root/include/asm-sparc/bitops.h

/* [previous][next][first][last][top][bottom][index][help] */

INCLUDED FROM


DEFINITIONS

This source file includes following definitions.
  1. set_bit
  2. clear_bit
  3. change_bit
  4. test_bit
  5. ffz
  6. find_next_zero_bit

   1 #ifndef _SPARC_BITOPS_H
   2 #define _SPARC_BITOPS_H
   3 
   4 /*
   5  * Copyright 1994, David S. Miller (davem@caip.rutgers.edu).
   6  */
   7 
   8 
   9 /* Set bit 'nr' in 32-bit quantity at address 'addr' where bit '0'
  10  * is in the highest of the four bytes and bit '31' is the high bit
  11  * within the first byte. Sparc is BIG-Endian. Unless noted otherwise
  12  * all bit-ops return 0 if bit was previously clear and != 0 otherwise.
  13  */
  14 
  15 /* For now, the sun4c implementation will disable and enable traps
  16  * in order to insure atomicity. Things will have to be different
  17  * for sun4m (ie. SMP) no doubt.
  18  */
  19 
  20 extern __inline__ unsigned int set_bit(unsigned int nr, void *addr)
     /* [previous][next][first][last][top][bottom][index][help] */
  21 {
  22   register unsigned long retval, tmp, mask, psr;
  23 
  24   __asm__ __volatile__("or %%g0, 0x1, %3\n\t"     /* produce the mask */
  25                        "sll %3, %4, %3\n\t"
  26                        "rd %%psr, %5\n\t"         /* read the psr */
  27                        "wr %5, 0x20, %%psr\n\t"   /* traps disabled */
  28                        "ld [%1], %2\n\t"          /* critical section */
  29                        "and %3, %2, %0\n\t"
  30                        "or  %3, %2, %2\n\t"
  31                        "st  %2, [%1]\n\t"
  32                        "wr %5, 0x0, %%psr\n\t" :  /* re-enable traps */
  33                        "=r" (retval) :
  34                        "r" (addr), "r" (tmp=0), "r" (mask=0),
  35                        "r" (nr), "r" (psr=0));
  36 
  37   return retval; /* confuse gcc :-) */
  38 
  39 }
  40 
  41 extern __inline__ unsigned int clear_bit(unsigned int nr, void *addr)
     /* [previous][next][first][last][top][bottom][index][help] */
  42 {
  43   register unsigned long retval, tmp, mask, psr;
  44 
  45   __asm__ __volatile__("or %%g0, 0x1, %3\n\t"
  46                        "sll %3, %4, %3\n\t"
  47                        "rd %%psr, %5\n\t"
  48                        "wr %5, 0x20, %%psr\n\t"   /* disable traps */
  49                        "ld [%1], %2\n\t"
  50                        "and %2, %3, %0\n\t"       /* get old bit */
  51                        "andn %2, %3, %2\n\t"      /* set new val */
  52                        "st  %2, [%1]\n\t"
  53                        "wr %5, 0x0, %%psr\n\t" :  /* enable traps */
  54                        "=r" (retval) :
  55                        "r" (addr), "r" (tmp=0), "r" (mask=0),
  56                        "r" (nr), "r" (psr=0));
  57 
  58   return retval; /* confuse gcc ;-) */
  59 
  60 }
  61 
  62 extern __inline__ unsigned int change_bit(unsigned int nr, void *addr)
     /* [previous][next][first][last][top][bottom][index][help] */
  63 {
  64   register unsigned long retval, tmp, mask, psr;
  65 
  66   __asm__ __volatile__("or %%g0, 0x1, %3\n\t"
  67                        "sll %3, %4, %3\n\t"
  68                        "rd %%psr, %5\n\t"
  69                        "wr %5, 0x20, %%psr\n\t"   /* disable traps */
  70                        "ld [%1], %2\n\t"
  71                        "and %3, %2, %0\n\t"       /* get old bit val */
  72                        "xor %3, %2, %2\n\t"       /* set new val */
  73                        "st  %2, [%1]\n\t"
  74                        "wr %5, 0x0, %%psr\n\t" :  /* enable traps */
  75                        "=r" (retval) :
  76                        "r" (addr), "r" (tmp=0), "r" (mask=0),
  77                        "r" (nr), "r" (psr=0));
  78 
  79   return retval; /* confuse gcc ;-) */
  80 
  81 }
  82 
  83 /* The following routine need not be atomic. */
  84 
  85 extern __inline__ unsigned int test_bit(int nr, void *addr)
     /* [previous][next][first][last][top][bottom][index][help] */
  86 {
  87   register unsigned long retval, tmp;
  88 
  89   __asm__ __volatile__("ld [%1], %2\n\t"
  90                        "or %%g0, 0x1, %0\n\t"
  91                        "sll %0, %3, %0\n\t"
  92                        "and %0, %2, %0\n\t" :
  93                        "=r" (retval) :
  94                        "r" (addr), "r" (tmp=0),
  95                        "r" (nr));
  96 
  97   return retval; /* confuse gcc :> */
  98 
  99 }
 100 
 101 /* There has to be a faster way to do this, sigh... */
 102 
 103 extern __inline__ unsigned long ffz(unsigned long word)
     /* [previous][next][first][last][top][bottom][index][help] */
 104 {
 105   register unsigned long cnt, tmp, tmp2;
 106 
 107   cnt = 0;
 108 
 109   __asm__("or %%g0, %3, %2\n\t"
 110           "1: and %2, 0x1, %1\n\t"
 111           "srl %2, 0x1, %2\n\t"
 112           "cmp %1, 0\n\t"
 113           "bne,a 1b\n\t"
 114           "add %0, 0x1, %0\n\t" :
 115           "=r" (cnt) :
 116           "r" (tmp=0), "r" (tmp2=0), "r" (word));
 117 
 118   return cnt;
 119 }
 120 
 121 /* find_next_zero_bit() finds the first zero bit in a bit string of length
 122  * 'size' bits, starting the search at bit 'offset'. This is largely based
 123  * on Linus's ALPHA routines, which are pretty portable BTW.
 124  */
 125 
 126 extern __inline__ unsigned long find_next_zero_bit(void *addr, unsigned long size, unsigned long offset)
     /* [previous][next][first][last][top][bottom][index][help] */
 127 {
 128   unsigned long *p = ((unsigned long *) addr) + (offset >> 6);
 129   unsigned long result = offset & ~31UL;
 130   unsigned long tmp;
 131 
 132   if (offset >= size)
 133     return size;
 134   size -= result;
 135   offset &= 31UL;
 136   if (offset) 
 137     {
 138       tmp = *(p++);
 139       tmp |= ~0UL >> (32-offset);
 140       if (size < 32)
 141         goto found_first;
 142       if (~tmp)
 143         goto found_middle;
 144       size -= 32;
 145       result += 32;
 146     }
 147   while (size & ~32UL) 
 148     {
 149       if (~(tmp = *(p++)))
 150         goto found_middle;
 151       result += 32;
 152       size -= 32;
 153     }
 154   if (!size)
 155     return result;
 156   tmp = *p;
 157 
 158 found_first:
 159   tmp |= ~0UL << size;
 160 found_middle:
 161   return result + ffz(tmp);
 162 }
 163 
 164 /* Linus sez that gcc can optimize the following correctly, we'll see if this
 165  * holds on the Sparc as it does for the ALPHA.
 166  */
 167 
 168 #define find_first_zero_bit(addr, size) \
 169         find_next_zero_bit((addr), (size), 0)
 170 
 171 
 172 #endif /* defined(_SPARC_BITOPS_H) */
 173 

/* [previous][next][first][last][top][bottom][index][help] */