root/include/asm-alpha/bitops.h

/* [previous][next][first][last][top][bottom][index][help] */

INCLUDED FROM


DEFINITIONS

This source file includes following definitions.
  1. set_bit
  2. clear_bit
  3. change_bit
  4. test_bit
  5. ffz
  6. find_next_zero_bit

   1 #ifndef _ALPHA_BITOPS_H
   2 #define _ALPHA_BITOPS_H
   3 
   4 /*
   5  * Copyright 1994, Linus Torvalds.
   6  */
   7 
   8 /*
   9  * These have to be done with inline assembly: that way the bit-setting
  10  * is guaranteed to be atomic. All bit operations return 0 if the bit
  11  * was cleared before the operation and != 0 if it was not.
  12  *
  13  * bit 0 is the LSB of addr; bit 64 is the LSB of (addr+1).
  14  */
  15 
  16 extern __inline__ unsigned long set_bit(unsigned long nr, void * addr)
     /* [previous][next][first][last][top][bottom][index][help] */
  17 {
  18         unsigned long oldbit;
  19         unsigned long temp;
  20 
  21         __asm__ __volatile__(
  22                 "\n1:\t"
  23                 "ldq_l %0,%1\n\t"
  24                 "and %0,%3,%2\n\t"
  25                 "bne %2,2f\n\t"
  26                 "xor %0,%3,%0\n\t"
  27                 "stq_c %0,%1\n\t"
  28                 "beq %0,1b\n"
  29                 "2:"
  30                 :"=&r" (temp),
  31                  "=m" (((unsigned long *) addr)[nr >> 6]),
  32                  "=&r" (oldbit)
  33                 :"r" (1UL << (nr & 63)),
  34                  "m" (((unsigned long *) addr)[nr >> 6]));
  35         return oldbit != 0;
  36 }
  37 
  38 extern __inline__ unsigned long clear_bit(unsigned long nr, void * addr)
     /* [previous][next][first][last][top][bottom][index][help] */
  39 {
  40         unsigned long oldbit;
  41         unsigned long temp;
  42 
  43         __asm__ __volatile__(
  44                 "\n1:\t"
  45                 "ldq_l %0,%1\n\t"
  46                 "and %0,%3,%2\n\t"
  47                 "beq %2,2f\n\t"
  48                 "xor %0,%3,%0\n\t"
  49                 "stq_c %0,%1\n\t"
  50                 "beq %0,1b\n"
  51                 "2:"
  52                 :"=&r" (temp),
  53                  "=m" (((unsigned long *) addr)[nr >> 6]),
  54                  "=&r" (oldbit)
  55                 :"r" (1UL << (nr & 63)),
  56                  "m" (((unsigned long *) addr)[nr >> 6]));
  57         return oldbit != 0;
  58 }
  59 
  60 extern __inline__ unsigned long change_bit(unsigned long nr, void * addr)
     /* [previous][next][first][last][top][bottom][index][help] */
  61 {
  62         unsigned long oldbit;
  63         unsigned long temp;
  64 
  65         __asm__ __volatile__(
  66                 "\n1:\t"
  67                 "ldq_l %0,%1\n\t"
  68                 "and %0,%3,%2\n\t"
  69                 "xor %0,%3,%0\n\t"
  70                 "stq_c %0,%1\n\t"
  71                 "beq %0,1b\n"
  72                 :"=&r" (temp),
  73                  "=m" (((unsigned long *) addr)[nr >> 6]),
  74                  "=&r" (oldbit)
  75                 :"r" (1UL << (nr & 63)),
  76                  "m" (((unsigned long *) addr)[nr >> 6]));
  77         return oldbit != 0;
  78 }
  79 
  80 extern __inline__ unsigned long test_bit(int nr, void * addr)
     /* [previous][next][first][last][top][bottom][index][help] */
  81 {
  82         return 1UL & (((unsigned long *) addr)[nr >> 6] >> (nr & 63));
  83 }
  84 
  85 /*
  86  * ffz = Find First Zero in word. Undefined if no zero exists,
  87  * so code should check against ~0UL first..
  88  *
  89  * This uses the cmpbge insn to check which byte contains the zero.
  90  * I don't know if that's actually a good idea, but it's fun and the
  91  * resulting LBS tests should be natural on the alpha.. Besides, I'm
  92  * just teaching myself the asm of the alpha anyway.
  93  */
  94 extern inline unsigned long ffz(unsigned long word)
     /* [previous][next][first][last][top][bottom][index][help] */
  95 {
  96         unsigned long result = 0;
  97         unsigned long tmp;
  98 
  99         __asm__("cmpbge %1,%0,%0"
 100                 :"=r" (tmp)
 101                 :"r" (word), "0" (~0UL));
 102         while (tmp & 1) {
 103                 word >>= 8;
 104                 tmp >>= 1;
 105                 result += 8;
 106         }
 107         while (word & 1) {
 108                 result++;
 109                 word >>= 1;
 110         }
 111         return result;
 112 }
 113 
 114 /*
 115  * Find next zero bit in a bitmap reasonably efficiently..
 116  */
 117 extern inline unsigned long find_next_zero_bit(void * addr, unsigned long size, unsigned long offset)
     /* [previous][next][first][last][top][bottom][index][help] */
 118 {
 119         unsigned long * p = ((unsigned long *) addr) + (offset >> 6);
 120         unsigned long result = offset & ~63UL;
 121         unsigned long tmp;
 122 
 123         if (offset >= size)
 124                 return size;
 125         size -= result;
 126         offset &= 63UL;
 127         if (offset) {
 128                 tmp = *(p++);
 129                 tmp |= ~0UL >> (64-offset);
 130                 if (size < 64)
 131                         goto found_first;
 132                 if (~tmp)
 133                         goto found_middle;
 134                 size -= 64;
 135                 result += 64;
 136         }
 137         while (size & ~63UL) {
 138                 if (~(tmp = *(p++)))
 139                         goto found_middle;
 140                 result += 64;
 141                 size -= 64;
 142         }
 143         if (!size)
 144                 return result;
 145         tmp = *p;
 146 found_first:
 147         tmp |= ~0UL << size;
 148 found_middle:
 149         return result + ffz(tmp);
 150 }
 151 
 152 /*
 153  * The optimizer actually does good code for this case..
 154  */
 155 #define find_first_zero_bit(addr, size) \
 156         find_next_zero_bit((addr), (size), 0)
 157 
 158 #endif /* _ALPHA_BITOPS_H */

/* [previous][next][first][last][top][bottom][index][help] */