2005-04-16 15:20:36 -07:00
# ifndef _S390_BITOPS_H
# define _S390_BITOPS_H
/*
* include / asm - s390 / bitops . h
*
* S390 version
* Copyright ( C ) 1999 IBM Deutschland Entwicklung GmbH , IBM Corporation
* Author ( s ) : Martin Schwidefsky ( schwidefsky @ de . ibm . com )
*
* Derived from " include/asm-i386/bitops.h "
* Copyright ( C ) 1992 , Linus Torvalds
*
*/
2006-06-29 14:56:13 +02:00
# ifdef __KERNEL__
2007-10-18 23:40:26 -07:00
# ifndef _LINUX_BITOPS_H
# error only <linux / bitops.h> can be included directly
# endif
2005-04-16 15:20:36 -07:00
# include <linux/compiler.h>
/*
* 32 bit bitops format :
* bit 0 is the LSB of * addr ; bit 31 is the MSB of * addr ;
* bit 32 is the LSB of * ( addr + 4 ) . That combined with the
* big endian byte order on S390 give the following bit
* order in memory :
* 1f 1 e 1 d 1 c 1 b 1 a 19 18 17 16 15 14 13 12 11 10 \
* 0f 0 e 0 d 0 c 0 b 0 a 09 08 07 06 05 04 03 02 01 00
* after that follows the next long with bit numbers
* 3f 3 e 3 d 3 c 3 b 3 a 39 38 37 36 35 34 33 32 31 30
* 2f 2 e 2 d 2 c 2 b 2 a 29 28 27 26 25 24 23 22 21 20
* The reason for this bit ordering is the fact that
* in the architecture independent code bits operations
* of the form " flags |= (1 << bitnr) " are used INTERMIXED
* with operation of the form " set_bit(bitnr, flags) " .
*
* 64 bit bitops format :
* bit 0 is the LSB of * addr ; bit 63 is the MSB of * addr ;
* bit 64 is the LSB of * ( addr + 8 ) . That combined with the
* big endian byte order on S390 give the following bit
* order in memory :
* 3f 3 e 3 d 3 c 3 b 3 a 39 38 37 36 35 34 33 32 31 30
* 2f 2 e 2 d 2 c 2 b 2 a 29 28 27 26 25 24 23 22 21 20
* 1f 1 e 1 d 1 c 1 b 1 a 19 18 17 16 15 14 13 12 11 10
* 0f 0 e 0 d 0 c 0 b 0 a 09 08 07 06 05 04 03 02 01 00
* after that follows the next long with bit numbers
* 7f 7 e 7 d 7 c 7 b 7 a 79 78 77 76 75 74 73 72 71 70
* 6f 6 e 6 d 6 c 6 b 6 a 69 68 67 66 65 64 63 62 61 60
* 5f 5 e 5 d 5 c 5 b 5 a 59 58 57 56 55 54 53 52 51 50
* 4f 4 e 4 d 4 c 4 b 4 a 49 48 47 46 45 44 43 42 41 40
* The reason for this bit ordering is the fact that
* in the architecture independent code bits operations
* of the form " flags |= (1 << bitnr) " are used INTERMIXED
* with operation of the form " set_bit(bitnr, flags) " .
*/
2009-03-26 15:24:03 +01:00
/* bitmap tables from arch/s390/kernel/bitmap.c */
2005-04-16 15:20:36 -07:00
extern const char _oi_bitmap [ ] ;
extern const char _ni_bitmap [ ] ;
extern const char _zb_findmap [ ] ;
extern const char _sb_findmap [ ] ;
# ifndef __s390x__
# define __BITOPS_ALIGN 3
# define __BITOPS_WORDSIZE 32
# define __BITOPS_OR "or"
# define __BITOPS_AND "nr"
# define __BITOPS_XOR "xr"
2006-09-28 16:56:43 +02:00
# define __BITOPS_LOOP(__old, __new, __addr, __val, __op_string) \
asm volatile ( \
" l %0,%2 \n " \
" 0: lr %1,%0 \n " \
__op_string " %1,%3 \n " \
" cs %0,%1,%2 \n " \
" jl 0b " \
: " =&d " ( __old ) , " =&d " ( __new ) , \
" =Q " ( * ( unsigned long * ) __addr ) \
: " d " ( __val ) , " Q " ( * ( unsigned long * ) __addr ) \
: " cc " ) ;
2005-04-16 15:20:36 -07:00
# else /* __s390x__ */
# define __BITOPS_ALIGN 7
# define __BITOPS_WORDSIZE 64
# define __BITOPS_OR "ogr"
# define __BITOPS_AND "ngr"
# define __BITOPS_XOR "xgr"
2006-09-28 16:56:43 +02:00
# define __BITOPS_LOOP(__old, __new, __addr, __val, __op_string) \
asm volatile ( \
" lg %0,%2 \n " \
" 0: lgr %1,%0 \n " \
__op_string " %1,%3 \n " \
" csg %0,%1,%2 \n " \
" jl 0b " \
: " =&d " ( __old ) , " =&d " ( __new ) , \
" =Q " ( * ( unsigned long * ) __addr ) \
: " d " ( __val ) , " Q " ( * ( unsigned long * ) __addr ) \
: " cc " ) ;
2005-04-16 15:20:36 -07:00
# endif /* __s390x__ */
# define __BITOPS_WORDS(bits) (((bits)+__BITOPS_WORDSIZE-1) / __BITOPS_WORDSIZE)
2006-09-28 16:56:43 +02:00
# define __BITOPS_BARRIER() asm volatile("" : : : "memory")
2005-04-16 15:20:36 -07:00
# ifdef CONFIG_SMP
/*
* SMP safe set_bit routine based on compare and swap ( CS )
*/
static inline void set_bit_cs ( unsigned long nr , volatile unsigned long * ptr )
{
unsigned long addr , old , new , mask ;
addr = ( unsigned long ) ptr ;
/* calculate address for CS */
addr + = ( nr ^ ( nr & ( __BITOPS_WORDSIZE - 1 ) ) ) > > 3 ;
/* make OR mask */
mask = 1UL < < ( nr & ( __BITOPS_WORDSIZE - 1 ) ) ;
/* Do the atomic update. */
__BITOPS_LOOP ( old , new , addr , mask , __BITOPS_OR ) ;
}
/*
* SMP safe clear_bit routine based on compare and swap ( CS )
*/
static inline void clear_bit_cs ( unsigned long nr , volatile unsigned long * ptr )
{
unsigned long addr , old , new , mask ;
addr = ( unsigned long ) ptr ;
/* calculate address for CS */
addr + = ( nr ^ ( nr & ( __BITOPS_WORDSIZE - 1 ) ) ) > > 3 ;
/* make AND mask */
mask = ~ ( 1UL < < ( nr & ( __BITOPS_WORDSIZE - 1 ) ) ) ;
/* Do the atomic update. */
__BITOPS_LOOP ( old , new , addr , mask , __BITOPS_AND ) ;
}
/*
* SMP safe change_bit routine based on compare and swap ( CS )
*/
static inline void change_bit_cs ( unsigned long nr , volatile unsigned long * ptr )
{
unsigned long addr , old , new , mask ;
addr = ( unsigned long ) ptr ;
/* calculate address for CS */
addr + = ( nr ^ ( nr & ( __BITOPS_WORDSIZE - 1 ) ) ) > > 3 ;
/* make XOR mask */
mask = 1UL < < ( nr & ( __BITOPS_WORDSIZE - 1 ) ) ;
/* Do the atomic update. */
__BITOPS_LOOP ( old , new , addr , mask , __BITOPS_XOR ) ;
}
/*
* SMP safe test_and_set_bit routine based on compare and swap ( CS )
*/
static inline int
test_and_set_bit_cs ( unsigned long nr , volatile unsigned long * ptr )
{
unsigned long addr , old , new , mask ;
addr = ( unsigned long ) ptr ;
/* calculate address for CS */
addr + = ( nr ^ ( nr & ( __BITOPS_WORDSIZE - 1 ) ) ) > > 3 ;
/* make OR/test mask */
mask = 1UL < < ( nr & ( __BITOPS_WORDSIZE - 1 ) ) ;
/* Do the atomic update. */
__BITOPS_LOOP ( old , new , addr , mask , __BITOPS_OR ) ;
__BITOPS_BARRIER ( ) ;
return ( old & mask ) ! = 0 ;
}
/*
* SMP safe test_and_clear_bit routine based on compare and swap ( CS )
*/
static inline int
test_and_clear_bit_cs ( unsigned long nr , volatile unsigned long * ptr )
{
unsigned long addr , old , new , mask ;
addr = ( unsigned long ) ptr ;
/* calculate address for CS */
addr + = ( nr ^ ( nr & ( __BITOPS_WORDSIZE - 1 ) ) ) > > 3 ;
/* make AND/test mask */
mask = ~ ( 1UL < < ( nr & ( __BITOPS_WORDSIZE - 1 ) ) ) ;
/* Do the atomic update. */
__BITOPS_LOOP ( old , new , addr , mask , __BITOPS_AND ) ;
__BITOPS_BARRIER ( ) ;
return ( old ^ new ) ! = 0 ;
}
/*
* SMP safe test_and_change_bit routine based on compare and swap ( CS )
*/
static inline int
test_and_change_bit_cs ( unsigned long nr , volatile unsigned long * ptr )
{
unsigned long addr , old , new , mask ;
addr = ( unsigned long ) ptr ;
/* calculate address for CS */
addr + = ( nr ^ ( nr & ( __BITOPS_WORDSIZE - 1 ) ) ) > > 3 ;
/* make XOR/test mask */
mask = 1UL < < ( nr & ( __BITOPS_WORDSIZE - 1 ) ) ;
/* Do the atomic update. */
__BITOPS_LOOP ( old , new , addr , mask , __BITOPS_XOR ) ;
__BITOPS_BARRIER ( ) ;
return ( old & mask ) ! = 0 ;
}
# endif /* CONFIG_SMP */
/*
* fast , non - SMP set_bit routine
*/
static inline void __set_bit ( unsigned long nr , volatile unsigned long * ptr )
{
unsigned long addr ;
addr = ( unsigned long ) ptr + ( ( nr ^ ( __BITOPS_WORDSIZE - 8 ) ) > > 3 ) ;
2006-09-28 16:56:43 +02:00
asm volatile (
2010-02-26 22:37:31 +01:00
" oc %O0(1,%R0),%1 "
: " =Q " ( * ( char * ) addr ) : " Q " ( _oi_bitmap [ nr & 7 ] ) : " cc " ) ;
2005-04-16 15:20:36 -07:00
}
static inline void
__constant_set_bit ( const unsigned long nr , volatile unsigned long * ptr )
{
unsigned long addr ;
addr = ( ( unsigned long ) ptr ) + ( ( nr ^ ( __BITOPS_WORDSIZE - 8 ) ) > > 3 ) ;
2006-09-28 16:56:43 +02:00
* ( unsigned char * ) addr | = 1 < < ( nr & 7 ) ;
2005-04-16 15:20:36 -07:00
}
# define set_bit_simple(nr,addr) \
( __builtin_constant_p ( ( nr ) ) ? \
__constant_set_bit ( ( nr ) , ( addr ) ) : \
__set_bit ( ( nr ) , ( addr ) ) )
/*
* fast , non - SMP clear_bit routine
*/
static inline void
__clear_bit ( unsigned long nr , volatile unsigned long * ptr )
{
unsigned long addr ;
addr = ( unsigned long ) ptr + ( ( nr ^ ( __BITOPS_WORDSIZE - 8 ) ) > > 3 ) ;
2006-09-28 16:56:43 +02:00
asm volatile (
2010-02-26 22:37:31 +01:00
" nc %O0(1,%R0),%1 "
: " =Q " ( * ( char * ) addr ) : " Q " ( _ni_bitmap [ nr & 7 ] ) : " cc " ) ;
2005-04-16 15:20:36 -07:00
}
static inline void
__constant_clear_bit ( const unsigned long nr , volatile unsigned long * ptr )
{
unsigned long addr ;
addr = ( ( unsigned long ) ptr ) + ( ( nr ^ ( __BITOPS_WORDSIZE - 8 ) ) > > 3 ) ;
2006-09-28 16:56:43 +02:00
* ( unsigned char * ) addr & = ~ ( 1 < < ( nr & 7 ) ) ;
2005-04-16 15:20:36 -07:00
}
# define clear_bit_simple(nr,addr) \
( __builtin_constant_p ( ( nr ) ) ? \
__constant_clear_bit ( ( nr ) , ( addr ) ) : \
__clear_bit ( ( nr ) , ( addr ) ) )
/*
* fast , non - SMP change_bit routine
*/
static inline void __change_bit ( unsigned long nr , volatile unsigned long * ptr )
{
unsigned long addr ;
addr = ( unsigned long ) ptr + ( ( nr ^ ( __BITOPS_WORDSIZE - 8 ) ) > > 3 ) ;
2006-09-28 16:56:43 +02:00
asm volatile (
2010-02-26 22:37:31 +01:00
" xc %O0(1,%R0),%1 "
: " =Q " ( * ( char * ) addr ) : " Q " ( _oi_bitmap [ nr & 7 ] ) : " cc " ) ;
2005-04-16 15:20:36 -07:00
}
static inline void
__constant_change_bit ( const unsigned long nr , volatile unsigned long * ptr )
{
unsigned long addr ;
addr = ( ( unsigned long ) ptr ) + ( ( nr ^ ( __BITOPS_WORDSIZE - 8 ) ) > > 3 ) ;
2006-09-28 16:56:43 +02:00
* ( unsigned char * ) addr ^ = 1 < < ( nr & 7 ) ;
2005-04-16 15:20:36 -07:00
}
# define change_bit_simple(nr,addr) \
( __builtin_constant_p ( ( nr ) ) ? \
__constant_change_bit ( ( nr ) , ( addr ) ) : \
__change_bit ( ( nr ) , ( addr ) ) )
/*
* fast , non - SMP test_and_set_bit routine
*/
static inline int
test_and_set_bit_simple ( unsigned long nr , volatile unsigned long * ptr )
{
unsigned long addr ;
unsigned char ch ;
addr = ( unsigned long ) ptr + ( ( nr ^ ( __BITOPS_WORDSIZE - 8 ) ) > > 3 ) ;
ch = * ( unsigned char * ) addr ;
2006-09-28 16:56:43 +02:00
asm volatile (
2010-02-26 22:37:31 +01:00
" oc %O0(1,%R0),%1 "
: " =Q " ( * ( char * ) addr ) : " Q " ( _oi_bitmap [ nr & 7 ] )
: " cc " , " memory " ) ;
2005-04-16 15:20:36 -07:00
return ( ch > > ( nr & 7 ) ) & 1 ;
}
# define __test_and_set_bit(X,Y) test_and_set_bit_simple(X,Y)
/*
* fast , non - SMP test_and_clear_bit routine
*/
static inline int
test_and_clear_bit_simple ( unsigned long nr , volatile unsigned long * ptr )
{
unsigned long addr ;
unsigned char ch ;
addr = ( unsigned long ) ptr + ( ( nr ^ ( __BITOPS_WORDSIZE - 8 ) ) > > 3 ) ;
ch = * ( unsigned char * ) addr ;
2006-09-28 16:56:43 +02:00
asm volatile (
2010-02-26 22:37:31 +01:00
" nc %O0(1,%R0),%1 "
: " =Q " ( * ( char * ) addr ) : " Q " ( _ni_bitmap [ nr & 7 ] )
: " cc " , " memory " ) ;
2005-04-16 15:20:36 -07:00
return ( ch > > ( nr & 7 ) ) & 1 ;
}
# define __test_and_clear_bit(X,Y) test_and_clear_bit_simple(X,Y)
/*
* fast , non - SMP test_and_change_bit routine
*/
static inline int
test_and_change_bit_simple ( unsigned long nr , volatile unsigned long * ptr )
{
unsigned long addr ;
unsigned char ch ;
addr = ( unsigned long ) ptr + ( ( nr ^ ( __BITOPS_WORDSIZE - 8 ) ) > > 3 ) ;
ch = * ( unsigned char * ) addr ;
2006-09-28 16:56:43 +02:00
asm volatile (
2010-02-26 22:37:31 +01:00
" xc %O0(1,%R0),%1 "
: " =Q " ( * ( char * ) addr ) : " Q " ( _oi_bitmap [ nr & 7 ] )
: " cc " , " memory " ) ;
2005-04-16 15:20:36 -07:00
return ( ch > > ( nr & 7 ) ) & 1 ;
}
# define __test_and_change_bit(X,Y) test_and_change_bit_simple(X,Y)
# ifdef CONFIG_SMP
# define set_bit set_bit_cs
# define clear_bit clear_bit_cs
# define change_bit change_bit_cs
# define test_and_set_bit test_and_set_bit_cs
# define test_and_clear_bit test_and_clear_bit_cs
# define test_and_change_bit test_and_change_bit_cs
# else
# define set_bit set_bit_simple
# define clear_bit clear_bit_simple
# define change_bit change_bit_simple
# define test_and_set_bit test_and_set_bit_simple
# define test_and_clear_bit test_and_clear_bit_simple
# define test_and_change_bit test_and_change_bit_simple
# endif
/*
* This routine doesn ' t need to be atomic .
*/
static inline int __test_bit ( unsigned long nr , const volatile unsigned long * ptr )
{
unsigned long addr ;
unsigned char ch ;
addr = ( unsigned long ) ptr + ( ( nr ^ ( __BITOPS_WORDSIZE - 8 ) ) > > 3 ) ;
ch = * ( volatile unsigned char * ) addr ;
return ( ch > > ( nr & 7 ) ) & 1 ;
}
static inline int
__constant_test_bit ( unsigned long nr , const volatile unsigned long * addr ) {
2006-02-11 17:56:04 -08:00
return ( ( ( volatile char * ) addr )
[ ( nr ^ ( __BITOPS_WORDSIZE - 8 ) ) > > 3 ] & ( 1 < < ( nr & 7 ) ) ) ! = 0 ;
2005-04-16 15:20:36 -07:00
}
# define test_bit(nr,addr) \
( __builtin_constant_p ( ( nr ) ) ? \
__constant_test_bit ( ( nr ) , ( addr ) ) : \
__test_bit ( ( nr ) , ( addr ) ) )
2005-07-27 11:44:58 -07:00
/*
2008-02-05 16:50:43 +01:00
* Optimized find bit helper functions .
2005-07-27 11:44:58 -07:00
*/
2008-02-05 16:50:43 +01:00
/**
* __ffz_word_loop - find byte offset of first long ! = - 1UL
* @ addr : pointer to array of unsigned long
* @ size : size of the array in bits
*/
static inline unsigned long __ffz_word_loop ( const unsigned long * addr ,
unsigned long size )
{
typedef struct { long _ [ __BITOPS_WORDS ( size ) ] ; } addrtype ;
unsigned long bytes = 0 ;
asm volatile (
# ifndef __s390x__
2008-02-19 15:29:28 +01:00
" ahi %1,-1 \n "
" sra %1,5 \n "
" jz 1f \n "
2008-02-05 16:50:43 +01:00
" 0: c %2,0(%0,%3) \n "
" jne 1f \n "
" la %0,4(%0) \n "
" brct %1,0b \n "
" 1: \n "
# else
2008-02-19 15:29:28 +01:00
" aghi %1,-1 \n "
" srag %1,%1,6 \n "
" jz 1f \n "
2008-02-05 16:50:43 +01:00
" 0: cg %2,0(%0,%3) \n "
" jne 1f \n "
" la %0,8(%0) \n "
" brct %1,0b \n "
" 1: \n "
# endif
2008-02-09 18:24:29 +01:00
: " +&a " ( bytes ) , " +&d " ( size )
2008-02-05 16:50:43 +01:00
: " d " ( - 1UL ) , " a " ( addr ) , " m " ( * ( addrtype * ) addr )
: " cc " ) ;
return bytes ;
}
/**
* __ffs_word_loop - find byte offset of first long ! = 0UL
* @ addr : pointer to array of unsigned long
* @ size : size of the array in bits
*/
static inline unsigned long __ffs_word_loop ( const unsigned long * addr ,
unsigned long size )
2005-07-27 11:44:58 -07:00
{
2008-02-05 16:50:43 +01:00
typedef struct { long _ [ __BITOPS_WORDS ( size ) ] ; } addrtype ;
unsigned long bytes = 0 ;
2005-07-27 11:44:58 -07:00
2008-02-05 16:50:43 +01:00
asm volatile (
# ifndef __s390x__
2008-02-19 15:29:28 +01:00
" ahi %1,-1 \n "
" sra %1,5 \n "
" jz 1f \n "
2008-02-05 16:50:43 +01:00
" 0: c %2,0(%0,%3) \n "
" jne 1f \n "
" la %0,4(%0) \n "
" brct %1,0b \n "
" 1: \n "
# else
2008-02-19 15:29:28 +01:00
" aghi %1,-1 \n "
" srag %1,%1,6 \n "
" jz 1f \n "
2008-02-05 16:50:43 +01:00
" 0: cg %2,0(%0,%3) \n "
" jne 1f \n "
" la %0,8(%0) \n "
" brct %1,0b \n "
" 1: \n "
# endif
2008-02-09 18:24:29 +01:00
: " +&a " ( bytes ) , " +&a " ( size )
2008-02-05 16:50:43 +01:00
: " d " ( 0UL ) , " a " ( addr ) , " m " ( * ( addrtype * ) addr )
: " cc " ) ;
return bytes ;
}
/**
* __ffz_word - add number of the first unset bit
* @ nr : base value the bit number is added to
* @ word : the word that is searched for unset bits
*/
static inline unsigned long __ffz_word ( unsigned long nr , unsigned long word )
{
2005-07-27 11:44:58 -07:00
# ifdef __s390x__
2009-03-26 15:24:35 +01:00
if ( ( word & 0xffffffff ) = = 0xffffffff ) {
2005-07-27 11:44:58 -07:00
word > > = 32 ;
2008-02-05 16:50:43 +01:00
nr + = 32 ;
2005-07-27 11:44:58 -07:00
}
# endif
2009-03-26 15:24:35 +01:00
if ( ( word & 0xffff ) = = 0xffff ) {
2005-07-27 11:44:58 -07:00
word > > = 16 ;
2008-02-05 16:50:43 +01:00
nr + = 16 ;
2005-07-27 11:44:58 -07:00
}
2009-03-26 15:24:35 +01:00
if ( ( word & 0xff ) = = 0xff ) {
2005-07-27 11:44:58 -07:00
word > > = 8 ;
2008-02-05 16:50:43 +01:00
nr + = 8 ;
2005-07-27 11:44:58 -07:00
}
2008-02-05 16:50:43 +01:00
return nr + _zb_findmap [ ( unsigned char ) word ] ;
2005-07-27 11:44:58 -07:00
}
2008-02-05 16:50:43 +01:00
/**
* __ffs_word - add number of the first set bit
* @ nr : base value the bit number is added to
* @ word : the word that is searched for set bits
2005-07-27 11:44:58 -07:00
*/
2008-02-05 16:50:43 +01:00
static inline unsigned long __ffs_word ( unsigned long nr , unsigned long word )
2005-07-27 11:44:58 -07:00
{
# ifdef __s390x__
2009-03-26 15:24:35 +01:00
if ( ( word & 0xffffffff ) = = 0 ) {
2005-07-27 11:44:58 -07:00
word > > = 32 ;
2008-02-05 16:50:43 +01:00
nr + = 32 ;
2005-07-27 11:44:58 -07:00
}
# endif
2009-03-26 15:24:35 +01:00
if ( ( word & 0xffff ) = = 0 ) {
2005-07-27 11:44:58 -07:00
word > > = 16 ;
2008-02-05 16:50:43 +01:00
nr + = 16 ;
2005-07-27 11:44:58 -07:00
}
2009-03-26 15:24:35 +01:00
if ( ( word & 0xff ) = = 0 ) {
2005-07-27 11:44:58 -07:00
word > > = 8 ;
2008-02-05 16:50:43 +01:00
nr + = 8 ;
2005-07-27 11:44:58 -07:00
}
2008-02-05 16:50:43 +01:00
return nr + _sb_findmap [ ( unsigned char ) word ] ;
2005-07-27 11:44:58 -07:00
}
2005-04-16 15:20:36 -07:00
2005-07-27 11:44:58 -07:00
2008-02-05 16:50:43 +01:00
/**
* __load_ulong_be - load big endian unsigned long
* @ p : pointer to array of unsigned long
* @ offset : byte offset of source value in the array
*/
static inline unsigned long __load_ulong_be ( const unsigned long * p ,
unsigned long offset )
{
p = ( unsigned long * ) ( ( unsigned long ) p + offset ) ;
return * p ;
}
2005-07-27 11:44:58 -07:00
2008-02-05 16:50:43 +01:00
/**
* __load_ulong_le - load little endian unsigned long
* @ p : pointer to array of unsigned long
* @ offset : byte offset of source value in the array
*/
static inline unsigned long __load_ulong_le ( const unsigned long * p ,
unsigned long offset )
2005-04-16 15:20:36 -07:00
{
2008-02-05 16:50:43 +01:00
unsigned long word ;
2005-04-16 15:20:36 -07:00
2008-02-05 16:50:43 +01:00
p = ( unsigned long * ) ( ( unsigned long ) p + offset ) ;
# ifndef __s390x__
2006-09-28 16:56:43 +02:00
asm volatile (
2010-02-26 22:37:31 +01:00
" ic %0,%O1(%R1) \n "
" icm %0,2,%O1+1(%R1) \n "
" icm %0,4,%O1+2(%R1) \n "
" icm %0,8,%O1+3(%R1) "
: " =&d " ( word ) : " Q " ( * p ) : " cc " ) ;
2008-02-05 16:50:43 +01:00
# else
asm volatile (
" lrvg %0,%1 "
: " =d " ( word ) : " m " ( * p ) ) ;
# endif
return word ;
2005-04-16 15:20:36 -07:00
}
2008-02-05 16:50:43 +01:00
/*
* The various find bit functions .
*/
/*
* ffz - find first zero in word .
* @ word : The word to search
*
* Undefined if no zero exists , so code should check against ~ 0UL first .
*/
static inline unsigned long ffz ( unsigned long word )
2005-04-16 15:20:36 -07:00
{
2008-02-05 16:50:43 +01:00
return __ffz_word ( 0 , word ) ;
}
2005-04-16 15:20:36 -07:00
2008-02-05 16:50:43 +01:00
/**
* __ffs - find first bit in word .
* @ word : The word to search
*
* Undefined if no bit exists , so code should check against 0 first .
*/
static inline unsigned long __ffs ( unsigned long word )
{
return __ffs_word ( 0 , word ) ;
2005-04-16 15:20:36 -07:00
}
2008-02-05 16:50:43 +01:00
/**
* ffs - find first bit set
* @ x : the word to search
*
* This is defined the same way as
* the libc and compiler builtin ffs routines , therefore
* differs in spirit from the above ffz ( man ffs ) .
*/
static inline int ffs ( int x )
{
if ( ! x )
return 0 ;
return __ffs_word ( 1 , x ) ;
}
2005-04-16 15:20:36 -07:00
2008-02-05 16:50:43 +01:00
/**
* find_first_zero_bit - find the first zero bit in a memory region
* @ addr : The address to start the search at
* @ size : The maximum size to search
*
* Returns the bit - number of the first zero bit , not the number of the byte
* containing a bit .
*/
static inline unsigned long find_first_zero_bit ( const unsigned long * addr ,
unsigned long size )
2005-04-16 15:20:36 -07:00
{
2008-02-05 16:50:43 +01:00
unsigned long bytes , bits ;
2005-04-16 15:20:36 -07:00
if ( ! size )
return 0 ;
2008-02-05 16:50:43 +01:00
bytes = __ffz_word_loop ( addr , size ) ;
bits = __ffz_word ( bytes * 8 , __load_ulong_be ( addr , bytes ) ) ;
return ( bits < size ) ? bits : size ;
}
2011-05-26 16:26:06 -07:00
# define find_first_zero_bit find_first_zero_bit
2008-02-05 16:50:43 +01:00
/**
* find_first_bit - find the first set bit in a memory region
* @ addr : The address to start the search at
* @ size : The maximum size to search
*
* Returns the bit - number of the first set bit , not the number of the byte
* containing a bit .
*/
static inline unsigned long find_first_bit ( const unsigned long * addr ,
unsigned long size )
2005-04-16 15:20:36 -07:00
{
2008-02-05 16:50:43 +01:00
unsigned long bytes , bits ;
2005-04-16 15:20:36 -07:00
if ( ! size )
return 0 ;
2008-02-05 16:50:43 +01:00
bytes = __ffs_word_loop ( addr , size ) ;
bits = __ffs_word ( bytes * 8 , __load_ulong_be ( addr , bytes ) ) ;
return ( bits < size ) ? bits : size ;
2005-04-16 15:20:36 -07:00
}
2011-05-26 16:26:06 -07:00
# define find_first_bit find_first_bit
2005-04-16 15:20:36 -07:00
2008-02-05 16:50:43 +01:00
/**
* find_next_zero_bit - find the first zero bit in a memory region
* @ addr : The address to base the search on
* @ offset : The bitnumber to start searching at
* @ size : The maximum size to search
*/
static inline int find_next_zero_bit ( const unsigned long * addr ,
unsigned long size ,
unsigned long offset )
2005-04-16 15:20:36 -07:00
{
2005-07-27 11:44:58 -07:00
const unsigned long * p ;
unsigned long bit , set ;
if ( offset > = size )
return size ;
bit = offset & ( __BITOPS_WORDSIZE - 1 ) ;
offset - = bit ;
size - = offset ;
p = addr + offset / __BITOPS_WORDSIZE ;
if ( bit ) {
/*
2008-02-05 16:50:43 +01:00
* __ffz_word returns __BITOPS_WORDSIZE
2005-07-27 11:44:58 -07:00
* if no zero bit is present in the word .
*/
2008-08-21 19:46:37 +02:00
set = __ffz_word ( bit , * p > > bit ) ;
2005-07-27 11:44:58 -07:00
if ( set > = size )
return size + offset ;
if ( set < __BITOPS_WORDSIZE )
return set + offset ;
offset + = __BITOPS_WORDSIZE ;
size - = __BITOPS_WORDSIZE ;
p + + ;
2005-04-16 15:20:36 -07:00
}
2005-07-27 11:44:58 -07:00
return offset + find_first_zero_bit ( p , size ) ;
2005-04-16 15:20:36 -07:00
}
2011-05-26 16:26:06 -07:00
# define find_next_zero_bit find_next_zero_bit
2005-04-16 15:20:36 -07:00
2008-02-05 16:50:43 +01:00
/**
* find_next_bit - find the first set bit in a memory region
* @ addr : The address to base the search on
* @ offset : The bitnumber to start searching at
* @ size : The maximum size to search
*/
static inline int find_next_bit ( const unsigned long * addr ,
unsigned long size ,
unsigned long offset )
2005-04-16 15:20:36 -07:00
{
2005-07-27 11:44:58 -07:00
const unsigned long * p ;
unsigned long bit , set ;
if ( offset > = size )
return size ;
bit = offset & ( __BITOPS_WORDSIZE - 1 ) ;
offset - = bit ;
size - = offset ;
p = addr + offset / __BITOPS_WORDSIZE ;
if ( bit ) {
/*
2008-02-05 16:50:43 +01:00
* __ffs_word returns __BITOPS_WORDSIZE
2005-07-27 11:44:58 -07:00
* if no one bit is present in the word .
*/
2008-02-05 16:50:43 +01:00
set = __ffs_word ( 0 , * p & ( ~ 0UL < < bit ) ) ;
2005-07-27 11:44:58 -07:00
if ( set > = size )
return size + offset ;
if ( set < __BITOPS_WORDSIZE )
return set + offset ;
offset + = __BITOPS_WORDSIZE ;
size - = __BITOPS_WORDSIZE ;
p + + ;
2005-04-16 15:20:36 -07:00
}
2005-07-27 11:44:58 -07:00
return offset + find_first_bit ( p , size ) ;
2005-04-16 15:20:36 -07:00
}
2011-05-26 16:26:06 -07:00
# define find_next_bit find_next_bit
2005-04-16 15:20:36 -07:00
/*
* Every architecture must define this function . It ' s the fastest
* way of searching a 140 - bit bitmap where the first 100 bits are
* unlikely to be set . It ' s guaranteed that at least one of the 140
* bits is cleared .
*/
static inline int sched_find_first_bit ( unsigned long * b )
{
return find_first_bit ( b , 140 ) ;
}
2006-03-26 01:39:34 -08:00
# include <asm-generic/bitops/fls.h>
2008-03-15 18:31:49 +01:00
# include <asm-generic/bitops/__fls.h>
2006-03-26 01:39:34 -08:00
# include <asm-generic/bitops/fls64.h>
2005-04-16 15:20:36 -07:00
2006-03-26 01:39:34 -08:00
# include <asm-generic/bitops/hweight.h>
2007-10-18 03:06:39 -07:00
# include <asm-generic/bitops/lock.h>
2005-04-16 15:20:36 -07:00
/*
* ATTENTION : intel byte ordering convention for ext2 and minix ! !
* bit 0 is the LSB of addr ; bit 31 is the MSB of addr ;
* bit 32 is the LSB of ( addr + 4 ) .
* That combined with the little endian byte order of Intel gives the
* following bit order in memory :
* 07 06 05 04 03 02 01 00 15 14 13 12 11 10 09 08 \
* 23 22 21 20 19 18 17 16 31 30 29 28 27 26 25 24
*/
2011-03-23 16:41:57 -07:00
static inline int find_first_zero_bit_le ( void * vaddr , unsigned int size )
2005-04-16 15:20:36 -07:00
{
2008-02-05 16:50:43 +01:00
unsigned long bytes , bits ;
2005-04-16 15:20:36 -07:00
if ( ! size )
return 0 ;
2008-02-05 16:50:43 +01:00
bytes = __ffz_word_loop ( vaddr , size ) ;
bits = __ffz_word ( bytes * 8 , __load_ulong_le ( vaddr , bytes ) ) ;
return ( bits < size ) ? bits : size ;
2005-04-16 15:20:36 -07:00
}
2011-05-26 16:26:06 -07:00
# define find_first_zero_bit_le find_first_zero_bit_le
2005-04-16 15:20:36 -07:00
2011-03-23 16:41:57 -07:00
static inline int find_next_zero_bit_le ( void * vaddr , unsigned long size ,
2008-02-05 16:50:43 +01:00
unsigned long offset )
2005-04-16 15:20:36 -07:00
{
2005-07-27 11:44:58 -07:00
unsigned long * addr = vaddr , * p ;
2008-02-05 16:50:43 +01:00
unsigned long bit , set ;
2005-04-16 15:20:36 -07:00
if ( offset > = size )
return size ;
2005-07-27 11:44:58 -07:00
bit = offset & ( __BITOPS_WORDSIZE - 1 ) ;
offset - = bit ;
size - = offset ;
p = addr + offset / __BITOPS_WORDSIZE ;
2005-04-16 15:20:36 -07:00
if ( bit ) {
2005-07-27 11:44:58 -07:00
/*
* s390 version of ffz returns __BITOPS_WORDSIZE
* if no zero bit is present in the word .
*/
2008-08-21 19:46:37 +02:00
set = __ffz_word ( bit , __load_ulong_le ( p , 0 ) > > bit ) ;
2005-07-27 11:44:58 -07:00
if ( set > = size )
return size + offset ;
if ( set < __BITOPS_WORDSIZE )
return set + offset ;
offset + = __BITOPS_WORDSIZE ;
size - = __BITOPS_WORDSIZE ;
p + + ;
2005-04-16 15:20:36 -07:00
}
2011-03-23 16:41:57 -07:00
return offset + find_first_zero_bit_le ( p , size ) ;
2005-04-16 15:20:36 -07:00
}
2011-05-26 16:26:06 -07:00
# define find_next_zero_bit_le find_next_zero_bit_le
2005-04-16 15:20:36 -07:00
2011-03-23 16:41:57 -07:00
static inline unsigned long find_first_bit_le ( void * vaddr , unsigned long size )
2008-02-05 16:50:44 +01:00
{
unsigned long bytes , bits ;
if ( ! size )
return 0 ;
bytes = __ffs_word_loop ( vaddr , size ) ;
bits = __ffs_word ( bytes * 8 , __load_ulong_le ( vaddr , bytes ) ) ;
return ( bits < size ) ? bits : size ;
}
2011-05-26 16:26:06 -07:00
# define find_first_bit_le find_first_bit_le
2008-02-05 16:50:44 +01:00
2011-03-23 16:41:57 -07:00
static inline int find_next_bit_le ( void * vaddr , unsigned long size ,
2008-02-05 16:50:44 +01:00
unsigned long offset )
{
unsigned long * addr = vaddr , * p ;
unsigned long bit , set ;
if ( offset > = size )
return size ;
bit = offset & ( __BITOPS_WORDSIZE - 1 ) ;
offset - = bit ;
size - = offset ;
p = addr + offset / __BITOPS_WORDSIZE ;
if ( bit ) {
/*
* s390 version of ffz returns __BITOPS_WORDSIZE
* if no zero bit is present in the word .
*/
2008-08-21 19:46:37 +02:00
set = __ffs_word ( 0 , __load_ulong_le ( p , 0 ) & ( ~ 0UL < < bit ) ) ;
2008-02-05 16:50:44 +01:00
if ( set > = size )
return size + offset ;
if ( set < __BITOPS_WORDSIZE )
return set + offset ;
offset + = __BITOPS_WORDSIZE ;
size - = __BITOPS_WORDSIZE ;
p + + ;
}
2011-03-23 16:41:57 -07:00
return offset + find_first_bit_le ( p , size ) ;
}
2011-05-26 16:26:06 -07:00
# define find_next_bit_le find_next_bit_le
2011-03-23 16:41:57 -07:00
2011-05-26 16:26:12 -07:00
# include <asm-generic/bitops/le.h>
2011-03-23 16:41:57 -07:00
# define ext2_set_bit_atomic(lock, nr, addr) \
test_and_set_bit_le ( nr , addr )
# define ext2_clear_bit_atomic(lock, nr, addr) \
test_and_clear_bit_le ( nr , addr )
2008-02-05 16:50:44 +01:00
2005-04-16 15:20:36 -07:00
# endif /* __KERNEL__ */
# endif /* _S390_BITOPS_H */