diff --git a/arch/powerpc/include/asm/word-at-a-time.h b/arch/powerpc/include/asm/word-at-a-time.h index ea52b51be401..5b3a903adae6 100644 --- a/arch/powerpc/include/asm/word-at-a-time.h +++ b/arch/powerpc/include/asm/word-at-a-time.h @@ -42,32 +42,65 @@ static inline bool has_zero(unsigned long val, unsigned long *data, const struct #else +#ifdef CONFIG_64BIT + +/* unused */ +struct word_at_a_time { +}; + +#define WORD_AT_A_TIME_CONSTANTS { } + +/* This will give us 0xff for a NULL char and 0x00 elsewhere */ +static inline unsigned long has_zero(unsigned long a, unsigned long *bits, const struct word_at_a_time *c) +{ + unsigned long ret; + unsigned long zero = 0; + + asm("cmpb %0,%1,%2" : "=r" (ret) : "r" (a), "r" (zero)); + *bits = ret; + + return ret; +} + +static inline unsigned long prep_zero_mask(unsigned long a, unsigned long bits, const struct word_at_a_time *c) +{ + return bits; +} + +/* Alan Modra's little-endian strlen tail for 64-bit */ +static inline unsigned long create_zero_mask(unsigned long bits) +{ + unsigned long leading_zero_bits; + long trailing_zero_bit_mask; + + asm("addi %1,%2,-1\n\t" + "andc %1,%1,%2\n\t" + "popcntd %0,%1" + : "=r" (leading_zero_bits), "=&r" (trailing_zero_bit_mask) + : "r" (bits)); + + return leading_zero_bits; +} + +static inline unsigned long find_zero(unsigned long mask) +{ + return mask >> 3; +} + +/* This assumes that we never ask for an all 1s bitmask */ +static inline unsigned long zero_bytemask(unsigned long mask) +{ + return (1UL << mask) - 1; +} + +#else /* 32-bit case */ + struct word_at_a_time { const unsigned long one_bits, high_bits; }; #define WORD_AT_A_TIME_CONSTANTS { REPEAT_BYTE(0x01), REPEAT_BYTE(0x80) } -#ifdef CONFIG_64BIT - -/* Alan Modra's little-endian strlen tail for 64-bit */ -#define create_zero_mask(mask) (mask) - -static inline unsigned long find_zero(unsigned long mask) -{ - unsigned long leading_zero_bits; - long trailing_zero_bit_mask; - - asm ("addi %1,%2,-1\n\t" - "andc %1,%1,%2\n\t" - "popcntd %0,%1" - : "=r" (leading_zero_bits), "=&r" (trailing_zero_bit_mask) - : "r" (mask)); - return leading_zero_bits >> 3; -} - -#else /* 32-bit case */ - /* * This is largely generic for little-endian machines, but the * optimal byte mask counting is probably going to be something @@ -96,8 +129,6 @@ static inline unsigned long find_zero(unsigned long mask) return count_masked_bytes(mask); } -#endif - /* Return nonzero if it has a zero */ static inline unsigned long has_zero(unsigned long a, unsigned long *bits, const struct word_at_a_time *c) { @@ -114,7 +145,9 @@ static inline unsigned long prep_zero_mask(unsigned long a, unsigned long bits, /* The mask we created is directly usable as a bytemask */ #define zero_bytemask(mask) (mask) -#endif +#endif /* CONFIG_64BIT */ + +#endif /* __BIG_ENDIAN__ */ /* * We use load_unaligned_zero() in a selftest, which builds a userspace