From: Mathieu Desnoyers We can use __LL and __SC to select the assembly instruction, which divide by 2 the size of mips local.h. Signed-off-by: Mathieu Desnoyers Cc: Ralf Baechle Signed-off-by: Andrew Morton --- include/asm-mips/local.h | 262 +------------------------------------ 1 files changed, 13 insertions(+), 249 deletions(-) diff -puN include/asm-mips/local.h~local_t-mips-extension-shrink-duplicated-mips-32-64-bits-functions-from-localh include/asm-mips/local.h --- a/include/asm-mips/local.h~local_t-mips-extension-shrink-duplicated-mips-32-64-bits-functions-from-localh +++ a/include/asm-mips/local.h @@ -2,6 +2,7 @@ #define _ARCH_MIPS_LOCAL_H #include +#include #include #include @@ -20,240 +21,6 @@ typedef struct #define local_inc(l) local_long_inc(&(l)->a) #define local_dec(l) local_long_dec(&(l)->a) - -#ifndef CONFIG_64BIT - -/* - * Same as above, but return the result value - */ -static __inline__ int local_add_return(int i, local_t * l) -{ - unsigned long result; - - if (cpu_has_llsc && R10000_LLSC_WAR) { - unsigned long temp; - - __asm__ __volatile__( - " .set mips3 \n" - "1: ll %1, %2 # local_add_return \n" - " addu %0, %1, %3 \n" - " sc %0, %2 \n" - " beqzl %0, 1b \n" - " addu %0, %1, %3 \n" - " .set mips0 \n" - : "=&r" (result), "=&r" (temp), "=m" (l->a.counter) - : "Ir" (i), "m" (l->a.counter) - : "memory"); - } else if (cpu_has_llsc) { - unsigned long temp; - - __asm__ __volatile__( - " .set mips3 \n" - "1: ll %1, %2 # local_add_return \n" - " addu %0, %1, %3 \n" - " sc %0, %2 \n" - " beqz %0, 1b \n" - " addu %0, %1, %3 \n" - " .set mips0 \n" - : "=&r" (result), "=&r" (temp), "=m" (l->a.counter) - : "Ir" (i), "m" (l->a.counter) - : "memory"); - } else { - unsigned long flags; - - local_irq_save(flags); - result = l->a.counter; - result += i; - l->a.counter = result; - local_irq_restore(flags); - } - - return result; -} - -static __inline__ int local_sub_return(int i, local_t * l) -{ - unsigned long result; - - if (cpu_has_llsc && R10000_LLSC_WAR) { - unsigned long temp; - - __asm__ __volatile__( - " .set mips3 \n" - "1: ll %1, %2 # local_sub_return \n" - " subu %0, %1, %3 \n" - " sc %0, %2 \n" - " beqzl %0, 1b \n" - " subu %0, %1, %3 \n" - " .set mips0 \n" - : "=&r" (result), "=&r" (temp), "=m" (l->a.counter) - : "Ir" (i), "m" (l->a.counter) - : "memory"); - } else if (cpu_has_llsc) { - unsigned long temp; - - __asm__ __volatile__( - " .set mips3 \n" - "1: ll %1, %2 # local_sub_return \n" - " subu %0, %1, %3 \n" - " sc %0, %2 \n" - " beqz %0, 1b \n" - " subu %0, %1, %3 \n" - " .set mips0 \n" - : "=&r" (result), "=&r" (temp), "=m" (l->a.counter) - : "Ir" (i), "m" (l->a.counter) - : "memory"); - } else { - unsigned long flags; - - local_irq_save(flags); - result = l->a.counter; - result -= i; - l->a.counter = result; - local_irq_restore(flags); - } - - return result; -} - -/* - * local_sub_if_positive - conditionally subtract integer from atomic variable - * @i: integer value to subtract - * @l: pointer of type local_t - * - * Atomically test @l and subtract @i if @l is greater or equal than @i. - * The function returns the old value of @l minus @i. - */ -static __inline__ int local_sub_if_positive(int i, local_t * l) -{ - unsigned long result; - - if (cpu_has_llsc && R10000_LLSC_WAR) { - unsigned long temp; - - __asm__ __volatile__( - " .set mips3 \n" - "1: ll %1, %2 # local_sub_if_positive\n" - " subu %0, %1, %3 \n" - " bltz %0, 1f \n" - " sc %0, %2 \n" - " .set noreorder \n" - " beqzl %0, 1b \n" - " subu %0, %1, %3 \n" - " .set reorder \n" - "1: \n" - " .set mips0 \n" - : "=&r" (result), "=&r" (temp), "=m" (l->a.counter) - : "Ir" (i), "m" (l->a.counter) - : "memory"); - } else if (cpu_has_llsc) { - unsigned long temp; - - __asm__ __volatile__( - " .set mips3 \n" - "1: ll %1, %2 # local_sub_if_positive\n" - " subu %0, %1, %3 \n" - " bltz %0, 1f \n" - " sc %0, %2 \n" - " .set noreorder \n" - " beqz %0, 1b \n" - " subu %0, %1, %3 \n" - " .set reorder \n" - "1: \n" - " .set mips0 \n" - : "=&r" (result), "=&r" (temp), "=m" (l->a.counter) - : "Ir" (i), "m" (l->a.counter) - : "memory"); - } else { - unsigned long flags; - - local_irq_save(flags); - result = l->a.counter; - result -= i; - if (result >= 0) - l->a.counter = result; - local_irq_restore(flags); - } - - return result; -} - -#define local_cmpxchg(l, o, n) \ - (cmpxchg_local(&((l)->a.counter), (o), (n))) -#define local_xchg(l, n) (xchg_local(&((l)->a.counter), (n))) - -/** - * local_add_unless - add unless the number is a given value - * @l: pointer of type local_t - * @a: the amount to add to l... - * @u: ...unless l is equal to u. - * - * Atomically adds @a to @l, so long as it was not @u. - * Returns non-zero if @l was not @u, and zero otherwise. - */ -#define local_add_unless(l, a, u) \ -({ \ - long c, old; \ - c = local_read(l); \ - while (c != (u) && (old = local_cmpxchg((l), c, c + (a))) != c) \ - c = old; \ - c != (u); \ -}) -#define local_inc_not_zero(l) local_add_unless((l), 1, 0) - -#define local_dec_return(l) local_sub_return(1,(l)) -#define local_inc_return(l) local_add_return(1,(l)) - -/* - * local_sub_and_test - subtract value from variable and test result - * @i: integer value to subtract - * @l: pointer of type local_t - * - * Atomically subtracts @i from @l and returns - * true if the result is zero, or false for all - * other cases. - */ -#define local_sub_and_test(i,l) (local_sub_return((i), (l)) == 0) - -/* - * local_inc_and_test - increment and test - * @l: pointer of type local_t - * - * Atomically increments @l by 1 - * and returns true if the result is zero, or false for all - * other cases. - */ -#define local_inc_and_test(l) (local_inc_return(l) == 0) - -/* - * local_dec_and_test - decrement by 1 and test - * @l: pointer of type local_t - * - * Atomically decrements @l by 1 and - * returns true if the result is 0, or false for all other - * cases. - */ -#define local_dec_and_test(l) (local_sub_return(1, (l)) == 0) - -/* - * local_dec_if_positive - decrement by 1 if old value positive - * @l: pointer of type local_t - */ -#define local_dec_if_positive(l) local_sub_if_positive(1, l) - -/* - * local_add_negative - add and test if negative - * @l: pointer of type local_t - * @i: integer value to add - * - * Atomically adds @i to @l and returns true - * if the result is negative, or false when - * result is greater than or equal to zero. - */ -#define local_add_negative(i,l) (local_add_return(i, (l)) < 0) - -#else /* CONFIG_64BIT */ - /* * Same as above, but return the result value */ @@ -266,9 +33,9 @@ static __inline__ long local_add_return( __asm__ __volatile__( " .set mips3 \n" - "1: lld %1, %2 # local_add_return \n" + "1:" __LL "%1, %2 # local_add_return \n" " addu %0, %1, %3 \n" - " scd %0, %2 \n" + __SC "%0, %2 \n" " beqzl %0, 1b \n" " addu %0, %1, %3 \n" " .set mips0 \n" @@ -280,9 +47,9 @@ static __inline__ long local_add_return( __asm__ __volatile__( " .set mips3 \n" - "1: lld %1, %2 # local_add_return \n" + "1:" __LL "%1, %2 # local_add_return \n" " addu %0, %1, %3 \n" - " scd %0, %2 \n" + __SC "%0, %2 \n" " beqz %0, 1b \n" " addu %0, %1, %3 \n" " .set mips0 \n" @@ -311,9 +78,9 @@ static __inline__ long local_sub_return( __asm__ __volatile__( " .set mips3 \n" - "1: lld %1, %2 # local_sub_return \n" + "1:" __LL "%1, %2 # local_sub_return \n" " subu %0, %1, %3 \n" - " scd %0, %2 \n" + __SC "%0, %2 \n" " beqzl %0, 1b \n" " subu %0, %1, %3 \n" " .set mips0 \n" @@ -325,9 +92,9 @@ static __inline__ long local_sub_return( __asm__ __volatile__( " .set mips3 \n" - "1: lld %1, %2 # local_sub_return \n" + "1:" __LL "%1, %2 # local_sub_return \n" " subu %0, %1, %3 \n" - " scd %0, %2 \n" + __SC "%0, %2 \n" " beqz %0, 1b \n" " subu %0, %1, %3 \n" " .set mips0 \n" @@ -364,10 +131,10 @@ static __inline__ long local_sub_if_posi __asm__ __volatile__( " .set mips3 \n" - "1: lld %1, %2 # local_sub_if_positive\n" + "1:" __LL "%1, %2 # local_sub_if_positive\n" " dsubu %0, %1, %3 \n" " bltz %0, 1f \n" - " scd %0, %2 \n" + __SC "%0, %2 \n" " .set noreorder \n" " beqzl %0, 1b \n" " dsubu %0, %1, %3 \n" @@ -382,10 +149,10 @@ static __inline__ long local_sub_if_posi __asm__ __volatile__( " .set mips3 \n" - "1: lld %1, %2 # local_sub_if_positive\n" + "1:" __LL "%1, %2 # local_sub_if_positive\n" " dsubu %0, %1, %3 \n" " bltz %0, 1f \n" - " scd %0, %2 \n" + __SC "%0, %2 \n" " .set noreorder \n" " beqz %0, 1b \n" " dsubu %0, %1, %3 \n" @@ -483,9 +250,6 @@ static __inline__ long local_sub_if_posi */ #define local_add_negative(i,l) (local_add_return(i, (l)) < 0) -#endif /* !CONFIG_64BIT */ - - /* Use these for per-cpu local_t variables: on some archs they are * much more efficient than these naive implementations. Note they take * a variable, not an address. _