From 8ee5797a91bdb713b4031741b33bd035f9c43870 Mon Sep 17 00:00:00 2001 From: Harvey Harrison Date: Wed, 30 Jan 2008 13:31:26 +0100 Subject: [PATCH] x86: introduce asm helpers in local_{32|64}.h Handle the use of long on X86_32 and quad on X86_64 Signed-off-by: Harvey Harrison Signed-off-by: Thomas Gleixner Signed-off-by: Ingo Molnar --- include/asm-x86/asm.h | 12 ++++++++++++ include/asm-x86/local_32.h | 18 +++++++++--------- include/asm-x86/local_64.h | 18 +++++++++--------- 3 files changed, 30 insertions(+), 18 deletions(-) diff --git a/include/asm-x86/asm.h b/include/asm-x86/asm.h index 8661ae75488a..1a6980a60fc6 100644 --- a/include/asm-x86/asm.h +++ b/include/asm-x86/asm.h @@ -8,6 +8,12 @@ # define _ASM_ALIGN " .balign 4 " # define _ASM_MOV_UL " movl " +# define _ASM_INC " incl " +# define _ASM_DEC " decl " +# define _ASM_ADD " addl " +# define _ASM_SUB " subl " +# define _ASM_XADD " xaddl " + #else /* 64 bits */ @@ -15,6 +21,12 @@ # define _ASM_ALIGN " .balign 8 " # define _ASM_MOV_UL " movq " +# define _ASM_INC " incq " +# define _ASM_DEC " decq " +# define _ASM_ADD " addq " +# define _ASM_SUB " subq " +# define _ASM_XADD " xaddq " + #endif /* CONFIG_X86_32 */ #endif /* _ASM_X86_ASM_H */ diff --git a/include/asm-x86/local_32.h b/include/asm-x86/local_32.h index 33d9c7bf463c..c219fe56b3df 100644 --- a/include/asm-x86/local_32.h +++ b/include/asm-x86/local_32.h @@ -4,21 +4,21 @@ static inline void local_inc(local_t *l) { __asm__ __volatile__( - "incl %0" + _ASM_INC "%0" :"+m" (l->a.counter)); } static inline void local_dec(local_t *l) { __asm__ __volatile__( - "decl %0" + _ASM_DEC "%0" :"+m" (l->a.counter)); } static inline void local_add(long i, local_t *l) { __asm__ __volatile__( - "addl %1,%0" + _ASM_ADD "%1,%0" :"+m" (l->a.counter) :"ir" (i)); } @@ -26,7 +26,7 @@ static inline void local_add(long i, local_t *l) static inline void local_sub(long i, local_t *l) { __asm__ __volatile__( - "subl %1,%0" + _ASM_SUB "%1,%0" :"+m" (l->a.counter) :"ir" (i)); } @@ -45,7 +45,7 @@ static inline int local_sub_and_test(long i, local_t *l) unsigned char c; __asm__ __volatile__( - "subl %2,%0; sete %1" + _ASM_SUB "%2,%0; sete %1" :"+m" (l->a.counter), "=qm" (c) :"ir" (i) : "memory"); return c; @@ -64,7 +64,7 @@ static inline int local_dec_and_test(local_t *l) unsigned char c; __asm__ __volatile__( - "decl %0; sete %1" + _ASM_DEC "%0; sete %1" :"+m" (l->a.counter), "=qm" (c) : : "memory"); return c != 0; @@ -83,7 +83,7 @@ static inline int local_inc_and_test(local_t *l) unsigned char c; __asm__ __volatile__( - "incl %0; sete %1" + _ASM_INC "%0; sete %1" :"+m" (l->a.counter), "=qm" (c) : : "memory"); return c != 0; @@ -103,7 +103,7 @@ static inline int local_add_negative(long i, local_t *l) unsigned char c; __asm__ __volatile__( - "addl %2,%0; sets %1" + _ASM_ADD "%2,%0; sets %1" :"+m" (l->a.counter), "=qm" (c) :"ir" (i) : "memory"); return c; @@ -127,7 +127,7 @@ static inline long local_add_return(long i, local_t *l) /* Modern 486+ processor */ __i = i; __asm__ __volatile__( - "xaddl %0, %1;" + _ASM_XADD "%0, %1;" :"+r" (i), "+m" (l->a.counter) : : "memory"); return i + __i; diff --git a/include/asm-x86/local_64.h b/include/asm-x86/local_64.h index 50e99eddd628..d685cd7e014f 100644 --- a/include/asm-x86/local_64.h +++ b/include/asm-x86/local_64.h @@ -4,21 +4,21 @@ static inline void local_inc(local_t *l) { __asm__ __volatile__( - "incq %0" + _ASM_INC "%0" :"+m" (l->a.counter)); } static inline void local_dec(local_t *l) { __asm__ __volatile__( - "decq %0" + _ASM_DEC "%0" :"+m" (l->a.counter)); } static inline void local_add(long i, local_t *l) { __asm__ __volatile__( - "addq %1,%0" + _ASM_ADD "%1,%0" :"+m" (l->a.counter) :"ir" (i)); } @@ -26,7 +26,7 @@ static inline void local_add(long i, local_t *l) static inline void local_sub(long i, local_t *l) { __asm__ __volatile__( - "subq %1,%0" + _ASM_SUB "%1,%0" :"+m" (l->a.counter) :"ir" (i)); } @@ -45,7 +45,7 @@ static inline int local_sub_and_test(long i, local_t *l) unsigned char c; __asm__ __volatile__( - "subq %2,%0; sete %1" + _ASM_SUB "%2,%0; sete %1" :"+m" (l->a.counter), "=qm" (c) :"ir" (i) : "memory"); return c; @@ -64,7 +64,7 @@ static inline int local_dec_and_test(local_t *l) unsigned char c; __asm__ __volatile__( - "decq %0; sete %1" + _ASM_DEC "%0; sete %1" :"+m" (l->a.counter), "=qm" (c) : : "memory"); return c != 0; @@ -83,7 +83,7 @@ static inline int local_inc_and_test(local_t *l) unsigned char c; __asm__ __volatile__( - "incq %0; sete %1" + _ASM_INC "%0; sete %1" :"+m" (l->a.counter), "=qm" (c) : : "memory"); return c != 0; @@ -103,7 +103,7 @@ static inline int local_add_negative(long i, local_t *l) unsigned char c; __asm__ __volatile__( - "addq %2,%0; sets %1" + _ASM_ADD "%2,%0; sets %1" :"+m" (l->a.counter), "=qm" (c) :"ir" (i) : "memory"); return c; @@ -120,7 +120,7 @@ static inline long local_add_return(long i, local_t *l) { long __i = i; __asm__ __volatile__( - "xaddq %0, %1;" + _ASM_XADD "%0, %1;" :"+r" (i), "+m" (l->a.counter) : : "memory"); return i + __i;