]> bbs.cooldavid.org Git - net-next-2.6.git/commitdiff
percpu, x86: Generic inc / dec percpu instructions
authorChristoph Lameter <cl@linux-foundation.org>
Tue, 5 Jan 2010 06:34:50 +0000 (15:34 +0900)
committerTejun Heo <tj@kernel.org>
Tue, 5 Jan 2010 06:34:50 +0000 (15:34 +0900)
Optimize code generated for percpu access by checking for increment and
decrements.

tj: fix incorrect usage of __builtin_constant_p() and restructure
    percpu_add_op() macro.

Signed-off-by: Christoph Lameter <cl@linux-foundation.org>
Signed-off-by: Tejun Heo <tj@kernel.org>
arch/x86/include/asm/percpu.h

index 4c170ccc72ed6b70491eca976cd5d45abae8b510..66a272dfd8b8aa0d5fcb788ace9bec17da31e236 100644 (file)
@@ -103,6 +103,64 @@ do {                                                       \
        }                                               \
 } while (0)
 
+/*
+ * Generate a percpu add to memory instruction and optimize code
+ * if a one is added or subtracted.
+ */
+#define percpu_add_op(var, val)                                                \
+do {                                                                   \
+       typedef typeof(var) pao_T__;                                    \
+       const int pao_ID__ = (__builtin_constant_p(val) &&              \
+                             ((val) == 1 || (val) == -1)) ? (val) : 0; \
+       if (0) {                                                        \
+               pao_T__ pao_tmp__;                                      \
+               pao_tmp__ = (val);                                      \
+       }                                                               \
+       switch (sizeof(var)) {                                          \
+       case 1:                                                         \
+               if (pao_ID__ == 1)                                      \
+                       asm("incb "__percpu_arg(0) : "+m" (var));       \
+               else if (pao_ID__ == -1)                                \
+                       asm("decb "__percpu_arg(0) : "+m" (var));       \
+               else                                                    \
+                       asm("addb %1, "__percpu_arg(0)                  \
+                           : "+m" (var)                                \
+                           : "qi" ((pao_T__)(val)));                   \
+               break;                                                  \
+       case 2:                                                         \
+               if (pao_ID__ == 1)                                      \
+                       asm("incw "__percpu_arg(0) : "+m" (var));       \
+               else if (pao_ID__ == -1)                                \
+                       asm("decw "__percpu_arg(0) : "+m" (var));       \
+               else                                                    \
+                       asm("addw %1, "__percpu_arg(0)                  \
+                           : "+m" (var)                                \
+                           : "ri" ((pao_T__)(val)));                   \
+               break;                                                  \
+       case 4:                                                         \
+               if (pao_ID__ == 1)                                      \
+                       asm("incl "__percpu_arg(0) : "+m" (var));       \
+               else if (pao_ID__ == -1)                                \
+                       asm("decl "__percpu_arg(0) : "+m" (var));       \
+               else                                                    \
+                       asm("addl %1, "__percpu_arg(0)                  \
+                           : "+m" (var)                                \
+                           : "ri" ((pao_T__)(val)));                   \
+               break;                                                  \
+       case 8:                                                         \
+               if (pao_ID__ == 1)                                      \
+                       asm("incq "__percpu_arg(0) : "+m" (var));       \
+               else if (pao_ID__ == -1)                                \
+                       asm("decq "__percpu_arg(0) : "+m" (var));       \
+               else                                                    \
+                       asm("addq %1, "__percpu_arg(0)                  \
+                           : "+m" (var)                                \
+                           : "re" ((pao_T__)(val)));                   \
+               break;                                                  \
+       default: __bad_percpu_size();                                   \
+       }                                                               \
+} while (0)
+
 #define percpu_from_op(op, var, constraint)            \
 ({                                                     \
        typeof(var) pfo_ret__;                          \
@@ -144,8 +202,8 @@ do {                                                        \
 #define percpu_read(var)               percpu_from_op("mov", var, "m" (var))
 #define percpu_read_stable(var)                percpu_from_op("mov", var, "p" (&(var)))
 #define percpu_write(var, val)         percpu_to_op("mov", var, val)
-#define percpu_add(var, val)           percpu_to_op("add", var, val)
-#define percpu_sub(var, val)           percpu_to_op("sub", var, val)
+#define percpu_add(var, val)           percpu_add_op(var, val)
+#define percpu_sub(var, val)           percpu_add_op(var, -(val))
 #define percpu_and(var, val)           percpu_to_op("and", var, val)
 #define percpu_or(var, val)            percpu_to_op("or", var, val)
 #define percpu_xor(var, val)           percpu_to_op("xor", var, val)
@@ -157,9 +215,9 @@ do {                                                        \
 #define __this_cpu_write_1(pcp, val)   percpu_to_op("mov", (pcp), val)
 #define __this_cpu_write_2(pcp, val)   percpu_to_op("mov", (pcp), val)
 #define __this_cpu_write_4(pcp, val)   percpu_to_op("mov", (pcp), val)
-#define __this_cpu_add_1(pcp, val)     percpu_to_op("add", (pcp), val)
-#define __this_cpu_add_2(pcp, val)     percpu_to_op("add", (pcp), val)
-#define __this_cpu_add_4(pcp, val)     percpu_to_op("add", (pcp), val)
+#define __this_cpu_add_1(pcp, val)     percpu_add_op((pcp), val)
+#define __this_cpu_add_2(pcp, val)     percpu_add_op((pcp), val)
+#define __this_cpu_add_4(pcp, val)     percpu_add_op((pcp), val)
 #define __this_cpu_and_1(pcp, val)     percpu_to_op("and", (pcp), val)
 #define __this_cpu_and_2(pcp, val)     percpu_to_op("and", (pcp), val)
 #define __this_cpu_and_4(pcp, val)     percpu_to_op("and", (pcp), val)
@@ -176,9 +234,9 @@ do {                                                        \
 #define this_cpu_write_1(pcp, val)     percpu_to_op("mov", (pcp), val)
 #define this_cpu_write_2(pcp, val)     percpu_to_op("mov", (pcp), val)
 #define this_cpu_write_4(pcp, val)     percpu_to_op("mov", (pcp), val)
-#define this_cpu_add_1(pcp, val)       percpu_to_op("add", (pcp), val)
-#define this_cpu_add_2(pcp, val)       percpu_to_op("add", (pcp), val)
-#define this_cpu_add_4(pcp, val)       percpu_to_op("add", (pcp), val)
+#define this_cpu_add_1(pcp, val)       percpu_add_op((pcp), val)
+#define this_cpu_add_2(pcp, val)       percpu_add_op((pcp), val)
+#define this_cpu_add_4(pcp, val)       percpu_add_op((pcp), val)
 #define this_cpu_and_1(pcp, val)       percpu_to_op("and", (pcp), val)
 #define this_cpu_and_2(pcp, val)       percpu_to_op("and", (pcp), val)
 #define this_cpu_and_4(pcp, val)       percpu_to_op("and", (pcp), val)
@@ -189,9 +247,9 @@ do {                                                        \
 #define this_cpu_xor_2(pcp, val)       percpu_to_op("xor", (pcp), val)
 #define this_cpu_xor_4(pcp, val)       percpu_to_op("xor", (pcp), val)
 
-#define irqsafe_cpu_add_1(pcp, val)    percpu_to_op("add", (pcp), val)
-#define irqsafe_cpu_add_2(pcp, val)    percpu_to_op("add", (pcp), val)
-#define irqsafe_cpu_add_4(pcp, val)    percpu_to_op("add", (pcp), val)
+#define irqsafe_cpu_add_1(pcp, val)    percpu_add_op((pcp), val)
+#define irqsafe_cpu_add_2(pcp, val)    percpu_add_op((pcp), val)
+#define irqsafe_cpu_add_4(pcp, val)    percpu_add_op((pcp), val)
 #define irqsafe_cpu_and_1(pcp, val)    percpu_to_op("and", (pcp), val)
 #define irqsafe_cpu_and_2(pcp, val)    percpu_to_op("and", (pcp), val)
 #define irqsafe_cpu_and_4(pcp, val)    percpu_to_op("and", (pcp), val)
@@ -209,19 +267,19 @@ do {                                                      \
 #ifdef CONFIG_X86_64
 #define __this_cpu_read_8(pcp)         percpu_from_op("mov", (pcp), "m"(pcp))
 #define __this_cpu_write_8(pcp, val)   percpu_to_op("mov", (pcp), val)
-#define __this_cpu_add_8(pcp, val)     percpu_to_op("add", (pcp), val)
+#define __this_cpu_add_8(pcp, val)     percpu_add_op((pcp), val)
 #define __this_cpu_and_8(pcp, val)     percpu_to_op("and", (pcp), val)
 #define __this_cpu_or_8(pcp, val)      percpu_to_op("or", (pcp), val)
 #define __this_cpu_xor_8(pcp, val)     percpu_to_op("xor", (pcp), val)
 
 #define this_cpu_read_8(pcp)           percpu_from_op("mov", (pcp), "m"(pcp))
 #define this_cpu_write_8(pcp, val)     percpu_to_op("mov", (pcp), val)
-#define this_cpu_add_8(pcp, val)       percpu_to_op("add", (pcp), val)
+#define this_cpu_add_8(pcp, val)       percpu_add_op((pcp), val)
 #define this_cpu_and_8(pcp, val)       percpu_to_op("and", (pcp), val)
 #define this_cpu_or_8(pcp, val)                percpu_to_op("or", (pcp), val)
 #define this_cpu_xor_8(pcp, val)       percpu_to_op("xor", (pcp), val)
 
-#define irqsafe_cpu_add_8(pcp, val)    percpu_to_op("add", (pcp), val)
+#define irqsafe_cpu_add_8(pcp, val)    percpu_add_op((pcp), val)
 #define irqsafe_cpu_and_8(pcp, val)    percpu_to_op("and", (pcp), val)
 #define irqsafe_cpu_or_8(pcp, val)     percpu_to_op("or", (pcp), val)
 #define irqsafe_cpu_xor_8(pcp, val)    percpu_to_op("xor", (pcp), val)