]> asedeno.scripts.mit.edu Git - linux.git/commitdiff
xtensa: fix type conversion in __get_user_[no]check
authorMax Filippov <jcmvbkbc@gmail.com>
Fri, 11 Oct 2019 03:55:35 +0000 (20:55 -0700)
committerMax Filippov <jcmvbkbc@gmail.com>
Mon, 14 Oct 2019 21:14:21 +0000 (14:14 -0700)
__get_user_[no]check uses temporary buffer of type long to store result
of __get_user_size and do sign extension on it when necessary. This
doesn't work correctly for 64-bit data. Fix it by moving temporary
buffer/sign extension logic to __get_user_asm.

Don't do assignment of __get_user_bad result to (x) as it may not always
be integer-compatible now and issue warning even when it's going to be
optimized. Instead do (x) = 0; and call __get_user_bad separately.

Zero initialize __x in __get_user_asm and use '+' constraint for its
assembly argument, so that its value is preserved in error cases. This
may add at most 1 cycle to the fast path, but saves an instruction and
two padding bytes in the fixup section for each use of this macro and
works for both misaligned store and store exception.

Signed-off-by: Max Filippov <jcmvbkbc@gmail.com>
arch/xtensa/include/asm/uaccess.h

index da4d3544506308d8022bb66019eb7f4706f07102..3f80386f18838902b1a96a32c3839cf9979ec436 100644 (file)
@@ -172,19 +172,19 @@ __asm__ __volatile__(                                     \
 
 #define __get_user_nocheck(x, ptr, size)                       \
 ({                                                             \
-       long __gu_err, __gu_val;                                \
-       __get_user_size(__gu_val, (ptr), (size), __gu_err);     \
-       (x) = (__force __typeof__(*(ptr)))__gu_val;             \
+       long __gu_err;                                          \
+       __get_user_size((x), (ptr), (size), __gu_err);          \
        __gu_err;                                               \
 })
 
 #define __get_user_check(x, ptr, size)                                 \
 ({                                                                     \
-       long __gu_err = -EFAULT, __gu_val = 0;                          \
+       long __gu_err = -EFAULT;                                        \
        const __typeof__(*(ptr)) *__gu_addr = (ptr);                    \
-       if (access_ok(__gu_addr, size))                 \
-               __get_user_size(__gu_val, __gu_addr, (size), __gu_err); \
-       (x) = (__force __typeof__(*(ptr)))__gu_val;                     \
+       if (access_ok(__gu_addr, size))                                 \
+               __get_user_size((x), __gu_addr, (size), __gu_err);      \
+       else                                                            \
+               (x) = 0;                                                \
        __gu_err;                                                       \
 })
 
@@ -208,7 +208,7 @@ do {                                                                        \
                }                                                       \
                break;                                                  \
        }                                                               \
-       default: (x) = __get_user_bad();                                \
+       default: (x) = 0; __get_user_bad();                             \
        }                                                               \
 } while (0)
 
@@ -218,24 +218,27 @@ do {                                                                      \
  * __check_align_* macros still work.
  */
 #define __get_user_asm(x_, addr_, err_, align, insn, cb) \
-__asm__ __volatile__(                          \
-       __check_align_##align                   \
-       "1: "insn"  %[x], %[addr], 0    \n"     \
-       "2:                             \n"     \
-       "   .section  .fixup,\"ax\"     \n"     \
-       "   .align 4                    \n"     \
-       "   .literal_position           \n"     \
-       "5:                             \n"     \
-       "   movi   %[tmp], 2b           \n"     \
-       "   movi   %[x], 0              \n"     \
-       "   movi   %[err], %[efault]    \n"     \
-       "   jx     %[tmp]               \n"     \
-       "   .previous                   \n"     \
-       "   .section  __ex_table,\"a\"  \n"     \
-       "   .long       1b, 5b          \n"     \
-       "   .previous"                          \
-       :[err] "+r"(err_), [tmp] "=r"(cb), [x] "=r"(x_)\
-       :[addr] "r"(addr_), [efault] "i"(-EFAULT))
+do {                                                   \
+       u32 __x = 0;                                    \
+       __asm__ __volatile__(                           \
+               __check_align_##align                   \
+               "1: "insn"  %[x], %[addr], 0    \n"     \
+               "2:                             \n"     \
+               "   .section  .fixup,\"ax\"     \n"     \
+               "   .align 4                    \n"     \
+               "   .literal_position           \n"     \
+               "5:                             \n"     \
+               "   movi   %[tmp], 2b           \n"     \
+               "   movi   %[err], %[efault]    \n"     \
+               "   jx     %[tmp]               \n"     \
+               "   .previous                   \n"     \
+               "   .section  __ex_table,\"a\"  \n"     \
+               "   .long       1b, 5b          \n"     \
+               "   .previous"                          \
+               :[err] "+r"(err_), [tmp] "=r"(cb), [x] "+r"(__x) \
+               :[addr] "r"(addr_), [efault] "i"(-EFAULT)); \
+       (x_) = (__force __typeof__(*(addr_)))__x;       \
+} while (0)
 
 
 /*