root/arch/x86/lib/getuser.S

/* [<][>][^][v][top][bottom][index][help] */
   1 /* SPDX-License-Identifier: GPL-2.0 */
   2 /*
   3  * __get_user functions.
   4  *
   5  * (C) Copyright 1998 Linus Torvalds
   6  * (C) Copyright 2005 Andi Kleen
   7  * (C) Copyright 2008 Glauber Costa
   8  *
   9  * These functions have a non-standard call interface
  10  * to make them more efficient, especially as they
  11  * return an error value in addition to the "real"
  12  * return value.
  13  */
  14 
  15 /*
  16  * __get_user_X
  17  *
  18  * Inputs:      %[r|e]ax contains the address.
  19  *
  20  * Outputs:     %[r|e]ax is error code (0 or -EFAULT)
  21  *              %[r|e]dx contains zero-extended value
  22  *              %ecx contains the high half for 32-bit __get_user_8
  23  *
  24  *
  25  * These functions should not modify any other registers,
  26  * as they get called from within inline assembly.
  27  */
  28 
  29 #include <linux/linkage.h>
  30 #include <asm/page_types.h>
  31 #include <asm/errno.h>
  32 #include <asm/asm-offsets.h>
  33 #include <asm/thread_info.h>
  34 #include <asm/asm.h>
  35 #include <asm/smap.h>
  36 #include <asm/export.h>
  37 
  38         .text
  39 ENTRY(__get_user_1)
  40         mov PER_CPU_VAR(current_task), %_ASM_DX
  41         cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
  42         jae bad_get_user
  43         sbb %_ASM_DX, %_ASM_DX          /* array_index_mask_nospec() */
  44         and %_ASM_DX, %_ASM_AX
  45         ASM_STAC
  46 1:      movzbl (%_ASM_AX),%edx
  47         xor %eax,%eax
  48         ASM_CLAC
  49         ret
  50 ENDPROC(__get_user_1)
  51 EXPORT_SYMBOL(__get_user_1)
  52 
  53 ENTRY(__get_user_2)
  54         add $1,%_ASM_AX
  55         jc bad_get_user
  56         mov PER_CPU_VAR(current_task), %_ASM_DX
  57         cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
  58         jae bad_get_user
  59         sbb %_ASM_DX, %_ASM_DX          /* array_index_mask_nospec() */
  60         and %_ASM_DX, %_ASM_AX
  61         ASM_STAC
  62 2:      movzwl -1(%_ASM_AX),%edx
  63         xor %eax,%eax
  64         ASM_CLAC
  65         ret
  66 ENDPROC(__get_user_2)
  67 EXPORT_SYMBOL(__get_user_2)
  68 
  69 ENTRY(__get_user_4)
  70         add $3,%_ASM_AX
  71         jc bad_get_user
  72         mov PER_CPU_VAR(current_task), %_ASM_DX
  73         cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
  74         jae bad_get_user
  75         sbb %_ASM_DX, %_ASM_DX          /* array_index_mask_nospec() */
  76         and %_ASM_DX, %_ASM_AX
  77         ASM_STAC
  78 3:      movl -3(%_ASM_AX),%edx
  79         xor %eax,%eax
  80         ASM_CLAC
  81         ret
  82 ENDPROC(__get_user_4)
  83 EXPORT_SYMBOL(__get_user_4)
  84 
  85 ENTRY(__get_user_8)
  86 #ifdef CONFIG_X86_64
  87         add $7,%_ASM_AX
  88         jc bad_get_user
  89         mov PER_CPU_VAR(current_task), %_ASM_DX
  90         cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
  91         jae bad_get_user
  92         sbb %_ASM_DX, %_ASM_DX          /* array_index_mask_nospec() */
  93         and %_ASM_DX, %_ASM_AX
  94         ASM_STAC
  95 4:      movq -7(%_ASM_AX),%rdx
  96         xor %eax,%eax
  97         ASM_CLAC
  98         ret
  99 #else
 100         add $7,%_ASM_AX
 101         jc bad_get_user_8
 102         mov PER_CPU_VAR(current_task), %_ASM_DX
 103         cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
 104         jae bad_get_user_8
 105         sbb %_ASM_DX, %_ASM_DX          /* array_index_mask_nospec() */
 106         and %_ASM_DX, %_ASM_AX
 107         ASM_STAC
 108 4:      movl -7(%_ASM_AX),%edx
 109 5:      movl -3(%_ASM_AX),%ecx
 110         xor %eax,%eax
 111         ASM_CLAC
 112         ret
 113 #endif
 114 ENDPROC(__get_user_8)
 115 EXPORT_SYMBOL(__get_user_8)
 116 
 117 
 118 .Lbad_get_user_clac:
 119         ASM_CLAC
 120 bad_get_user:
 121         xor %edx,%edx
 122         mov $(-EFAULT),%_ASM_AX
 123         ret
 124 
 125 #ifdef CONFIG_X86_32
 126 .Lbad_get_user_8_clac:
 127         ASM_CLAC
 128 bad_get_user_8:
 129         xor %edx,%edx
 130         xor %ecx,%ecx
 131         mov $(-EFAULT),%_ASM_AX
 132         ret
 133 #endif
 134 
 135         _ASM_EXTABLE_UA(1b, .Lbad_get_user_clac)
 136         _ASM_EXTABLE_UA(2b, .Lbad_get_user_clac)
 137         _ASM_EXTABLE_UA(3b, .Lbad_get_user_clac)
 138 #ifdef CONFIG_X86_64
 139         _ASM_EXTABLE_UA(4b, .Lbad_get_user_clac)
 140 #else
 141         _ASM_EXTABLE_UA(4b, .Lbad_get_user_8_clac)
 142         _ASM_EXTABLE_UA(5b, .Lbad_get_user_8_clac)
 143 #endif

/* [<][>][^][v][top][bottom][index][help] */