root/arch/x86/lib/msr-reg.S

/* [<][>][^][v][top][bottom][index][help] */
   1 /* SPDX-License-Identifier: GPL-2.0 */
   2 #include <linux/linkage.h>
   3 #include <linux/errno.h>
   4 #include <asm/asm.h>
   5 #include <asm/msr.h>
   6 
   7 #ifdef CONFIG_X86_64
   8 /*
   9  * int {rdmsr,wrmsr}_safe_regs(u32 gprs[8]);
  10  *
  11  * reg layout: u32 gprs[eax, ecx, edx, ebx, esp, ebp, esi, edi]
  12  *
  13  */
  14 .macro op_safe_regs op
  15 ENTRY(\op\()_safe_regs)
  16         pushq %rbx
  17         pushq %r12
  18         movq    %rdi, %r10      /* Save pointer */
  19         xorl    %r11d, %r11d    /* Return value */
  20         movl    (%rdi), %eax
  21         movl    4(%rdi), %ecx
  22         movl    8(%rdi), %edx
  23         movl    12(%rdi), %ebx
  24         movl    20(%rdi), %r12d
  25         movl    24(%rdi), %esi
  26         movl    28(%rdi), %edi
  27 1:      \op
  28 2:      movl    %eax, (%r10)
  29         movl    %r11d, %eax     /* Return value */
  30         movl    %ecx, 4(%r10)
  31         movl    %edx, 8(%r10)
  32         movl    %ebx, 12(%r10)
  33         movl    %r12d, 20(%r10)
  34         movl    %esi, 24(%r10)
  35         movl    %edi, 28(%r10)
  36         popq %r12
  37         popq %rbx
  38         ret
  39 3:
  40         movl    $-EIO, %r11d
  41         jmp     2b
  42 
  43         _ASM_EXTABLE(1b, 3b)
  44 ENDPROC(\op\()_safe_regs)
  45 .endm
  46 
  47 #else /* X86_32 */
  48 
  49 .macro op_safe_regs op
  50 ENTRY(\op\()_safe_regs)
  51         pushl %ebx
  52         pushl %ebp
  53         pushl %esi
  54         pushl %edi
  55         pushl $0              /* Return value */
  56         pushl %eax
  57         movl    4(%eax), %ecx
  58         movl    8(%eax), %edx
  59         movl    12(%eax), %ebx
  60         movl    20(%eax), %ebp
  61         movl    24(%eax), %esi
  62         movl    28(%eax), %edi
  63         movl    (%eax), %eax
  64 1:      \op
  65 2:      pushl %eax
  66         movl    4(%esp), %eax
  67         popl (%eax)
  68         addl    $4, %esp
  69         movl    %ecx, 4(%eax)
  70         movl    %edx, 8(%eax)
  71         movl    %ebx, 12(%eax)
  72         movl    %ebp, 20(%eax)
  73         movl    %esi, 24(%eax)
  74         movl    %edi, 28(%eax)
  75         popl %eax
  76         popl %edi
  77         popl %esi
  78         popl %ebp
  79         popl %ebx
  80         ret
  81 3:
  82         movl    $-EIO, 4(%esp)
  83         jmp     2b
  84 
  85         _ASM_EXTABLE(1b, 3b)
  86 ENDPROC(\op\()_safe_regs)
  87 .endm
  88 
  89 #endif
  90 
  91 op_safe_regs rdmsr
  92 op_safe_regs wrmsr
  93 

/* [<][>][^][v][top][bottom][index][help] */