root/arch/x86/power/hibernate_asm_32.S

/* [<][>][^][v][top][bottom][index][help] */
   1 /* SPDX-License-Identifier: GPL-2.0 */
   2 /*
   3  * This may not use any stack, nor any variable that is not "NoSave":
   4  *
   5  * Its rewriting one kernel image with another. What is stack in "old"
   6  * image could very well be data page in "new" image, and overwriting
   7  * your own stack under you is bad idea.
   8  */
   9 
  10 #include <linux/linkage.h>
  11 #include <asm/segment.h>
  12 #include <asm/page_types.h>
  13 #include <asm/asm-offsets.h>
  14 #include <asm/processor-flags.h>
  15 #include <asm/frame.h>
  16 
  17 .text
  18 
  19 ENTRY(swsusp_arch_suspend)
  20         movl %esp, saved_context_esp
  21         movl %ebx, saved_context_ebx
  22         movl %ebp, saved_context_ebp
  23         movl %esi, saved_context_esi
  24         movl %edi, saved_context_edi
  25         pushfl
  26         popl saved_context_eflags
  27 
  28         /* save cr3 */
  29         movl    %cr3, %eax
  30         movl    %eax, restore_cr3
  31 
  32         FRAME_BEGIN
  33         call swsusp_save
  34         FRAME_END
  35         ret
  36 ENDPROC(swsusp_arch_suspend)
  37 
  38 ENTRY(restore_image)
  39         /* prepare to jump to the image kernel */
  40         movl    restore_jump_address, %ebx
  41         movl    restore_cr3, %ebp
  42 
  43         movl    mmu_cr4_features, %ecx
  44 
  45         /* jump to relocated restore code */
  46         movl    relocated_restore_code, %eax
  47         jmpl    *%eax
  48 
  49 /* code below has been relocated to a safe page */
  50 ENTRY(core_restore_code)
  51         movl    temp_pgt, %eax
  52         movl    %eax, %cr3
  53 
  54         jecxz   1f      # cr4 Pentium and higher, skip if zero
  55         andl    $~(X86_CR4_PGE), %ecx
  56         movl    %ecx, %cr4;  # turn off PGE
  57         movl    %cr3, %eax;  # flush TLB
  58         movl    %eax, %cr3
  59 1:
  60         movl    restore_pblist, %edx
  61         .p2align 4,,7
  62 
  63 copy_loop:
  64         testl   %edx, %edx
  65         jz      done
  66 
  67         movl    pbe_address(%edx), %esi
  68         movl    pbe_orig_address(%edx), %edi
  69 
  70         movl    $(PAGE_SIZE >> 2), %ecx
  71         rep
  72         movsl
  73 
  74         movl    pbe_next(%edx), %edx
  75         jmp     copy_loop
  76         .p2align 4,,7
  77 
  78 done:
  79         jmpl    *%ebx
  80 
  81         /* code below belongs to the image kernel */
  82         .align PAGE_SIZE
  83 ENTRY(restore_registers)
  84         /* go back to the original page tables */
  85         movl    %ebp, %cr3
  86         movl    mmu_cr4_features, %ecx
  87         jecxz   1f      # cr4 Pentium and higher, skip if zero
  88         movl    %ecx, %cr4;  # turn PGE back on
  89 1:
  90 
  91         movl saved_context_esp, %esp
  92         movl saved_context_ebp, %ebp
  93         movl saved_context_ebx, %ebx
  94         movl saved_context_esi, %esi
  95         movl saved_context_edi, %edi
  96 
  97         pushl saved_context_eflags
  98         popfl
  99 
 100         /* Saved in save_processor_state. */
 101         movl $saved_context, %eax
 102         lgdt saved_context_gdt_desc(%eax)
 103 
 104         xorl    %eax, %eax
 105 
 106         /* tell the hibernation core that we've just restored the memory */
 107         movl    %eax, in_suspend
 108 
 109         ret
 110 ENDPROC(restore_registers)

/* [<][>][^][v][top][bottom][index][help] */