1
2
3
4
5
6
7
8
9
10
11
12 #include <linux/linkage.h>
13 #include <linux/init.h>
14 #include <linux/irqchip/arm-gic-v3.h>
15
16 #include <asm/assembler.h>
17 #include <asm/boot.h>
18 #include <asm/ptrace.h>
19 #include <asm/asm-offsets.h>
20 #include <asm/cache.h>
21 #include <asm/cputype.h>
22 #include <asm/elf.h>
23 #include <asm/image.h>
24 #include <asm/kernel-pgtable.h>
25 #include <asm/kvm_arm.h>
26 #include <asm/memory.h>
27 #include <asm/pgtable-hwdef.h>
28 #include <asm/pgtable.h>
29 #include <asm/page.h>
30 #include <asm/smp.h>
31 #include <asm/sysreg.h>
32 #include <asm/thread_info.h>
33 #include <asm/virt.h>
34
35 #include "efi-header.S"
36
37 #define __PHYS_OFFSET (KERNEL_START - TEXT_OFFSET)
38
39 #if (TEXT_OFFSET & 0xfff) != 0
40 #error TEXT_OFFSET must be at least 4KB aligned
41 #elif (PAGE_OFFSET & 0x1fffff) != 0
42 #error PAGE_OFFSET must be at least 2MB aligned
43 #elif TEXT_OFFSET > 0x1fffff
44 #error TEXT_OFFSET must be less than 2MB
45 #endif
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62 __HEAD
63 _head:
64
65
66
67 #ifdef CONFIG_EFI
68
69
70
71
72 add x13, x18, #0x16
73 b stext
74 #else
75 b stext
76 .long 0
77 #endif
78 le64sym _kernel_offset_le
79 le64sym _kernel_size_le
80 le64sym _kernel_flags_le
81 .quad 0
82 .quad 0
83 .quad 0
84 .ascii ARM64_IMAGE_MAGIC
85 #ifdef CONFIG_EFI
86 .long pe_header - _head
87
88 pe_header:
89 __EFI_PE_HEADER
90 #else
91 .long 0
92 #endif
93
94 __INIT
95
96
97
98
99
100
101
102
103
104
105
106
107
108 ENTRY(stext)
109 bl preserve_boot_args
110 bl el2_setup
111 adrp x23, __PHYS_OFFSET
112 and x23, x23, MIN_KIMG_ALIGN - 1
113 bl set_cpu_boot_mode_flag
114 bl __create_page_tables
115
116
117
118
119
120
121 bl __cpu_setup
122 b __primary_switch
123 ENDPROC(stext)
124
125
126
127
128 preserve_boot_args:
129 mov x21, x0
130
131 adr_l x0, boot_args
132 stp x21, x1, [x0]
133 stp x2, x3, [x0, #16]
134
135 dmb sy
136
137
138 mov x1, #0x20
139 b __inval_dcache_area
140 ENDPROC(preserve_boot_args)
141
142
143
144
145
146
147
148
149
150
151
152
153
154 .macro create_table_entry, tbl, virt, shift, ptrs, tmp1, tmp2
155 add \tmp1, \tbl, #PAGE_SIZE
156 phys_to_pte \tmp2, \tmp1
157 orr \tmp2, \tmp2, #PMD_TYPE_TABLE
158 lsr \tmp1, \virt, #\shift
159 sub \ptrs, \ptrs, #1
160 and \tmp1, \tmp1, \ptrs
161 str \tmp2, [\tbl, \tmp1, lsl #3]
162 add \tbl, \tbl, #PAGE_SIZE
163 .endm
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181 .macro populate_entries, tbl, rtbl, index, eindex, flags, inc, tmp1
182 .Lpe\@: phys_to_pte \tmp1, \rtbl
183 orr \tmp1, \tmp1, \flags
184 str \tmp1, [\tbl, \index, lsl #3]
185 add \rtbl, \rtbl, \inc
186 add \index, \index, #1
187 cmp \index, \eindex
188 b.ls .Lpe\@
189 .endm
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209 .macro compute_indices, vstart, vend, shift, ptrs, istart, iend, count
210 lsr \iend, \vend, \shift
211 mov \istart, \ptrs
212 sub \istart, \istart, #1
213 and \iend, \iend, \istart
214 mov \istart, \ptrs
215 mul \istart, \istart, \count
216 add \iend, \iend, \istart
217
218
219 lsr \istart, \vstart, \shift
220 mov \count, \ptrs
221 sub \count, \count, #1
222 and \istart, \istart, \count
223
224 sub \count, \iend, \istart
225 .endm
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244 .macro map_memory, tbl, rtbl, vstart, vend, flags, phys, pgds, istart, iend, tmp, count, sv
245 add \rtbl, \tbl, #PAGE_SIZE
246 mov \sv, \rtbl
247 mov \count, #0
248 compute_indices \vstart, \vend, #PGDIR_SHIFT, \pgds, \istart, \iend, \count
249 populate_entries \tbl, \rtbl, \istart, \iend, #PMD_TYPE_TABLE, #PAGE_SIZE, \tmp
250 mov \tbl, \sv
251 mov \sv, \rtbl
252
253 #if SWAPPER_PGTABLE_LEVELS > 3
254 compute_indices \vstart, \vend, #PUD_SHIFT, #PTRS_PER_PUD, \istart, \iend, \count
255 populate_entries \tbl, \rtbl, \istart, \iend, #PMD_TYPE_TABLE, #PAGE_SIZE, \tmp
256 mov \tbl, \sv
257 mov \sv, \rtbl
258 #endif
259
260 #if SWAPPER_PGTABLE_LEVELS > 2
261 compute_indices \vstart, \vend, #SWAPPER_TABLE_SHIFT, #PTRS_PER_PMD, \istart, \iend, \count
262 populate_entries \tbl, \rtbl, \istart, \iend, #PMD_TYPE_TABLE, #PAGE_SIZE, \tmp
263 mov \tbl, \sv
264 #endif
265
266 compute_indices \vstart, \vend, #SWAPPER_BLOCK_SHIFT, #PTRS_PER_PTE, \istart, \iend, \count
267 bic \count, \phys, #SWAPPER_BLOCK_SIZE - 1
268 populate_entries \tbl, \count, \istart, \iend, \flags, #SWAPPER_BLOCK_SIZE, \tmp
269 .endm
270
271
272
273
274
275
276
277
278 __create_page_tables:
279 mov x28, lr
280
281
282
283
284
285
286
287 adrp x0, init_pg_dir
288 adrp x1, init_pg_end
289 sub x1, x1, x0
290 bl __inval_dcache_area
291
292
293
294
295 adrp x0, init_pg_dir
296 adrp x1, init_pg_end
297 sub x1, x1, x0
298 1: stp xzr, xzr, [x0], #16
299 stp xzr, xzr, [x0], #16
300 stp xzr, xzr, [x0], #16
301 stp xzr, xzr, [x0], #16
302 subs x1, x1, #64
303 b.ne 1b
304
305 mov x7, SWAPPER_MM_MMUFLAGS
306
307
308
309
310 adrp x0, idmap_pg_dir
311 adrp x3, __idmap_text_start
312
313 #ifdef CONFIG_ARM64_VA_BITS_52
314 mrs_s x6, SYS_ID_AA64MMFR2_EL1
315 and x6, x6, #(0xf << ID_AA64MMFR2_LVA_SHIFT)
316 mov x5, #52
317 cbnz x6, 1f
318 #endif
319 mov x5, #VA_BITS_MIN
320 1:
321 adr_l x6, vabits_actual
322 str x5, [x6]
323 dmb sy
324 dc ivac, x6
325
326
327
328
329
330
331
332
333
334
335
336
337
338 adrp x5, __idmap_text_end
339 clz x5, x5
340 cmp x5, TCR_T0SZ(VA_BITS)
341 b.ge 1f
342
343 adr_l x6, idmap_t0sz
344 str x5, [x6]
345 dmb sy
346 dc ivac, x6
347
348 #if (VA_BITS < 48)
349 #define EXTRA_SHIFT (PGDIR_SHIFT + PAGE_SHIFT - 3)
350 #define EXTRA_PTRS (1 << (PHYS_MASK_SHIFT - EXTRA_SHIFT))
351
352
353
354
355
356
357
358
359 #if VA_BITS != EXTRA_SHIFT
360 #error "Mismatch between VA_BITS and page size/number of translation levels"
361 #endif
362
363 mov x4, EXTRA_PTRS
364 create_table_entry x0, x3, EXTRA_SHIFT, x4, x5, x6
365 #else
366
367
368
369
370 mov x4, #1 << (PHYS_MASK_SHIFT - PGDIR_SHIFT)
371 str_l x4, idmap_ptrs_per_pgd, x5
372 #endif
373 1:
374 ldr_l x4, idmap_ptrs_per_pgd
375 mov x5, x3
376 adr_l x6, __idmap_text_end
377
378 map_memory x0, x1, x3, x6, x7, x3, x4, x10, x11, x12, x13, x14
379
380
381
382
383 adrp x0, init_pg_dir
384 mov_q x5, KIMAGE_VADDR + TEXT_OFFSET
385 add x5, x5, x23
386 mov x4, PTRS_PER_PGD
387 adrp x6, _end
388 adrp x3, _text
389 sub x6, x6, x3
390 add x6, x6, x5
391
392 map_memory x0, x1, x5, x6, x7, x3, x4, x10, x11, x12, x13, x14
393
394
395
396
397
398
399 adrp x0, idmap_pg_dir
400 adrp x1, init_pg_end
401 sub x1, x1, x0
402 dmb sy
403 bl __inval_dcache_area
404
405 ret x28
406 ENDPROC(__create_page_tables)
407 .ltorg
408
409
410
411
412
413
414 __primary_switched:
415 adrp x4, init_thread_union
416 add sp, x4, #THREAD_SIZE
417 adr_l x5, init_task
418 msr sp_el0, x5
419
420 adr_l x8, vectors
421 msr vbar_el1, x8
422 isb
423
424 stp xzr, x30, [sp, #-16]!
425 mov x29, sp
426
427 str_l x21, __fdt_pointer, x5
428
429 ldr_l x4, kimage_vaddr
430 sub x4, x4, x0
431 str_l x4, kimage_voffset, x5
432
433
434 adr_l x0, __bss_start
435 mov x1, xzr
436 adr_l x2, __bss_stop
437 sub x2, x2, x0
438 bl __pi_memset
439 dsb ishst
440
441 #ifdef CONFIG_KASAN
442 bl kasan_early_init
443 #endif
444 #ifdef CONFIG_RANDOMIZE_BASE
445 tst x23, ~(MIN_KIMG_ALIGN - 1)
446 b.ne 0f
447 mov x0, x21
448 bl kaslr_early_init
449 cbz x0, 0f
450 orr x23, x23, x0
451 ldp x29, x30, [sp], #16
452 ret
453 0:
454 #endif
455 add sp, sp, #16
456 mov x29, #0
457 mov x30, #0
458 b start_kernel
459 ENDPROC(__primary_switched)
460
461
462
463
464
465 .section ".idmap.text","awx"
466
467 ENTRY(kimage_vaddr)
468 .quad _text - TEXT_OFFSET
469 EXPORT_SYMBOL(kimage_vaddr)
470
471
472
473
474
475
476
477
478 ENTRY(el2_setup)
479 msr SPsel, #1
480 mrs x0, CurrentEL
481 cmp x0, #CurrentEL_EL2
482 b.eq 1f
483 mov_q x0, (SCTLR_EL1_RES1 | ENDIAN_SET_EL1)
484 msr sctlr_el1, x0
485 mov w0, #BOOT_CPU_MODE_EL1
486 isb
487 ret
488
489 1: mov_q x0, (SCTLR_EL2_RES1 | ENDIAN_SET_EL2)
490 msr sctlr_el2, x0
491
492 #ifdef CONFIG_ARM64_VHE
493
494
495
496
497
498 mrs x2, id_aa64mmfr1_el1
499 ubfx x2, x2, #ID_AA64MMFR1_VHE_SHIFT, #4
500 #else
501 mov x2, xzr
502 #endif
503
504
505 mov_q x0, HCR_HOST_NVHE_FLAGS
506 cbz x2, set_hcr
507 mov_q x0, HCR_HOST_VHE_FLAGS
508 set_hcr:
509 msr hcr_el2, x0
510 isb
511
512
513
514
515
516
517
518
519
520
521
522 cbnz x2, 1f
523 mrs x0, cnthctl_el2
524 orr x0, x0, #3
525 msr cnthctl_el2, x0
526 1:
527 msr cntvoff_el2, xzr
528
529 #ifdef CONFIG_ARM_GIC_V3
530
531 mrs x0, id_aa64pfr0_el1
532 ubfx x0, x0, #ID_AA64PFR0_GIC_SHIFT, #4
533 cbz x0, 3f
534
535 mrs_s x0, SYS_ICC_SRE_EL2
536 orr x0, x0, #ICC_SRE_EL2_SRE
537 orr x0, x0, #ICC_SRE_EL2_ENABLE
538 msr_s SYS_ICC_SRE_EL2, x0
539 isb
540 mrs_s x0, SYS_ICC_SRE_EL2
541 tbz x0, #0, 3f
542 msr_s SYS_ICH_HCR_EL2, xzr
543
544 3:
545 #endif
546
547
548 mrs x0, midr_el1
549 mrs x1, mpidr_el1
550 msr vpidr_el2, x0
551 msr vmpidr_el2, x1
552
553 #ifdef CONFIG_COMPAT
554 msr hstr_el2, xzr
555 #endif
556
557
558 mrs x1, id_aa64dfr0_el1
559 sbfx x0, x1, #ID_AA64DFR0_PMUVER_SHIFT, #4
560 cmp x0, #1
561 b.lt 4f
562 mrs x0, pmcr_el0
563 ubfx x0, x0, #11, #5
564 4:
565 csel x3, xzr, x0, lt
566
567
568 ubfx x0, x1, #ID_AA64DFR0_PMSVER_SHIFT, #4
569 cbz x0, 7f
570 cbnz x2, 6f
571 mrs_s x4, SYS_PMBIDR_EL1
572 and x4, x4, #(1 << SYS_PMBIDR_EL1_P_SHIFT)
573 cbnz x4, 5f
574 mov x4, #(1 << SYS_PMSCR_EL2_PCT_SHIFT | \
575 1 << SYS_PMSCR_EL2_PA_SHIFT)
576 msr_s SYS_PMSCR_EL2, x4
577 5:
578 mov x1, #(MDCR_EL2_E2PB_MASK << MDCR_EL2_E2PB_SHIFT)
579 orr x3, x3, x1
580 b 7f
581 6:
582 orr x3, x3, #MDCR_EL2_TPMS
583 7:
584 msr mdcr_el2, x3
585
586
587 mrs x1, id_aa64mmfr1_el1
588 ubfx x0, x1, #ID_AA64MMFR1_LOR_SHIFT, 4
589 cbz x0, 1f
590 msr_s SYS_LORC_EL1, xzr
591 1:
592
593
594 msr vttbr_el2, xzr
595
596 cbz x2, install_el2_stub
597
598 mov w0, #BOOT_CPU_MODE_EL2
599 isb
600 ret
601
602 install_el2_stub:
603
604
605
606
607
608
609
610 mov_q x0, (SCTLR_EL1_RES1 | ENDIAN_SET_EL1)
611 msr sctlr_el1, x0
612
613
614 mov x0, #0x33ff
615 msr cptr_el2, x0
616
617
618 mrs x1, id_aa64pfr0_el1
619 ubfx x1, x1, #ID_AA64PFR0_SVE_SHIFT, #4
620 cbz x1, 7f
621
622 bic x0, x0, #CPTR_EL2_TZ
623 msr cptr_el2, x0
624 isb
625 mov x1, #ZCR_ELx_LEN_MASK
626 msr_s SYS_ZCR_EL2, x1
627
628
629 7: adr_l x0, __hyp_stub_vectors
630 msr vbar_el2, x0
631
632
633 mov x0, #(PSR_F_BIT | PSR_I_BIT | PSR_A_BIT | PSR_D_BIT |\
634 PSR_MODE_EL1h)
635 msr spsr_el2, x0
636 msr elr_el2, lr
637 mov w0, #BOOT_CPU_MODE_EL2
638 eret
639 ENDPROC(el2_setup)
640
641
642
643
644
645 set_cpu_boot_mode_flag:
646 adr_l x1, __boot_cpu_mode
647 cmp w0, #BOOT_CPU_MODE_EL2
648 b.ne 1f
649 add x1, x1, #4
650 1: str w0, [x1]
651 dmb sy
652 dc ivac, x1
653 ret
654 ENDPROC(set_cpu_boot_mode_flag)
655
656
657
658
659
660
661
662 .pushsection ".mmuoff.data.write", "aw"
663
664
665
666
667
668
669
670 ENTRY(__boot_cpu_mode)
671 .long BOOT_CPU_MODE_EL2
672 .long BOOT_CPU_MODE_EL1
673
674
675
676
677 ENTRY(__early_cpu_boot_status)
678 .quad 0
679
680 .popsection
681
682
683
684
685
686 ENTRY(secondary_holding_pen)
687 bl el2_setup
688 bl set_cpu_boot_mode_flag
689 mrs x0, mpidr_el1
690 mov_q x1, MPIDR_HWID_BITMASK
691 and x0, x0, x1
692 adr_l x3, secondary_holding_pen_release
693 pen: ldr x4, [x3]
694 cmp x4, x0
695 b.eq secondary_startup
696 wfe
697 b pen
698 ENDPROC(secondary_holding_pen)
699
700
701
702
703
704 ENTRY(secondary_entry)
705 bl el2_setup
706 bl set_cpu_boot_mode_flag
707 b secondary_startup
708 ENDPROC(secondary_entry)
709
710 secondary_startup:
711
712
713
714 bl __cpu_secondary_check52bitva
715 bl __cpu_setup
716 adrp x1, swapper_pg_dir
717 bl __enable_mmu
718 ldr x8, =__secondary_switched
719 br x8
720 ENDPROC(secondary_startup)
721
722 __secondary_switched:
723 adr_l x5, vectors
724 msr vbar_el1, x5
725 isb
726
727 adr_l x0, secondary_data
728 ldr x1, [x0, #CPU_BOOT_STACK]
729 cbz x1, __secondary_too_slow
730 mov sp, x1
731 ldr x2, [x0, #CPU_BOOT_TASK]
732 cbz x2, __secondary_too_slow
733 msr sp_el0, x2
734 mov x29, #0
735 mov x30, #0
736 b secondary_start_kernel
737 ENDPROC(__secondary_switched)
738
739 __secondary_too_slow:
740 wfe
741 wfi
742 b __secondary_too_slow
743 ENDPROC(__secondary_too_slow)
744
745
746
747
748
749
750
751
752
753
754
755 .macro update_early_cpu_boot_status status, tmp1, tmp2
756 mov \tmp2, #\status
757 adr_l \tmp1, __early_cpu_boot_status
758 str \tmp2, [\tmp1]
759 dmb sy
760 dc ivac, \tmp1
761 .endm
762
763
764
765
766
767
768
769
770
771
772
773
774
775 ENTRY(__enable_mmu)
776 mrs x2, ID_AA64MMFR0_EL1
777 ubfx x2, x2, #ID_AA64MMFR0_TGRAN_SHIFT, 4
778 cmp x2, #ID_AA64MMFR0_TGRAN_SUPPORTED
779 b.ne __no_granule_support
780 update_early_cpu_boot_status 0, x2, x3
781 adrp x2, idmap_pg_dir
782 phys_to_ttbr x1, x1
783 phys_to_ttbr x2, x2
784 msr ttbr0_el1, x2
785 offset_ttbr1 x1, x3
786 msr ttbr1_el1, x1
787 isb
788 msr sctlr_el1, x0
789 isb
790
791
792
793
794
795 ic iallu
796 dsb nsh
797 isb
798 ret
799 ENDPROC(__enable_mmu)
800
801 ENTRY(__cpu_secondary_check52bitva)
802 #ifdef CONFIG_ARM64_VA_BITS_52
803 ldr_l x0, vabits_actual
804 cmp x0, #52
805 b.ne 2f
806
807 mrs_s x0, SYS_ID_AA64MMFR2_EL1
808 and x0, x0, #(0xf << ID_AA64MMFR2_LVA_SHIFT)
809 cbnz x0, 2f
810
811 update_early_cpu_boot_status \
812 CPU_STUCK_IN_KERNEL | CPU_STUCK_REASON_52_BIT_VA, x0, x1
813 1: wfe
814 wfi
815 b 1b
816
817 #endif
818 2: ret
819 ENDPROC(__cpu_secondary_check52bitva)
820
821 __no_granule_support:
822
823 update_early_cpu_boot_status \
824 CPU_STUCK_IN_KERNEL | CPU_STUCK_REASON_NO_GRAN, x1, x2
825 1:
826 wfe
827 wfi
828 b 1b
829 ENDPROC(__no_granule_support)
830
831 #ifdef CONFIG_RELOCATABLE
832 __relocate_kernel:
833
834
835
836
837 ldr w9, =__rela_offset
838 ldr w10, =__rela_size
839
840 mov_q x11, KIMAGE_VADDR
841 add x11, x11, x23
842 add x9, x9, x11
843 add x10, x9, x10
844
845 0: cmp x9, x10
846 b.hs 1f
847 ldp x12, x13, [x9], #24
848 ldr x14, [x9, #-8]
849 cmp w13, #R_AARCH64_RELATIVE
850 b.ne 0b
851 add x14, x14, x23
852 str x14, [x12, x23]
853 b 0b
854
855 1:
856 #ifdef CONFIG_RELR
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893 ldr w9, =__relr_offset
894 ldr w10, =__relr_size
895 add x9, x9, x11
896 add x10, x9, x10
897
898 sub x15, x23, x24
899 cbz x15, 7f
900 mov x24, x23
901
902 2: cmp x9, x10
903 b.hs 7f
904 ldr x11, [x9], #8
905 tbnz x11, #0, 3f
906 add x13, x11, x23
907 ldr x12, [x13]
908 add x12, x12, x15
909 str x12, [x13], #8
910 b 2b
911
912 3: mov x14, x13
913 4: lsr x11, x11, #1
914 cbz x11, 6f
915 tbz x11, #0, 5f
916 ldr x12, [x14]
917 add x12, x12, x15
918 str x12, [x14]
919
920 5: add x14, x14, #8
921 b 4b
922
923 6:
924
925
926
927 add x13, x13, #(8 * 63)
928 b 2b
929
930 7:
931 #endif
932 ret
933
934 ENDPROC(__relocate_kernel)
935 #endif
936
937 __primary_switch:
938 #ifdef CONFIG_RANDOMIZE_BASE
939 mov x19, x0
940 mrs x20, sctlr_el1
941 #endif
942
943 adrp x1, init_pg_dir
944 bl __enable_mmu
945 #ifdef CONFIG_RELOCATABLE
946 #ifdef CONFIG_RELR
947 mov x24, #0
948 #endif
949 bl __relocate_kernel
950 #ifdef CONFIG_RANDOMIZE_BASE
951 ldr x8, =__primary_switched
952 adrp x0, __PHYS_OFFSET
953 blr x8
954
955
956
957
958
959
960 pre_disable_mmu_workaround
961 msr sctlr_el1, x20
962 isb
963 bl __create_page_tables
964
965 tlbi vmalle1
966 dsb nsh
967
968 msr sctlr_el1, x19
969 isb
970 ic iallu
971 dsb nsh
972 isb
973
974 bl __relocate_kernel
975 #endif
976 #endif
977 ldr x8, =__primary_switched
978 adrp x0, __PHYS_OFFSET
979 br x8
980 ENDPROC(__primary_switch)