1
2
3
4
5
6
7
8 #ifndef __ASSEMBLY__
9 #error "Only include this from assembly code"
10 #endif
11
12 #ifndef __ASM_ASSEMBLER_H
13 #define __ASM_ASSEMBLER_H
14
15 #include <asm-generic/export.h>
16
17 #include <asm/asm-offsets.h>
18 #include <asm/cpufeature.h>
19 #include <asm/cputype.h>
20 #include <asm/debug-monitors.h>
21 #include <asm/page.h>
22 #include <asm/pgtable-hwdef.h>
23 #include <asm/ptrace.h>
24 #include <asm/thread_info.h>
25
26 .macro save_and_disable_daif, flags
27 mrs \flags, daif
28 msr daifset, #0xf
29 .endm
30
31 .macro disable_daif
32 msr daifset, #0xf
33 .endm
34
35 .macro enable_daif
36 msr daifclr, #0xf
37 .endm
38
39 .macro restore_daif, flags:req
40 msr daif, \flags
41 .endm
42
43
44 .macro inherit_daif, pstate:req, tmp:req
45 and \tmp, \pstate, #(PSR_D_BIT | PSR_A_BIT | PSR_I_BIT | PSR_F_BIT)
46 msr daif, \tmp
47 .endm
48
49
50 .macro enable_da_f
51 msr daifclr, #(8 | 4 | 1)
52 .endm
53
54
55
56
57 .macro save_and_disable_irq, flags
58 mrs \flags, daif
59 msr daifset, #2
60 .endm
61
62 .macro restore_irq, flags
63 msr daif, \flags
64 .endm
65
66 .macro enable_dbg
67 msr daifclr, #8
68 .endm
69
70 .macro disable_step_tsk, flgs, tmp
71 tbz \flgs, #TIF_SINGLESTEP, 9990f
72 mrs \tmp, mdscr_el1
73 bic \tmp, \tmp, #DBG_MDSCR_SS
74 msr mdscr_el1, \tmp
75 isb
76 9990:
77 .endm
78
79
80 .macro enable_step_tsk, flgs, tmp
81 tbz \flgs, #TIF_SINGLESTEP, 9990f
82 mrs \tmp, mdscr_el1
83 orr \tmp, \tmp, #DBG_MDSCR_SS
84 msr mdscr_el1, \tmp
85 9990:
86 .endm
87
88
89
90
91 .macro smp_dmb, opt
92 dmb \opt
93 .endm
94
95
96
97
98 .macro esb
99 #ifdef CONFIG_ARM64_RAS_EXTN
100 hint #16
101 #else
102 nop
103 #endif
104 .endm
105
106
107
108
109 .macro csdb
110 hint #20
111 .endm
112
113
114
115
116 .macro sb
117 alternative_if_not ARM64_HAS_SB
118 dsb nsh
119 isb
120 alternative_else
121 SB_BARRIER_INSN
122 nop
123 alternative_endif
124 .endm
125
126
127
128
129 .macro nops, num
130 .rept \num
131 nop
132 .endr
133 .endm
134
135
136
137
138 .macro _asm_extable, from, to
139 .pushsection __ex_table, "a"
140 .align 3
141 .long (\from - .), (\to - .)
142 .popsection
143 .endm
144
145 #define USER(l, x...) \
146 9999: x; \
147 _asm_extable 9999b, l
148
149
150
151
152 lr .req x30
153
154
155
156
157 .macro ventry label
158 .align 7
159 b \label
160 .endm
161
162
163
164
165 #ifdef CONFIG_CPU_BIG_ENDIAN
166 #define CPU_BE(code...) code
167 #else
168 #define CPU_BE(code...)
169 #endif
170
171
172
173
174 #ifdef CONFIG_CPU_BIG_ENDIAN
175 #define CPU_LE(code...)
176 #else
177 #define CPU_LE(code...) code
178 #endif
179
180
181
182
183
184
185 #ifndef CONFIG_CPU_BIG_ENDIAN
186 .macro regs_to_64, rd, lbits, hbits
187 #else
188 .macro regs_to_64, rd, hbits, lbits
189 #endif
190 orr \rd, \lbits, \hbits, lsl #32
191 .endm
192
193
194
195
196
197
198
199
200
201 .macro adr_l, dst, sym
202 adrp \dst, \sym
203 add \dst, \dst, :lo12:\sym
204 .endm
205
206
207
208
209
210
211
212
213 .macro ldr_l, dst, sym, tmp=
214 .ifb \tmp
215 adrp \dst, \sym
216 ldr \dst, [\dst, :lo12:\sym]
217 .else
218 adrp \tmp, \sym
219 ldr \dst, [\tmp, :lo12:\sym]
220 .endif
221 .endm
222
223
224
225
226
227
228
229 .macro str_l, src, sym, tmp
230 adrp \tmp, \sym
231 str \src, [\tmp, :lo12:\sym]
232 .endm
233
234
235
236
237
238
239 .macro adr_this_cpu, dst, sym, tmp
240 adrp \tmp, \sym
241 add \dst, \tmp, #:lo12:\sym
242 alternative_if_not ARM64_HAS_VIRT_HOST_EXTN
243 mrs \tmp, tpidr_el1
244 alternative_else
245 mrs \tmp, tpidr_el2
246 alternative_endif
247 add \dst, \dst, \tmp
248 .endm
249
250
251
252
253
254
255 .macro ldr_this_cpu dst, sym, tmp
256 adr_l \dst, \sym
257 alternative_if_not ARM64_HAS_VIRT_HOST_EXTN
258 mrs \tmp, tpidr_el1
259 alternative_else
260 mrs \tmp, tpidr_el2
261 alternative_endif
262 ldr \dst, [\dst, \tmp]
263 .endm
264
265
266
267
268 .macro vma_vm_mm, rd, rn
269 ldr \rd, [\rn, #VMA_VM_MM]
270 .endm
271
272
273
274
275 .macro mmid, rd, rn
276 ldr \rd, [\rn, #MM_CONTEXT_ID]
277 .endm
278
279
280
281
282 .macro read_ctr, reg
283 alternative_if_not ARM64_MISMATCHED_CACHE_TYPE
284 mrs \reg, ctr_el0
285 nop
286 alternative_else
287 ldr_l \reg, arm64_ftr_reg_ctrel0 + ARM64_FTR_SYSVAL
288 alternative_endif
289 .endm
290
291
292
293
294
295
296 .macro raw_dcache_line_size, reg, tmp
297 mrs \tmp, ctr_el0
298 ubfm \tmp, \tmp, #16, #19
299 mov \reg, #4
300 lsl \reg, \reg, \tmp
301 .endm
302
303
304
305
306 .macro dcache_line_size, reg, tmp
307 read_ctr \tmp
308 ubfm \tmp, \tmp, #16, #19
309 mov \reg, #4
310 lsl \reg, \reg, \tmp
311 .endm
312
313
314
315
316
317 .macro raw_icache_line_size, reg, tmp
318 mrs \tmp, ctr_el0
319 and \tmp, \tmp, #0xf
320 mov \reg, #4
321 lsl \reg, \reg, \tmp
322 .endm
323
324
325
326
327 .macro icache_line_size, reg, tmp
328 read_ctr \tmp
329 and \tmp, \tmp, #0xf
330 mov \reg, #4
331 lsl \reg, \reg, \tmp
332 .endm
333
334
335
336
337 .macro tcr_set_t0sz, valreg, t0sz
338 bfi \valreg, \t0sz, #TCR_T0SZ_OFFSET, #TCR_TxSZ_WIDTH
339 .endm
340
341
342
343
344 .macro tcr_set_t1sz, valreg, t1sz
345 bfi \valreg, \t1sz, #TCR_T1SZ_OFFSET, #TCR_TxSZ_WIDTH
346 .endm
347
348
349
350
351
352
353
354
355
356 .macro tcr_compute_pa_size, tcr, pos, tmp0, tmp1
357 mrs \tmp0, ID_AA64MMFR0_EL1
358
359 ubfx \tmp0, \tmp0, #ID_AA64MMFR0_PARANGE_SHIFT, #3
360 mov \tmp1, #ID_AA64MMFR0_PARANGE_MAX
361 cmp \tmp0, \tmp1
362 csel \tmp0, \tmp1, \tmp0, hi
363 bfi \tcr, \tmp0, \pos, #3
364 .endm
365
366
367
368
369
370
371
372
373
374
375
376 .macro __dcache_op_workaround_clean_cache, op, kaddr
377 alternative_if_not ARM64_WORKAROUND_CLEAN_CACHE
378 dc \op, \kaddr
379 alternative_else
380 dc civac, \kaddr
381 alternative_endif
382 .endm
383
384 .macro dcache_by_line_op op, domain, kaddr, size, tmp1, tmp2
385 dcache_line_size \tmp1, \tmp2
386 add \size, \kaddr, \size
387 sub \tmp2, \tmp1, #1
388 bic \kaddr, \kaddr, \tmp2
389 9998:
390 .ifc \op, cvau
391 __dcache_op_workaround_clean_cache \op, \kaddr
392 .else
393 .ifc \op, cvac
394 __dcache_op_workaround_clean_cache \op, \kaddr
395 .else
396 .ifc \op, cvap
397 sys 3, c7, c12, 1, \kaddr
398 .else
399 .ifc \op, cvadp
400 sys 3, c7, c13, 1, \kaddr
401 .else
402 dc \op, \kaddr
403 .endif
404 .endif
405 .endif
406 .endif
407 add \kaddr, \kaddr, \tmp1
408 cmp \kaddr, \size
409 b.lo 9998b
410 dsb \domain
411 .endm
412
413
414
415
416
417
418
419
420
421 .macro invalidate_icache_by_line start, end, tmp1, tmp2, label
422 icache_line_size \tmp1, \tmp2
423 sub \tmp2, \tmp1, #1
424 bic \tmp2, \start, \tmp2
425 9997:
426 USER(\label, ic ivau, \tmp2)
427 add \tmp2, \tmp2, \tmp1
428 cmp \tmp2, \end
429 b.lo 9997b
430 dsb ish
431 isb
432 .endm
433
434
435
436
437 .macro reset_pmuserenr_el0, tmpreg
438 mrs \tmpreg, id_aa64dfr0_el1
439 sbfx \tmpreg, \tmpreg, #ID_AA64DFR0_PMUVER_SHIFT, #4
440 cmp \tmpreg, #1
441 b.lt 9000f
442 msr pmuserenr_el0, xzr
443 9000:
444 .endm
445
446
447
448
449 .macro copy_page dest:req src:req t1:req t2:req t3:req t4:req t5:req t6:req t7:req t8:req
450 9998: ldp \t1, \t2, [\src]
451 ldp \t3, \t4, [\src, #16]
452 ldp \t5, \t6, [\src, #32]
453 ldp \t7, \t8, [\src, #48]
454 add \src, \src, #64
455 stnp \t1, \t2, [\dest]
456 stnp \t3, \t4, [\dest, #16]
457 stnp \t5, \t6, [\dest, #32]
458 stnp \t7, \t8, [\dest, #48]
459 add \dest, \dest, #64
460 tst \src, #(PAGE_SIZE - 1)
461 b.ne 9998b
462 .endm
463
464
465
466
467
468 #define ENDPIPROC(x) \
469 .globl __pi_##x; \
470 .type __pi_##x, %function; \
471 .set __pi_##x, x; \
472 .size __pi_##x, . - x; \
473 ENDPROC(x)
474
475
476
477
478 #ifdef CONFIG_KPROBES
479 #define NOKPROBE(x) \
480 .pushsection "_kprobe_blacklist", "aw"; \
481 .quad x; \
482 .popsection;
483 #else
484 #define NOKPROBE(x)
485 #endif
486
487 #ifdef CONFIG_KASAN
488 #define EXPORT_SYMBOL_NOKASAN(name)
489 #else
490 #define EXPORT_SYMBOL_NOKASAN(name) EXPORT_SYMBOL(name)
491 #endif
492
493
494
495
496
497
498
499 .macro le64sym, sym
500 .long \sym\()_lo32
501 .long \sym\()_hi32
502 .endm
503
504
505
506
507
508
509 .macro mov_q, reg, val
510 .if (((\val) >> 31) == 0 || ((\val) >> 31) == 0x1ffffffff)
511 movz \reg, :abs_g1_s:\val
512 .else
513 .if (((\val) >> 47) == 0 || ((\val) >> 47) == 0x1ffff)
514 movz \reg, :abs_g2_s:\val
515 .else
516 movz \reg, :abs_g3:\val
517 movk \reg, :abs_g2_nc:\val
518 .endif
519 movk \reg, :abs_g1_nc:\val
520 .endif
521 movk \reg, :abs_g0_nc:\val
522 .endm
523
524
525
526
527 .macro get_current_task, rd
528 mrs \rd, sp_el0
529 .endm
530
531
532
533
534
535
536
537 .macro offset_ttbr1, ttbr, tmp
538 #ifdef CONFIG_ARM64_VA_BITS_52
539 mrs_s \tmp, SYS_ID_AA64MMFR2_EL1
540 and \tmp, \tmp, #(0xf << ID_AA64MMFR2_LVA_SHIFT)
541 cbnz \tmp, .Lskipoffs_\@
542 orr \ttbr, \ttbr, #TTBR1_BADDR_4852_OFFSET
543 .Lskipoffs_\@ :
544 #endif
545 .endm
546
547
548
549
550
551
552 .macro restore_ttbr1, ttbr
553 #ifdef CONFIG_ARM64_VA_BITS_52
554 bic \ttbr, \ttbr, #TTBR1_BADDR_4852_OFFSET
555 #endif
556 .endm
557
558
559
560
561
562
563
564
565 .macro phys_to_ttbr, ttbr, phys
566 #ifdef CONFIG_ARM64_PA_BITS_52
567 orr \ttbr, \phys, \phys, lsr #46
568 and \ttbr, \ttbr, #TTBR_BADDR_MASK_52
569 #else
570 mov \ttbr, \phys
571 #endif
572 .endm
573
574 .macro phys_to_pte, pte, phys
575 #ifdef CONFIG_ARM64_PA_BITS_52
576
577
578
579
580 orr \pte, \phys, \phys, lsr #36
581 and \pte, \pte, #PTE_ADDR_MASK
582 #else
583 mov \pte, \phys
584 #endif
585 .endm
586
587 .macro pte_to_phys, phys, pte
588 #ifdef CONFIG_ARM64_PA_BITS_52
589 ubfiz \phys, \pte, #(48 - 16 - 12), #16
590 bfxil \phys, \pte, #16, #32
591 lsl \phys, \phys, #16
592 #else
593 and \phys, \pte, #PTE_ADDR_MASK
594 #endif
595 .endm
596
597
598
599
600 .macro tcr_clear_errata_bits, tcr, tmp1, tmp2
601 #ifdef CONFIG_FUJITSU_ERRATUM_010001
602 mrs \tmp1, midr_el1
603
604 mov_q \tmp2, MIDR_FUJITSU_ERRATUM_010001_MASK
605 and \tmp1, \tmp1, \tmp2
606 mov_q \tmp2, MIDR_FUJITSU_ERRATUM_010001
607 cmp \tmp1, \tmp2
608 b.ne 10f
609
610 mov_q \tmp2, TCR_CLEAR_FUJITSU_ERRATUM_010001
611 bic \tcr, \tcr, \tmp2
612 10:
613 #endif
614 .endm
615
616
617
618
619
620 .macro pre_disable_mmu_workaround
621 #ifdef CONFIG_QCOM_FALKOR_ERRATUM_E1041
622 isb
623 #endif
624 .endm
625
626
627
628
629
630
631
632 .macro frame_push, regcount:req, extra
633 __frame st, \regcount, \extra
634 .endm
635
636
637
638
639
640
641
642 .macro frame_pop
643 __frame ld
644 .endm
645
646 .macro __frame_regs, reg1, reg2, op, num
647 .if .Lframe_regcount == \num
648 \op\()r \reg1, [sp, #(\num + 1) * 8]
649 .elseif .Lframe_regcount > \num
650 \op\()p \reg1, \reg2, [sp, #(\num + 1) * 8]
651 .endif
652 .endm
653
654 .macro __frame, op, regcount, extra=0
655 .ifc \op, st
656 .if (\regcount) < 0 || (\regcount) > 10
657 .error "regcount should be in the range [0 ... 10]"
658 .endif
659 .if ((\extra) % 16) != 0
660 .error "extra should be a multiple of 16 bytes"
661 .endif
662 .ifdef .Lframe_regcount
663 .if .Lframe_regcount != -1
664 .error "frame_push/frame_pop may not be nested"
665 .endif
666 .endif
667 .set .Lframe_regcount, \regcount
668 .set .Lframe_extra, \extra
669 .set .Lframe_local_offset, ((\regcount + 3) / 2) * 16
670 stp x29, x30, [sp, #-.Lframe_local_offset - .Lframe_extra]!
671 mov x29, sp
672 .endif
673
674 __frame_regs x19, x20, \op, 1
675 __frame_regs x21, x22, \op, 3
676 __frame_regs x23, x24, \op, 5
677 __frame_regs x25, x26, \op, 7
678 __frame_regs x27, x28, \op, 9
679
680 .ifc \op, ld
681 .if .Lframe_regcount == -1
682 .error "frame_push/frame_pop may not be nested"
683 .endif
684 ldp x29, x30, [sp], #.Lframe_local_offset + .Lframe_extra
685 .set .Lframe_regcount, -1
686 .endif
687 .endm
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724 .macro cond_yield_neon, lbl
725 if_will_cond_yield_neon
726 do_cond_yield_neon
727 endif_yield_neon \lbl
728 .endm
729
730 .macro if_will_cond_yield_neon
731 #ifdef CONFIG_PREEMPT
732 get_current_task x0
733 ldr x0, [x0, #TSK_TI_PREEMPT]
734 sub x0, x0, #PREEMPT_DISABLE_OFFSET
735 cbz x0, .Lyield_\@
736
737 .subsection 1
738 .Lyield_\@ :
739 #else
740 .section ".discard.cond_yield_neon", "ax"
741 #endif
742 .endm
743
744 .macro do_cond_yield_neon
745 bl kernel_neon_end
746 bl kernel_neon_begin
747 .endm
748
749 .macro endif_yield_neon, lbl
750 .ifnb \lbl
751 b \lbl
752 .else
753 b .Lyield_out_\@
754 .endif
755 .previous
756 .Lyield_out_\@ :
757 .endm
758
759 #endif