1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 #include <linux/linkage.h>
16 #include <asm/asm-offsets.h>
17 #include <asm/asmmacro.h>
18 #include <asm/processor.h>
19 #include <asm/coprocessor.h>
20 #include <asm/thread_info.h>
21 #include <asm/asm-uaccess.h>
22 #include <asm/unistd.h>
23 #include <asm/ptrace.h>
24 #include <asm/current.h>
25 #include <asm/pgtable.h>
26 #include <asm/page.h>
27 #include <asm/signal.h>
28 #include <asm/tlbflush.h>
29 #include <variant/tie-asm.h>
30
31
32
33 #undef KERNEL_STACK_OVERFLOW_CHECK
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48 .macro ffs_ws bit mask
49
50 #if XCHAL_HAVE_NSA
51 nsau \bit, \mask # 32-WSBITS ... 31 (32 iff 0)
52 addi \bit, \bit, WSBITS - 32 + 1 # uppest bit set -> return 1
53 #else
54 movi \bit, WSBITS
55 #if WSBITS > 16
56 _bltui \mask, 0x10000, 99f
57 addi \bit, \bit, -16
58 extui \mask, \mask, 16, 16
59 #endif
60 #if WSBITS > 8
61 99: _bltui \mask, 0x100, 99f
62 addi \bit, \bit, -8
63 srli \mask, \mask, 8
64 #endif
65 99: _bltui \mask, 0x10, 99f
66 addi \bit, \bit, -4
67 srli \mask, \mask, 4
68 99: _bltui \mask, 0x4, 99f
69 addi \bit, \bit, -2
70 srli \mask, \mask, 2
71 99: _bltui \mask, 0x2, 99f
72 addi \bit, \bit, -1
73 99:
74
75 #endif
76 .endm
77
78
79 .macro irq_save flags tmp
80 #if XTENSA_FAKE_NMI
81 #if defined(CONFIG_DEBUG_KERNEL) && (LOCKLEVEL | TOPLEVEL) >= XCHAL_DEBUGLEVEL
82 rsr \flags, ps
83 extui \tmp, \flags, PS_INTLEVEL_SHIFT, PS_INTLEVEL_WIDTH
84 bgei \tmp, LOCKLEVEL, 99f
85 rsil \tmp, LOCKLEVEL
86 99:
87 #else
88 movi \tmp, LOCKLEVEL
89 rsr \flags, ps
90 or \flags, \flags, \tmp
91 xsr \flags, ps
92 rsync
93 #endif
94 #else
95 rsil \flags, LOCKLEVEL
96 #endif
97 .endm
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129 .literal_position
130
131 ENTRY(user_exception)
132
133
134
135 rsr a0, depc
136 s32i a1, a2, PT_AREG1
137 s32i a0, a2, PT_AREG2
138 s32i a3, a2, PT_AREG3
139 mov a1, a2
140
141 .globl _user_exception
142 _user_exception:
143
144
145
146 movi a2, 0
147 wsr a2, depc # terminate user stack trace with 0
148 rsr a3, sar
149 xsr a2, icountlevel
150 s32i a3, a1, PT_SAR
151 s32i a2, a1, PT_ICOUNTLEVEL
152
153 #if XCHAL_HAVE_THREADPTR
154 rur a2, threadptr
155 s32i a2, a1, PT_THREADPTR
156 #endif
157
158
159
160
161 rsr a2, windowbase
162 rsr a3, windowstart
163 ssr a2
164 s32i a2, a1, PT_WINDOWBASE
165 s32i a3, a1, PT_WINDOWSTART
166 slli a2, a3, 32-WSBITS
167 src a2, a3, a2
168 srli a2, a2, 32-WSBITS
169 s32i a2, a1, PT_WMASK # needed for restoring registers
170
171
172
173 _bbsi.l a2, 1, 1f
174 s32i a4, a1, PT_AREG4
175 s32i a5, a1, PT_AREG5
176 s32i a6, a1, PT_AREG6
177 s32i a7, a1, PT_AREG7
178 _bbsi.l a2, 2, 1f
179 s32i a8, a1, PT_AREG8
180 s32i a9, a1, PT_AREG9
181 s32i a10, a1, PT_AREG10
182 s32i a11, a1, PT_AREG11
183 _bbsi.l a2, 3, 1f
184 s32i a12, a1, PT_AREG12
185 s32i a13, a1, PT_AREG13
186 s32i a14, a1, PT_AREG14
187 s32i a15, a1, PT_AREG15
188 _bnei a2, 1, 1f # only one valid frame?
189
190
191
192 j 2f
193
194
195
196
197
198
199
200
201
202 1: addi a3, a2, -1 # eliminate '1' in bit 0: yyyyxxww0
203 neg a3, a3 # yyyyxxww0 -> YYYYXXWW1+1
204 and a3, a3, a2 # max. only one bit is set
205
206
207
208 ffs_ws a0, a3 # number of frames to the '1' from left
209
210
211
212
213
214
215 slli a3, a0, 4 # number of frames to save in bits 8..4
216 extui a2, a2, 0, 4 # mask for the first 16 registers
217 or a2, a3, a2
218 s32i a2, a1, PT_WMASK # needed when we restore the reg-file
219
220
221
222 1: rotw -1
223 s32i a0, a5, PT_AREG_END - 16
224 s32i a1, a5, PT_AREG_END - 12
225 s32i a2, a5, PT_AREG_END - 8
226 s32i a3, a5, PT_AREG_END - 4
227 addi a0, a4, -1
228 addi a1, a5, -16
229 _bnez a0, 1b
230
231
232
233 rsr a2, sar # original WINDOWBASE
234 movi a3, 1
235 ssl a2
236 sll a3, a3
237 wsr a3, windowstart # set corresponding WINDOWSTART bit
238 wsr a2, windowbase # and WINDOWSTART
239 rsync
240
241
242
243 2:
244
245 j common_exception
246
247 ENDPROC(user_exception)
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277 ENTRY(kernel_exception)
278
279
280
281 rsr a0, depc # get a2
282 s32i a1, a2, PT_AREG1
283 s32i a0, a2, PT_AREG2
284 s32i a3, a2, PT_AREG3
285 mov a1, a2
286
287 .globl _kernel_exception
288 _kernel_exception:
289
290
291
292 movi a2, 0
293 rsr a3, sar
294 xsr a2, icountlevel
295 s32i a3, a1, PT_SAR
296 s32i a2, a1, PT_ICOUNTLEVEL
297
298
299
300
301 rsr a2, windowbase # don't need to save these, we only
302 rsr a3, windowstart # need shifted windowstart: windowmask
303 ssr a2
304 slli a2, a3, 32-WSBITS
305 src a2, a3, a2
306 srli a2, a2, 32-WSBITS
307 s32i a2, a1, PT_WMASK # needed for kernel_exception_exit
308
309
310
311 _bbsi.l a2, 1, 1f
312 s32i a4, a1, PT_AREG4
313 s32i a5, a1, PT_AREG5
314 s32i a6, a1, PT_AREG6
315 s32i a7, a1, PT_AREG7
316 _bbsi.l a2, 2, 1f
317 s32i a8, a1, PT_AREG8
318 s32i a9, a1, PT_AREG9
319 s32i a10, a1, PT_AREG10
320 s32i a11, a1, PT_AREG11
321 _bbsi.l a2, 3, 1f
322 s32i a12, a1, PT_AREG12
323 s32i a13, a1, PT_AREG13
324 s32i a14, a1, PT_AREG14
325 s32i a15, a1, PT_AREG15
326
327 _bnei a2, 1, 1f
328
329
330
331
332 l32i a3, a1, PT_SIZE
333 l32i a0, a1, PT_SIZE + 4
334 s32e a3, a1, -16
335 s32e a0, a1, -12
336 1:
337 l32i a0, a1, PT_AREG0 # restore saved a0
338 wsr a0, depc
339
340 #ifdef KERNEL_STACK_OVERFLOW_CHECK
341
342
343 extui a2, a1, TASK_SIZE_BITS,XX
344 movi a3, SIZE??
345 _bge a2, a3, out_of_stack_panic
346
347 #endif
348
349
350
351
352
353
354
355
356
357
358 common_exception:
359
360
361
362 rsr a2, debugcause
363 rsr a3, epc1
364 s32i a2, a1, PT_DEBUGCAUSE
365 s32i a3, a1, PT_PC
366
367 movi a2, NO_SYSCALL
368 rsr a3, excvaddr
369 s32i a2, a1, PT_SYSCALL
370 movi a2, 0
371 s32i a3, a1, PT_EXCVADDR
372 #if XCHAL_HAVE_LOOPS
373 xsr a2, lcount
374 s32i a2, a1, PT_LCOUNT
375 #endif
376
377
378
379 rsr a2, exccause
380 movi a3, 0
381 rsr a0, excsave1
382 s32i a2, a1, PT_EXCCAUSE
383 s32i a3, a0, EXC_TABLE_FIXUP
384
385
386
387
388
389
390
391
392
393 rsr a3, ps
394 s32i a3, a1, PT_PS # save ps
395
396 #if XTENSA_FAKE_NMI
397
398
399
400
401
402 movi a0, EXCCAUSE_MAPPED_NMI
403 extui a3, a3, PS_INTLEVEL_SHIFT, PS_INTLEVEL_WIDTH
404 beq a2, a0, .Lmedium_level_irq
405 bnei a2, EXCCAUSE_LEVEL1_INTERRUPT, .Lexception
406 beqz a3, .Llevel1_irq # level-1 IRQ sets ps.intlevel to 0
407
408 .Lmedium_level_irq:
409 rsr a0, excsave2
410 s32i a0, a1, PT_PS # save medium-level interrupt ps
411 bgei a3, LOCKLEVEL, .Lexception
412
413 .Llevel1_irq:
414 movi a3, LOCKLEVEL
415
416 .Lexception:
417 movi a0, PS_WOE_MASK
418 or a3, a3, a0
419 #else
420 addi a2, a2, -EXCCAUSE_LEVEL1_INTERRUPT
421 movi a0, LOCKLEVEL
422 extui a3, a3, PS_INTLEVEL_SHIFT, PS_INTLEVEL_WIDTH
423
424 moveqz a3, a0, a2 # a3 = LOCKLEVEL iff interrupt
425 movi a2, PS_WOE_MASK
426 or a3, a3, a2
427 rsr a2, exccause
428 #endif
429
430
431 rsr a0, depc
432 wsr a3, ps
433 rsync # PS.WOE => rsync => overflow
434
435
436 #if XCHAL_HAVE_LOOPS
437 rsr a4, lbeg
438 rsr a3, lend
439 s32i a4, a1, PT_LBEG
440 s32i a3, a1, PT_LEND
441 #endif
442
443
444
445 #if XCHAL_HAVE_S32C1I
446 rsr a3, scompare1
447 s32i a3, a1, PT_SCOMPARE1
448 #endif
449
450
451
452 save_xtregs_opt a1 a3 a4 a5 a6 a7 PT_XTREGS_OPT
453
454
455
456
457
458 rsr a4, excsave1
459 mov a6, a1 # pass stack frame
460 mov a7, a2 # pass EXCCAUSE
461 addx4 a4, a2, a4
462 l32i a4, a4, EXC_TABLE_DEFAULT # load handler
463
464
465
466 callx4 a4
467
468
469 .global common_exception_return
470 common_exception_return:
471
472 #if XTENSA_FAKE_NMI
473 l32i a2, a1, PT_EXCCAUSE
474 movi a3, EXCCAUSE_MAPPED_NMI
475 beq a2, a3, .LNMIexit
476 #endif
477 1:
478 irq_save a2, a3
479 #ifdef CONFIG_TRACE_IRQFLAGS
480 call4 trace_hardirqs_off
481 #endif
482
483
484
485 l32i a3, a1, PT_PS
486 GET_THREAD_INFO(a2, a1)
487 l32i a4, a2, TI_FLAGS
488 _bbci.l a3, PS_UM_BIT, 6f
489
490
491
492
493
494
495
496
497 _bbsi.l a4, TIF_NEED_RESCHED, 3f
498 _bbsi.l a4, TIF_NOTIFY_RESUME, 2f
499 _bbci.l a4, TIF_SIGPENDING, 5f
500
501 2: l32i a4, a1, PT_DEPC
502 bgeui a4, VALID_DOUBLE_EXCEPTION_ADDRESS, 4f
503
504
505
506 #ifdef CONFIG_TRACE_IRQFLAGS
507 call4 trace_hardirqs_on
508 #endif
509 rsil a2, 0
510 mov a6, a1
511 call4 do_notify_resume # int do_notify_resume(struct pt_regs*)
512 j 1b
513
514 3:
515
516 #ifdef CONFIG_TRACE_IRQFLAGS
517 call4 trace_hardirqs_on
518 #endif
519 rsil a2, 0
520 call4 schedule # void schedule (void)
521 j 1b
522
523 #ifdef CONFIG_PREEMPT
524 6:
525 _bbci.l a4, TIF_NEED_RESCHED, 4f
526
527
528
529 l32i a4, a2, TI_PRE_COUNT
530 bnez a4, 4f
531 call4 preempt_schedule_irq
532 j 1b
533 #endif
534
535 #if XTENSA_FAKE_NMI
536 .LNMIexit:
537 l32i a3, a1, PT_PS
538 _bbci.l a3, PS_UM_BIT, 4f
539 #endif
540
541 5:
542 #ifdef CONFIG_HAVE_HW_BREAKPOINT
543 _bbci.l a4, TIF_DB_DISABLED, 7f
544 call4 restore_dbreak
545 7:
546 #endif
547 #ifdef CONFIG_DEBUG_TLB_SANITY
548 l32i a4, a1, PT_DEPC
549 bgeui a4, VALID_DOUBLE_EXCEPTION_ADDRESS, 4f
550 call4 check_tlb_sanity
551 #endif
552 6:
553 4:
554 #ifdef CONFIG_TRACE_IRQFLAGS
555 extui a4, a3, PS_INTLEVEL_SHIFT, PS_INTLEVEL_WIDTH
556 bgei a4, LOCKLEVEL, 1f
557 call4 trace_hardirqs_on
558 1:
559 #endif
560
561
562 load_xtregs_opt a1 a2 a4 a5 a6 a7 PT_XTREGS_OPT
563
564
565
566 #if XCHAL_HAVE_S32C1I
567 l32i a2, a1, PT_SCOMPARE1
568 wsr a2, scompare1
569 #endif
570 wsr a3, ps
571
572 _bbci.l a3, PS_UM_BIT, kernel_exception_exit
573
574 user_exception_exit:
575
576
577
578
579
580 l32i a2, a1, PT_WINDOWBASE
581 l32i a3, a1, PT_WINDOWSTART
582 wsr a1, depc # use DEPC as temp storage
583 wsr a3, windowstart # restore WINDOWSTART
584 ssr a2 # preserve user's WB in the SAR
585 wsr a2, windowbase # switch to user's saved WB
586 rsync
587 rsr a1, depc # restore stack pointer
588 l32i a2, a1, PT_WMASK # register frames saved (in bits 4...9)
589 rotw -1 # we restore a4..a7
590 _bltui a6, 16, 1f # only have to restore current window?
591
592
593
594
595
596
597
598
599 mov a2, a6
600 mov a3, a5
601
602 2: rotw -1 # a0..a3 become a4..a7
603 addi a3, a7, -4*4 # next iteration
604 addi a2, a6, -16 # decrementing Y in WMASK
605 l32i a4, a3, PT_AREG_END + 0
606 l32i a5, a3, PT_AREG_END + 4
607 l32i a6, a3, PT_AREG_END + 8
608 l32i a7, a3, PT_AREG_END + 12
609 _bgeui a2, 16, 2b
610
611
612
613 1: rsr a0, windowbase
614 rsr a3, sar
615 sub a3, a0, a3
616 beqz a3, 2f
617 extui a3, a3, 0, WBBITS
618
619 1: rotw -1
620 addi a3, a7, -1
621 movi a4, 0
622 movi a5, 0
623 movi a6, 0
624 movi a7, 0
625 bgei a3, 1, 1b
626
627
628
629
630
631
632
633 2:
634 #if XCHAL_HAVE_THREADPTR
635 l32i a3, a1, PT_THREADPTR
636 wur a3, threadptr
637 #endif
638
639 j common_exception_exit
640
641
642
643
644
645
646 kernel_exception_exit:
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672 l32i a2, a1, PT_WMASK
673 _beqi a2, 1, common_exception_exit # Spilled before exception,jump
674
675
676
677 rsr a3, windowstart
678 addi a0, a3, -1
679 and a3, a3, a0
680 _bnez a3, common_exception_exit
681
682
683
684 addi a0, a1, -16
685 l32i a3, a0, 0
686 l32i a4, a0, 4
687 s32i a3, a1, PT_SIZE+0
688 s32i a4, a1, PT_SIZE+4
689 l32i a3, a0, 8
690 l32i a4, a0, 12
691 s32i a3, a1, PT_SIZE+8
692 s32i a4, a1, PT_SIZE+12
693
694
695
696
697
698
699
700
701 common_exception_exit:
702
703
704
705 _bbsi.l a2, 1, 1f
706 l32i a4, a1, PT_AREG4
707 l32i a5, a1, PT_AREG5
708 l32i a6, a1, PT_AREG6
709 l32i a7, a1, PT_AREG7
710 _bbsi.l a2, 2, 1f
711 l32i a8, a1, PT_AREG8
712 l32i a9, a1, PT_AREG9
713 l32i a10, a1, PT_AREG10
714 l32i a11, a1, PT_AREG11
715 _bbsi.l a2, 3, 1f
716 l32i a12, a1, PT_AREG12
717 l32i a13, a1, PT_AREG13
718 l32i a14, a1, PT_AREG14
719 l32i a15, a1, PT_AREG15
720
721
722
723 1: l32i a2, a1, PT_PC
724 l32i a3, a1, PT_SAR
725 wsr a2, epc1
726 wsr a3, sar
727
728
729 #if XCHAL_HAVE_LOOPS
730 l32i a2, a1, PT_LBEG
731 l32i a3, a1, PT_LEND
732 wsr a2, lbeg
733 l32i a2, a1, PT_LCOUNT
734 wsr a3, lend
735 wsr a2, lcount
736 #endif
737
738
739
740 l32i a2, a1, PT_ICOUNTLEVEL
741 movi a3, -2
742 wsr a2, icountlevel
743 wsr a3, icount
744
745
746
747 l32i a0, a1, PT_DEPC
748 l32i a3, a1, PT_AREG3
749 l32i a2, a1, PT_AREG2
750 _bgeui a0, VALID_DOUBLE_EXCEPTION_ADDRESS, 1f
751
752
753
754 l32i a0, a1, PT_AREG0
755 l32i a1, a1, PT_AREG1
756 rfe
757
758 1: wsr a0, depc
759 l32i a0, a1, PT_AREG0
760 l32i a1, a1, PT_AREG1
761 rfde
762
763 ENDPROC(kernel_exception)
764
765
766
767
768
769
770
771
772
773 .literal_position
774
775 ENTRY(debug_exception)
776
777 rsr a0, SREG_EPS + XCHAL_DEBUGLEVEL
778 bbsi.l a0, PS_EXCM_BIT, 1f # exception mode
779
780
781
782 wsr a2, depc # save a2 temporarily
783 rsr a2, SREG_EPC + XCHAL_DEBUGLEVEL
784 wsr a2, epc1
785
786 movi a2, EXCCAUSE_MAPPED_DEBUG
787 wsr a2, exccause
788
789
790
791 movi a2, 1 << PS_EXCM_BIT
792 or a2, a0, a2
793 wsr a2, ps
794
795
796
797 bbsi.l a2, PS_UM_BIT, 2f # jump if user mode
798
799 addi a2, a1, -16-PT_SIZE # assume kernel stack
800 3:
801 l32i a0, a3, DT_DEBUG_SAVE
802 s32i a1, a2, PT_AREG1
803 s32i a0, a2, PT_AREG0
804 movi a0, 0
805 s32i a0, a2, PT_DEPC # mark it as a regular exception
806 xsr a3, SREG_EXCSAVE + XCHAL_DEBUGLEVEL
807 xsr a0, depc
808 s32i a3, a2, PT_AREG3
809 s32i a0, a2, PT_AREG2
810 mov a1, a2
811
812
813
814
815
816
817 #if defined(CONFIG_PREEMPT_COUNT) && defined(CONFIG_HAVE_HW_BREAKPOINT)
818 GET_THREAD_INFO(a2, a1)
819 l32i a3, a2, TI_PRE_COUNT
820 addi a3, a3, 1
821 s32i a3, a2, TI_PRE_COUNT
822 #endif
823
824 rsr a2, ps
825 bbsi.l a2, PS_UM_BIT, _user_exception
826 j _kernel_exception
827
828 2: rsr a2, excsave1
829 l32i a2, a2, EXC_TABLE_KSTK # load kernel stack pointer
830 j 3b
831
832 #ifdef CONFIG_HAVE_HW_BREAKPOINT
833
834
835
836
837
838
839 1:
840 bbci.l a0, PS_UM_BIT, 1b # jump if kernel mode
841
842 rsr a0, debugcause
843 bbsi.l a0, DEBUGCAUSE_DBREAK_BIT, .Ldebug_save_dbreak
844
845 .set _index, 0
846 .rept XCHAL_NUM_DBREAK
847 l32i a0, a3, DT_DBREAKC_SAVE + _index * 4
848 wsr a0, SREG_DBREAKC + _index
849 .set _index, _index + 1
850 .endr
851
852 l32i a0, a3, DT_ICOUNT_LEVEL_SAVE
853 wsr a0, icountlevel
854
855 l32i a0, a3, DT_ICOUNT_SAVE
856 xsr a0, icount
857
858 l32i a0, a3, DT_DEBUG_SAVE
859 xsr a3, SREG_EXCSAVE + XCHAL_DEBUGLEVEL
860 rfi XCHAL_DEBUGLEVEL
861
862 .Ldebug_save_dbreak:
863 .set _index, 0
864 .rept XCHAL_NUM_DBREAK
865 movi a0, 0
866 xsr a0, SREG_DBREAKC + _index
867 s32i a0, a3, DT_DBREAKC_SAVE + _index * 4
868 .set _index, _index + 1
869 .endr
870
871 movi a0, XCHAL_EXCM_LEVEL + 1
872 xsr a0, icountlevel
873 s32i a0, a3, DT_ICOUNT_LEVEL_SAVE
874
875 movi a0, 0xfffffffe
876 xsr a0, icount
877 s32i a0, a3, DT_ICOUNT_SAVE
878
879 l32i a0, a3, DT_DEBUG_SAVE
880 xsr a3, SREG_EXCSAVE + XCHAL_DEBUGLEVEL
881 rfi XCHAL_DEBUGLEVEL
882 #else
883
884 1: j 1b
885 #endif
886
887 ENDPROC(debug_exception)
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910 .align 4
911 unrecoverable_text:
912 .ascii "Unrecoverable error in exception handler\0"
913
914 .literal_position
915
916 ENTRY(unrecoverable_exception)
917
918 movi a0, 1
919 movi a1, 0
920
921 wsr a0, windowstart
922 wsr a1, windowbase
923 rsync
924
925 movi a1, PS_WOE_MASK | LOCKLEVEL
926 wsr a1, ps
927 rsync
928
929 movi a1, init_task
930 movi a0, 0
931 addi a1, a1, PT_REGS_OFFSET
932
933 movi a6, unrecoverable_text
934 call4 panic
935
936 1: j 1b
937
938 ENDPROC(unrecoverable_exception)
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982 ENTRY(fast_alloca)
983 rsr a0, windowbase
984 rotw -1
985 rsr a2, ps
986 extui a3, a2, PS_OWB_SHIFT, PS_OWB_WIDTH
987 xor a3, a3, a4
988 l32i a4, a6, PT_AREG0
989 l32i a1, a6, PT_DEPC
990 rsr a6, depc
991 wsr a1, depc
992 slli a3, a3, PS_OWB_SHIFT
993 xor a2, a2, a3
994 wsr a2, ps
995 rsync
996
997 _bbci.l a4, 31, 4f
998 rotw -1
999 _bbci.l a8, 30, 8f
1000 rotw -1
1001 j _WindowUnderflow12
1002 8: j _WindowUnderflow8
1003 4: j _WindowUnderflow4
1004 ENDPROC(fast_alloca)
1005
1006 #ifdef CONFIG_USER_ABI_CALL0_PROBE
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024 ENTRY(fast_illegal_instruction_user)
1025
1026 rsr a0, ps
1027 bbsi.l a0, PS_WOE_BIT, user_exception
1028 s32i a3, a2, PT_AREG3
1029 movi a3, PS_WOE_MASK
1030 or a0, a0, a3
1031 wsr a0, ps
1032 l32i a3, a2, PT_AREG3
1033 l32i a0, a2, PT_AREG0
1034 rsr a2, depc
1035 rfe
1036
1037 ENDPROC(fast_illegal_instruction_user)
1038 #endif
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059 ENTRY(fast_syscall_user)
1060
1061
1062
1063 rsr a0, epc1
1064 addi a0, a0, 3
1065 wsr a0, epc1
1066
1067 l32i a0, a2, PT_DEPC
1068 bgeui a0, VALID_DOUBLE_EXCEPTION_ADDRESS, fast_syscall_unrecoverable
1069
1070 rsr a0, depc # get syscall-nr
1071 _beqz a0, fast_syscall_spill_registers
1072 _beqi a0, __NR_xtensa, fast_syscall_xtensa
1073
1074 j user_exception
1075
1076 ENDPROC(fast_syscall_user)
1077
1078 ENTRY(fast_syscall_unrecoverable)
1079
1080
1081
1082 l32i a0, a2, PT_AREG0 # restore a0
1083 xsr a2, depc # restore a2, depc
1084
1085 wsr a0, excsave1
1086 call0 unrecoverable_exception
1087
1088 ENDPROC(fast_syscall_unrecoverable)
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115 .literal_position
1116
1117 #ifdef CONFIG_FAST_SYSCALL_XTENSA
1118
1119 ENTRY(fast_syscall_xtensa)
1120
1121 s32i a7, a2, PT_AREG7 # we need an additional register
1122 movi a7, 4 # sizeof(unsigned int)
1123 access_ok a3, a7, a0, a2, .Leac # a0: scratch reg, a2: sp
1124
1125 _bgeui a6, SYS_XTENSA_COUNT, .Lill
1126 _bnei a6, SYS_XTENSA_ATOMIC_CMP_SWP, .Lnswp
1127
1128
1129
1130 .Lswp:
1131
1132 EX(.Leac) l32i a0, a3, 0 # read old value
1133 bne a0, a4, 1f # same as old value? jump
1134 EX(.Leac) s32i a5, a3, 0 # different, modify value
1135 l32i a7, a2, PT_AREG7 # restore a7
1136 l32i a0, a2, PT_AREG0 # restore a0
1137 movi a2, 1 # and return 1
1138 rfe
1139
1140 1: l32i a7, a2, PT_AREG7 # restore a7
1141 l32i a0, a2, PT_AREG0 # restore a0
1142 movi a2, 0 # return 0 (note that we cannot set
1143 rfe
1144
1145 .Lnswp:
1146
1147 EX(.Leac) l32i a7, a3, 0 # orig
1148 addi a6, a6, -SYS_XTENSA_ATOMIC_SET
1149 add a0, a4, a7 # + arg
1150 moveqz a0, a4, a6 # set
1151 addi a6, a6, SYS_XTENSA_ATOMIC_SET
1152 EX(.Leac) s32i a0, a3, 0 # write new value
1153
1154 mov a0, a2
1155 mov a2, a7
1156 l32i a7, a0, PT_AREG7 # restore a7
1157 l32i a0, a0, PT_AREG0 # restore a0
1158 rfe
1159
1160 .Leac: l32i a7, a2, PT_AREG7 # restore a7
1161 l32i a0, a2, PT_AREG0 # restore a0
1162 movi a2, -EFAULT
1163 rfe
1164
1165 .Lill: l32i a7, a2, PT_AREG7 # restore a7
1166 l32i a0, a2, PT_AREG0 # restore a0
1167 movi a2, -EINVAL
1168 rfe
1169
1170 ENDPROC(fast_syscall_xtensa)
1171
1172 #else
1173
1174 ENTRY(fast_syscall_xtensa)
1175
1176 l32i a0, a2, PT_AREG0 # restore a0
1177 movi a2, -ENOSYS
1178 rfe
1179
1180 ENDPROC(fast_syscall_xtensa)
1181
1182 #endif
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199 #ifdef CONFIG_FAST_SYSCALL_SPILL_REGISTERS
1200
1201 ENTRY(fast_syscall_spill_registers)
1202
1203
1204
1205 xsr a3, excsave1
1206 movi a0, fast_syscall_spill_registers_fixup
1207 s32i a0, a3, EXC_TABLE_FIXUP
1208 rsr a0, windowbase
1209 s32i a0, a3, EXC_TABLE_PARAM
1210 xsr a3, excsave1 # restore a3 and excsave_1
1211
1212
1213
1214 rsr a0, sar
1215 s32i a3, a2, PT_AREG3
1216 s32i a0, a2, PT_SAR
1217
1218
1219
1220 s32i a4, a2, PT_AREG4
1221 s32i a7, a2, PT_AREG7
1222 s32i a8, a2, PT_AREG8
1223 s32i a11, a2, PT_AREG11
1224 s32i a12, a2, PT_AREG12
1225 s32i a15, a2, PT_AREG15
1226
1227
1228
1229
1230
1231
1232
1233 rsr a0, windowbase
1234 rsr a3, windowstart # a3 = xxxwww1yy
1235 ssr a0 # holds WB
1236 slli a0, a3, WSBITS
1237 or a3, a3, a0 # a3 = xxxwww1yyxxxwww1yy
1238 srl a3, a3 # a3 = 00xxxwww1yyxxxwww1
1239
1240
1241
1242 extui a3, a3, 1, WSBITS-1 # a3 = 0yyxxxwww
1243 movi a0, (1 << (WSBITS-1))
1244 _beqz a3, .Lnospill # only one active frame? jump
1245
1246
1247
1248 or a3, a3, a0 # 1yyxxxwww
1249
1250
1251
1252 wsr a3, windowstart # save shifted windowstart
1253 neg a0, a3
1254 and a3, a0, a3 # first bit set from right: 000010000
1255
1256 ffs_ws a0, a3 # a0: shifts to skip empty frames
1257 movi a3, WSBITS
1258 sub a0, a3, a0 # WSBITS-a0:number of 0-bits from right
1259 ssr a0 # save in SAR for later.
1260
1261 rsr a3, windowbase
1262 add a3, a3, a0
1263 wsr a3, windowbase
1264 rsync
1265
1266 rsr a3, windowstart
1267 srl a3, a3 # shift windowstart
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278 .Lloop: _bbsi.l a3, 1, .Lc4
1279 _bbci.l a3, 2, .Lc12
1280
1281 .Lc8: s32e a4, a13, -16
1282 l32e a4, a5, -12
1283 s32e a8, a4, -32
1284 s32e a5, a13, -12
1285 s32e a6, a13, -8
1286 s32e a7, a13, -4
1287 s32e a9, a4, -28
1288 s32e a10, a4, -24
1289 s32e a11, a4, -20
1290 srli a11, a3, 2 # shift windowbase by 2
1291 rotw 2
1292 _bnei a3, 1, .Lloop
1293 j .Lexit
1294
1295 .Lc4: s32e a4, a9, -16
1296 s32e a5, a9, -12
1297 s32e a6, a9, -8
1298 s32e a7, a9, -4
1299
1300 srli a7, a3, 1
1301 rotw 1
1302 _bnei a3, 1, .Lloop
1303 j .Lexit
1304
1305 .Lc12: _bbci.l a3, 3, .Linvalid_mask # bit 2 shouldn't be zero!
1306
1307
1308
1309 l32e a0, a5, -12
1310 s32e a8, a0, -48
1311 mov a8, a0
1312
1313 s32e a9, a8, -44
1314 s32e a10, a8, -40
1315 s32e a11, a8, -36
1316 s32e a12, a8, -32
1317 s32e a13, a8, -28
1318 s32e a14, a8, -24
1319 s32e a15, a8, -20
1320 srli a15, a3, 3
1321
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332 rotw 1
1333 mov a4, a13
1334 rotw -1
1335
1336 s32e a4, a8, -16
1337 s32e a5, a8, -12
1338 s32e a6, a8, -8
1339 s32e a7, a8, -4
1340
1341 rotw 3
1342
1343 _beqi a3, 1, .Lexit
1344 j .Lloop
1345
1346 .Lexit:
1347
1348
1349
1350 rotw 1
1351 rsr a3, windowbase
1352 ssl a3
1353 movi a3, 1
1354 sll a3, a3
1355 wsr a3, windowstart
1356 .Lnospill:
1357
1358
1359
1360 l32i a3, a2, PT_SAR
1361 l32i a0, a2, PT_AREG0
1362 wsr a3, sar
1363 l32i a3, a2, PT_AREG3
1364
1365
1366
1367 l32i a4, a2, PT_AREG4
1368 l32i a7, a2, PT_AREG7
1369 l32i a8, a2, PT_AREG8
1370 l32i a11, a2, PT_AREG11
1371 l32i a12, a2, PT_AREG12
1372 l32i a15, a2, PT_AREG15
1373
1374 movi a2, 0
1375 rfe
1376
1377 .Linvalid_mask:
1378
1379
1380
1381
1382
1383
1384 movi a0, 1
1385 movi a1, 0
1386
1387 wsr a0, windowstart
1388 wsr a1, windowbase
1389 rsync
1390
1391 movi a0, 0
1392
1393 rsr a3, excsave1
1394 l32i a1, a3, EXC_TABLE_KSTK
1395
1396 movi a4, PS_WOE_MASK | LOCKLEVEL
1397 wsr a4, ps
1398 rsync
1399
1400 movi a6, SIGSEGV
1401 call4 do_exit
1402
1403
1404
1405 wsr a0, excsave1
1406 call0 unrecoverable_exception # should not return
1407 1: j 1b
1408
1409
1410 ENDPROC(fast_syscall_spill_registers)
1411
1412
1413
1414
1415
1416
1417
1418
1419
1420
1421
1422
1423
1424
1425
1426
1427
1428
1429
1430 ENTRY(fast_syscall_spill_registers_fixup)
1431
1432 rsr a2, windowbase # get current windowbase (a2 is saved)
1433 xsr a0, depc # restore depc and a0
1434 ssl a2 # set shift (32 - WB)
1435
1436
1437
1438
1439
1440
1441
1442
1443
1444
1445 xsr a3, excsave1 # get spill-mask
1446 slli a3, a3, 1 # shift left by one
1447 addi a3, a3, 1 # set the bit for the current window frame
1448
1449 slli a2, a3, 32-WSBITS
1450 src a2, a3, a2 # a2 = xxwww1yyxxxwww1yy......
1451 wsr a2, windowstart # set corrected windowstart
1452
1453 srli a3, a3, 1
1454 rsr a2, excsave1
1455 l32i a2, a2, EXC_TABLE_DOUBLE_SAVE # restore a2
1456 xsr a2, excsave1
1457 s32i a3, a2, EXC_TABLE_DOUBLE_SAVE # save a3
1458 l32i a3, a2, EXC_TABLE_PARAM # original WB (in user task)
1459 xsr a2, excsave1
1460
1461
1462
1463
1464
1465
1466
1467
1468
1469 wsr a3, windowbase
1470 rsync
1471
1472
1473
1474
1475
1476
1477
1478
1479
1480
1481
1482
1483
1484 addi a2, a2, -PT_USER_SIZE
1485 s32i a0, a2, PT_AREG0
1486
1487
1488
1489 movi a3, fast_syscall_spill_registers_fixup_return
1490 s32i a3, a2, PT_DEPC # setup depc
1491
1492
1493
1494 rsr a3, excsave1
1495 rsr a0, exccause
1496 addx4 a0, a0, a3 # find entry in table
1497 l32i a0, a0, EXC_TABLE_FAST_USER # load handler
1498 l32i a3, a3, EXC_TABLE_DOUBLE_SAVE
1499 jx a0
1500
1501 ENDPROC(fast_syscall_spill_registers_fixup)
1502
1503 ENTRY(fast_syscall_spill_registers_fixup_return)
1504
1505
1506
1507 wsr a2, depc # exception address
1508
1509
1510
1511 rsr a2, excsave1
1512 s32i a3, a2, EXC_TABLE_DOUBLE_SAVE
1513 movi a3, fast_syscall_spill_registers_fixup
1514 s32i a3, a2, EXC_TABLE_FIXUP
1515 rsr a3, windowbase
1516 s32i a3, a2, EXC_TABLE_PARAM
1517 l32i a2, a2, EXC_TABLE_KSTK
1518
1519
1520
1521 rsr a3, sar # WB is still in SAR
1522 neg a3, a3
1523 wsr a3, windowbase
1524 rsync
1525
1526 rsr a3, excsave1
1527 l32i a3, a3, EXC_TABLE_DOUBLE_SAVE
1528
1529 rfde
1530
1531 ENDPROC(fast_syscall_spill_registers_fixup_return)
1532
1533 #else
1534
1535 ENTRY(fast_syscall_spill_registers)
1536
1537 l32i a0, a2, PT_AREG0 # restore a0
1538 movi a2, -ENOSYS
1539 rfe
1540
1541 ENDPROC(fast_syscall_spill_registers)
1542
1543 #endif
1544
1545 #ifdef CONFIG_MMU
1546
1547
1548
1549
1550 ENTRY(fast_second_level_miss_double_kernel)
1551
1552 1:
1553 call0 unrecoverable_exception # should not return
1554 1: j 1b
1555
1556 ENDPROC(fast_second_level_miss_double_kernel)
1557
1558
1559
1560
1561
1562
1563
1564
1565
1566
1567
1568
1569
1570
1571
1572
1573
1574
1575
1576
1577
1578 ENTRY(fast_second_level_miss)
1579
1580
1581
1582 s32i a1, a2, PT_AREG1
1583 s32i a3, a2, PT_AREG3
1584
1585
1586
1587
1588
1589
1590
1591
1592
1593
1594
1595
1596
1597
1598
1599
1600
1601 GET_CURRENT(a1,a2)
1602 l32i a0, a1, TASK_MM # tsk->mm
1603 beqz a0, 9f
1604
1605 8: rsr a3, excvaddr # fault address
1606 _PGD_OFFSET(a0, a3, a1)
1607 l32i a0, a0, 0 # read pmdval
1608 beqz a0, 2f
1609
1610
1611
1612
1613
1614
1615
1616
1617
1618
1619
1620
1621
1622
1623
1624 movi a1, (PHYS_OFFSET - PAGE_OFFSET) & 0xffffffff
1625 add a0, a0, a1 # pmdval - PAGE_OFFSET
1626 extui a1, a0, 0, PAGE_SHIFT # ... & PAGE_MASK
1627 xor a0, a0, a1
1628
1629 movi a1, _PAGE_DIRECTORY
1630 or a0, a0, a1 # ... | PAGE_DIRECTORY
1631
1632
1633
1634
1635
1636
1637
1638
1639
1640
1641
1642 extui a3, a3, 28, 2 # addr. bit 28 and 29 0,1,2,3
1643 rsr a1, ptevaddr
1644 addx2 a3, a3, a3 # -> 0,3,6,9
1645 srli a1, a1, PAGE_SHIFT
1646 extui a3, a3, 2, 2 # -> 0,0,1,2
1647 slli a1, a1, PAGE_SHIFT # ptevaddr & PAGE_MASK
1648 addi a3, a3, DTLB_WAY_PGD
1649 add a1, a1, a3 # ... + way_number
1650
1651 3: wdtlb a0, a1
1652 dsync
1653
1654
1655
1656 4: rsr a3, excsave1
1657 movi a0, 0
1658 s32i a0, a3, EXC_TABLE_FIXUP
1659
1660
1661
1662 l32i a0, a2, PT_AREG0
1663 l32i a1, a2, PT_AREG1
1664 l32i a3, a2, PT_AREG3
1665 l32i a2, a2, PT_DEPC
1666
1667 bgeui a2, VALID_DOUBLE_EXCEPTION_ADDRESS, 1f
1668
1669
1670
1671 rsr a2, depc
1672 rfe
1673
1674
1675
1676 1: xsr a2, depc
1677 esync
1678 rfde
1679
1680 9: l32i a0, a1, TASK_ACTIVE_MM # unlikely case mm == 0
1681 bnez a0, 8b
1682
1683
1684
1685
1686
1687 movi a0, init_mm
1688 j 8b
1689
1690 #if (DCACHE_WAY_SIZE > PAGE_SIZE)
1691
1692 2:
1693
1694
1695
1696
1697
1698
1699
1700
1701 l32i a0, a2, PT_DEPC
1702 bgeui a0, VALID_DOUBLE_EXCEPTION_ADDRESS, 2f
1703
1704
1705
1706 movi a0, __tlbtemp_mapping_start
1707 rsr a3, epc1
1708 bltu a3, a0, 2f
1709 movi a0, __tlbtemp_mapping_end
1710 bgeu a3, a0, 2f
1711
1712
1713
1714 movi a3, TLBTEMP_BASE_1
1715 rsr a0, excvaddr
1716 bltu a0, a3, 2f
1717
1718 addi a1, a0, -TLBTEMP_SIZE
1719 bgeu a1, a3, 2f
1720
1721
1722
1723 movi a1, __tlbtemp_mapping_itlb
1724 rsr a3, epc1
1725 sub a3, a3, a1
1726
1727
1728
1729 movi a1, PAGE_MASK
1730 and a1, a1, a0
1731
1732
1733
1734 bgez a3, 1f
1735
1736
1737
1738 extui a3, a0, PAGE_SHIFT + DCACHE_ALIAS_ORDER, 1
1739 add a1, a3, a1
1740
1741
1742
1743 mov a0, a6
1744 movnez a0, a7, a3
1745 j 3b
1746
1747
1748
1749 1: witlb a6, a1
1750 isync
1751 j 4b
1752
1753
1754 #endif
1755
1756
1757 2:
1758
1759 rsr a1, depc
1760 s32i a1, a2, PT_AREG2
1761 mov a1, a2
1762
1763 rsr a2, ps
1764 bbsi.l a2, PS_UM_BIT, 1f
1765 j _kernel_exception
1766 1: j _user_exception
1767
1768 ENDPROC(fast_second_level_miss)
1769
1770
1771
1772
1773
1774
1775
1776
1777
1778
1779
1780
1781
1782
1783
1784
1785
1786
1787
1788 ENTRY(fast_store_prohibited)
1789
1790
1791
1792 s32i a1, a2, PT_AREG1
1793 s32i a3, a2, PT_AREG3
1794
1795 GET_CURRENT(a1,a2)
1796 l32i a0, a1, TASK_MM # tsk->mm
1797 beqz a0, 9f
1798
1799 8: rsr a1, excvaddr # fault address
1800 _PGD_OFFSET(a0, a1, a3)
1801 l32i a0, a0, 0
1802 beqz a0, 2f
1803
1804
1805
1806
1807
1808
1809 _PTE_OFFSET(a0, a1, a3)
1810 l32i a3, a0, 0 # read pteval
1811 movi a1, _PAGE_CA_INVALID
1812 ball a3, a1, 2f
1813 bbci.l a3, _PAGE_WRITABLE_BIT, 2f
1814
1815 movi a1, _PAGE_ACCESSED | _PAGE_DIRTY | _PAGE_HW_WRITE
1816 or a3, a3, a1
1817 rsr a1, excvaddr
1818 s32i a3, a0, 0
1819
1820
1821 #if (DCACHE_WAY_SIZE > PAGE_SIZE) && XCHAL_DCACHE_IS_WRITEBACK
1822 dhwb a0, 0
1823 #endif
1824 pdtlb a0, a1
1825 wdtlb a3, a0
1826
1827
1828
1829 movi a0, 0
1830 rsr a3, excsave1
1831 s32i a0, a3, EXC_TABLE_FIXUP
1832
1833
1834
1835 l32i a3, a2, PT_AREG3
1836 l32i a1, a2, PT_AREG1
1837 l32i a0, a2, PT_AREG0
1838 l32i a2, a2, PT_DEPC
1839
1840 bgeui a2, VALID_DOUBLE_EXCEPTION_ADDRESS, 1f
1841
1842 rsr a2, depc
1843 rfe
1844
1845
1846
1847 1: xsr a2, depc
1848 esync
1849 rfde
1850
1851 9: l32i a0, a1, TASK_ACTIVE_MM # unlikely case mm == 0
1852 j 8b
1853
1854 2:
1855
1856 rsr a3, depc # still holds a2
1857 s32i a3, a2, PT_AREG2
1858 mov a1, a2
1859
1860 rsr a2, ps
1861 bbsi.l a2, PS_UM_BIT, 1f
1862 j _kernel_exception
1863 1: j _user_exception
1864
1865 ENDPROC(fast_store_prohibited)
1866
1867 #endif
1868
1869
1870
1871
1872
1873
1874
1875 .literal_position
1876
1877 ENTRY(system_call)
1878
1879
1880 abi_entry(4)
1881
1882
1883
1884 l32i a7, a2, PT_AREG2
1885 s32i a7, a2, PT_SYSCALL
1886
1887 GET_THREAD_INFO(a4, a1)
1888 l32i a3, a4, TI_FLAGS
1889 movi a4, _TIF_WORK_MASK
1890 and a3, a3, a4
1891 beqz a3, 1f
1892
1893 mov a6, a2
1894 call4 do_syscall_trace_enter
1895 l32i a7, a2, PT_SYSCALL
1896
1897 1:
1898 s32i a7, a1, 4
1899
1900
1901
1902 movi a4, sys_call_table
1903 movi a5, __NR_syscalls
1904 movi a6, -ENOSYS
1905 bgeu a7, a5, 1f
1906
1907 addx4 a4, a7, a4
1908 l32i a4, a4, 0
1909 movi a5, sys_ni_syscall;
1910 beq a4, a5, 1f
1911
1912
1913
1914 l32i a6, a2, PT_AREG6
1915 l32i a7, a2, PT_AREG3
1916 l32i a8, a2, PT_AREG4
1917 l32i a9, a2, PT_AREG5
1918 l32i a10, a2, PT_AREG8
1919 l32i a11, a2, PT_AREG9
1920
1921
1922 s32i a2, a1, 0
1923
1924 callx4 a4
1925
1926 1:
1927
1928 s32i a6, a2, PT_AREG2
1929 bnez a3, 1f
1930 abi_ret(4)
1931
1932 1:
1933 l32i a4, a1, 4
1934 l32i a3, a2, PT_SYSCALL
1935 s32i a4, a2, PT_SYSCALL
1936 mov a6, a2
1937 call4 do_syscall_trace_leave
1938 s32i a3, a2, PT_SYSCALL
1939 abi_ret(4)
1940
1941 ENDPROC(system_call)
1942
1943
1944
1945
1946
1947
1948
1949
1950 .macro spill_registers_kernel
1951
1952 #if XCHAL_NUM_AREGS > 16
1953 call12 1f
1954 _j 2f
1955 retw
1956 .align 4
1957 1:
1958 _entry a1, 48
1959 addi a12, a0, 3
1960 #if XCHAL_NUM_AREGS > 32
1961 .rept (XCHAL_NUM_AREGS - 32) / 12
1962 _entry a1, 48
1963 mov a12, a0
1964 .endr
1965 #endif
1966 _entry a1, 16
1967 #if XCHAL_NUM_AREGS % 12 == 0
1968 mov a8, a8
1969 #elif XCHAL_NUM_AREGS % 12 == 4
1970 mov a12, a12
1971 #elif XCHAL_NUM_AREGS % 12 == 8
1972 mov a4, a4
1973 #endif
1974 retw
1975 2:
1976 #else
1977 mov a12, a12
1978 #endif
1979 .endm
1980
1981
1982
1983
1984
1985
1986
1987
1988 ENTRY(_switch_to)
1989
1990 abi_entry(XTENSA_SPILL_STACK_RESERVE)
1991
1992 mov a11, a3 # and 'next' (a3)
1993
1994 l32i a4, a2, TASK_THREAD_INFO
1995 l32i a5, a3, TASK_THREAD_INFO
1996
1997 save_xtregs_user a4 a6 a8 a9 a12 a13 THREAD_XTREGS_USER
1998
1999 #if THREAD_RA > 1020 || THREAD_SP > 1020
2000 addi a10, a2, TASK_THREAD
2001 s32i a0, a10, THREAD_RA - TASK_THREAD # save return address
2002 s32i a1, a10, THREAD_SP - TASK_THREAD # save stack pointer
2003 #else
2004 s32i a0, a2, THREAD_RA # save return address
2005 s32i a1, a2, THREAD_SP # save stack pointer
2006 #endif
2007
2008 #if defined(CONFIG_STACKPROTECTOR) && !defined(CONFIG_SMP)
2009 movi a6, __stack_chk_guard
2010 l32i a8, a3, TASK_STACK_CANARY
2011 s32i a8, a6, 0
2012 #endif
2013
2014
2015
2016 irq_save a14, a3
2017 rsync
2018
2019
2020
2021 #if (XTENSA_HAVE_COPROCESSORS || XTENSA_HAVE_IO_PORTS)
2022 l32i a3, a5, THREAD_CPENABLE
2023 xsr a3, cpenable
2024 s32i a3, a4, THREAD_CPENABLE
2025 #endif
2026
2027
2028
2029 spill_registers_kernel
2030
2031
2032
2033
2034
2035
2036
2037 rsr a3, excsave1 # exc_table
2038 addi a7, a5, PT_REGS_OFFSET
2039 s32i a7, a3, EXC_TABLE_KSTK
2040
2041
2042
2043 l32i a0, a11, THREAD_RA # restore return address
2044 l32i a1, a11, THREAD_SP # restore stack pointer
2045
2046 load_xtregs_user a5 a6 a8 a9 a12 a13 THREAD_XTREGS_USER
2047
2048 wsr a14, ps
2049 rsync
2050
2051 abi_ret(XTENSA_SPILL_STACK_RESERVE)
2052
2053 ENDPROC(_switch_to)
2054
2055 ENTRY(ret_from_fork)
2056
2057
2058
2059
2060 call4 schedule_tail
2061
2062 mov a6, a1
2063 call4 do_syscall_trace_leave
2064
2065 j common_exception_return
2066
2067 ENDPROC(ret_from_fork)
2068
2069
2070
2071
2072
2073
2074 ENTRY(ret_from_kernel_thread)
2075
2076 call4 schedule_tail
2077 mov a6, a3
2078 callx4 a2
2079 j common_exception_return
2080
2081 ENDPROC(ret_from_kernel_thread)