1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36 #include <linux/linkage.h>
37 #include <asm/errno.h>
38 #include <asm/setup.h>
39 #include <asm/segment.h>
40 #include <asm/traps.h>
41 #include <asm/unistd.h>
42 #include <asm/asm-offsets.h>
43 #include <asm/entry.h>
44
45 .globl system_call, buserr, trap, resume
46 .globl sys_call_table
47 .globl __sys_fork, __sys_clone, __sys_vfork
48 .globl bad_interrupt
49 .globl auto_irqhandler_fixup
50 .globl user_irqvec_fixup
51
52 .text
53 ENTRY(__sys_fork)
54 SAVE_SWITCH_STACK
55 jbsr sys_fork
56 lea %sp@(24),%sp
57 rts
58
59 ENTRY(__sys_clone)
60 SAVE_SWITCH_STACK
61 pea %sp@(SWITCH_STACK_SIZE)
62 jbsr m68k_clone
63 lea %sp@(28),%sp
64 rts
65
66 ENTRY(__sys_vfork)
67 SAVE_SWITCH_STACK
68 jbsr sys_vfork
69 lea %sp@(24),%sp
70 rts
71
72 ENTRY(sys_sigreturn)
73 SAVE_SWITCH_STACK
74 movel %sp,%sp@- | switch_stack pointer
75 pea %sp@(SWITCH_STACK_SIZE+4) | pt_regs pointer
76 jbsr do_sigreturn
77 addql #8,%sp
78 RESTORE_SWITCH_STACK
79 rts
80
81 ENTRY(sys_rt_sigreturn)
82 SAVE_SWITCH_STACK
83 movel %sp,%sp@- | switch_stack pointer
84 pea %sp@(SWITCH_STACK_SIZE+4) | pt_regs pointer
85 jbsr do_rt_sigreturn
86 addql #8,%sp
87 RESTORE_SWITCH_STACK
88 rts
89
90 ENTRY(buserr)
91 SAVE_ALL_INT
92 GET_CURRENT(%d0)
93 movel %sp,%sp@- | stack frame pointer argument
94 jbsr buserr_c
95 addql #4,%sp
96 jra ret_from_exception
97
98 ENTRY(trap)
99 SAVE_ALL_INT
100 GET_CURRENT(%d0)
101 movel %sp,%sp@- | stack frame pointer argument
102 jbsr trap_c
103 addql #4,%sp
104 jra ret_from_exception
105
106 | After a fork we jump here directly from resume,
107 | so that %d1 contains the previous task
108 | schedule_tail now used regardless of CONFIG_SMP
109 ENTRY(ret_from_fork)
110 movel %d1,%sp@-
111 jsr schedule_tail
112 addql #4,%sp
113 jra ret_from_exception
114
115 ENTRY(ret_from_kernel_thread)
116 | a3 contains the kernel thread payload, d7 - its argument
117 movel %d1,%sp@-
118 jsr schedule_tail
119 movel %d7,(%sp)
120 jsr %a3@
121 addql #4,%sp
122 jra ret_from_exception
123
124 #if defined(CONFIG_COLDFIRE) || !defined(CONFIG_MMU)
125
126 #ifdef TRAP_DBG_INTERRUPT
127
128 .globl dbginterrupt
129 ENTRY(dbginterrupt)
130 SAVE_ALL_INT
131 GET_CURRENT(%d0)
132 movel %sp,%sp@-
133 jsr dbginterrupt_c
134 addql #4,%sp
135 jra ret_from_exception
136 #endif
137
138 ENTRY(reschedule)
139
140 pea %sp@
141 jbsr set_esp0
142 addql #4,%sp
143 pea ret_from_exception
144 jmp schedule
145
146 ENTRY(ret_from_user_signal)
147 moveq #__NR_sigreturn,%d0
148 trap #0
149
150 ENTRY(ret_from_user_rt_signal)
151 movel #__NR_rt_sigreturn,%d0
152 trap #0
153
154 #else
155
156 do_trace_entry:
157 movel #-ENOSYS,%sp@(PT_OFF_D0)| needed for strace
158 subql #4,%sp
159 SAVE_SWITCH_STACK
160 jbsr syscall_trace
161 RESTORE_SWITCH_STACK
162 addql #4,%sp
163 movel %sp@(PT_OFF_ORIG_D0),%d0
164 cmpl #NR_syscalls,%d0
165 jcs syscall
166 badsys:
167 movel #-ENOSYS,%sp@(PT_OFF_D0)
168 jra ret_from_syscall
169
170 do_trace_exit:
171 subql #4,%sp
172 SAVE_SWITCH_STACK
173 jbsr syscall_trace
174 RESTORE_SWITCH_STACK
175 addql #4,%sp
176 jra .Lret_from_exception
177
178 ENTRY(ret_from_signal)
179 movel %curptr@(TASK_STACK),%a1
180 tstb %a1@(TINFO_FLAGS+2)
181 jge 1f
182 jbsr syscall_trace
183 1: RESTORE_SWITCH_STACK
184 addql #4,%sp
185
186 #ifdef CONFIG_M68040
187 bfextu %sp@(PT_OFF_FORMATVEC){#0,#4},%d0
188 subql #7,%d0 | bus error frame ?
189 jbne 1f
190 movel %sp,%sp@-
191 jbsr berr_040cleanup
192 addql #4,%sp
193 1:
194 #endif
195 jra .Lret_from_exception
196
197 ENTRY(system_call)
198 SAVE_ALL_SYS
199
200 GET_CURRENT(%d1)
201 movel %d1,%a1
202
203 | save top of frame
204 movel %sp,%curptr@(TASK_THREAD+THREAD_ESP0)
205
206 | syscall trace?
207 tstb %a1@(TINFO_FLAGS+2)
208 jmi do_trace_entry
209 cmpl #NR_syscalls,%d0
210 jcc badsys
211 syscall:
212 jbsr @(sys_call_table,%d0:l:4)@(0)
213 movel %d0,%sp@(PT_OFF_D0) | save the return value
214 ret_from_syscall:
215 |oriw #0x0700,%sr
216 movel %curptr@(TASK_STACK),%a1
217 movew %a1@(TINFO_FLAGS+2),%d0
218 jne syscall_exit_work
219 1: RESTORE_ALL
220
221 syscall_exit_work:
222 btst #5,%sp@(PT_OFF_SR) | check if returning to kernel
223 bnes 1b | if so, skip resched, signals
224 lslw #1,%d0
225 jcs do_trace_exit
226 jmi do_delayed_trace
227 lslw #8,%d0
228 jne do_signal_return
229 pea resume_userspace
230 jra schedule
231
232
233 ENTRY(ret_from_exception)
234 .Lret_from_exception:
235 btst #5,%sp@(PT_OFF_SR) | check if returning to kernel
236 bnes 1f | if so, skip resched, signals
237 | only allow interrupts when we are really the last one on the
238 | kernel stack, otherwise stack overflow can occur during
239 | heavy interrupt load
240 andw #ALLOWINT,%sr
241
242 resume_userspace:
243 movel %curptr@(TASK_STACK),%a1
244 moveb %a1@(TINFO_FLAGS+3),%d0
245 jne exit_work
246 1: RESTORE_ALL
247
248 exit_work:
249 | save top of frame
250 movel %sp,%curptr@(TASK_THREAD+THREAD_ESP0)
251 lslb #1,%d0
252 jne do_signal_return
253 pea resume_userspace
254 jra schedule
255
256
257 do_signal_return:
258 |andw #ALLOWINT,%sr
259 subql #4,%sp | dummy return address
260 SAVE_SWITCH_STACK
261 pea %sp@(SWITCH_STACK_SIZE)
262 bsrl do_notify_resume
263 addql #4,%sp
264 RESTORE_SWITCH_STACK
265 addql #4,%sp
266 jbra resume_userspace
267
268 do_delayed_trace:
269 bclr #7,%sp@(PT_OFF_SR) | clear trace bit in SR
270 pea 1 | send SIGTRAP
271 movel %curptr,%sp@-
272 pea LSIGTRAP
273 jbsr send_sig
274 addql #8,%sp
275 addql #4,%sp
276 jbra resume_userspace
277
278
279
280
281 ENTRY(auto_inthandler)
282 SAVE_ALL_INT
283 GET_CURRENT(%d0)
284 | put exception # in d0
285 bfextu %sp@(PT_OFF_FORMATVEC){#4,#10},%d0
286 subw #VEC_SPUR,%d0
287
288 movel %sp,%sp@-
289 movel %d0,%sp@- | put vector # on stack
290 auto_irqhandler_fixup = . + 2
291 jsr do_IRQ | process the IRQ
292 addql #8,%sp | pop parameters off stack
293 jra ret_from_exception
294
295
296
297 ENTRY(user_inthandler)
298 SAVE_ALL_INT
299 GET_CURRENT(%d0)
300 | put exception # in d0
301 bfextu %sp@(PT_OFF_FORMATVEC){#4,#10},%d0
302 user_irqvec_fixup = . + 2
303 subw #VEC_USER,%d0
304
305 movel %sp,%sp@-
306 movel %d0,%sp@- | put vector # on stack
307 jsr do_IRQ | process the IRQ
308 addql #8,%sp | pop parameters off stack
309 jra ret_from_exception
310
311
312
313 ENTRY(bad_inthandler)
314 SAVE_ALL_INT
315 GET_CURRENT(%d0)
316
317 movel %sp,%sp@-
318 jsr handle_badint
319 addql #4,%sp
320 jra ret_from_exception
321
322 resume:
323
324
325
326
327
328
329
330 movew %sr,%a0@(TASK_THREAD+THREAD_SR)
331
332
333 movec %sfc,%d0
334 movew %d0,%a0@(TASK_THREAD+THREAD_FS)
335
336
337
338 movec %usp,%d0
339 movel %d0,%a0@(TASK_THREAD+THREAD_USP)
340
341
342 SAVE_SWITCH_STACK
343
344
345 movel %sp,%a0@(TASK_THREAD+THREAD_KSP)
346
347
348 #ifndef CONFIG_M68KFPU_EMU_ONLY
349 #ifdef CONFIG_M68KFPU_EMU
350 tstl m68k_fputype
351 jeq 3f
352 #endif
353 fsave %a0@(TASK_THREAD+THREAD_FPSTATE)
354
355 #if defined(CONFIG_M68060)
356 #if !defined(CPU_M68060_ONLY)
357 btst #3,m68k_cputype+3
358 beqs 1f
359 #endif
360
361 tstb %a0@(TASK_THREAD+THREAD_FPSTATE+2)
362 jeq 3f
363 #if !defined(CPU_M68060_ONLY)
364 jra 2f
365 #endif
366 #endif
367 #if !defined(CPU_M68060_ONLY)
368 1: tstb %a0@(TASK_THREAD+THREAD_FPSTATE)
369 jeq 3f
370 #endif
371 2: fmovemx %fp0-%fp7,%a0@(TASK_THREAD+THREAD_FPREG)
372 fmoveml %fpcr/%fpsr/%fpiar,%a0@(TASK_THREAD+THREAD_FPCNTL)
373 3:
374 #endif
375
376 movel %curptr,%d1
377
378
379 movel %a1,%curptr
380
381
382 #ifndef CONFIG_M68KFPU_EMU_ONLY
383 #ifdef CONFIG_M68KFPU_EMU
384 tstl m68k_fputype
385 jeq 4f
386 #endif
387 #if defined(CONFIG_M68060)
388 #if !defined(CPU_M68060_ONLY)
389 btst #3,m68k_cputype+3
390 beqs 1f
391 #endif
392
393 tstb %a1@(TASK_THREAD+THREAD_FPSTATE+2)
394 jeq 3f
395 #if !defined(CPU_M68060_ONLY)
396 jra 2f
397 #endif
398 #endif
399 #if !defined(CPU_M68060_ONLY)
400 1: tstb %a1@(TASK_THREAD+THREAD_FPSTATE)
401 jeq 3f
402 #endif
403 2: fmovemx %a1@(TASK_THREAD+THREAD_FPREG),%fp0-%fp7
404 fmoveml %a1@(TASK_THREAD+THREAD_FPCNTL),%fpcr/%fpsr/%fpiar
405 3: frestore %a1@(TASK_THREAD+THREAD_FPSTATE)
406 4:
407 #endif
408
409
410 movel %a1@(TASK_THREAD+THREAD_KSP),%sp
411
412
413 RESTORE_SWITCH_STACK
414
415
416 movel %a1@(TASK_THREAD+THREAD_USP),%a0
417 movel %a0,%usp
418
419
420 movew %a1@(TASK_THREAD+THREAD_FS),%a0
421 movec %a0,%sfc
422 movec %a0,%dfc
423
424
425 movew %a1@(TASK_THREAD+THREAD_SR),%sr
426
427 rts
428
429 #endif