1
2
3
4
5
6
7
8
9 #include <linux/linkage.h>
10 #include <linux/init.h>
11
12 #include <asm/assembler.h>
13 #include <asm/ptrace.h>
14 #include <generated/asm-offsets.h>
15 #include <asm/memory.h>
16 #include <asm/thread_info.h>
17 #include <asm/hwdef-copro.h>
18 #include <asm/pgtable-hwdef.h>
19
20 #if (PHYS_OFFSET & 0x003fffff)
21 #error "PHYS_OFFSET must be at an even 4MiB boundary!"
22 #endif
23
24 #define KERNEL_RAM_VADDR (PAGE_OFFSET + KERNEL_IMAGE_START)
25 #define KERNEL_RAM_PADDR (PHYS_OFFSET + KERNEL_IMAGE_START)
26
27 #define KERNEL_PGD_PADDR (KERNEL_RAM_PADDR - 0x1000)
28 #define KERNEL_PGD_VADDR (KERNEL_RAM_VADDR - 0x1000)
29
30 #define KERNEL_START KERNEL_RAM_VADDR
31 #define KERNEL_END _end
32
33
34
35
36
37
38
39
40 #if (KERNEL_RAM_VADDR & 0xffff) != 0x8000
41 #error KERNEL_RAM_VADDR must start at 0xXXXX8000
42 #endif
43
44 .globl swapper_pg_dir
45 .equ swapper_pg_dir, KERNEL_RAM_VADDR - 0x1000
46
47
48
49
50
51
52
53
54
55
56
57 __HEAD
58 ENTRY(stext)
59 @ set asr
60 mov r0, #PRIV_MODE @ ensure priv mode
61 or r0, #PSR_R_BIT | PSR_I_BIT @ disable irqs
62 mov.a asr, r0
63
64 @ process identify
65 movc r0, p0.c0, #0 @ cpuid
66 movl r1, 0xff00ffff @ mask
67 movl r2, 0x4d000863 @ value
68 and r0, r1, r0
69 cxor.a r0, r2
70 bne __error_p @ invalid processor id
71
72
73
74
75 movl r0, #KERNEL_PGD_PADDR @ page table address
76 mov r1, #0
77 add r2, r0, #0x1000
78 101: stw.w r1, [r0]+, #4
79 stw.w r1, [r0]+, #4
80 stw.w r1, [r0]+, #4
81 stw.w r1, [r0]+, #4
82 cxor.a r0, r2
83 bne 101b
84
85 movl r4, #KERNEL_PGD_PADDR @ page table address
86 mov r7, #PMD_TYPE_SECT | PMD_PRESENT @ page size: section
87 or r7, r7, #PMD_SECT_CACHEABLE @ cacheable
88 or r7, r7, #PMD_SECT_READ | PMD_SECT_WRITE | PMD_SECT_EXEC
89
90
91
92
93
94
95
96 mov r6, pc
97 mov r6, r6 >> #22 @ start of kernel section
98 or r1, r7, r6 << #22 @ flags + kernel base
99 stw r1, [r4+], r6 << #2 @ identity mapping
100
101
102
103
104
105 add r0, r4, #(KERNEL_START & 0xff000000) >> 20
106 stw.w r1, [r0+], #(KERNEL_START & 0x00c00000) >> 20
107 movl r6, #(KERNEL_END - 1)
108 add r0, r0, #4
109 add r6, r4, r6 >> #20
110 102: csub.a r0, r6
111 add r1, r1, #1 << 22
112 bua 103f
113 stw.w r1, [r0]+, #4
114 b 102b
115 103:
116
117
118
119 add r0, r4, #PAGE_OFFSET >> 20
120 or r6, r7, #(PHYS_OFFSET & 0xffc00000)
121 stw r6, [r0]
122
123 ldw r15, __switch_data @ address to jump to after
124
125
126
127
128
129 mov r0, #0
130 movc p0.c5, r0, #28 @ cache invalidate all
131 nop8
132 movc p0.c6, r0, #6 @ TLB invalidate all
133 nop8
134
135
136
137
138
139 movl r0, #0x201f @ control register setting
140
141
142
143
144
145
146 #ifndef CONFIG_ALIGNMENT_TRAP
147 andn r0, r0, #CR_A
148 #endif
149 #ifdef CONFIG_CPU_DCACHE_DISABLE
150 andn r0, r0, #CR_D
151 #endif
152 #ifdef CONFIG_CPU_DCACHE_WRITETHROUGH
153 andn r0, r0, #CR_B
154 #endif
155 #ifdef CONFIG_CPU_ICACHE_DISABLE
156 andn r0, r0, #CR_I
157 #endif
158
159 movc p0.c2, r4, #0 @ set pgd
160 b __turn_mmu_on
161 ENDPROC(stext)
162
163
164
165
166
167
168
169
170 .align 5
171 __turn_mmu_on:
172 mov r0, r0
173 movc p0.c1, r0, #0 @ write control reg
174 nop @ fetch inst by phys addr
175 mov pc, r15
176 nop8 @ fetch inst by phys addr
177 ENDPROC(__turn_mmu_on)
178
179
180
181
182
183
184
185
186
187
188
189
190
191 .ltorg
192
193 .align 2
194 .type __switch_data, %object
195 __switch_data:
196 .long __mmap_switched
197 .long __bss_start @ r6
198 .long _end @ r7
199 .long cr_alignment @ r8
200 .long init_thread_union + THREAD_START_SP @ sp
201
202
203
204
205
206
207
208 __mmap_switched:
209 adr r3, __switch_data + 4
210
211 ldm.w (r6, r7, r8), [r3]+
212 ldw sp, [r3]
213
214 mov fp, #0 @ Clear BSS (and zero fp)
215 203: csub.a r6, r7
216 bea 204f
217 stw.w fp, [r6]+,#4
218 b 203b
219 204:
220 andn r1, r0, #CR_A @ Clear 'A' bit
221 stm (r0, r1), [r8]+ @ Save control register values
222 b start_kernel
223 ENDPROC(__mmap_switched)
224
225
226
227
228
229
230
231
232
233
234 __error_p:
235 #ifdef CONFIG_DEBUG_LL
236 adr r0, str_p1
237 b.l printascii
238 mov r0, r9
239 b.l printhex8
240 adr r0, str_p2
241 b.l printascii
242 901: nop8
243 b 901b
244 str_p1: .asciz "\nError: unrecognized processor variant (0x"
245 str_p2: .asciz ").\n"
246 .align
247 #endif
248 ENDPROC(__error_p)
249