-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathl.s
274 lines (231 loc) · 5.2 KB
/
l.s
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
/*
* Broadcom bcm2835 SoC, as used in Raspberry Pi
* arm1176jzf-s processor (armv6)
*/
#include "arm.s"
TEXT _start(SB), 1, $-4
/*
* load physical base for SB addressing while mmu is off
* keep a handy zero in R0 until first function call
*/
MOVW $setR12(SB), R12
SUB $KZERO, R12
ADD $PHYSDRAM, R12
MOVW $0, R0
/*
* SVC mode, interrupts disabled
*/
MOVW $(PsrDirq|PsrDfiq|PsrMsvc), R1
MOVW R1, CPSR
/*
* disable the mmu and L1 caches
* invalidate caches and tlb
*/
MRC CpSC, 0, R1, C(CpCONTROL), C(0), CpMainctl
BIC $(CpCdcache|CpCicache|CpCpredict|CpCmmu), R1
MCR CpSC, 0, R1, C(CpCONTROL), C(0), CpMainctl
MCR CpSC, 0, R0, C(CpCACHE), C(CpCACHEinvu), CpCACHEall
MCR CpSC, 0, R0, C(CpTLB), C(CpTLBinvu), CpTLBinv
ISB
/*
* clear mach and page tables
*/
MOVW $PADDR(MACHADDR), R1
MOVW $PADDR(KTZERO), R2
_ramZ:
MOVW R0, (R1)
ADD $4, R1
CMP R1, R2
BNE _ramZ
/*
* start stack at top of mach (physical addr)
* set up page tables for kernel
*/
MOVW $PADDR(MACHADDR+MACHSIZE-4), R13
BL ,mmuinit(SB)
/*
* set up domain access control and page table base
*/
MOVW $Client, R1
MCR CpSC, 0, R1, C(CpDAC), C(0)
MOVW $PADDR(L1), R1
MCR CpSC, 0, R1, C(CpTTB), C(0)
/*
* enable caches, mmu, and high vectors
*/
MRC CpSC, 0, R0, C(CpCONTROL), C(0), CpMainctl
ORR $(CpChv|CpCdcache|CpCicache|CpCmmu), R0
MCR CpSC, 0, R0, C(CpCONTROL), C(0), CpMainctl
ISB
/*
* switch SB, SP, and PC into KZERO space
*/
MOVW $setR12(SB), R12
MOVW $(MACHADDR+MACHSIZE-4), R13
MOVW $_startpg(SB), R15
TEXT _startpg(SB), 1, $-4
/*
* enable cycle counter
*/
MOVW $1, R1
MCR CpSC, 0, R1, C(CpSPM), C(CpSPMperf), CpSPMctl
/*
* call main and loop forever if it returns
*/
BL ,main(SB)
B ,0(PC)
BL _div(SB) /* hack to load _div, etc. */
TEXT fsrget(SB), 1, $-4 /* data fault status */
MRC CpSC, 0, R0, C(CpFSR), C(0), CpFSRdata
RET
TEXT ifsrget(SB), 1, $-4 /* instruction fault status */
MRC CpSC, 0, R0, C(CpFSR), C(0), CpFSRinst
RET
TEXT farget(SB), 1, $-4 /* fault address */
MRC CpSC, 0, R0, C(CpFAR), C(0x0)
RET
TEXT lcycles(SB), 1, $-4
MRC CpSC, 0, R0, C(CpSPM), C(CpSPMperf), CpSPMcyc
RET
TEXT splhi(SB), 1, $-4
MOVW $(MACHADDR+4), R2 /* save caller pc in Mach */
MOVW R14, 0(R2)
MOVW CPSR, R0 /* turn off irqs (but not fiqs) */
ORR $(PsrDirq), R0, R1
MOVW R1, CPSR
RET
TEXT splfhi(SB), 1, $-4
MOVW $(MACHADDR+4), R2 /* save caller pc in Mach */
MOVW R14, 0(R2)
MOVW CPSR, R0 /* turn off irqs and fiqs */
ORR $(PsrDirq|PsrDfiq), R0, R1
MOVW R1, CPSR
RET
TEXT splflo(SB), 1, $-4
MOVW CPSR, R0 /* turn on fiqs */
BIC $(PsrDfiq), R0, R1
MOVW R1, CPSR
RET
TEXT spllo(SB), 1, $-4
MOVW CPSR, R0 /* turn on irqs and fiqs */
BIC $(PsrDirq|PsrDfiq), R0, R1
MOVW R1, CPSR
RET
TEXT splx(SB), 1, $-4
MOVW $(MACHADDR+0x04), R2 /* save caller pc in Mach */
MOVW R14, 0(R2)
MOVW R0, R1 /* reset interrupt level */
MOVW CPSR, R0
MOVW R1, CPSR
RET
TEXT spldone(SB), 1, $0 /* end marker for devkprof.c */
RET
TEXT islo(SB), 1, $-4
MOVW CPSR, R0
AND $(PsrDirq), R0
EOR $(PsrDirq), R0
RET
TEXT tas(SB), $-4
TEXT _tas(SB), $-4
MOVW R0,R1
MOVW $1,R0
SWPW R0,(R1) /* fix: deprecated in armv6 */
RET
TEXT setlabel(SB), 1, $-4
MOVW R13, 0(R0) /* sp */
MOVW R14, 4(R0) /* pc */
MOVW $0, R0
RET
TEXT gotolabel(SB), 1, $-4
MOVW 0(R0), R13 /* sp */
MOVW 4(R0), R14 /* pc */
MOVW $1, R0
RET
TEXT getcallerpc(SB), 1, $-4
MOVW 0(R13), R0
RET
TEXT idlehands(SB), $-4
BARRIERS
MOVW CPSR, R3
BIC $(PsrDirq|PsrDfiq), R3, R1 /* spllo */
MOVW R1, CPSR
MOVW $0, R0 /* wait for interrupt */
MCR CpSC, 0, R0, C(CpCACHE), C(CpCACHEintr), CpCACHEwait
ISB
MOVW R3, CPSR /* splx */
RET
TEXT coherence(SB), $-4
BARRIERS
RET
/*
* invalidate tlb
*/
TEXT mmuinvalidate(SB), 1, $-4
MOVW $0, R0
MCR CpSC, 0, R0, C(CpTLB), C(CpTLBinvu), CpTLBinv
BARRIERS
RET
/*
* mmuinvalidateaddr(va)
* invalidate tlb entry for virtual page address va, ASID 0
*/
TEXT mmuinvalidateaddr(SB), 1, $-4
MCR CpSC, 0, R0, C(CpTLB), C(CpTLBinvu), CpTLBinvse
BARRIERS
RET
/*
* drain write buffer
* writeback and invalidate data cache
*/
TEXT cachedwbinv(SB), 1, $-4
DSB
MOVW $0, R0
MCR CpSC, 0, R0, C(CpCACHE), C(CpCACHEwbi), CpCACHEall
RET
/*
* cachedwbinvse(va, n)
* drain write buffer
* writeback and invalidate data cache range [va, va+n)
*/
TEXT cachedwbinvse(SB), 1, $-4
MOVW R0, R1 /* DSB clears R0 */
DSB
MOVW n+4(FP), R2
ADD R1, R2
SUB $1, R2
BIC $(CACHELINESZ-1), R1
BIC $(CACHELINESZ-1), R2
MCRR(CpSC, 0, 2, 1, CpCACHERANGEdwbi)
RET
/*
* cachedwbse(va, n)
* drain write buffer
* writeback data cache range [va, va+n)
*/
TEXT cachedwbse(SB), 1, $-4
MOVW R0, R1 /* DSB clears R0 */
DSB
MOVW n+4(FP), R2
ADD R1, R2
BIC $(CACHELINESZ-1), R1
BIC $(CACHELINESZ-1), R2
MCRR(CpSC, 0, 2, 1, CpCACHERANGEdwb)
RET
/*
* drain write buffer and prefetch buffer
* writeback and invalidate data cache
* invalidate instruction cache
*/
TEXT cacheuwbinv(SB), 1, $-4
BARRIERS
MOVW $0, R0
MCR CpSC, 0, R0, C(CpCACHE), C(CpCACHEwbi), CpCACHEall
MCR CpSC, 0, R0, C(CpCACHE), C(CpCACHEinvi), CpCACHEall
RET
/*
* invalidate instruction cache
*/
TEXT cacheiinv(SB), 1, $-4
MOVW $0, R0
MCR CpSC, 0, R0, C(CpCACHE), C(CpCACHEinvi), CpCACHEall
RET