]> git.gir.st - tmk_keyboard.git/blob - tmk_core/tool/mbed/mbed-sdk/libraries/rtos/rtx/TARGET_CORTEX_A/TOOLCHAIN_GCC/HAL_CA9.s
Merge commit '20b787fc1284176834cbe7ca2134e4b36bec5828'
[tmk_keyboard.git] / tmk_core / tool / mbed / mbed-sdk / libraries / rtos / rtx / TARGET_CORTEX_A / TOOLCHAIN_GCC / HAL_CA9.s
1 /*----------------------------------------------------------------------------
2 * RL-ARM - RTX
3 *----------------------------------------------------------------------------
4 * Name: HAL_CA9.c
5 * Purpose: Hardware Abstraction Layer for Cortex-A9
6 * Rev.: 3 Sept 2013
7 *----------------------------------------------------------------------------
8 *
9 * Copyright (c) 2012 - 2013 ARM Limited
10 * All rights reserved.
11 * Redistribution and use in source and binary forms, with or without
12 * modification, are permitted provided that the following conditions are met:
13 * - Redistributions of source code must retain the above copyright
14 * notice, this list of conditions and the following disclaimer.
15 * - Redistributions in binary form must reproduce the above copyright
16 * notice, this list of conditions and the following disclaimer in the
17 * documentation and/or other materials provided with the distribution.
18 * - Neither the name of ARM nor the names of its contributors may be used
19 * to endorse or promote products derived from this software without
20 * specific prior written permission.
21 *
22 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
23 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
24 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
25 * ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE
26 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
27 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
28 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
29 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
30 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
31 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
32 * POSSIBILITY OF SUCH DAMAGE.
33 *---------------------------------------------------------------------------*/
34
35 .global rt_set_PSP
36 .global rt_get_PSP
37 .global _alloc_box
38 .global _free_box
39 .global PendSV_Handler
40 .global OS_Tick_Handler
41
42 .EQU CPSR_T_BIT, 0x20
43 .EQU CPSR_I_BIT, 0x80
44 .EQU CPSR_F_BIT, 0x40
45
46 .EQU MODE_USR, 0x10
47 .EQU MODE_FIQ, 0x11
48 .EQU MODE_IRQ, 0x12
49 .EQU MODE_SVC, 0x13
50 .EQU MODE_ABT, 0x17
51 .EQU MODE_UND, 0x1B
52 .EQU MODE_SYS, 0x1F
53
54 .EQU TCB_TID, 3 /* 'task id' offset */
55 .EQU TCB_STACKF, 32 /* 'stack_frame' offset */
56 .EQU TCB_TSTACK, 36 /* 'tsk_stack' offset */
57
58 .extern rt_alloc_box
59 .extern os_tsk
60 .extern GICInterface_BASE
61 .extern rt_pop_req
62 .extern os_tick_irqack
63 .extern rt_systick
64
65 /*----------------------------------------------------------------------------
66 * Functions
67 *---------------------------------------------------------------------------*/
68 .text
69 @ For A-class, set USR/SYS stack
70 @ __asm void rt_set_PSP (U32 stack) {
71 rt_set_PSP:
72 .arm
73
74 MRS R1, CPSR
75 CPS #MODE_SYS @no effect in USR mode
76 ISB
77 MOV SP, R0
78 MSR CPSR_c, R1 @no effect in USR mode
79 ISB
80 BX LR
81
82 @ }
83
84 @ For A-class, get USR/SYS stack
85 @ __asm U32 rt_get_PSP (void) {
86 rt_get_PSP:
87 .arm
88
89 MRS R1, CPSR
90 CPS #MODE_SYS @no effect in USR mode
91 ISB
92 MOV R0, SP
93 MSR CPSR_c, R1 @no effect in USR mode
94 ISB
95 BX LR
96
97 @ }
98
99 /*--------------------------- _alloc_box ------------------------------------*/
100 @ __asm void *_alloc_box (void *box_mem) {
101 _alloc_box:
102 /* Function wrapper for Unprivileged/Privileged mode. */
103 .arm
104
105 LDR R12,=rt_alloc_box @ __cpp(rt_alloc_box)
106 MRS R2, CPSR
107 LSLS R2, #28
108 BXNE R12
109 SVC 0
110 BX LR
111 @ }
112
113
114 /*--------------------------- _free_box -------------------------------------*/
115 @ __asm int _free_box (void *box_mem, void *box) {
116 _free_box:
117 /* Function wrapper for Unprivileged/Privileged mode. */
118 .arm
119
120 LDR R12,=rt_free_box @ __cpp(rt_free_box)
121 MRS R2, CPSR
122 LSLS R2, #28
123 BXNE R12
124 SVC 0
125 BX LR
126
127 @ }
128
129 /*-------------------------- SVC_Handler -----------------------------------*/
130
131 @ #pragma push
132 @ #pragma arm
133 @ __asm void SVC_Handler (void) {
134
135 .type SVC_Handler, %function
136 .global SVC_Handler
137 SVC_Handler:
138 @ PRESERVE8
139 .arm
140 .extern rt_tsk_lock
141 .extern rt_tsk_unlock
142 .extern SVC_Count
143 .extern SVC_Table
144 .extern rt_stk_check
145 .extern FPUEnable
146
147 .EQU Mode_SVC, 0x13
148
149 SRSDB SP!, #Mode_SVC @ Push LR_SVC and SPRS_SVC onto SVC mode stack
150 PUSH {R4} @ Push R4 so we can use it as a temp
151
152
153 MRS R4,SPSR @ Get SPSR
154 TST R4,#CPSR_T_BIT @ Check Thumb Bit
155 LDRNEH R4,[LR,#-2] @ Thumb: Load Halfword
156 BICNE R4,R4,#0xFF00 @ Extract SVC Number
157 LDREQ R4,[LR,#-4] @ ARM: Load Word
158 BICEQ R4,R4,#0xFF000000 @ Extract SVC Number
159
160 /* Lock out systick and re-enable interrupts */
161 PUSH {R0-R3,R12,LR}
162
163 AND R12, SP, #4 @ Ensure stack is 8-byte aligned
164 SUB SP, SP, R12 @ Adjust stack
165 PUSH {R12, LR} @ Store stack adjustment and dummy LR to SVC stack
166
167 BLX rt_tsk_lock
168 CPSIE i
169
170 POP {R12, LR} @ Get stack adjustment & discard dummy LR
171 ADD SP, SP, R12 @ Unadjust stack
172
173 POP {R0-R3,R12,LR}
174
175 CMP R4,#0
176 BNE SVC_User
177
178 MRS R4,SPSR
179 PUSH {R4} @ Push R4 so we can use it as a temp
180 AND R4, SP, #4 @ Ensure stack is 8-byte aligned
181 SUB SP, SP, R4 @ Adjust stack
182 PUSH {R4, LR} @ Store stack adjustment and dummy LR
183 BLX R12
184 POP {R4, LR} @ Get stack adjustment & discard dummy LR
185 ADD SP, SP, R4 @ Unadjust stack
186 POP {R4} @ Restore R4
187 MSR SPSR_cxsf,R4
188
189 /* Here we will be in SVC mode (even if coming in from PendSV_Handler or OS_Tick_Handler) */
190 Sys_Switch:
191 LDR LR,=os_tsk @ __cpp(&os_tsk)
192 LDM LR,{R4,LR} @ os_tsk.run, os_tsk.new
193 CMP R4,LR
194 BNE switching
195
196 PUSH {R0-R3,R12,LR}
197
198 AND R12, SP, #4 @ Ensure stack is 8-byte aligned
199 SUB SP, SP, R12 @ Adjust stack
200 PUSH {R12, LR} @ Store stack adjustment and dummy LR to SVC stack
201
202 CPSID i
203 BLX rt_tsk_unlock
204
205 POP {R12, LR} @ Get stack adjustment & discard dummy LR
206 ADD SP, SP, R12 @ Unadjust stack
207
208 POP {R0-R3,R12,LR}
209 POP {R4}
210 RFEFD SP! @ Return from exception, no task switch
211
212 switching:
213 CLREX
214 CMP R4,#0
215 ADDEQ SP,SP,#12 @ Original R4, LR & SPSR do not need to be popped when we are paging in a different task
216 BEQ SVC_Next @ Runtask deleted?
217
218
219 PUSH {R8-R11} @ R4 and LR already stacked
220 MOV R10,R4 @ Preserve os_tsk.run
221 MOV R11,LR @ Preserve os_tsk.new
222
223 ADD R8,SP,#16 @ Unstack R4,LR
224 LDMIA R8,{R4,LR}
225
226 SUB SP,SP,#4 @ Make space on the stack for the next instn
227 STMIA SP,{SP}^ @ Put User SP onto stack
228 POP {R8} @ Pop User SP into R8
229
230 MRS R9,SPSR
231 STMDB R8!,{R9} @ User CPSR
232 STMDB R8!,{LR} @ User PC
233 STMDB R8,{LR}^ @ User LR
234 SUB R8,R8,#4 @ No writeback for store of User LR
235 STMDB R8!,{R0-R3,R12} @ User R0-R3,R12
236 MOV R3,R10 @ os_tsk.run
237 MOV LR,R11 @ os_tsk.new
238 POP {R9-R12}
239 ADD SP,SP,#12 @ Fix up SP for unstack of R4, LR & SPSR
240 STMDB R8!,{R4-R7,R9-R12} @ User R4-R11
241
242 @ If applicable, stack VFP state
243 MRC p15,0,R1,c1,c0,2 @ VFP/NEON access enabled? (CPACR)
244 AND R2,R1,#0x00F00000
245 CMP R2,#0x00F00000
246 BNE no_outgoing_vfp
247 VMRS R2,FPSCR
248 STMDB R8!,{R2,R4} @ Push FPSCR, maintain 8-byte alignment
249 VSTMDB R8!,{S0-S31}
250 LDRB R2,[R3,#TCB_STACKF] @ Record in TCB that VFP state is stacked
251 ORR R2,#2
252 STRB R2,[R3,#TCB_STACKF]
253
254 no_outgoing_vfp:
255 STR R8,[R3,#TCB_TSTACK]
256 MOV R4,LR
257
258 PUSH {R4} @ Push R4 so we can use it as a temp
259 AND R4, SP, #4 @ Ensure stack is 8-byte aligned
260 SUB SP, SP, R4 @ Adjust stack
261 PUSH {R4, LR} @ Store stack adjustment and dummy LR to SVC stack
262
263 BLX rt_stk_check
264
265 POP {R4, LR} @ Get stack adjustment & discard dummy LR
266 ADD SP, SP, R4 @ Unadjust stack
267 POP {R4} @ Restore R4
268
269 MOV LR,R4
270
271 SVC_Next: @ R4 == os_tsk.run, LR == os_tsk.new, R0-R3, R5-R12 corruptible
272 LDR R1,=os_tsk @ __cpp(&os_tsk), os_tsk.run = os_tsk.new
273 STR LR,[R1]
274 LDRB R1,[LR,#TCB_TID] @ os_tsk.run->task_id
275 LSL R1,#8 @ Store PROCID
276 MCR p15,0,R1,c13,c0,1 @ Write CONTEXTIDR
277
278 LDR R0,[LR,#TCB_TSTACK] @ os_tsk.run->tsk_stack
279
280 @ Does incoming task have VFP state in stack?
281 LDRB R3,[LR,#TCB_STACKF]
282 TST R3,#0x2
283 MRC p15,0,R1,c1,c0,2 @ Read CPACR
284 ANDEQ R1,R1,#0xFF0FFFFF @ Disable VFP access if incoming task does not have stacked VFP state
285 ORRNE R1,R1,#0x00F00000 @ Enable VFP access if incoming task does have stacked VFP state
286 MCR p15,0,R1,c1,c0,2 @ Write CPACR
287 BEQ no_incoming_vfp
288 ISB @ We only need the sync if we enabled, otherwise we will context switch before next VFP instruction anyway
289 VLDMIA R0!,{S0-S31}
290 LDR R2,[R0]
291 VMSR FPSCR,R2
292 ADD R0,R0,#8
293
294 no_incoming_vfp:
295 LDR R1,[R0,#60] @ Restore User CPSR
296 MSR SPSR_cxsf,R1
297 LDMIA R0!,{R4-R11} @ Restore User R4-R11
298 ADD R0,R0,#4 @ Restore User R1-R3,R12
299 LDMIA R0!,{R1-R3,R12}
300 LDMIA R0,{LR}^ @ Restore User LR
301 ADD R0,R0,#4 @ No writeback for load to user LR
302 LDMIA R0!,{LR} @ Restore User PC
303 ADD R0,R0,#4 @ Correct User SP for unstacked user CPSR
304
305 PUSH {R0} @ Push R0 onto stack
306 LDMIA SP,{SP}^ @ Get R0 off stack into User SP
307 ADD SP,SP,#4 @ Put SP back
308
309 LDR R0,[R0,#-32] @ Restore R0
310
311 PUSH {R0-R3,R12,LR}
312
313 AND R12, SP, #4 @ Ensure stack is 8-byte aligned
314 SUB SP, SP, R12 @ Adjust stack
315 PUSH {R12, LR} @ Store stack adjustment and dummy LR to SVC stack
316
317 CPSID i
318 BLX rt_tsk_unlock
319
320 POP {R12, LR} @ Get stack adjustment & discard dummy LR
321 ADD SP, SP, R12 @ Unadjust stack
322
323 POP {R0-R3,R12,LR}
324
325 MOVS PC,LR @ Return from exception
326
327
328 /*------------------- User SVC -------------------------------*/
329
330 SVC_User:
331 LDR R12,=SVC_Count
332 LDR R12,[R12]
333 CMP R4,R12 @ Check for overflow
334 BHI SVC_Done
335
336 LDR R12,=SVC_Table-4
337 LDR R12,[R12,R4,LSL #2] @ Load SVC Function Address
338 MRS R4,SPSR @ Save SPSR
339 PUSH {R4} @ Push R4 so we can use it as a temp
340 AND R4, SP, #4 @ Ensure stack is 8-byte aligned
341 SUB SP, SP, R4 @ Adjust stack
342 PUSH {R4, LR} @ Store stack adjustment and dummy LR
343 BLX R12 @ Call SVC Function
344 POP {R4, LR} @ Get stack adjustment & discard dummy LR
345 ADD SP, SP, R4 @ Unadjust stack
346 POP {R4} @ Restore R4
347 MSR SPSR_cxsf,R4 @ Restore SPSR
348
349 SVC_Done:
350 PUSH {R0-R3,R12,LR}
351
352 PUSH {R4} @ Push R4 so we can use it as a temp
353 AND R4, SP, #4 @ Ensure stack is 8-byte aligned
354 SUB SP, SP, R4 @ Adjust stack
355 PUSH {R4, LR} @ Store stack adjustment and dummy LR
356
357 CPSID i
358 BLX rt_tsk_unlock
359
360 POP {R4, LR} @ Get stack adjustment & discard dummy LR
361 ADD SP, SP, R4 @ Unadjust stack
362 POP {R4} @ Restore R4
363
364 POP {R0-R3,R12,LR}
365 POP {R4}
366 RFEFD SP! @ Return from exception
367
368 @ }
369
370 @ #pragma pop
371
372
373 @ #pragma push
374 @ #pragma arm
375 @ __asm void PendSV_Handler (U32 IRQn) {
376 PendSV_Handler:
377 .arm
378
379 .extern rt_tsk_lock
380 .extern IRQNestLevel
381
382 ADD SP,SP,#8 @ fix up stack pointer (R0 has been pushed and will never be popped, R1 was pushed for stack alignment)
383
384 @ Disable systick interrupts, then write EOIR. We want interrupts disabled before we enter the context switcher.
385 PUSH {R0, R1}
386 BLX rt_tsk_lock
387 POP {R0, R1}
388 LDR R1, =GICInterface_BASE @ __cpp(&GICInterface_BASE)
389 LDR R1, [R1, #0]
390 STR R0, [R1, #0x10]
391
392 LDR R0, =IRQNestLevel @ Get address of nesting counter
393 LDR R1, [R0]
394 SUB R1, R1, #1 @ Decrement nesting counter
395 STR R1, [R0]
396
397 BLX rt_pop_req @ __cpp(rt_pop_req)
398
399 POP {R1, LR} @ Get stack adjustment & discard dummy LR
400 ADD SP, SP, R1 @ Unadjust stack
401
402 LDR R0,[SP,#24]
403 MSR SPSR_cxsf,R0
404 POP {R0-R3,R12} @ Leave SPSR & LR on the stack
405 PUSH {R4}
406 B Sys_Switch
407 @ }
408 @ #pragma pop
409
410 @ #pragma push
411 @ #pragma arm
412 @ __asm void OS_Tick_Handler (U32 IRQn) {
413 OS_Tick_Handler:
414 .arm
415
416 ADD SP,SP,#8 @ fix up stack pointer (R0 has been pushed and will never be popped, R1 was pushed for stack alignment)
417
418 PUSH {R0, R1}
419 BLX rt_tsk_lock
420 POP {R0, R1}
421 LDR R1, =GICInterface_BASE @ __cpp(&GICInterface_BASE)
422 LDR R1, [R1, #0]
423 STR R0, [R1, #0x10]
424
425 LDR R0, =IRQNestLevel @ Get address of nesting counter
426 LDR R1, [R0]
427 SUB R1, R1, #1 @ Decrement nesting counter
428 STR R1, [R0]
429
430 BLX os_tick_irqack @ __cpp(os_tick_irqack)
431 BLX rt_systick @ __cpp(rt_systick)
432
433 POP {R1, LR} @ Get stack adjustment & discard dummy LR
434 ADD SP, SP, R1 @ Unadjust stack
435
436 LDR R0,[SP,#24]
437 MSR SPSR_cxsf,R0
438 POP {R0-R3,R12} @ Leave SPSR & LR on the stack
439 PUSH {R4}
440 B Sys_Switch
441 @ }
442 @ #pragma pop
443
444 .global __set_PSP
445 @ __STATIC_ASM void __set_PSP(uint32_t topOfProcStack)
446 @ {
447 __set_PSP:
448 @ PRESERVE8
449 .arm
450
451 BIC R0, R0, #7 @ensure stack is 8-byte aligned
452 MRS R1, CPSR
453 CPS #MODE_SYS @no effect in USR mode
454 MOV SP, R0
455 MSR CPSR_c, R1 @no effect in USR mode
456 ISB
457 BX LR
458
459 @ }
460
461 .global __set_CPS_USR
462 @ __STATIC_ASM void __set_CPS_USR(void)
463 @ {
464 __set_CPS_USR:
465 .arm
466
467 CPS #MODE_USR
468 BX LR
469 @ }
470
471 .END
472 /*----------------------------------------------------------------------------
473 * end of file
474 *---------------------------------------------------------------------------*/
Imprint / Impressum