]> git.gir.st - tmk_keyboard.git/blob - tmk_core/tool/mbed/mbed-sdk/libraries/rtos/rtx/TARGET_CORTEX_A/TOOLCHAIN_ARM/HAL_CA9.c
Merge commit '5a0132f1c1c9a14fd2941f0a5e29bbf5e31da20c' into master-core-pull
[tmk_keyboard.git] / tmk_core / tool / mbed / mbed-sdk / libraries / rtos / rtx / TARGET_CORTEX_A / TOOLCHAIN_ARM / HAL_CA9.c
1 /*----------------------------------------------------------------------------
2 * RL-ARM - RTX
3 *----------------------------------------------------------------------------
4 * Name: HAL_CA9.c
5 * Purpose: Hardware Abstraction Layer for Cortex-A9
6 * Rev.: 3 Sept 2013
7 *----------------------------------------------------------------------------
8 *
9 * Copyright (c) 2012 - 2013 ARM Limited
10 * All rights reserved.
11 * Redistribution and use in source and binary forms, with or without
12 * modification, are permitted provided that the following conditions are met:
13 * - Redistributions of source code must retain the above copyright
14 * notice, this list of conditions and the following disclaimer.
15 * - Redistributions in binary form must reproduce the above copyright
16 * notice, this list of conditions and the following disclaimer in the
17 * documentation and/or other materials provided with the distribution.
18 * - Neither the name of ARM nor the names of its contributors may be used
19 * to endorse or promote products derived from this software without
20 * specific prior written permission.
21 *
22 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
23 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
24 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
25 * ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE
26 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
27 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
28 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
29 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
30 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
31 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
32 * POSSIBILITY OF SUCH DAMAGE.
33 *---------------------------------------------------------------------------*/
34
35 #include "rt_TypeDef.h"
36 #include "RTX_Config.h"
37 #include "rt_System.h"
38 #include "rt_Task.h"
39 #include "rt_List.h"
40 #include "rt_MemBox.h"
41 #include "rt_HAL_CA.h"
42
43
44 /*----------------------------------------------------------------------------
45 * Functions
46 *---------------------------------------------------------------------------*/
47
48 //For A-class, set USR/SYS stack
49 __asm void rt_set_PSP (U32 stack) {
50 ARM
51
52 MRS R1, CPSR
53 CPS #MODE_SYS ;no effect in USR mode
54 ISB
55 MOV SP, R0
56 MSR CPSR_c, R1 ;no effect in USR mode
57 ISB
58 BX LR
59
60 }
61
62 //For A-class, get USR/SYS stack
63 __asm U32 rt_get_PSP (void) {
64 ARM
65
66 MRS R1, CPSR
67 CPS #MODE_SYS ;no effect in USR mode
68 ISB
69 MOV R0, SP
70 MSR CPSR_c, R1 ;no effect in USR mode
71 ISB
72 BX LR
73 }
74
75 /*--------------------------- _alloc_box ------------------------------------*/
76 __asm void *_alloc_box (void *box_mem) {
77 /* Function wrapper for Unprivileged/Privileged mode. */
78 ARM
79
80 LDR R12,=__cpp(rt_alloc_box)
81 MRS R2, CPSR
82 LSLS R2, #28
83 BXNE R12
84 SVC 0
85 BX LR
86 }
87
88
89 /*--------------------------- _free_box -------------------------------------*/
90 __asm int _free_box (void *box_mem, void *box) {
91 /* Function wrapper for Unprivileged/Privileged mode. */
92 ARM
93
94 LDR R12,=__cpp(rt_free_box)
95 MRS R2, CPSR
96 LSLS R2, #28
97 BXNE R12
98 SVC 0
99 BX LR
100
101 }
102
103 /*-------------------------- SVC_Handler -----------------------------------*/
104
105 #pragma push
106 #pragma arm
107 __asm void SVC_Handler (void) {
108 PRESERVE8
109 ARM
110
111 IMPORT rt_tsk_lock
112 IMPORT rt_tsk_unlock
113 IMPORT SVC_Count
114 IMPORT SVC_Table
115 IMPORT rt_stk_check
116 IMPORT FPUEnable
117
118 Mode_SVC EQU 0x13
119
120 SRSFD SP!, #Mode_SVC ; Push LR_SVC and SPRS_SVC onto SVC mode stack
121 PUSH {R4} ; Push R4 so we can use it as a temp
122
123 MRS R4,SPSR ; Get SPSR
124 TST R4,#CPSR_T_BIT ; Check Thumb Bit
125 LDRNEH R4,[LR,#-2] ; Thumb: Load Halfword
126 BICNE R4,R4,#0xFF00 ; Extract SVC Number
127 LDREQ R4,[LR,#-4] ; ARM: Load Word
128 BICEQ R4,R4,#0xFF000000 ; Extract SVC Number
129
130 /* Lock out systick and re-enable interrupts */
131 PUSH {R0-R3,R12,LR}
132
133 AND R12, SP, #4 ; Ensure stack is 8-byte aligned
134 SUB SP, SP, R12 ; Adjust stack
135 PUSH {R12, LR} ; Store stack adjustment and dummy LR to SVC stack
136
137 BLX rt_tsk_lock
138 CPSIE i
139
140 POP {R12, LR} ; Get stack adjustment & discard dummy LR
141 ADD SP, SP, R12 ; Unadjust stack
142
143 POP {R0-R3,R12,LR}
144
145 CMP R4,#0
146 BNE SVC_User
147
148 MRS R4,SPSR
149 PUSH {R4} ; Push R4 so we can use it as a temp
150 AND R4, SP, #4 ; Ensure stack is 8-byte aligned
151 SUB SP, SP, R4 ; Adjust stack
152 PUSH {R4, LR} ; Store stack adjustment and dummy LR
153 BLX R12
154 POP {R4, LR} ; Get stack adjustment & discard dummy LR
155 ADD SP, SP, R4 ; Unadjust stack
156 POP {R4} ; Restore R4
157 MSR SPSR_CXSF,R4
158
159 /* Here we will be in SVC mode (even if coming in from PendSV_Handler or OS_Tick_Handler) */
160 Sys_Switch
161 LDR LR,=__cpp(&os_tsk)
162 LDM LR,{R4,LR} ; os_tsk.run, os_tsk.new
163 CMP R4,LR
164 BNE switching
165
166 PUSH {R0-R3,R12,LR}
167
168 AND R12, SP, #4 ; Ensure stack is 8-byte aligned
169 SUB SP, SP, R12 ; Adjust stack
170 PUSH {R12, LR} ; Store stack adjustment and dummy LR to SVC stack
171
172 CPSID i
173 BLX rt_tsk_unlock
174
175 POP {R12, LR} ; Get stack adjustment & discard dummy LR
176 ADD SP, SP, R12 ; Unadjust stack
177
178 POP {R0-R3,R12,LR}
179 POP {R4}
180 RFEFD SP! ; Return from exception, no task switch
181
182 switching
183 CLREX
184 CMP R4,#0
185 ADDEQ SP,SP,#12 ; Original R4, LR & SPSR do not need to be popped when we are paging in a different task
186 BEQ SVC_Next ; Runtask deleted?
187
188
189 PUSH {R8-R11} //R4 and LR already stacked
190 MOV R10,R4 ; Preserve os_tsk.run
191 MOV R11,LR ; Preserve os_tsk.new
192
193 ADD R8,SP,#16 ; Unstack R4,LR
194 LDMIA R8,{R4,LR}
195
196 SUB SP,SP,#4 ; Make space on the stack for the next instn
197 STMIA SP,{SP}^ ; Put User SP onto stack
198 POP {R8} ; Pop User SP into R8
199
200 MRS R9,SPSR
201 STMDB R8!,{R9} ; User CPSR
202 STMDB R8!,{LR} ; User PC
203 STMDB R8,{LR}^ ; User LR
204 SUB R8,R8,#4 ; No writeback for store of User LR
205 STMDB R8!,{R0-R3,R12} ; User R0-R3,R12
206 MOV R3,R10 ; os_tsk.run
207 MOV LR,R11 ; os_tsk.new
208 POP {R9-R12}
209 ADD SP,SP,#12 ; Fix up SP for unstack of R4, LR & SPSR
210 STMDB R8!,{R4-R7,R9-R12} ; User R4-R11
211
212 //If applicable, stack VFP state
213 MRC p15,0,R1,c1,c0,2 ; VFP/NEON access enabled? (CPACR)
214 AND R2,R1,#0x00F00000
215 CMP R2,#0x00F00000
216 BNE no_outgoing_vfp
217 VMRS R2,FPSCR
218 STMDB R8!,{R2,R4} ; Push FPSCR, maintain 8-byte alignment
219 VSTMDB R8!,{S0-S31}
220 LDRB R2,[R3,#TCB_STACKF] ; Record in TCB that VFP state is stacked
221 ORR R2,#2
222 STRB R2,[R3,#TCB_STACKF]
223
224 no_outgoing_vfp
225 STR R8,[R3,#TCB_TSTACK]
226 MOV R4,LR
227
228 PUSH {R4} ; Push R4 so we can use it as a temp
229 AND R4, SP, #4 ; Ensure stack is 8-byte aligned
230 SUB SP, SP, R4 ; Adjust stack
231 PUSH {R4, LR} ; Store stack adjustment and dummy LR to SVC stack
232
233 BLX rt_stk_check
234
235 POP {R4, LR} ; Get stack adjustment & discard dummy LR
236 ADD SP, SP, R4 ; Unadjust stack
237 POP {R4} ; Restore R4
238
239 MOV LR,R4
240
241 SVC_Next //R4 == os_tsk.run, LR == os_tsk.new, R0-R3, R5-R12 corruptible
242 LDR R1,=__cpp(&os_tsk) ; os_tsk.run = os_tsk.new
243 STR LR,[R1]
244 LDRB R1,[LR,#TCB_TID] ; os_tsk.run->task_id
245 LSL R1,#8 ; Store PROCID
246 MCR p15,0,R1,c13,c0,1 ; Write CONTEXTIDR
247
248 LDR R0,[LR,#TCB_TSTACK] ; os_tsk.run->tsk_stack
249
250 //Does incoming task have VFP state in stack?
251 LDRB R3,[LR,#TCB_STACKF]
252 TST R3,#0x2
253 MRC p15,0,R1,c1,c0,2 ; Read CPACR
254 ANDEQ R1,R1,#0xFF0FFFFF ; Disable VFP access if incoming task does not have stacked VFP state
255 ORRNE R1,R1,#0x00F00000 ; Enable VFP access if incoming task does have stacked VFP state
256 MCR p15,0,R1,c1,c0,2 ; Write CPACR
257 BEQ no_incoming_vfp
258 ISB ; We only need the sync if we enabled, otherwise we will context switch before next VFP instruction anyway
259 VLDMIA R0!,{S0-S31}
260 LDR R2,[R0]
261 VMSR FPSCR,R2
262 ADD R0,R0,#8
263
264 no_incoming_vfp
265 LDR R1,[R0,#60] ; Restore User CPSR
266 MSR SPSR_CXSF,R1
267 LDMIA R0!,{R4-R11} ; Restore User R4-R11
268 ADD R0,R0,#4 ; Restore User R1-R3,R12
269 LDMIA R0!,{R1-R3,R12}
270 LDMIA R0,{LR}^ ; Restore User LR
271 ADD R0,R0,#4 ; No writeback for load to user LR
272 LDMIA R0!,{LR} ; Restore User PC
273 ADD R0,R0,#4 ; Correct User SP for unstacked user CPSR
274
275 PUSH {R0} ; Push R0 onto stack
276 LDMIA SP,{SP}^ ; Get R0 off stack into User SP
277 ADD SP,SP,#4 ; Put SP back
278
279 LDR R0,[R0,#-32] ; Restore R0
280
281 PUSH {R0-R3,R12,LR}
282
283 AND R12, SP, #4 ; Ensure stack is 8-byte aligned
284 SUB SP, SP, R12 ; Adjust stack
285 PUSH {R12, LR} ; Store stack adjustment and dummy LR to SVC stack
286
287 CPSID i
288 BLX rt_tsk_unlock
289
290 POP {R12, LR} ; Get stack adjustment & discard dummy LR
291 ADD SP, SP, R12 ; Unadjust stack
292
293 POP {R0-R3,R12,LR}
294
295 MOVS PC,LR ; Return from exception
296
297
298 /*------------------- User SVC -------------------------------*/
299
300 SVC_User
301 LDR R12,=SVC_Count
302 LDR R12,[R12]
303 CMP R4,R12 ; Check for overflow
304 BHI SVC_Done
305
306 LDR R12,=SVC_Table-4
307 LDR R12,[R12,R4,LSL #2] ; Load SVC Function Address
308 MRS R4,SPSR ; Save SPSR
309 PUSH {R4} ; Push R4 so we can use it as a temp
310 AND R4, SP, #4 ; Ensure stack is 8-byte aligned
311 SUB SP, SP, R4 ; Adjust stack
312 PUSH {R4, LR} ; Store stack adjustment and dummy LR
313 BLX R12 ; Call SVC Function
314 POP {R4, LR} ; Get stack adjustment & discard dummy LR
315 ADD SP, SP, R4 ; Unadjust stack
316 POP {R4} ; Restore R4
317 MSR SPSR_CXSF,R4 ; Restore SPSR
318
319 SVC_Done
320 PUSH {R0-R3,R12,LR}
321
322 PUSH {R4} ; Push R4 so we can use it as a temp
323 AND R4, SP, #4 ; Ensure stack is 8-byte aligned
324 SUB SP, SP, R4 ; Adjust stack
325 PUSH {R4, LR} ; Store stack adjustment and dummy LR
326
327 CPSID i
328 BLX rt_tsk_unlock
329
330 POP {R4, LR} ; Get stack adjustment & discard dummy LR
331 ADD SP, SP, R4 ; Unadjust stack
332 POP {R4} ; Restore R4
333
334 POP {R0-R3,R12,LR}
335 POP {R4}
336 RFEFD SP! ; Return from exception
337 }
338 #pragma pop
339
340 #pragma push
341 #pragma arm
342 __asm void PendSV_Handler (U32 IRQn) {
343 ARM
344
345 IMPORT rt_tsk_lock
346 IMPORT IRQNestLevel
347
348 ADD SP,SP,#8 //fix up stack pointer (R0 has been pushed and will never be popped, R1 was pushed for stack alignment)
349
350 //Disable systick interrupts, then write EOIR. We want interrupts disabled before we enter the context switcher.
351 PUSH {R0, R1}
352 BLX rt_tsk_lock
353 POP {R0, R1}
354 LDR R1, =__cpp(&GICInterface_BASE)
355 LDR R1, [R1, #0]
356 STR R0, [R1, #0x10]
357
358 LDR R0, =IRQNestLevel ; Get address of nesting counter
359 LDR R1, [R0]
360 SUB R1, R1, #1 ; Decrement nesting counter
361 STR R1, [R0]
362
363 BLX __cpp(rt_pop_req)
364
365 POP {R1, LR} ; Get stack adjustment & discard dummy LR
366 ADD SP, SP, R1 ; Unadjust stack
367
368 LDR R0,[SP,#24]
369 MSR SPSR_CXSF,R0
370 POP {R0-R3,R12} ; Leave SPSR & LR on the stack
371 PUSH {R4}
372 B Sys_Switch
373 }
374 #pragma pop
375
376
377 #pragma push
378 #pragma arm
379 __asm void OS_Tick_Handler (U32 IRQn) {
380 ARM
381
382 IMPORT rt_tsk_lock
383 IMPORT IRQNestLevel
384
385 ADD SP,SP,#8 //fix up stack pointer (R0 has been pushed and will never be popped, R1 was pushed for stack alignment)
386
387 PUSH {R0, R1}
388 BLX rt_tsk_lock
389 POP {R0, R1}
390 LDR R1, =__cpp(&GICInterface_BASE)
391 LDR R1, [R1, #0]
392 STR R0, [R1, #0x10]
393
394 LDR R0, =IRQNestLevel ; Get address of nesting counter
395 LDR R1, [R0]
396 SUB R1, R1, #1 ; Decrement nesting counter
397 STR R1, [R0]
398
399 BLX __cpp(os_tick_irqack)
400 BLX __cpp(rt_systick)
401
402 POP {R1, LR} ; Get stack adjustment & discard dummy LR
403 ADD SP, SP, R1 ; Unadjust stack
404
405 LDR R0,[SP,#24]
406 MSR SPSR_CXSF,R0
407 POP {R0-R3,R12} ; Leave SPSR & LR on the stack
408 PUSH {R4}
409 B Sys_Switch
410 }
411 #pragma pop
412
413
414 /*----------------------------------------------------------------------------
415 * end of file
416 *---------------------------------------------------------------------------*/
Imprint / Impressum