1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
|
//
// Copyright (c) 2011, ARM Limited. All rights reserved.
//
// This program and the accompanying materials
// are licensed and made available under the terms and conditions of the BSD License
// which accompanies this distribution. The full text of the license may be found at
// http://opensource.org/licenses/bsd-license.php
//
// THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
// WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
//
//
EXPORT monitor_vector_table
EXPORT return_from_exception
EXPORT enter_monitor_mode
EXPORT copy_cpsr_into_spsr
AREA Helper, CODE, READONLY
ALIGN 32
monitor_vector_table
ldr pc, dead
ldr pc, dead
ldr pc, dead
ldr pc, dead
ldr pc, dead
ldr pc, dead
ldr pc, dead
ldr pc, dead
// arg0: Secure Monitor mode stack
enter_monitor_mode
mov r2, lr // Save current lr
mrs r1, cpsr // Save current mode (SVC) in r1
bic r3, r1, #0x1f // Clear all mode bits
orr r3, r3, #0x16 // Set bits for Monitor mode
msr cpsr_cxsf, r3 // We are now in Monitor Mode
mov sp, r0 // Use the passed sp
mov lr, r2 // Use the same lr as before
msr spsr_cxsf, r1 // Use saved mode for the MOVS jump to the kernel
bx lr
// We cannot use the instruction 'movs pc, lr' because the caller can be written either in ARM or Thumb2 assembler.
// When we will jump into this function, we will set the CPSR flag to ARM assembler. By copying directly 'lr' into
// 'pc'; we will not change the CPSR flag and it will crash.
// The way to fix this limitation is to do the movs into the ARM assmbler code and then do a 'bx'.
return_from_exception
adr lr, returned_exception
movs pc, lr
returned_exception // We are now in non-secure state
bx r0
// Save the current Program Status Register (PSR) into the Saved PSR
copy_cpsr_into_spsr
mrs r0, cpsr
msr spsr_cxsf, r0
bx lr
dead
B dead
END
|