@ -10,10 +10,13 @@
# include < l i n u x / l i n k a g e . h >
# include < a s m / g i c . h >
# include < a s m / a r m v7 . h >
# include < a s m / p r o c - a r m v / p t r a c e . h >
.arch_extension sec
.arch_extension virt
.pushsection ._secure .text , " ax"
.align 5
/* the vector table for secure state and HYP mode */
_monitor_vectors :
@ -22,51 +25,86 @@ _monitor_vectors:
adr p c , _ s e c u r e _ m o n i t o r
.word 0
.word 0
adr p c , _ h y p _ t r a p
.word 0
.word 0
.word 0
.macro is_cpu_virt_capable tmp
mrc p15 , 0 , \ t m p , c0 , c1 , 1 @ read ID_PFR1
and \ t m p , \ t m p , #C P U I D _ A R M _ V I R T _ M A S K @ m a s k v i r t u a l i z a t i o n b i t s
cmp \ t m p , #( 1 < < C P U I D _ A R M _ V I R T _ S H I F T )
.endm
/ *
* secure m o n i t o r h a n d l e r
* U- b o o t c a l l s t h i s " s o f t w a r e i n t e r r u p t " i n s t a r t . S
* This i s e x e c u t e d o n a " s m c " i n s t r u c t i o n , w e u s e a " s m c #0 " t o s w i t c h
* to n o n - s e c u r e s t a t e .
* We u s e o n l y r0 a n d r1 h e r e , d u e t o c o n s t r a i n t s i n t h e c a l l e r .
* r0 , r1 , r2 : p a s s e d t o t h e c a l l e e
* ip : target P C
* /
_secure_monitor :
mrc p15 , 0 , r1 , c1 , c1 , 0 @ read SCR
bic r1 , r1 , #0x4e @ clear IRQ, FIQ, EA, nET bits
orr r1 , r1 , #0x31 @ enable NS, AW, FW bits
mrc p15 , 0 , r5 , c1 , c1 , 0 @ read SCR
bic r5 , r5 , #0x4e @ clear IRQ, FIQ, EA, nET bits
orr r5 , r5 , #0x31 @ enable NS, AW, FW bits
mrc p15 , 0 , r0 , c0 , c1 , 1 @ read ID_PFR1
and r0 , r0 , #C P U I D _ A R M _ V I R T _ M A S K @ m a s k v i r t u a l i z a t i o n b i t s
cmp r0 , #( 1 < < C P U I D _ A R M _ V I R T _ S H I F T )
mov r6 , #S V C _ M O D E @ d e f a u l t m o d e i s S V C
is_ c p u _ v i r t _ c a p a b l e r4
# ifdef C O N F I G _ A R M V 7 _ V I R T
orreq r1 , r1 , #0x100 @ allow HVC instruction
orreq r5 , r5 , #0x100 @ allow HVC instruction
moveq r6 , #H Y P _ M O D E @ E n t e r t h e k e r n e l a s H Y P
# endif
mcr p15 , 0 , r1 , c1 , c1 , 0 @ write SCR (with NS bit set)
mcr p15 , 0 , r5 , c1 , c1 , 0 @ write SCR (with NS bit set)
isb
# ifdef C O N F I G _ A R M V 7 _ V I R T
mrceq p15 , 0 , r0 , c12 , c0 , 1 @ get MVBAR value
mcreq p15 , 4 , r0 , c12 , c0 , 0 @ write HVBAR
# endif
bne 1 f
@ Reset CNTVOFF to 0 before leaving monitor mode
mrc p15 , 0 , r0 , c0 , c1 , 1 @ read ID_PFR1
ands r0 , r0 , #C P U I D _ A R M _ G E N T I M E R _ M A S K @ t e s t a r c h t i m e r b i t s
movne r0 , #0
mcrrne p15 , 4 , r0 , r0 , c14 @ Reset CNTVOFF to zero
mrc p15 , 0 , r4 , c0 , c1 , 1 @ read ID_PFR1
ands r4 , r4 , #C P U I D _ A R M _ G E N T I M E R _ M A S K @ t e s t a r c h t i m e r b i t s
movne r4 , #0
mcrrne p15 , 4 , r4 , r4 , c14 @ Reset CNTVOFF to zero
1 :
movs p c , l r @ return to non-secure SVC
_hyp_trap :
mrs l r , e l r _ h y p @ for older asm: .byte 0x00, 0xe3, 0x0e, 0xe1
mov p c , l r @ do no switch modes, but
@ return to caller
mov l r , i p
mov i p , #( F _ B I T | I _ B I T | A _ B I T ) @ S e t A , I a n d F
tst l r , #1 @ Check for Thumb PC
orrne i p , i p , #T _ B I T @ S e t T i f T h u m b
orr i p , i p , r6 @ Slot target mode in
msr s p s r _ c x f s , i p @ Set full SPSR
movs p c , l r @ ERET to non-secure
ENTRY( _ d o _ n o n s e c _ e n t r y )
mov i p , r0
mov r0 , r1
mov r1 , r2
mov r2 , r3
smc #0
ENDPROC( _ d o _ n o n s e c _ e n t r y )
.macro get_cbar_addr addr
# ifdef C O N F I G _ A R M _ G I C _ B A S E _ A D D R E S S
ldr \ a d d r , =CONFIG_ARM_GIC_BASE_ADDRESS
# else
mrc p15 , 4 , \ a d d r , c15 , c0 , 0 @ read CBAR
bfc \ a d d r , #0 , #15 @ clear reserved bits
# endif
.endm
.macro get_gicd_addr addr
get_ c b a r _ a d d r \ a d d r
add \ a d d r , \ a d d r , #G I C _ D I S T _ O F F S E T @ G I C d i s t i / f o f f s e t
.endm
.macro get_gicc_addr addr, t m p
get_ c b a r _ a d d r \ a d d r
is_ c p u _ v i r t _ c a p a b l e \ t m p
movne \ t m p , #G I C _ C P U _ O F F S E T _ A 9 @ GIC CPU offset for A9
moveq \ t m p , #G I C _ C P U _ O F F S E T _ A 15 @ GIC CPU offset for A15/A7
add \ a d d r , \ a d d r , \ t m p
.endm
# ifndef C O N F I G _ A R M V 7 _ P S C I
/ *
* Secondary C P U s s t a r t h e r e a n d c a l l t h e c o d e f o r t h e c o r e s p e c i f i c p a r t s
* of t h e n o n - s e c u r e a n d H Y P m o d e t r a n s i t i o n . T h e G I C d i s t r i b u t o r s p e c i f i c
@ -74,31 +112,21 @@ _hyp_trap:
* Then t h e y g o b a c k t o w f i a n d w a i t t o b e w o k e n u p b y t h e k e r n e l a g a i n .
* /
ENTRY( _ s m p _ p e n )
mrs r0 , c p s r
orr r0 , r0 , #0xc0
msr c p s r , r0 @ disable interrupts
ldr r1 , =_start
mcr p15 , 0 , r1 , c12 , c0 , 0 @ set VBAR
cpsid i
cpsid f
bl _ n o n s e c _ i n i t
mov r12 , r0 @ save GICC address
# ifdef C O N F I G _ A R M V 7 _ V I R T
bl _ s w i t c h _ t o _ h y p
# endif
ldr r1 , [ r12 , #G I C C _ I A R ] @ a c k n o w l e d g e I P I
str r1 , [ r12 , #G I C C _ E O I R ] @ s i g n a l e n d o f i n t e r r u p t
adr r0 , _ s m p _ p e n @ do not use this address again
b s m p _ w a i t l o o p @ wait for IPIs, board specific
ENDPROC( _ s m p _ p e n )
# endif
/ *
* Switch a c o r e t o n o n - s e c u r e s t a t e .
*
* 1 . initialize t h e G I C p e r - c o r e i n t e r f a c e
* 2 . allow c o p r o c e s s o r a c c e s s i n n o n - s e c u r e m o d e s
* 3 . switch t h e c p u m o d e ( b y c a l l i n g " s m c #0 " )
*
* Called f r o m s m p _ p e n b y s e c o n d a r y c o r e s a n d d i r e c t l y b y t h e B S P .
* Do n o t a s s u m e t h a t t h e s t a c k i s a v a i l a b l e a n d o n l y u s e r e g i s t e r s
@ -108,38 +136,23 @@ ENDPROC(_smp_pen)
* though, b u t w e c h e c k t h i s i n C b e f o r e c a l l i n g t h i s f u n c t i o n .
* /
ENTRY( _ n o n s e c _ i n i t )
# ifdef C O N F I G _ A R M _ G I C _ B A S E _ A D D R E S S
ldr r2 , =CONFIG_ARM_GIC_BASE_ADDRESS
# else
mrc p15 , 4 , r2 , c15 , c0 , 0 @ read CBAR
bfc r2 , #0 , #15 @ clear reserved bits
# endif
add r3 , r2 , #G I C _ D I S T _ O F F S E T @ G I C d i s t i / f o f f s e t
get_ g i c d _ a d d r r3
mvn r1 , #0 @ all bits to 1
str r1 , [ r3 , #G I C D _ I G R O U P R n ] @ a l l o w p r i v a t e i n t e r r u p t s
mrc p15 , 0 , r0 , c0 , c0 , 0 @ read MIDR
ldr r1 , =MIDR_PRIMARY_PART_MASK
and r0 , r0 , r1 @ mask out variant and revision
get_ g i c c _ a d d r r3 , r1
ldr r1 , =MIDR_CORTEX_A7_R0P0 & M I D R _ P R I M A R Y _ P A R T _ M A S K
cmp r0 , r1 @ check for Cortex-A7
ldr r1 , =MIDR_CORTEX_A15_R0P0 & M I D R _ P R I M A R Y _ P A R T _ M A S K
cmpne r0 , r1 @ check for Cortex-A15
movne r1 , #G I C _ C P U _ O F F S E T _ A 9 @ GIC CPU offset for A9
moveq r1 , #G I C _ C P U _ O F F S E T _ A 15 @ GIC CPU offset for A15/A7
add r3 , r2 , r1 @ r3 = GIC CPU i/f addr
mov r1 , #1 @ set GICC_CTLR[enable]
mov r1 , #3 @ Enable both groups
str r1 , [ r3 , #G I C C _ C T L R ] @ a n d c l e a r a l l o t h e r b i t s
mov r1 , #0xff
str r1 , [ r3 , #G I C C _ P M R ] @ s e t p r i o r i t y m a s k r e g i s t e r
mrc p15 , 0 , r0 , c1 , c1 , 2
movw r1 , #0x3fff
movt r1 , #0x0006
mcr p15 , 0 , r1 , c1 , c1 , 2 @ NSACR = all copros to non-sec
movt r1 , #0x0004
orr r0 , r0 , r1
mcr p15 , 0 , r0 , c1 , c1 , 2 @ NSACR = all copros to non-sec
/ * The C N T F R Q r e g i s t e r o f t h e g e n e r i c t i m e r n e e d s t o b e
* programmed i n s e c u r e s t a t e . S o m e p r i m a r y b o o t l o a d e r s / f i r m w a r e
@ -157,21 +170,9 @@ ENTRY(_nonsec_init)
adr r1 , _ m o n i t o r _ v e c t o r s
mcr p15 , 0 , r1 , c12 , c0 , 1 @ set MVBAR to secure vectors
mrc p15 , 0 , i p , c12 , c0 , 0 @ save secure copy of VBAR
isb
smc #0 @ call into MONITOR mode
mcr p15 , 0 , i p , c12 , c0 , 0 @ write non-secure copy of VBAR
mov r1 , #1
str r1 , [ r3 , #G I C C _ C T L R ] @ e n a b l e n o n - s e c u r e C P U i / f
add r2 , r2 , #G I C _ D I S T _ O F F S E T
str r1 , [ r2 , #G I C D _ C T L R ] @ a l l o w p r i v a t e i n t e r r u p t s
mov r0 , r3 @ return GICC address
bx l r
ENDPROC( _ n o n s e c _ i n i t )
@ -183,18 +184,10 @@ ENTRY(smp_waitloop)
ldr r1 , [ r1 ]
cmp r0 , r1 @ make sure we dont execute this code
beq s m p _ w a i t l o o p @ again (due to a spurious wakeup)
mov p c , r1
mov r0 , r1
b _ d o _ n o n s e c _ e n t r y
ENDPROC( s m p _ w a i t l o o p )
.weak smp_waitloop
# endif
ENTRY( _ s w i t c h _ t o _ h y p )
mov r0 , l r
mov r1 , s p @ save SVC copy of LR and SP
isb
hvc #0 @ for older asm: .byte 0x70, 0x00, 0x40, 0xe1
mov s p , r1
mov l r , r0 @ restore SVC copy of LR and SP
bx l r
ENDPROC( _ s w i t c h _ t o _ h y p )
.popsection