MIPS: Use unchecked immediate addition/subtraction

In MIPS assembly there have historically been 2 variants of immediate
addition - the standard "addi" which traps if an overflow occurs, and
the unchecked "addiu" which does not trap on overflow. In release 6 of
the MIPS architecture the trapping variants of immediate addition &
subtraction have been removed. In preparation for supporting MIPSr6,
stop using the trapping instructions from assembly & switch to their
unchecked variants.

Signed-off-by: Paul Burton <paul.burton@imgtec.com>
master
Paul Burton 8 years ago committed by Daniel Schwierzeck
parent 400df30998
commit 9f8ac82452
  1. 22
      arch/mips/cpu/start.S
  2. 2
      arch/mips/lib/cache_init.S

@ -164,12 +164,14 @@ reset:
li t0, -16
PTR_LI t1, CONFIG_SYS_INIT_SP_ADDR
and sp, t1, t0 # force 16 byte alignment
PTR_SUB sp, sp, GD_SIZE # reserve space for gd
PTR_SUBU \
sp, sp, GD_SIZE # reserve space for gd
and sp, sp, t0 # force 16 byte alignment
move k0, sp # save gd pointer
#ifdef CONFIG_SYS_MALLOC_F_LEN
li t2, CONFIG_SYS_MALLOC_F_LEN
PTR_SUB sp, sp, t2 # reserve space for early malloc
PTR_SUBU \
sp, sp, t2 # reserve space for early malloc
and sp, sp, t0 # force 16 byte alignment
#endif
move fp, sp
@ -179,7 +181,7 @@ reset:
1:
PTR_S zero, 0(t0)
blt t0, t1, 1b
PTR_ADDI t0, PTRSIZE
PTR_ADDIU t0, PTRSIZE
#ifdef CONFIG_SYS_MALLOC_F_LEN
PTR_S sp, GD_MALLOC_BASE(k0) # gd->malloc_base offset
@ -237,7 +239,7 @@ ENTRY(relocate_code)
move a0, s2 # a0 <-- destination address
/* Jump to where we've relocated ourselves */
PTR_ADDI t0, s2, in_ram - _start
PTR_ADDIU t0, s2, in_ram - _start
jr t0
nop
@ -257,7 +259,7 @@ in_ram:
PTR_L t3, -(1 * PTRSIZE)(t0) # t3 <-- num_got_entries
PTR_L t8, -(2 * PTRSIZE)(t0) # t8 <-- _GLOBAL_OFFSET_TABLE_
PTR_ADD t8, s1 # t8 now holds relocated _G_O_T_
PTR_ADDI t8, t8, 2 * PTRSIZE # skipping first two entries
PTR_ADDIU t8, t8, 2 * PTRSIZE # skipping first two entries
PTR_LI t2, 2
1:
PTR_L t1, 0(t8)
@ -265,16 +267,16 @@ in_ram:
PTR_ADD t1, s1
PTR_S t1, 0(t8)
2:
PTR_ADDI t2, 1
PTR_ADDIU t2, 1
blt t2, t3, 1b
PTR_ADDI t8, PTRSIZE
PTR_ADDIU t8, PTRSIZE
/* Update dynamic relocations */
PTR_L t1, -(4 * PTRSIZE)(t0) # t1 <-- __rel_dyn_start
PTR_L t2, -(5 * PTRSIZE)(t0) # t2 <-- __rel_dyn_end
b 2f # skip first reserved entry
PTR_ADDI t1, 2 * PTRSIZE
PTR_ADDIU t1, 2 * PTRSIZE
1:
lw t8, -4(t1) # t8 <-- relocation info
@ -293,7 +295,7 @@ in_ram:
2:
blt t1, t2, 1b
PTR_ADDI t1, 2 * PTRSIZE # each rel.dyn entry is 2*PTRSIZE bytes
PTR_ADDIU t1, 2 * PTRSIZE # each rel.dyn entry is 2*PTRSIZE bytes
/*
* Clear BSS
@ -307,7 +309,7 @@ in_ram:
1:
PTR_S zero, 0(t1)
blt t1, t2, 1b
PTR_ADDI t1, PTRSIZE
PTR_ADDIU t1, PTRSIZE
move a0, s0 # a0 <-- gd
move a1, s2

@ -64,7 +64,7 @@
/* detect associativity */
srl \sz, $1, \off + MIPS_CONF1_DA_SHF - MIPS_CONF1_DA_SHF
andi \sz, \sz, (MIPS_CONF1_DA >> MIPS_CONF1_DA_SHF)
addi \sz, \sz, 1
addiu \sz, \sz, 1
/* sz *= line_sz */
mul \sz, \sz, \line_sz

Loading…
Cancel
Save