|
|
|
@ -5,6 +5,9 @@
|
|
|
|
|
# xcause
|
|
|
|
|
# xtval
|
|
|
|
|
# XRET
|
|
|
|
|
# XLENB
|
|
|
|
|
# LOAD
|
|
|
|
|
# STORE
|
|
|
|
|
|
|
|
|
|
.macro SAVE_ALL
|
|
|
|
|
# If coming from userspace, preserve the user stack pointer and load
|
|
|
|
@ -19,37 +22,37 @@ _save_context:
|
|
|
|
|
# provide room for trap frame
|
|
|
|
|
addi sp, sp, -36 * XLENB
|
|
|
|
|
# save x registers except x2 (sp)
|
|
|
|
|
sw x1, 1*XLENB(sp)
|
|
|
|
|
sw x3, 3*XLENB(sp)
|
|
|
|
|
STORE x1, 1
|
|
|
|
|
STORE x3, 3
|
|
|
|
|
# tp(x4) = hartid. DON'T change.
|
|
|
|
|
# sw x4, 4*XLENB(sp)
|
|
|
|
|
sw x5, 5*XLENB(sp)
|
|
|
|
|
sw x6, 6*XLENB(sp)
|
|
|
|
|
sw x7, 7*XLENB(sp)
|
|
|
|
|
sw x8, 8*XLENB(sp)
|
|
|
|
|
sw x9, 9*XLENB(sp)
|
|
|
|
|
sw x10, 10*XLENB(sp)
|
|
|
|
|
sw x11, 11*XLENB(sp)
|
|
|
|
|
sw x12, 12*XLENB(sp)
|
|
|
|
|
sw x13, 13*XLENB(sp)
|
|
|
|
|
sw x14, 14*XLENB(sp)
|
|
|
|
|
sw x15, 15*XLENB(sp)
|
|
|
|
|
sw x16, 16*XLENB(sp)
|
|
|
|
|
sw x17, 17*XLENB(sp)
|
|
|
|
|
sw x18, 18*XLENB(sp)
|
|
|
|
|
sw x19, 19*XLENB(sp)
|
|
|
|
|
sw x20, 20*XLENB(sp)
|
|
|
|
|
sw x21, 21*XLENB(sp)
|
|
|
|
|
sw x22, 22*XLENB(sp)
|
|
|
|
|
sw x23, 23*XLENB(sp)
|
|
|
|
|
sw x24, 24*XLENB(sp)
|
|
|
|
|
sw x25, 25*XLENB(sp)
|
|
|
|
|
sw x26, 26*XLENB(sp)
|
|
|
|
|
sw x27, 27*XLENB(sp)
|
|
|
|
|
sw x28, 28*XLENB(sp)
|
|
|
|
|
sw x29, 29*XLENB(sp)
|
|
|
|
|
sw x30, 30*XLENB(sp)
|
|
|
|
|
sw x31, 31*XLENB(sp)
|
|
|
|
|
# STORE x4, 4
|
|
|
|
|
STORE x5, 5
|
|
|
|
|
STORE x6, 6
|
|
|
|
|
STORE x7, 7
|
|
|
|
|
STORE x8, 8
|
|
|
|
|
STORE x9, 9
|
|
|
|
|
STORE x10, 10
|
|
|
|
|
STORE x11, 11
|
|
|
|
|
STORE x12, 12
|
|
|
|
|
STORE x13, 13
|
|
|
|
|
STORE x14, 14
|
|
|
|
|
STORE x15, 15
|
|
|
|
|
STORE x16, 16
|
|
|
|
|
STORE x17, 17
|
|
|
|
|
STORE x18, 18
|
|
|
|
|
STORE x19, 19
|
|
|
|
|
STORE x20, 20
|
|
|
|
|
STORE x21, 21
|
|
|
|
|
STORE x22, 22
|
|
|
|
|
STORE x23, 23
|
|
|
|
|
STORE x24, 24
|
|
|
|
|
STORE x25, 25
|
|
|
|
|
STORE x26, 26
|
|
|
|
|
STORE x27, 27
|
|
|
|
|
STORE x28, 28
|
|
|
|
|
STORE x29, 29
|
|
|
|
|
STORE x30, 30
|
|
|
|
|
STORE x31, 31
|
|
|
|
|
|
|
|
|
|
# get sp, sstatus, sepc, stval, scause
|
|
|
|
|
# set sscratch = 0
|
|
|
|
@ -59,16 +62,16 @@ _save_context:
|
|
|
|
|
csrr s3, (xtval)
|
|
|
|
|
csrr s4, (xcause)
|
|
|
|
|
# store sp, sstatus, sepc, sbadvaddr, scause
|
|
|
|
|
sw s0, 2*XLENB(sp)
|
|
|
|
|
sw s1, 32*XLENB(sp)
|
|
|
|
|
sw s2, 33*XLENB(sp)
|
|
|
|
|
sw s3, 34*XLENB(sp)
|
|
|
|
|
sw s4, 35*XLENB(sp)
|
|
|
|
|
STORE s0, 2
|
|
|
|
|
STORE s1, 32
|
|
|
|
|
STORE s2, 33
|
|
|
|
|
STORE s3, 34
|
|
|
|
|
STORE s4, 35
|
|
|
|
|
.endm
|
|
|
|
|
|
|
|
|
|
.macro RESTORE_ALL
|
|
|
|
|
lw s1, 32*XLENB(sp) # s1 = sstatus
|
|
|
|
|
lw s2, 33*XLENB(sp) # s2 = sepc
|
|
|
|
|
LOAD s1, 32 # s1 = sstatus
|
|
|
|
|
LOAD s2, 33 # s2 = sepc
|
|
|
|
|
andi s0, s1, 1 << 8
|
|
|
|
|
bnez s0, _restore_context # back to S-mode? (sstatus.SPP = 1)
|
|
|
|
|
_save_kernel_sp:
|
|
|
|
@ -80,38 +83,38 @@ _restore_context:
|
|
|
|
|
csrw (xepc), s2
|
|
|
|
|
|
|
|
|
|
# restore x registers except x2 (sp)
|
|
|
|
|
lw x1, 1*XLENB(sp)
|
|
|
|
|
lw x3, 3*XLENB(sp)
|
|
|
|
|
# lw x4, 4*XLENB(sp)
|
|
|
|
|
lw x5, 5*XLENB(sp)
|
|
|
|
|
lw x6, 6*XLENB(sp)
|
|
|
|
|
lw x7, 7*XLENB(sp)
|
|
|
|
|
lw x8, 8*XLENB(sp)
|
|
|
|
|
lw x9, 9*XLENB(sp)
|
|
|
|
|
lw x10, 10*XLENB(sp)
|
|
|
|
|
lw x11, 11*XLENB(sp)
|
|
|
|
|
lw x12, 12*XLENB(sp)
|
|
|
|
|
lw x13, 13*XLENB(sp)
|
|
|
|
|
lw x14, 14*XLENB(sp)
|
|
|
|
|
lw x15, 15*XLENB(sp)
|
|
|
|
|
lw x16, 16*XLENB(sp)
|
|
|
|
|
lw x17, 17*XLENB(sp)
|
|
|
|
|
lw x18, 18*XLENB(sp)
|
|
|
|
|
lw x19, 19*XLENB(sp)
|
|
|
|
|
lw x20, 20*XLENB(sp)
|
|
|
|
|
lw x21, 21*XLENB(sp)
|
|
|
|
|
lw x22, 22*XLENB(sp)
|
|
|
|
|
lw x23, 23*XLENB(sp)
|
|
|
|
|
lw x24, 24*XLENB(sp)
|
|
|
|
|
lw x25, 25*XLENB(sp)
|
|
|
|
|
lw x26, 26*XLENB(sp)
|
|
|
|
|
lw x27, 27*XLENB(sp)
|
|
|
|
|
lw x28, 28*XLENB(sp)
|
|
|
|
|
lw x29, 29*XLENB(sp)
|
|
|
|
|
lw x30, 30*XLENB(sp)
|
|
|
|
|
lw x31, 31*XLENB(sp)
|
|
|
|
|
LOAD x1, 1
|
|
|
|
|
LOAD x3, 3
|
|
|
|
|
# LOAD x4, 4
|
|
|
|
|
LOAD x5, 5
|
|
|
|
|
LOAD x6, 6
|
|
|
|
|
LOAD x7, 7
|
|
|
|
|
LOAD x8, 8
|
|
|
|
|
LOAD x9, 9
|
|
|
|
|
LOAD x10, 10
|
|
|
|
|
LOAD x11, 11
|
|
|
|
|
LOAD x12, 12
|
|
|
|
|
LOAD x13, 13
|
|
|
|
|
LOAD x14, 14
|
|
|
|
|
LOAD x15, 15
|
|
|
|
|
LOAD x16, 16
|
|
|
|
|
LOAD x17, 17
|
|
|
|
|
LOAD x18, 18
|
|
|
|
|
LOAD x19, 19
|
|
|
|
|
LOAD x20, 20
|
|
|
|
|
LOAD x21, 21
|
|
|
|
|
LOAD x22, 22
|
|
|
|
|
LOAD x23, 23
|
|
|
|
|
LOAD x24, 24
|
|
|
|
|
LOAD x25, 25
|
|
|
|
|
LOAD x26, 26
|
|
|
|
|
LOAD x27, 27
|
|
|
|
|
LOAD x28, 28
|
|
|
|
|
LOAD x29, 29
|
|
|
|
|
LOAD x30, 30
|
|
|
|
|
LOAD x31, 31
|
|
|
|
|
# restore sp last
|
|
|
|
|
lw x2, 2*XLENB(sp)
|
|
|
|
|
LOAD x2, 2
|
|
|
|
|
.endm
|
|
|
|
|
|
|
|
|
|
.section .text
|
|
|
|
|