fix trap for rv64

master
WangRunji 6 years ago
parent 8529d9fe4e
commit 3a4b8f5dac

@ -1,14 +1,14 @@
.section .text.boot .section .text.boot
boot: boot:
//lui x1, 0x40000
//jalr x0, x1, 8
csrwi mie, 0 csrwi mie, 0
csrwi mip, 0 csrwi mip, 0
csrwi mscratch, 0 csrwi mscratch, 0
csrwi medeleg, 0 csrwi medeleg, 0
csrwi mideleg, 0 csrwi mideleg, 0
csrwi mstatus, 0
// enable float unit
li t0, 0x00006000 // MSTATUS_FS
csrw mstatus, t0
// uart init // uart init
lui x1, 0x38000 lui x1, 0x38000

@ -41,7 +41,7 @@ SECTIONS
.bss : { .bss : {
sbss = .; sbss = .;
*(.bss .bss.*) *(.bss .bss.* .sbss.*)
ebss = .; ebss = .;
} }

@ -5,6 +5,9 @@
# xcause # xcause
# xtval # xtval
# XRET # XRET
# XLENB
# LOAD
# STORE
.macro SAVE_ALL .macro SAVE_ALL
# If coming from userspace, preserve the user stack pointer and load # If coming from userspace, preserve the user stack pointer and load
@ -19,37 +22,37 @@ _save_context:
# provide room for trap frame # provide room for trap frame
addi sp, sp, -36 * XLENB addi sp, sp, -36 * XLENB
# save x registers except x2 (sp) # save x registers except x2 (sp)
sw x1, 1*XLENB(sp) STORE x1, 1
sw x3, 3*XLENB(sp) STORE x3, 3
# tp(x4) = hartid. DON'T change. # tp(x4) = hartid. DON'T change.
# sw x4, 4*XLENB(sp) # STORE x4, 4
sw x5, 5*XLENB(sp) STORE x5, 5
sw x6, 6*XLENB(sp) STORE x6, 6
sw x7, 7*XLENB(sp) STORE x7, 7
sw x8, 8*XLENB(sp) STORE x8, 8
sw x9, 9*XLENB(sp) STORE x9, 9
sw x10, 10*XLENB(sp) STORE x10, 10
sw x11, 11*XLENB(sp) STORE x11, 11
sw x12, 12*XLENB(sp) STORE x12, 12
sw x13, 13*XLENB(sp) STORE x13, 13
sw x14, 14*XLENB(sp) STORE x14, 14
sw x15, 15*XLENB(sp) STORE x15, 15
sw x16, 16*XLENB(sp) STORE x16, 16
sw x17, 17*XLENB(sp) STORE x17, 17
sw x18, 18*XLENB(sp) STORE x18, 18
sw x19, 19*XLENB(sp) STORE x19, 19
sw x20, 20*XLENB(sp) STORE x20, 20
sw x21, 21*XLENB(sp) STORE x21, 21
sw x22, 22*XLENB(sp) STORE x22, 22
sw x23, 23*XLENB(sp) STORE x23, 23
sw x24, 24*XLENB(sp) STORE x24, 24
sw x25, 25*XLENB(sp) STORE x25, 25
sw x26, 26*XLENB(sp) STORE x26, 26
sw x27, 27*XLENB(sp) STORE x27, 27
sw x28, 28*XLENB(sp) STORE x28, 28
sw x29, 29*XLENB(sp) STORE x29, 29
sw x30, 30*XLENB(sp) STORE x30, 30
sw x31, 31*XLENB(sp) STORE x31, 31
# get sp, sstatus, sepc, stval, scause # get sp, sstatus, sepc, stval, scause
# set sscratch = 0 # set sscratch = 0
@ -59,16 +62,16 @@ _save_context:
csrr s3, (xtval) csrr s3, (xtval)
csrr s4, (xcause) csrr s4, (xcause)
# store sp, sstatus, sepc, sbadvaddr, scause # store sp, sstatus, sepc, sbadvaddr, scause
sw s0, 2*XLENB(sp) STORE s0, 2
sw s1, 32*XLENB(sp) STORE s1, 32
sw s2, 33*XLENB(sp) STORE s2, 33
sw s3, 34*XLENB(sp) STORE s3, 34
sw s4, 35*XLENB(sp) STORE s4, 35
.endm .endm
.macro RESTORE_ALL .macro RESTORE_ALL
lw s1, 32*XLENB(sp) # s1 = sstatus LOAD s1, 32 # s1 = sstatus
lw s2, 33*XLENB(sp) # s2 = sepc LOAD s2, 33 # s2 = sepc
andi s0, s1, 1 << 8 andi s0, s1, 1 << 8
bnez s0, _restore_context # back to S-mode? (sstatus.SPP = 1) bnez s0, _restore_context # back to S-mode? (sstatus.SPP = 1)
_save_kernel_sp: _save_kernel_sp:
@ -80,38 +83,38 @@ _restore_context:
csrw (xepc), s2 csrw (xepc), s2
# restore x registers except x2 (sp) # restore x registers except x2 (sp)
lw x1, 1*XLENB(sp) LOAD x1, 1
lw x3, 3*XLENB(sp) LOAD x3, 3
# lw x4, 4*XLENB(sp) # LOAD x4, 4
lw x5, 5*XLENB(sp) LOAD x5, 5
lw x6, 6*XLENB(sp) LOAD x6, 6
lw x7, 7*XLENB(sp) LOAD x7, 7
lw x8, 8*XLENB(sp) LOAD x8, 8
lw x9, 9*XLENB(sp) LOAD x9, 9
lw x10, 10*XLENB(sp) LOAD x10, 10
lw x11, 11*XLENB(sp) LOAD x11, 11
lw x12, 12*XLENB(sp) LOAD x12, 12
lw x13, 13*XLENB(sp) LOAD x13, 13
lw x14, 14*XLENB(sp) LOAD x14, 14
lw x15, 15*XLENB(sp) LOAD x15, 15
lw x16, 16*XLENB(sp) LOAD x16, 16
lw x17, 17*XLENB(sp) LOAD x17, 17
lw x18, 18*XLENB(sp) LOAD x18, 18
lw x19, 19*XLENB(sp) LOAD x19, 19
lw x20, 20*XLENB(sp) LOAD x20, 20
lw x21, 21*XLENB(sp) LOAD x21, 21
lw x22, 22*XLENB(sp) LOAD x22, 22
lw x23, 23*XLENB(sp) LOAD x23, 23
lw x24, 24*XLENB(sp) LOAD x24, 24
lw x25, 25*XLENB(sp) LOAD x25, 25
lw x26, 26*XLENB(sp) LOAD x26, 26
lw x27, 27*XLENB(sp) LOAD x27, 27
lw x28, 28*XLENB(sp) LOAD x28, 28
lw x29, 29*XLENB(sp) LOAD x29, 29
lw x30, 30*XLENB(sp) LOAD x30, 30
lw x31, 31*XLENB(sp) LOAD x31, 31
# restore sp last # restore sp last
lw x2, 2*XLENB(sp) LOAD x2, 2
.endm .endm
.section .text .section .text

@ -67,16 +67,28 @@ global_asm!("
global_asm!(" global_asm!("
.equ XLENB, 4 .equ XLENB, 4
.equ XLENb, 32 .equ XLENb, 32
.macro LOAD a1, a2
lw \\a1, \\a2*XLENB(sp)
.endm
.macro STORE a1, a2
sw \\a1, \\a2*XLENB(sp)
.endm
"); ");
#[cfg(target_pointer_width = "64")] #[cfg(target_pointer_width = "64")]
global_asm!(" global_asm!("
.equ XLENB, 8 .equ XLENB, 8
.equ XLENb, 64 .equ XLENb, 64
.macro LOAD a1, a2
ld \\a1, \\a2*XLENB(sp)
.endm
.macro STORE a1, a2
sd \\a1, \\a2*XLENB(sp)
.endm
"); ");
#[cfg(feature = "board_k210")] #[cfg(feature = "board_k210")]
global_asm!(include_str!("boot/boot_k210.asm")); global_asm!(include_str!("board/k210/boot.asm"));
global_asm!(include_str!("boot/entry.asm")); global_asm!(include_str!("boot/entry.asm"));
global_asm!(include_str!("boot/trap.asm")); global_asm!(include_str!("boot/trap.asm"));

Loading…
Cancel
Save