diff --git a/kernel/src/arch/riscv32/boot/trap.asm b/kernel/src/arch/riscv32/boot/trap.asm index 0f3bc3d..04d206d 100644 --- a/kernel/src/arch/riscv32/boot/trap.asm +++ b/kernel/src/arch/riscv32/boot/trap.asm @@ -17,39 +17,39 @@ _restore_kernel_sp: # sscratch = previous-sp, sp = kernel-sp _save_context: # provide room for trap frame - addi sp, sp, -36 * 4 + addi sp, sp, -36 * XLENB # save x registers except x2 (sp) - sw x1, 1*4(sp) - sw x3, 3*4(sp) + sw x1, 1*XLENB(sp) + sw x3, 3*XLENB(sp) # tp(x4) = hartid. DON'T change. - # sw x4, 4*4(sp) - sw x5, 5*4(sp) - sw x6, 6*4(sp) - sw x7, 7*4(sp) - sw x8, 8*4(sp) - sw x9, 9*4(sp) - sw x10, 10*4(sp) - sw x11, 11*4(sp) - sw x12, 12*4(sp) - sw x13, 13*4(sp) - sw x14, 14*4(sp) - sw x15, 15*4(sp) - sw x16, 16*4(sp) - sw x17, 17*4(sp) - sw x18, 18*4(sp) - sw x19, 19*4(sp) - sw x20, 20*4(sp) - sw x21, 21*4(sp) - sw x22, 22*4(sp) - sw x23, 23*4(sp) - sw x24, 24*4(sp) - sw x25, 25*4(sp) - sw x26, 26*4(sp) - sw x27, 27*4(sp) - sw x28, 28*4(sp) - sw x29, 29*4(sp) - sw x30, 30*4(sp) - sw x31, 31*4(sp) + # sw x4, 4*XLENB(sp) + sw x5, 5*XLENB(sp) + sw x6, 6*XLENB(sp) + sw x7, 7*XLENB(sp) + sw x8, 8*XLENB(sp) + sw x9, 9*XLENB(sp) + sw x10, 10*XLENB(sp) + sw x11, 11*XLENB(sp) + sw x12, 12*XLENB(sp) + sw x13, 13*XLENB(sp) + sw x14, 14*XLENB(sp) + sw x15, 15*XLENB(sp) + sw x16, 16*XLENB(sp) + sw x17, 17*XLENB(sp) + sw x18, 18*XLENB(sp) + sw x19, 19*XLENB(sp) + sw x20, 20*XLENB(sp) + sw x21, 21*XLENB(sp) + sw x22, 22*XLENB(sp) + sw x23, 23*XLENB(sp) + sw x24, 24*XLENB(sp) + sw x25, 25*XLENB(sp) + sw x26, 26*XLENB(sp) + sw x27, 27*XLENB(sp) + sw x28, 28*XLENB(sp) + sw x29, 29*XLENB(sp) + sw x30, 30*XLENB(sp) + sw x31, 31*XLENB(sp) # get sp, sstatus, sepc, stval, scause # set sscratch = 0 @@ -59,20 +59,20 @@ _save_context: csrr s3, (xtval) csrr s4, (xcause) # store sp, sstatus, sepc, sbadvaddr, scause - sw s0, 2*4(sp) - sw s1, 32*4(sp) - sw s2, 33*4(sp) - sw s3, 34*4(sp) - sw s4, 35*4(sp) + sw s0, 2*XLENB(sp) + sw s1, 32*XLENB(sp) + sw s2, 33*XLENB(sp) + sw s3, 34*XLENB(sp) + sw s4, 35*XLENB(sp) .endm .macro RESTORE_ALL - lw s1, 32*4(sp) # s1 = sstatus - lw s2, 33*4(sp) # s2 = sepc + lw s1, 32*XLENB(sp) # s1 = sstatus + lw s2, 33*XLENB(sp) # s2 = sepc andi s0, s1, 1 << 8 bnez s0, _restore_context # back to S-mode? (sstatus.SPP = 1) _save_kernel_sp: - addi s0, sp, 36*4 + addi s0, sp, 36*XLENB csrw (xscratch), s0 # sscratch = kernel-sp _restore_context: # restore sstatus, sepc @@ -80,38 +80,38 @@ _restore_context: csrw (xepc), s2 # restore x registers except x2 (sp) - lw x1, 1*4(sp) - lw x3, 3*4(sp) - # lw x4, 4*4(sp) - lw x5, 5*4(sp) - lw x6, 6*4(sp) - lw x7, 7*4(sp) - lw x8, 8*4(sp) - lw x9, 9*4(sp) - lw x10, 10*4(sp) - lw x11, 11*4(sp) - lw x12, 12*4(sp) - lw x13, 13*4(sp) - lw x14, 14*4(sp) - lw x15, 15*4(sp) - lw x16, 16*4(sp) - lw x17, 17*4(sp) - lw x18, 18*4(sp) - lw x19, 19*4(sp) - lw x20, 20*4(sp) - lw x21, 21*4(sp) - lw x22, 22*4(sp) - lw x23, 23*4(sp) - lw x24, 24*4(sp) - lw x25, 25*4(sp) - lw x26, 26*4(sp) - lw x27, 27*4(sp) - lw x28, 28*4(sp) - lw x29, 29*4(sp) - lw x30, 30*4(sp) - lw x31, 31*4(sp) + lw x1, 1*XLENB(sp) + lw x3, 3*XLENB(sp) + # lw x4, 4*XLENB(sp) + lw x5, 5*XLENB(sp) + lw x6, 6*XLENB(sp) + lw x7, 7*XLENB(sp) + lw x8, 8*XLENB(sp) + lw x9, 9*XLENB(sp) + lw x10, 10*XLENB(sp) + lw x11, 11*XLENB(sp) + lw x12, 12*XLENB(sp) + lw x13, 13*XLENB(sp) + lw x14, 14*XLENB(sp) + lw x15, 15*XLENB(sp) + lw x16, 16*XLENB(sp) + lw x17, 17*XLENB(sp) + lw x18, 18*XLENB(sp) + lw x19, 19*XLENB(sp) + lw x20, 20*XLENB(sp) + lw x21, 21*XLENB(sp) + lw x22, 22*XLENB(sp) + lw x23, 23*XLENB(sp) + lw x24, 24*XLENB(sp) + lw x25, 25*XLENB(sp) + lw x26, 26*XLENB(sp) + lw x27, 27*XLENB(sp) + lw x28, 28*XLENB(sp) + lw x29, 29*XLENB(sp) + lw x30, 30*XLENB(sp) + lw x31, 31*XLENB(sp) # restore sp last - lw x2, 2*4(sp) + lw x2, 2*XLENB(sp) .endm .section .text diff --git a/kernel/src/arch/riscv32/compiler_rt.c b/kernel/src/arch/riscv32/compiler_rt.c index 8746716..930c564 100644 --- a/kernel/src/arch/riscv32/compiler_rt.c +++ b/kernel/src/arch/riscv32/compiler_rt.c @@ -49,6 +49,7 @@ int __atomic_fetch_sub_4(int* ptr, int val) { return res; } +#ifdef TARGET_IS_64BITS typedef unsigned long long u64; u64 __atomic_load_8(u64 *src) { @@ -87,3 +88,4 @@ u64 __atomic_fetch_sub_8(u64* ptr, u64 val) { __asm__ __volatile__("amoadd.d.rl %0, %1, (%2)" : "=r"(res) : "r"(-val), "r"(ptr) : "memory"); return res; } +#endif diff --git a/kernel/src/arch/riscv32/mod.rs b/kernel/src/arch/riscv32/mod.rs index 72dae42..32b7dd8 100644 --- a/kernel/src/arch/riscv32/mod.rs +++ b/kernel/src/arch/riscv32/mod.rs @@ -63,6 +63,18 @@ global_asm!(" .macro XRET\n sret\n .endm "); +#[cfg(target_pointer_width = "32")] +global_asm!(" + .equ XLENB, 4 + .equ XLENb, 32 +"); +#[cfg(target_pointer_width = "64")] +global_asm!(" + .equ XLENB, 8 + .equ XLENb, 64 +"); + + #[cfg(feature = "board_k210")] global_asm!(include_str!("boot/boot_k210.asm")); global_asm!(include_str!("boot/entry.asm")); @@ -84,4 +96,4 @@ static mut BBL_FUNCTIONS_PTR: *const BBLFunctions = ::core::ptr::null(); use lazy_static::lazy_static; lazy_static! { static ref BBL: BBLFunctions = unsafe { BBL_FUNCTIONS_PTR.read() }; -} \ No newline at end of file +}