summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAnup Patel <anup.patel@wdc.com>2019-05-21 13:42:33 +0300
committerAnup Patel <anup@brainfault.org>2019-05-24 05:52:47 +0300
commita22c6891b718a155b2b8429be62760860b4d6109 (patch)
treee7002877a42905408cb7b3345d5405ecb0b97b3d
parent95b7480ab4b438cf545afae200b40012e83f1dda (diff)
downloadopensbi-a22c6891b718a155b2b8429be62760860b4d6109.tar.xz
include: Make unprivilege load/store functions as non-inline functions
Currently, the unprivilege load/store functions are inline functions. We will be extending these functions to track whether a page/access fault occurs when we execute unprivilege load/store instruction. To make things simpler and debugable, we reduce number of places which can potentially generate a page/access fault by making all unprivilege load/store functions as regular (non-inline) functions. Signed-off-by: Anup Patel <anup.patel@wdc.com> Reviewed-by: Atish Patra <atish.patra@wdc.com>
-rw-r--r--include/sbi/riscv_unpriv.h122
-rw-r--r--lib/objects.mk1
-rw-r--r--lib/riscv_unpriv.c114
3 files changed, 135 insertions, 102 deletions
diff --git a/include/sbi/riscv_unpriv.h b/include/sbi/riscv_unpriv.h
index a2a5851..ed2dbcc 100644
--- a/include/sbi/riscv_unpriv.h
+++ b/include/sbi/riscv_unpriv.h
@@ -10,109 +10,27 @@
#ifndef __RISCV_UNPRIV_H__
#define __RISCV_UNPRIV_H__
-#include <sbi/riscv_encoding.h>
-#include <sbi/sbi_bits.h>
#include <sbi/sbi_types.h>
-#define DECLARE_UNPRIVILEGED_LOAD_FUNCTION(type, insn) \
- static inline type load_##type(const type *addr) \
- { \
- register ulong __mstatus asm("a2"); \
- type val; \
- asm("csrrs %0, " STR(CSR_MSTATUS) ", %3\n" #insn " %1, %2\n" \
- "csrw " STR( \
- CSR_MSTATUS) ", %0" \
- : "+&r"(__mstatus), "=&r"(val) \
- : "m"(*addr), "r"(MSTATUS_MPRV)); \
- return val; \
- }
-
-#define DECLARE_UNPRIVILEGED_STORE_FUNCTION(type, insn) \
- static inline void store_##type(type *addr, type val) \
- { \
- register ulong __mstatus asm("a3"); \
- asm volatile( \
- "csrrs %0, " STR( \
- CSR_MSTATUS) ", %3\n" #insn " %1, %2\n" \
- "csrw " STR(CSR_MSTATUS) ", %0" \
- : "+&r"(__mstatus) \
- : "r"(val), "m"(*addr), "r"(MSTATUS_MPRV)); \
- }
-
-DECLARE_UNPRIVILEGED_LOAD_FUNCTION(u8, lbu)
-DECLARE_UNPRIVILEGED_LOAD_FUNCTION(u16, lhu)
-DECLARE_UNPRIVILEGED_LOAD_FUNCTION(s8, lb)
-DECLARE_UNPRIVILEGED_LOAD_FUNCTION(s16, lh)
-DECLARE_UNPRIVILEGED_LOAD_FUNCTION(s32, lw)
-DECLARE_UNPRIVILEGED_STORE_FUNCTION(u8, sb)
-DECLARE_UNPRIVILEGED_STORE_FUNCTION(u16, sh)
-DECLARE_UNPRIVILEGED_STORE_FUNCTION(u32, sw)
-#if __riscv_xlen == 64
-DECLARE_UNPRIVILEGED_LOAD_FUNCTION(u32, lwu)
-DECLARE_UNPRIVILEGED_LOAD_FUNCTION(u64, ld)
-DECLARE_UNPRIVILEGED_STORE_FUNCTION(u64, sd)
-DECLARE_UNPRIVILEGED_LOAD_FUNCTION(ulong, ld)
-#else
-DECLARE_UNPRIVILEGED_LOAD_FUNCTION(u32, lw)
-DECLARE_UNPRIVILEGED_LOAD_FUNCTION(ulong, lw)
-
-static inline u64 load_u64(const u64 *addr)
-{
- return load_u32((u32 *)addr) + ((u64)load_u32((u32 *)addr + 1) << 32);
-}
-
-static inline void store_u64(u64 *addr, u64 val)
-{
- store_u32((u32 *)addr, val);
- store_u32((u32 *)addr + 1, val >> 32);
-}
-#endif
-
-static inline ulong get_insn(ulong mepc, ulong *mstatus)
-{
- register ulong __mepc asm("a2") = mepc;
- register ulong __mstatus asm("a3");
- ulong val;
-#ifndef __riscv_compressed
- asm("csrrs %[mstatus], " STR(CSR_MSTATUS) ", %[mprv]\n"
-#if __riscv_xlen == 64
- STR(LWU) " %[insn], (%[addr])\n"
-#else
- STR(LW) " %[insn], (%[addr])\n"
-#endif
- "csrw " STR(CSR_MSTATUS) ", %[mstatus]"
- : [mstatus] "+&r"(__mstatus), [insn] "=&r"(val)
- : [mprv] "r"(MSTATUS_MPRV | MSTATUS_MXR), [addr] "r"(__mepc));
-#else
- ulong rvc_mask = 3, tmp;
- asm("csrrs %[mstatus], " STR(CSR_MSTATUS) ", %[mprv]\n"
- "and %[tmp], %[addr], 2\n"
- "bnez %[tmp], 1f\n"
-#if __riscv_xlen == 64
- STR(LWU) " %[insn], (%[addr])\n"
-#else
- STR(LW) " %[insn], (%[addr])\n"
-#endif
- "and %[tmp], %[insn], %[rvc_mask]\n"
- "beq %[tmp], %[rvc_mask], 2f\n"
- "sll %[insn], %[insn], %[xlen_minus_16]\n"
- "srl %[insn], %[insn], %[xlen_minus_16]\n"
- "j 2f\n"
- "1:\n"
- "lhu %[insn], (%[addr])\n"
- "and %[tmp], %[insn], %[rvc_mask]\n"
- "bne %[tmp], %[rvc_mask], 2f\n"
- "lhu %[tmp], 2(%[addr])\n"
- "sll %[tmp], %[tmp], 16\n"
- "add %[insn], %[insn], %[tmp]\n"
- "2: csrw " STR(CSR_MSTATUS) ", %[mstatus]"
- : [mstatus] "+&r"(__mstatus), [insn] "=&r"(val), [tmp] "=&r"(tmp)
- : [mprv] "r"(MSTATUS_MPRV | MSTATUS_MXR), [addr] "r"(__mepc),
- [rvc_mask] "r"(rvc_mask), [xlen_minus_16] "i"(__riscv_xlen - 16));
-#endif
- if (mstatus)
- *mstatus = __mstatus;
- return val;
-}
+#define DECLARE_UNPRIVILEGED_LOAD_FUNCTION(type) \
+ type load_##type(const type *addr);
+
+#define DECLARE_UNPRIVILEGED_STORE_FUNCTION(type) \
+ void store_##type(type *addr, type val);
+
+DECLARE_UNPRIVILEGED_LOAD_FUNCTION(u8)
+DECLARE_UNPRIVILEGED_LOAD_FUNCTION(u16)
+DECLARE_UNPRIVILEGED_LOAD_FUNCTION(s8)
+DECLARE_UNPRIVILEGED_LOAD_FUNCTION(s16)
+DECLARE_UNPRIVILEGED_LOAD_FUNCTION(s32)
+DECLARE_UNPRIVILEGED_STORE_FUNCTION(u8)
+DECLARE_UNPRIVILEGED_STORE_FUNCTION(u16)
+DECLARE_UNPRIVILEGED_STORE_FUNCTION(u32)
+DECLARE_UNPRIVILEGED_LOAD_FUNCTION(u32)
+DECLARE_UNPRIVILEGED_LOAD_FUNCTION(u64)
+DECLARE_UNPRIVILEGED_STORE_FUNCTION(u64)
+DECLARE_UNPRIVILEGED_LOAD_FUNCTION(ulong)
+
+ulong get_insn(ulong mepc, ulong *mstatus);
#endif
diff --git a/lib/objects.mk b/lib/objects.mk
index 8eb860f..35a8bac 100644
--- a/lib/objects.mk
+++ b/lib/objects.mk
@@ -11,6 +11,7 @@ lib-objs-y += riscv_asm.o
lib-objs-y += riscv_atomic.o
lib-objs-y += riscv_hardfp.o
lib-objs-y += riscv_locks.o
+lib-objs-y += riscv_unpriv.o
lib-objs-y += sbi_console.o
lib-objs-y += sbi_ecall.o
diff --git a/lib/riscv_unpriv.c b/lib/riscv_unpriv.c
new file mode 100644
index 0000000..75d8f83
--- /dev/null
+++ b/lib/riscv_unpriv.c
@@ -0,0 +1,114 @@
+/*
+ * SPDX-License-Identifier: BSD-2-Clause
+ *
+ * Copyright (c) 2019 Western Digital Corporation or its affiliates.
+ *
+ * Authors:
+ * Anup Patel <anup.patel@wdc.com>
+ */
+
+#include <sbi/riscv_encoding.h>
+#include <sbi/riscv_unpriv.h>
+#include <sbi/sbi_bits.h>
+
+#define DEFINE_UNPRIVILEGED_LOAD_FUNCTION(type, insn) \
+ type load_##type(const type *addr) \
+ { \
+ register ulong __mstatus asm("a2"); \
+ type val; \
+ asm volatile( \
+ "csrrs %0, " STR(CSR_MSTATUS) ", %3\n" \
+ #insn " %1, %2\n" \
+ "csrw " STR(CSR_MSTATUS) ", %0" \
+ : "+&r"(__mstatus), "=&r"(val) \
+ : "m"(*addr), "r"(MSTATUS_MPRV)); \
+ return val; \
+ }
+
+#define DEFINE_UNPRIVILEGED_STORE_FUNCTION(type, insn) \
+ void store_##type(type *addr, type val) \
+ { \
+ register ulong __mstatus asm("a3"); \
+ asm volatile( \
+ "csrrs %0, " STR(CSR_MSTATUS) ", %3\n" \
+ #insn " %1, %2\n" \
+ "csrw " STR(CSR_MSTATUS) ", %0" \
+ : "+&r"(__mstatus) \
+ : "r"(val), "m"(*addr), "r"(MSTATUS_MPRV)); \
+ }
+
+DEFINE_UNPRIVILEGED_LOAD_FUNCTION(u8, lbu)
+DEFINE_UNPRIVILEGED_LOAD_FUNCTION(u16, lhu)
+DEFINE_UNPRIVILEGED_LOAD_FUNCTION(s8, lb)
+DEFINE_UNPRIVILEGED_LOAD_FUNCTION(s16, lh)
+DEFINE_UNPRIVILEGED_LOAD_FUNCTION(s32, lw)
+DEFINE_UNPRIVILEGED_STORE_FUNCTION(u8, sb)
+DEFINE_UNPRIVILEGED_STORE_FUNCTION(u16, sh)
+DEFINE_UNPRIVILEGED_STORE_FUNCTION(u32, sw)
+#if __riscv_xlen == 64
+DEFINE_UNPRIVILEGED_LOAD_FUNCTION(u32, lwu)
+DEFINE_UNPRIVILEGED_LOAD_FUNCTION(u64, ld)
+DEFINE_UNPRIVILEGED_STORE_FUNCTION(u64, sd)
+DEFINE_UNPRIVILEGED_LOAD_FUNCTION(ulong, ld)
+#else
+DEFINE_UNPRIVILEGED_LOAD_FUNCTION(u32, lw)
+DEFINE_UNPRIVILEGED_LOAD_FUNCTION(ulong, lw)
+
+u64 load_u64(const u64 *addr)
+{
+ return load_u32((u32 *)addr) + ((u64)load_u32((u32 *)addr + 1) << 32);
+}
+
+void store_u64(u64 *addr, u64 val)
+{
+ store_u32((u32 *)addr, val);
+ store_u32((u32 *)addr + 1, val >> 32);
+}
+#endif
+
+ulong get_insn(ulong mepc, ulong *mstatus)
+{
+ register ulong __mepc asm("a2") = mepc;
+ register ulong __mstatus asm("a3");
+ ulong val;
+#ifndef __riscv_compressed
+ asm("csrrs %[mstatus], " STR(CSR_MSTATUS) ", %[mprv]\n"
+#if __riscv_xlen == 64
+ STR(LWU) " %[insn], (%[addr])\n"
+#else
+ STR(LW) " %[insn], (%[addr])\n"
+#endif
+ "csrw " STR(CSR_MSTATUS) ", %[mstatus]"
+ : [mstatus] "+&r"(__mstatus), [insn] "=&r"(val)
+ : [mprv] "r"(MSTATUS_MPRV | MSTATUS_MXR), [addr] "r"(__mepc));
+#else
+ ulong rvc_mask = 3, tmp;
+ asm("csrrs %[mstatus], " STR(CSR_MSTATUS) ", %[mprv]\n"
+ "and %[tmp], %[addr], 2\n"
+ "bnez %[tmp], 1f\n"
+#if __riscv_xlen == 64
+ STR(LWU) " %[insn], (%[addr])\n"
+#else
+ STR(LW) " %[insn], (%[addr])\n"
+#endif
+ "and %[tmp], %[insn], %[rvc_mask]\n"
+ "beq %[tmp], %[rvc_mask], 2f\n"
+ "sll %[insn], %[insn], %[xlen_minus_16]\n"
+ "srl %[insn], %[insn], %[xlen_minus_16]\n"
+ "j 2f\n"
+ "1:\n"
+ "lhu %[insn], (%[addr])\n"
+ "and %[tmp], %[insn], %[rvc_mask]\n"
+ "bne %[tmp], %[rvc_mask], 2f\n"
+ "lhu %[tmp], 2(%[addr])\n"
+ "sll %[tmp], %[tmp], 16\n"
+ "add %[insn], %[insn], %[tmp]\n"
+ "2: csrw " STR(CSR_MSTATUS) ", %[mstatus]"
+ : [mstatus] "+&r"(__mstatus), [insn] "=&r"(val), [tmp] "=&r"(tmp)
+ : [mprv] "r"(MSTATUS_MPRV | MSTATUS_MXR), [addr] "r"(__mepc),
+ [rvc_mask] "r"(rvc_mask), [xlen_minus_16] "i"(__riscv_xlen - 16));
+#endif
+ if (mstatus)
+ *mstatus = __mstatus;
+ return val;
+}