summaryrefslogtreecommitdiff
path: root/include/sbi/sbi_unpriv.h
blob: 11ece0d4672f5501901674263d403962c3914f69 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
/*
 * SPDX-License-Identifier: BSD-2-Clause
 *
 * Copyright (c) 2019 Western Digital Corporation or its affiliates.
 *
 * Authors:
 *   Anup Patel <anup.patel@wdc.com>
 */

#ifndef __SBI_UNPRIV_H__
#define __SBI_UNPRIV_H__

#include <sbi/riscv_encoding.h>
#include <sbi/sbi_bits.h>
#include <sbi/sbi_types.h>

#define DECLARE_UNPRIVILEGED_LOAD_FUNCTION(type, insn)			\
static inline type load_##type(const type *addr, ulong mepc)		\
{									\
	register ulong __mepc asm ("a2") = mepc;			\
	register ulong __mstatus asm ("a3");				\
	type val;							\
	asm ("csrrs %0, mstatus, %3\n"					\
		#insn " %1, %2\n"					\
		"csrw mstatus, %0"					\
	: "+&r" (__mstatus), "=&r" (val)				\
	: "m" (*addr), "r" (MSTATUS_MPRV), "r" (__mepc));		\
	return val;							\
}

#define DECLARE_UNPRIVILEGED_STORE_FUNCTION(type, insn)			\
static inline void store_##type(type *addr, type val, ulong mepc)	\
{									\
	register ulong __mepc asm ("a2") = mepc;			\
	register ulong __mstatus asm ("a3");				\
	asm volatile ("csrrs %0, mstatus, %3\n"				\
		#insn " %1, %2\n"					\
		"csrw mstatus, %0"					\
	: "+&r" (__mstatus)						\
	: "r" (val), "m" (*addr), "r" (MSTATUS_MPRV), "r" (__mepc));	\
}

DECLARE_UNPRIVILEGED_LOAD_FUNCTION(u8, lbu)
DECLARE_UNPRIVILEGED_LOAD_FUNCTION(u16, lhu)
DECLARE_UNPRIVILEGED_LOAD_FUNCTION(s8, lb)
DECLARE_UNPRIVILEGED_LOAD_FUNCTION(s16, lh)
DECLARE_UNPRIVILEGED_LOAD_FUNCTION(s32, lw)
DECLARE_UNPRIVILEGED_STORE_FUNCTION(u8, sb)
DECLARE_UNPRIVILEGED_STORE_FUNCTION(u16, sh)
DECLARE_UNPRIVILEGED_STORE_FUNCTION(u32, sw)
#if __riscv_xlen == 64
DECLARE_UNPRIVILEGED_LOAD_FUNCTION(u32, lwu)
DECLARE_UNPRIVILEGED_LOAD_FUNCTION(u64, ld)
DECLARE_UNPRIVILEGED_STORE_FUNCTION(u64, sd)
DECLARE_UNPRIVILEGED_LOAD_FUNCTION(ulong, ld)
#else
DECLARE_UNPRIVILEGED_LOAD_FUNCTION(u32, lw)
DECLARE_UNPRIVILEGED_LOAD_FUNCTION(ulong, lw)

static inline u64 load_u64(const u64 *addr, ulong mepc)
{
	return load_u32((u32 *)addr, mepc)
		+ ((u64)load_u32((u32 *)addr + 1, mepc) << 32);
}

static inline void store_u64(u64 *addr, u64 val, ulong mepc)
{
	store_u32((u32 *)addr, val, mepc);
	store_u32((u32 *)addr + 1, val >> 32, mepc);
}
#endif

static inline ulong get_insn(ulong mepc, ulong *mstatus)
{
	register ulong __mepc asm ("a2") = mepc;
	register ulong __mstatus asm ("a3");
	ulong val;
#ifndef __riscv_compressed
	asm ("csrrs %[mstatus], mstatus, %[mprv]\n"
#if __riscv_xlen == 64
		STR(LWU) " %[insn], (%[addr])\n"
#else
		STR(LW) " %[insn], (%[addr])\n"
#endif
		"csrw mstatus, %[mstatus]"
		: [mstatus] "+&r" (__mstatus), [insn] "=&r" (val)
		: [mprv] "r" (MSTATUS_MPRV | MSTATUS_MXR), [addr] "r" (__mepc));
#else
	ulong rvc_mask = 3, tmp;
	asm ("csrrs %[mstatus], mstatus, %[mprv]\n"
		"and %[tmp], %[addr], 2\n"
		"bnez %[tmp], 1f\n"
#if __riscv_xlen == 64
		STR(LWU) " %[insn], (%[addr])\n"
#else
		STR(LW) " %[insn], (%[addr])\n"
#endif
		"and %[tmp], %[insn], %[rvc_mask]\n"
		"beq %[tmp], %[rvc_mask], 2f\n"
		"sll %[insn], %[insn], %[xlen_minus_16]\n"
		"srl %[insn], %[insn], %[xlen_minus_16]\n"
		"j 2f\n"
		"1:\n"
		"lhu %[insn], (%[addr])\n"
		"and %[tmp], %[insn], %[rvc_mask]\n"
		"bne %[tmp], %[rvc_mask], 2f\n"
		"lhu %[tmp], 2(%[addr])\n"
		"sll %[tmp], %[tmp], 16\n"
		"add %[insn], %[insn], %[tmp]\n"
		"2: csrw mstatus, %[mstatus]"
	: [mstatus] "+&r" (__mstatus), [insn] "=&r" (val), [tmp] "=&r" (tmp)
	: [mprv] "r" (MSTATUS_MPRV | MSTATUS_MXR), [addr] "r" (__mepc),
	[rvc_mask] "r" (rvc_mask), [xlen_minus_16] "i" (__riscv_xlen - 16));
#endif
	*mstatus = __mstatus;
	return val;
}

#endif