summaryrefslogtreecommitdiff
path: root/arch/loongarch/include/asm/stackframe.h
blob: d9eafd3ee3d1e848675f54f033163ecc66e07d72 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
/* SPDX-License-Identifier: GPL-2.0 */
/*
 * Copyright (C) 2020-2022 Loongson Technology Corporation Limited
 */
#ifndef _ASM_STACKFRAME_H
#define _ASM_STACKFRAME_H

#include <linux/threads.h>

#include <asm/addrspace.h>
#include <asm/asm.h>
#include <asm/asmmacro.h>
#include <asm/asm-offsets.h>
#include <asm/loongarch.h>
#include <asm/thread_info.h>
#include <asm/unwind_hints.h>

/* Make the addition of cfi info a little easier. */
	.macro cfi_rel_offset reg offset=0 docfi=0
	.if \docfi
	.cfi_rel_offset \reg, \offset
	.endif
	.endm

	.macro cfi_st reg offset=0 docfi=0
	cfi_rel_offset \reg, \offset, \docfi
	LONG_S	\reg, sp, \offset
	.endm

	.macro cfi_restore reg offset=0 docfi=0
	.if \docfi
	.cfi_restore \reg
	.endif
	.endm

	.macro cfi_ld reg offset=0 docfi=0
	LONG_L	\reg, sp, \offset
	cfi_restore \reg \offset \docfi
	.endm

/* Jump to the runtime virtual address. */
	.macro JUMP_VIRT_ADDR temp1 temp2
	li.d	\temp1, CACHE_BASE
	pcaddi	\temp2, 0
	bstrins.d  \temp1, \temp2, (DMW_PABITS - 1), 0
	jirl	zero, \temp1, 0xc
	.endm

	.macro BACKUP_T0T1
	csrwr	t0, EXCEPTION_KS0
	csrwr	t1, EXCEPTION_KS1
	.endm

	.macro RELOAD_T0T1
	csrrd   t0, EXCEPTION_KS0
	csrrd   t1, EXCEPTION_KS1
	.endm

	.macro	SAVE_TEMP docfi=0
	RELOAD_T0T1
	cfi_st	t0, PT_R12, \docfi
	cfi_st	t1, PT_R13, \docfi
	cfi_st	t2, PT_R14, \docfi
	cfi_st	t3, PT_R15, \docfi
	cfi_st	t4, PT_R16, \docfi
	cfi_st	t5, PT_R17, \docfi
	cfi_st	t6, PT_R18, \docfi
	cfi_st	t7, PT_R19, \docfi
	cfi_st	t8, PT_R20, \docfi
	.endm

	.macro	SAVE_STATIC docfi=0
	cfi_st	s0, PT_R23, \docfi
	cfi_st	s1, PT_R24, \docfi
	cfi_st	s2, PT_R25, \docfi
	cfi_st	s3, PT_R26, \docfi
	cfi_st	s4, PT_R27, \docfi
	cfi_st	s5, PT_R28, \docfi
	cfi_st	s6, PT_R29, \docfi
	cfi_st	s7, PT_R30, \docfi
	cfi_st	s8, PT_R31, \docfi
	.endm

/*
 * get_saved_sp returns the SP for the current CPU by looking in the
 * kernelsp array for it. It stores the current sp in t0 and loads the
 * new value in sp.
 */
	.macro	get_saved_sp docfi=0
	la_abs	  t1, kernelsp
#ifdef CONFIG_SMP
	csrrd	  t0, PERCPU_BASE_KS
	LONG_ADD  t1, t1, t0
#endif
	move	  t0, sp
	.if \docfi
	.cfi_register sp, t0
	.endif
	LONG_L	  sp, t1, 0
	.endm

	.macro	set_saved_sp stackp temp temp2
	la.pcrel  \temp, kernelsp
#ifdef CONFIG_SMP
	LONG_ADD  \temp, \temp, u0
#endif
	LONG_S	  \stackp, \temp, 0
	.endm

	.macro	SAVE_SOME docfi=0
	csrrd	t1, LOONGARCH_CSR_PRMD
	andi	t1, t1, 0x3	/* extract pplv bit */
	move	t0, sp
	beqz	t1, 8f
	/* Called from user mode, new stack. */
	get_saved_sp docfi=\docfi
8:
	PTR_ADDI sp, sp, -PT_SIZE
	.if \docfi
	.cfi_def_cfa sp, 0
	.endif
	cfi_st	t0, PT_R3, \docfi
	cfi_rel_offset  sp, PT_R3, \docfi
	LONG_S	zero, sp, PT_R0
	csrrd	t0, LOONGARCH_CSR_PRMD
	LONG_S	t0, sp, PT_PRMD
	csrrd	t0, LOONGARCH_CSR_CRMD
	LONG_S	t0, sp, PT_CRMD
	csrrd	t0, LOONGARCH_CSR_EUEN
	LONG_S  t0, sp, PT_EUEN
	csrrd	t0, LOONGARCH_CSR_ECFG
	LONG_S	t0, sp, PT_ECFG
	csrrd	t0, LOONGARCH_CSR_ESTAT
	PTR_S	t0, sp, PT_ESTAT
	cfi_st	ra, PT_R1, \docfi
	cfi_st	a0, PT_R4, \docfi
	cfi_st	a1, PT_R5, \docfi
	cfi_st	a2, PT_R6, \docfi
	cfi_st	a3, PT_R7, \docfi
	cfi_st	a4, PT_R8, \docfi
	cfi_st	a5, PT_R9, \docfi
	cfi_st	a6, PT_R10, \docfi
	cfi_st	a7, PT_R11, \docfi
	csrrd	ra, LOONGARCH_CSR_ERA
	LONG_S	ra, sp, PT_ERA
	.if \docfi
	.cfi_rel_offset ra, PT_ERA
	.endif
	cfi_st	tp, PT_R2, \docfi
	cfi_st	fp, PT_R22, \docfi

	/* Set thread_info if we're coming from user mode */
	csrrd	t0, LOONGARCH_CSR_PRMD
	andi	t0, t0, 0x3	/* extract pplv bit */
	beqz	t0, 9f

	li.d	tp, ~_THREAD_MASK
	and	tp, tp, sp
	cfi_st  u0, PT_R21, \docfi
	csrrd	u0, PERCPU_BASE_KS
9:
#ifdef CONFIG_KGDB
	li.w	t0, CSR_CRMD_WE
	csrxchg	t0, t0, LOONGARCH_CSR_CRMD
#endif
	UNWIND_HINT_REGS
	.endm

	.macro	SAVE_ALL docfi=0
	SAVE_SOME \docfi
	SAVE_TEMP \docfi
	SAVE_STATIC \docfi
	.endm

	.macro	RESTORE_TEMP docfi=0
	cfi_ld	t0, PT_R12, \docfi
	cfi_ld	t1, PT_R13, \docfi
	cfi_ld	t2, PT_R14, \docfi
	cfi_ld	t3, PT_R15, \docfi
	cfi_ld	t4, PT_R16, \docfi
	cfi_ld	t5, PT_R17, \docfi
	cfi_ld	t6, PT_R18, \docfi
	cfi_ld	t7, PT_R19, \docfi
	cfi_ld	t8, PT_R20, \docfi
	.endm

	.macro	RESTORE_STATIC docfi=0
	cfi_ld	s0, PT_R23, \docfi
	cfi_ld	s1, PT_R24, \docfi
	cfi_ld	s2, PT_R25, \docfi
	cfi_ld	s3, PT_R26, \docfi
	cfi_ld	s4, PT_R27, \docfi
	cfi_ld	s5, PT_R28, \docfi
	cfi_ld	s6, PT_R29, \docfi
	cfi_ld	s7, PT_R30, \docfi
	cfi_ld	s8, PT_R31, \docfi
	.endm

	.macro	RESTORE_SOME docfi=0
	LONG_L	a0, sp, PT_PRMD
	andi    a0, a0, 0x3	/* extract pplv bit */
	beqz    a0, 8f
	cfi_ld  u0, PT_R21, \docfi
8:
	LONG_L	a0, sp, PT_ERA
	csrwr	a0, LOONGARCH_CSR_ERA
	LONG_L	a0, sp, PT_PRMD
	csrwr	a0, LOONGARCH_CSR_PRMD
	cfi_ld	ra, PT_R1, \docfi
	cfi_ld	a0, PT_R4, \docfi
	cfi_ld	a1, PT_R5, \docfi
	cfi_ld	a2, PT_R6, \docfi
	cfi_ld	a3, PT_R7, \docfi
	cfi_ld	a4, PT_R8, \docfi
	cfi_ld	a5, PT_R9, \docfi
	cfi_ld	a6, PT_R10, \docfi
	cfi_ld	a7, PT_R11, \docfi
	cfi_ld	tp, PT_R2, \docfi
	cfi_ld	fp, PT_R22, \docfi
	.endm

	.macro	RESTORE_SP_AND_RET docfi=0
	cfi_ld	sp, PT_R3, \docfi
	UNWIND_HINT_FUNC
	ertn
	.endm

	.macro	RESTORE_ALL_AND_RET docfi=0
	RESTORE_STATIC \docfi
	RESTORE_TEMP \docfi
	RESTORE_SOME \docfi
	RESTORE_SP_AND_RET \docfi
	.endm

#endif /* _ASM_STACKFRAME_H */