summaryrefslogtreecommitdiff
path: root/arch/mips/lib/memset.S
diff options
context:
space:
mode:
Diffstat (limited to 'arch/mips/lib/memset.S')
-rw-r--r--arch/mips/lib/memset.S146
1 files changed, 95 insertions, 51 deletions
diff --git a/arch/mips/lib/memset.S b/arch/mips/lib/memset.S
index 0580194e7402..7b0e5462ca51 100644
--- a/arch/mips/lib/memset.S
+++ b/arch/mips/lib/memset.S
@@ -34,13 +34,27 @@
#define FILLPTRG t0
#endif
+#define LEGACY_MODE 1
+#define EVA_MODE 2
+
+/*
+ * No need to protect it with EVA #ifdefery. The generated block of code
+ * will never be assembled if EVA is not enabled.
+ */
+#define __EVAFY(insn, reg, addr) __BUILD_EVA_INSN(insn##e, reg, addr)
+#define ___BUILD_EVA_INSN(insn, reg, addr) __EVAFY(insn, reg, addr)
+
#define EX(insn,reg,addr,handler) \
-9: insn reg, addr; \
+ .if \mode == LEGACY_MODE; \
+9: insn reg, addr; \
+ .else; \
+9: ___BUILD_EVA_INSN(insn, reg, addr); \
+ .endif; \
.section __ex_table,"a"; \
PTR 9b, handler; \
.previous
- .macro f_fill64 dst, offset, val, fixup
+ .macro f_fill64 dst, offset, val, fixup, mode
EX(LONG_S, \val, (\offset + 0 * STORSIZE)(\dst), \fixup)
EX(LONG_S, \val, (\offset + 1 * STORSIZE)(\dst), \fixup)
EX(LONG_S, \val, (\offset + 2 * STORSIZE)(\dst), \fixup)
@@ -63,34 +77,24 @@
#endif
.endm
-/*
- * memset(void *s, int c, size_t n)
- *
- * a0: start of area to clear
- * a1: char to fill with
- * a2: size of area to clear
- */
.set noreorder
.align 5
-LEAF(memset)
- beqz a1, 1f
- move v0, a0 /* result */
- andi a1, 0xff /* spread fillword */
- LONG_SLL t1, a1, 8
- or a1, t1
- LONG_SLL t1, a1, 16
-#if LONGSIZE == 8
- or a1, t1
- LONG_SLL t1, a1, 32
-#endif
- or a1, t1
-1:
+ /*
+ * Macro to generate the __bzero{,_user} symbol
+ * Arguments:
+ * mode: LEGACY_MODE or EVA_MODE
+ */
+ .macro __BUILD_BZERO mode
+ /* Initialize __memset if this is the first time we call this macro */
+ .ifnotdef __memset
+ .set __memset, 1
+ .hidden __memset /* Make sure it does not leak */
+ .endif
-FEXPORT(__bzero)
sltiu t0, a2, STORSIZE /* very small region? */
- bnez t0, .Lsmall_memset
- andi t0, a0, STORMASK /* aligned? */
+ bnez t0, .Lsmall_memset\@
+ andi t0, a0, STORMASK /* aligned? */
#ifdef CONFIG_CPU_MICROMIPS
move t8, a1 /* used by 'swp' instruction */
@@ -98,39 +102,39 @@ FEXPORT(__bzero)
#endif
#ifndef CONFIG_CPU_DADDI_WORKAROUNDS
beqz t0, 1f
- PTR_SUBU t0, STORSIZE /* alignment in bytes */
+ PTR_SUBU t0, STORSIZE /* alignment in bytes */
#else
.set noat
li AT, STORSIZE
beqz t0, 1f
- PTR_SUBU t0, AT /* alignment in bytes */
+ PTR_SUBU t0, AT /* alignment in bytes */
.set at
#endif
R10KCBARRIER(0(ra))
#ifdef __MIPSEB__
- EX(LONG_S_L, a1, (a0), .Lfirst_fixup) /* make word/dword aligned */
+ EX(LONG_S_L, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */
#endif
#ifdef __MIPSEL__
- EX(LONG_S_R, a1, (a0), .Lfirst_fixup) /* make word/dword aligned */
+ EX(LONG_S_R, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */
#endif
PTR_SUBU a0, t0 /* long align ptr */
PTR_ADDU a2, t0 /* correct size */
1: ori t1, a2, 0x3f /* # of full blocks */
xori t1, 0x3f
- beqz t1, .Lmemset_partial /* no block to fill */
- andi t0, a2, 0x40-STORSIZE
+ beqz t1, .Lmemset_partial\@ /* no block to fill */
+ andi t0, a2, 0x40-STORSIZE
PTR_ADDU t1, a0 /* end address */
.set reorder
1: PTR_ADDIU a0, 64
R10KCBARRIER(0(ra))
- f_fill64 a0, -64, FILL64RG, .Lfwd_fixup
+ f_fill64 a0, -64, FILL64RG, .Lfwd_fixup\@, \mode
bne t1, a0, 1b
.set noreorder
-.Lmemset_partial:
+.Lmemset_partial\@:
R10KCBARRIER(0(ra))
PTR_LA t1, 2f /* where to start */
#ifdef CONFIG_CPU_MICROMIPS
@@ -145,60 +149,100 @@ FEXPORT(__bzero)
.set at
#endif
jr t1
- PTR_ADDU a0, t0 /* dest ptr */
+ PTR_ADDU a0, t0 /* dest ptr */
.set push
.set noreorder
.set nomacro
- f_fill64 a0, -64, FILL64RG, .Lpartial_fixup /* ... but first do longs ... */
+ /* ... but first do longs ... */
+ f_fill64 a0, -64, FILL64RG, .Lpartial_fixup\@, \mode
2: .set pop
andi a2, STORMASK /* At most one long to go */
beqz a2, 1f
- PTR_ADDU a0, a2 /* What's left */
+ PTR_ADDU a0, a2 /* What's left */
R10KCBARRIER(0(ra))
#ifdef __MIPSEB__
- EX(LONG_S_R, a1, -1(a0), .Llast_fixup)
+ EX(LONG_S_R, a1, -1(a0), .Llast_fixup\@)
#endif
#ifdef __MIPSEL__
- EX(LONG_S_L, a1, -1(a0), .Llast_fixup)
+ EX(LONG_S_L, a1, -1(a0), .Llast_fixup\@)
#endif
1: jr ra
- move a2, zero
+ move a2, zero
-.Lsmall_memset:
+.Lsmall_memset\@:
beqz a2, 2f
- PTR_ADDU t1, a0, a2
+ PTR_ADDU t1, a0, a2
1: PTR_ADDIU a0, 1 /* fill bytewise */
R10KCBARRIER(0(ra))
bne t1, a0, 1b
- sb a1, -1(a0)
+ sb a1, -1(a0)
2: jr ra /* done */
- move a2, zero
+ move a2, zero
+ .if __memset == 1
END(memset)
+ .set __memset, 0
+ .hidden __memset
+ .endif
-.Lfirst_fixup:
+.Lfirst_fixup\@:
jr ra
- nop
+ nop
-.Lfwd_fixup:
+.Lfwd_fixup\@:
PTR_L t0, TI_TASK($28)
andi a2, 0x3f
LONG_L t0, THREAD_BUADDR(t0)
LONG_ADDU a2, t1
jr ra
- LONG_SUBU a2, t0
+ LONG_SUBU a2, t0
-.Lpartial_fixup:
+.Lpartial_fixup\@:
PTR_L t0, TI_TASK($28)
andi a2, STORMASK
LONG_L t0, THREAD_BUADDR(t0)
LONG_ADDU a2, t1
jr ra
- LONG_SUBU a2, t0
+ LONG_SUBU a2, t0
-.Llast_fixup:
+.Llast_fixup\@:
jr ra
- andi v1, a2, STORMASK
+ andi v1, a2, STORMASK
+
+ .endm
+
+/*
+ * memset(void *s, int c, size_t n)
+ *
+ * a0: start of area to clear
+ * a1: char to fill with
+ * a2: size of area to clear
+ */
+
+LEAF(memset)
+ beqz a1, 1f
+ move v0, a0 /* result */
+
+ andi a1, 0xff /* spread fillword */
+ LONG_SLL t1, a1, 8
+ or a1, t1
+ LONG_SLL t1, a1, 16
+#if LONGSIZE == 8
+ or a1, t1
+ LONG_SLL t1, a1, 32
+#endif
+ or a1, t1
+1:
+#ifndef CONFIG_EVA
+FEXPORT(__bzero)
+#endif
+ __BUILD_BZERO LEGACY_MODE
+
+#ifdef CONFIG_EVA
+LEAF(__bzero)
+ __BUILD_BZERO EVA_MODE
+END(__bzero)
+#endif