summaryrefslogtreecommitdiff
path: root/arch/s390/include/asm/alternative.h
diff options
context:
space:
mode:
authorHeiko Carstens <heiko.carstens@de.ibm.com>2019-10-18 14:23:16 +0300
committerVasily Gorbik <gor@linux.ibm.com>2019-10-31 19:20:51 +0300
commitcceb018377a123dd77d3239bbdbfbdf50f0d6a71 (patch)
treed3d2e8b4e82124d4f0107fe36685250169898025 /arch/s390/include/asm/alternative.h
parent6a3035dac6506bc6da40e391803fba50bb2dce9d (diff)
downloadlinux-cceb018377a123dd77d3239bbdbfbdf50f0d6a71.tar.xz
s390/alternatives: make use of asm_inline
This is the s390 version of commit 40576e5e63ea ("x86: alternative.h: use asm_inline for all alternative variants"). See commit eb111869301e ("compiler-types.h: add asm_inline definition") for more details. With this change the compiler will not generate many out-of-line versions for the three instruction sized arch_spin_unlock() function anymore. Due to this gcc seems to change a lot of other inline decisions which results in a net 6k text size growth according to bloat-o-meter (gcc 9.2 with defconfig). But that's still better than having many out-of-line versions of arch_spin_unlock(). Signed-off-by: Heiko Carstens <heiko.carstens@de.ibm.com> Signed-off-by: Vasily Gorbik <gor@linux.ibm.com>
Diffstat (limited to 'arch/s390/include/asm/alternative.h')
-rw-r--r--arch/s390/include/asm/alternative.h4
1 files changed, 2 insertions, 2 deletions
diff --git a/arch/s390/include/asm/alternative.h b/arch/s390/include/asm/alternative.h
index c2cf7bcdef9b..1c8a38f762a3 100644
--- a/arch/s390/include/asm/alternative.h
+++ b/arch/s390/include/asm/alternative.h
@@ -139,10 +139,10 @@ void apply_alternatives(struct alt_instr *start, struct alt_instr *end);
* without volatile and memory clobber.
*/
#define alternative(oldinstr, altinstr, facility) \
- asm volatile(ALTERNATIVE(oldinstr, altinstr, facility) : : : "memory")
+ asm_inline volatile(ALTERNATIVE(oldinstr, altinstr, facility) : : : "memory")
#define alternative_2(oldinstr, altinstr1, facility1, altinstr2, facility2) \
- asm volatile(ALTERNATIVE_2(oldinstr, altinstr1, facility1, \
+ asm_inline volatile(ALTERNATIVE_2(oldinstr, altinstr1, facility1, \
altinstr2, facility2) ::: "memory")
#endif /* __ASSEMBLY__ */