summaryrefslogtreecommitdiff
path: root/arch/riscv
diff options
context:
space:
mode:
authorSebastien Van Cauwenberghe <svancau@gmail.com>2021-01-29 22:00:37 +0300
committerPalmer Dabbelt <palmerdabbelt@google.com>2021-02-03 05:36:29 +0300
commiteefb5f3ab2e8e0b3ef5eba5c5a9f33457741300d (patch)
tree9b8847093fab20cf2b6acd6635c521d23e0f8793 /arch/riscv
parentf105ea9890f42137344f8c08548c895dc9294bd8 (diff)
downloadlinux-eefb5f3ab2e8e0b3ef5eba5c5a9f33457741300d.tar.xz
riscv: Align on L1_CACHE_BYTES when STRICT_KERNEL_RWX
Allows the sections to be aligned on smaller boundaries and therefore results in a smaller kernel image size. Signed-off-by: Sebastien Van Cauwenberghe <svancau@gmail.com> Reviewed-by: Atish Patra <atish.patra@wdc.com> Signed-off-by: Palmer Dabbelt <palmerdabbelt@google.com>
Diffstat (limited to 'arch/riscv')
-rw-r--r--arch/riscv/include/asm/set_memory.h6
1 files changed, 3 insertions, 3 deletions
diff --git a/arch/riscv/include/asm/set_memory.h b/arch/riscv/include/asm/set_memory.h
index 211eb8244a45..8b80c80c7f1a 100644
--- a/arch/riscv/include/asm/set_memory.h
+++ b/arch/riscv/include/asm/set_memory.h
@@ -32,14 +32,14 @@ bool kernel_page_present(struct page *page);
#endif /* __ASSEMBLY__ */
-#ifdef CONFIG_ARCH_HAS_STRICT_KERNEL_RWX
+#ifdef CONFIG_STRICT_KERNEL_RWX
#ifdef CONFIG_64BIT
#define SECTION_ALIGN (1 << 21)
#else
#define SECTION_ALIGN (1 << 22)
#endif
-#else /* !CONFIG_ARCH_HAS_STRICT_KERNEL_RWX */
+#else /* !CONFIG_STRICT_KERNEL_RWX */
#define SECTION_ALIGN L1_CACHE_BYTES
-#endif /* CONFIG_ARCH_HAS_STRICT_KERNEL_RWX */
+#endif /* CONFIG_STRICT_KERNEL_RWX */
#endif /* _ASM_RISCV_SET_MEMORY_H */