summaryrefslogtreecommitdiff
path: root/arch/powerpc/lib
diff options
context:
space:
mode:
Diffstat (limited to 'arch/powerpc/lib')
-rw-r--r--arch/powerpc/lib/code-patching.c30
-rw-r--r--arch/powerpc/lib/copypage_64.S7
-rw-r--r--arch/powerpc/lib/feature-fixups.c4
-rw-r--r--arch/powerpc/lib/string_64.S7
4 files changed, 23 insertions, 25 deletions
diff --git a/arch/powerpc/lib/code-patching.c b/arch/powerpc/lib/code-patching.c
index 6edf0697a526..ad0cf3108dd0 100644
--- a/arch/powerpc/lib/code-patching.c
+++ b/arch/powerpc/lib/code-patching.c
@@ -94,17 +94,20 @@ void __init poking_init(void)
static_branch_enable(&poking_init_done);
}
+static unsigned long get_patch_pfn(void *addr)
+{
+ if (IS_ENABLED(CONFIG_MODULES) && is_vmalloc_or_module_addr(addr))
+ return vmalloc_to_pfn(addr);
+ else
+ return __pa_symbol(addr) >> PAGE_SHIFT;
+}
+
/*
* This can be called for kernel text or a module.
*/
static int map_patch_area(void *addr, unsigned long text_poke_addr)
{
- unsigned long pfn;
-
- if (IS_ENABLED(CONFIG_MODULES) && is_vmalloc_or_module_addr(addr))
- pfn = vmalloc_to_pfn(addr);
- else
- pfn = __pa_symbol(addr) >> PAGE_SHIFT;
+ unsigned long pfn = get_patch_pfn(addr);
return map_kernel_page(text_poke_addr, (pfn << PAGE_SHIFT), PAGE_KERNEL);
}
@@ -149,17 +152,22 @@ static int __do_patch_instruction(u32 *addr, ppc_inst_t instr)
int err;
u32 *patch_addr;
unsigned long text_poke_addr;
+ pte_t *pte;
+ unsigned long pfn = get_patch_pfn(addr);
- text_poke_addr = (unsigned long)__this_cpu_read(text_poke_area)->addr;
+ text_poke_addr = (unsigned long)__this_cpu_read(text_poke_area)->addr & PAGE_MASK;
patch_addr = (u32 *)(text_poke_addr + offset_in_page(addr));
- err = map_patch_area(addr, text_poke_addr);
- if (err)
- return err;
+ pte = virt_to_kpte(text_poke_addr);
+ __set_pte_at(&init_mm, text_poke_addr, pte, pfn_pte(pfn, PAGE_KERNEL), 0);
+ /* See ptesync comment in radix__set_pte_at() */
+ if (radix_enabled())
+ asm volatile("ptesync": : :"memory");
err = __patch_instruction(addr, instr, patch_addr);
- unmap_patch_area(text_poke_addr);
+ pte_clear(&init_mm, text_poke_addr, pte);
+ flush_tlb_kernel_range(text_poke_addr, text_poke_addr + PAGE_SIZE);
return err;
}
diff --git a/arch/powerpc/lib/copypage_64.S b/arch/powerpc/lib/copypage_64.S
index d1091b5ee5da..6812cb19d04a 100644
--- a/arch/powerpc/lib/copypage_64.S
+++ b/arch/powerpc/lib/copypage_64.S
@@ -9,11 +9,6 @@
#include <asm/export.h>
#include <asm/feature-fixups.h>
- .section ".toc","aw"
-PPC64_CACHES:
- .tc ppc64_caches[TC],ppc64_caches
- .section ".text"
-
_GLOBAL_TOC(copy_page)
BEGIN_FTR_SECTION
lis r5,PAGE_SIZE@h
@@ -24,7 +19,7 @@ FTR_SECTION_ELSE
ALT_FTR_SECTION_END_IFCLR(CPU_FTR_VMX_COPY)
ori r5,r5,PAGE_SIZE@l
BEGIN_FTR_SECTION
- ld r10,PPC64_CACHES@toc(r2)
+ LOAD_REG_ADDR(r10, ppc64_caches)
lwz r11,DCACHEL1LOGBLOCKSIZE(r10) /* log2 of cache block size */
lwz r12,DCACHEL1BLOCKSIZE(r10) /* get cache block size */
li r9,0
diff --git a/arch/powerpc/lib/feature-fixups.c b/arch/powerpc/lib/feature-fixups.c
index 993d3f31832a..31f40f544de5 100644
--- a/arch/powerpc/lib/feature-fixups.c
+++ b/arch/powerpc/lib/feature-fixups.c
@@ -550,7 +550,7 @@ void do_barrier_nospec_fixups(bool enable)
}
#endif /* CONFIG_PPC_BARRIER_NOSPEC */
-#ifdef CONFIG_PPC_FSL_BOOK3E
+#ifdef CONFIG_PPC_E500
void do_barrier_nospec_fixups_range(bool enable, void *fixup_start, void *fixup_end)
{
unsigned int instr[2], *dest;
@@ -602,7 +602,7 @@ void __init do_btb_flush_fixups(void)
for (; start < end; start += 2)
patch_btb_flush_section(start);
}
-#endif /* CONFIG_PPC_FSL_BOOK3E */
+#endif /* CONFIG_PPC_E500 */
void do_lwsync_fixups(unsigned long value, void *fixup_start, void *fixup_end)
{
diff --git a/arch/powerpc/lib/string_64.S b/arch/powerpc/lib/string_64.S
index 169872bc0892..df41ce06f86b 100644
--- a/arch/powerpc/lib/string_64.S
+++ b/arch/powerpc/lib/string_64.S
@@ -11,11 +11,6 @@
#include <asm/asm-offsets.h>
#include <asm/export.h>
- .section ".toc","aw"
-PPC64_CACHES:
- .tc ppc64_caches[TC],ppc64_caches
- .section ".text"
-
/**
* __arch_clear_user: - Zero a block of memory in user space, with less checking.
* @to: Destination address, in user space.
@@ -133,7 +128,7 @@ err1; stb r0,0(r3)
blr
.Llong_clear:
- ld r5,PPC64_CACHES@toc(r2)
+ LOAD_REG_ADDR(r5, ppc64_caches)
bf cr7*4+0,11f
err2; std r0,0(r3)