diff options
author | Pasha Tatashin <pasha.tatashin@soleen.com> | 2021-09-30 16:31:04 +0200 |
---|---|---|
committer | Will Deacon <will@kernel.org> | 2021-10-01 14:31:00 +0200 |
commit | 3036ec599332cdfb406249270e50ad3f1a5c5940 (patch) | |
tree | 1fed9bae0d590904516632e870a445e4fe5e6018 | |
parent | arm64: kexec: skip relocation code for inplace kexec (diff) | |
download | linux-3036ec599332cdfb406249270e50ad3f1a5c5940.tar.xz linux-3036ec599332cdfb406249270e50ad3f1a5c5940.zip |
arm64: kexec: Use dcache ops macros instead of open-coding
kexec does dcache maintenance when it re-writes all memory. Our
dcache_by_line_op macro depends on reading the sanitized DminLine
from memory. Kexec may have overwritten this, so open-codes the
sequence.
dcache_by_line_op is a whole set of macros, it uses dcache_line_size
which uses read_ctr for the sanitsed DminLine. Reading the DminLine
is the first thing the dcache_by_line_op does.
Rename dcache_by_line_op dcache_by_myline_op and take DminLine as
an argument. Kexec can now use the slightly smaller macro.
This makes up-coming changes to the dcache maintenance easier on
the eye.
Code generated by the existing callers is unchanged.
Suggested-by: James Morse <james.morse@arm.com>
Signed-off-by: Pasha Tatashin <pasha.tatashin@soleen.com>
Acked-by: Catalin Marinas <catalin.marinas@arm.com>
Link: https://lore.kernel.org/r/20210930143113.1502553-7-pasha.tatashin@soleen.com
Signed-off-by: Will Deacon <will@kernel.org>
-rw-r--r-- | arch/arm64/include/asm/assembler.h | 30 | ||||
-rw-r--r-- | arch/arm64/kernel/relocate_kernel.S | 13 |
2 files changed, 26 insertions, 17 deletions
diff --git a/arch/arm64/include/asm/assembler.h b/arch/arm64/include/asm/assembler.h index bfa58409a4d4..d5281f75a58d 100644 --- a/arch/arm64/include/asm/assembler.h +++ b/arch/arm64/include/asm/assembler.h @@ -405,19 +405,19 @@ alternative_endif /* * Macro to perform a data cache maintenance for the interval - * [start, end) + * [start, end) with dcache line size explicitly provided. * * op: operation passed to dc instruction * domain: domain used in dsb instruciton * start: starting virtual address of the region * end: end virtual address of the region + * linesz: dcache line size * fixup: optional label to branch to on user fault - * Corrupts: start, end, tmp1, tmp2 + * Corrupts: start, end, tmp */ - .macro dcache_by_line_op op, domain, start, end, tmp1, tmp2, fixup - dcache_line_size \tmp1, \tmp2 - sub \tmp2, \tmp1, #1 - bic \start, \start, \tmp2 + .macro dcache_by_myline_op op, domain, start, end, linesz, tmp, fixup + sub \tmp, \linesz, #1 + bic \start, \start, \tmp .Ldcache_op\@: .ifc \op, cvau __dcache_op_workaround_clean_cache \op, \start @@ -436,7 +436,7 @@ alternative_endif .endif .endif .endif - add \start, \start, \tmp1 + add \start, \start, \linesz cmp \start, \end b.lo .Ldcache_op\@ dsb \domain @@ -445,6 +445,22 @@ alternative_endif .endm /* + * Macro to perform a data cache maintenance for the interval + * [start, end) + * + * op: operation passed to dc instruction + * domain: domain used in dsb instruciton + * start: starting virtual address of the region + * end: end virtual address of the region + * fixup: optional label to branch to on user fault + * Corrupts: start, end, tmp1, tmp2 + */ + .macro dcache_by_line_op op, domain, start, end, tmp1, tmp2, fixup + dcache_line_size \tmp1, \tmp2 + dcache_by_myline_op \op, \domain, \start, \end, \tmp1, \tmp2, \fixup + .endm + +/* * Macro to perform an instruction cache maintenance for the interval * [start, end) * diff --git a/arch/arm64/kernel/relocate_kernel.S b/arch/arm64/kernel/relocate_kernel.S index 8058fabe0a76..8c43779e8cc6 100644 --- a/arch/arm64/kernel/relocate_kernel.S +++ b/arch/arm64/kernel/relocate_kernel.S @@ -41,16 +41,9 @@ SYM_CODE_START(arm64_relocate_new_kernel) tbz x16, IND_SOURCE_BIT, .Ltest_indirection /* Invalidate dest page to PoC. */ - mov x2, x13 - add x20, x2, #PAGE_SIZE - sub x1, x15, #1 - bic x2, x2, x1 -2: dc ivac, x2 - add x2, x2, x15 - cmp x2, x20 - b.lo 2b - dsb sy - + mov x2, x13 + add x1, x2, #PAGE_SIZE + dcache_by_myline_op ivac, sy, x2, x1, x15, x20 copy_page x13, x12, x1, x2, x3, x4, x5, x6, x7, x8 b .Lnext .Ltest_indirection: |