diff options
author | Nicholas Piggin <npiggin@gmail.com> | 2019-06-22 15:15:11 +0200 |
---|---|---|
committer | Michael Ellerman <mpe@ellerman.id.au> | 2019-07-02 12:23:51 +0200 |
commit | 4508a74a63154fea5c2d36d8ad064273c5ea9608 (patch) | |
tree | d214f8c3d70fd1e47636e0805443a9080d259296 /arch/powerpc/include/asm/exception-64s.h | |
parent | powerpc/64s/exception: Remove unused SOFTEN_VALUE_0x980 (diff) | |
download | linux-4508a74a63154fea5c2d36d8ad064273c5ea9608.tar.xz linux-4508a74a63154fea5c2d36d8ad064273c5ea9608.zip |
powerpc/64s/exception: remove H concatenation for EXC_HV variants
Replace all instances of this with gas macros that test the hsrr
parameter and use the appropriate register names / labels.
No generated code change.
Signed-off-by: Nicholas Piggin <npiggin@gmail.com>
[mpe: Remove extraneous 2nd check for 0xea0 in SOFTEN_TEST]
Signed-off-by: Michael Ellerman <mpe@ellerman.id.au>
Diffstat (limited to 'arch/powerpc/include/asm/exception-64s.h')
-rw-r--r-- | arch/powerpc/include/asm/exception-64s.h | 332 |
1 files changed, 193 insertions, 139 deletions
diff --git a/arch/powerpc/include/asm/exception-64s.h b/arch/powerpc/include/asm/exception-64s.h index b4f8b745ba01..eddd74cf36c3 100644 --- a/arch/powerpc/include/asm/exception-64s.h +++ b/arch/powerpc/include/asm/exception-64s.h @@ -67,6 +67,8 @@ */ #define EX_R3 EX_DAR +#ifdef __ASSEMBLY__ + #define STF_ENTRY_BARRIER_SLOT \ STF_ENTRY_BARRIER_FIXUP_SECTION; \ nop; \ @@ -148,38 +150,6 @@ hrfid; \ b hrfi_flush_fallback -#ifdef CONFIG_RELOCATABLE -#define __EXCEPTION_PROLOG_2_RELON(label, h) \ - mfspr r11,SPRN_##h##SRR0; /* save SRR0 */ \ - LOAD_HANDLER(r12,label); \ - mtctr r12; \ - mfspr r12,SPRN_##h##SRR1; /* and SRR1 */ \ - li r10,MSR_RI; \ - mtmsrd r10,1; /* Set RI (EE=0) */ \ - bctr; -#else -/* If not relocatable, we can jump directly -- and save messing with LR */ -#define __EXCEPTION_PROLOG_2_RELON(label, h) \ - mfspr r11,SPRN_##h##SRR0; /* save SRR0 */ \ - mfspr r12,SPRN_##h##SRR1; /* and SRR1 */ \ - li r10,MSR_RI; \ - mtmsrd r10,1; /* Set RI (EE=0) */ \ - b label; -#endif -#define EXCEPTION_PROLOG_2_RELON(label, h) \ - __EXCEPTION_PROLOG_2_RELON(label, h) - -/* - * As EXCEPTION_PROLOG(), except we've already got relocation on so no need to - * rfid. Save LR in case we're CONFIG_RELOCATABLE, in which case - * EXCEPTION_PROLOG_2_RELON will be using LR. - */ -#define EXCEPTION_RELON_PROLOG(area, label, h, extra, vec) \ - SET_SCRATCH0(r13); /* save r13 */ \ - EXCEPTION_PROLOG_0(area); \ - EXCEPTION_PROLOG_1(area, extra, vec); \ - EXCEPTION_PROLOG_2_RELON(label, h) - /* * We're short on space and time in the exception prolog, so we can't * use the normal LOAD_REG_IMMEDIATE macro to load the address of label. @@ -204,9 +174,54 @@ ori reg,reg,(ABS_ADDR(label))@l; \ addis reg,reg,(ABS_ADDR(label))@h +#ifdef CONFIG_RELOCATABLE +.macro EXCEPTION_PROLOG_2_RELON label, hsrr + .if \hsrr + mfspr r11,SPRN_HSRR0 /* save HSRR0 */ + .else + mfspr r11,SPRN_SRR0 /* save SRR0 */ + .endif + LOAD_HANDLER(r12, \label\()) + mtctr r12 + .if \hsrr + mfspr r12,SPRN_HSRR1 /* and HSRR1 */ + .else + mfspr r12,SPRN_SRR1 /* and HSRR1 */ + .endif + li r10,MSR_RI + mtmsrd r10,1 /* Set RI (EE=0) */ + bctr +.endm +#else +/* If not relocatable, we can jump directly -- and save messing with LR */ +.macro EXCEPTION_PROLOG_2_RELON label, hsrr + .if \hsrr + mfspr r11,SPRN_HSRR0 /* save HSRR0 */ + mfspr r12,SPRN_HSRR1 /* and HSRR1 */ + .else + mfspr r11,SPRN_SRR0 /* save SRR0 */ + mfspr r12,SPRN_SRR1 /* and SRR1 */ + .endif + li r10,MSR_RI + mtmsrd r10,1 /* Set RI (EE=0) */ + b \label +.endm +#endif + +/* + * As EXCEPTION_PROLOG(), except we've already got relocation on so no need to + * rfid. Save LR in case we're CONFIG_RELOCATABLE, in which case + * EXCEPTION_PROLOG_2_RELON will be using LR. + */ +#define EXCEPTION_RELON_PROLOG(area, label, hsrr, extra, vec) \ + SET_SCRATCH0(r13); /* save r13 */ \ + EXCEPTION_PROLOG_0(area); \ + EXCEPTION_PROLOG_1(area, extra, vec); \ + EXCEPTION_PROLOG_2_RELON label, hsrr + /* Exception register prefixes */ -#define EXC_HV H -#define EXC_STD +#define EXC_HV 1 +#define EXC_STD 0 #if defined(CONFIG_RELOCATABLE) /* @@ -308,43 +323,57 @@ END_FTR_SECTION_NESTED(ftr,ftr,943) #define EXCEPTION_PROLOG_1(area, extra, vec) \ _EXCEPTION_PROLOG_1(area, extra, vec) -#define __EXCEPTION_PROLOG_2(label, h) \ - ld r10,PACAKMSR(r13); /* get MSR value for kernel */ \ - mfspr r11,SPRN_##h##SRR0; /* save SRR0 */ \ - LOAD_HANDLER(r12,label); \ - mtspr SPRN_##h##SRR0,r12; \ - mfspr r12,SPRN_##h##SRR1; /* and SRR1 */ \ - mtspr SPRN_##h##SRR1,r10; \ - h##RFI_TO_KERNEL; \ +.macro EXCEPTION_PROLOG_2 label, hsrr + ld r10,PACAKMSR(r13) /* get MSR value for kernel */ + .if \hsrr + mfspr r11,SPRN_HSRR0 /* save HSRR0 */ + .else + mfspr r11,SPRN_SRR0 /* save SRR0 */ + .endif + LOAD_HANDLER(r12,\label\()) + .if \hsrr + mtspr SPRN_HSRR0,r12 + mfspr r12,SPRN_HSRR1 /* and HSRR1 */ + mtspr SPRN_HSRR1,r10 + HRFI_TO_KERNEL + .else + mtspr SPRN_SRR0,r12 + mfspr r12,SPRN_SRR1 /* and SRR1 */ + mtspr SPRN_SRR1,r10 + RFI_TO_KERNEL + .endif b . /* prevent speculative execution */ -#define EXCEPTION_PROLOG_2(label, h) \ - __EXCEPTION_PROLOG_2(label, h) +.endm /* _NORI variant keeps MSR_RI clear */ -#define __EXCEPTION_PROLOG_2_NORI(label, h) \ - ld r10,PACAKMSR(r13); /* get MSR value for kernel */ \ - xori r10,r10,MSR_RI; /* Clear MSR_RI */ \ - mfspr r11,SPRN_##h##SRR0; /* save SRR0 */ \ - LOAD_HANDLER(r12,label); \ - mtspr SPRN_##h##SRR0,r12; \ - mfspr r12,SPRN_##h##SRR1; /* and SRR1 */ \ - mtspr SPRN_##h##SRR1,r10; \ - h##RFI_TO_KERNEL; \ +.macro EXCEPTION_PROLOG_2_NORI label, hsrr + ld r10,PACAKMSR(r13) /* get MSR value for kernel */ + xori r10,r10,MSR_RI /* Clear MSR_RI */ + .if \hsrr + mfspr r11,SPRN_HSRR0 /* save HSRR0 */ + .else + mfspr r11,SPRN_SRR0 /* save SRR0 */ + .endif + LOAD_HANDLER(r12,\label\()) + .if \hsrr + mtspr SPRN_HSRR0,r12 + mfspr r12,SPRN_HSRR1 /* and HSRR1 */ + mtspr SPRN_HSRR1,r10 + HRFI_TO_KERNEL + .else + mtspr SPRN_SRR0,r12 + mfspr r12,SPRN_SRR1 /* and SRR1 */ + mtspr SPRN_SRR1,r10 + RFI_TO_KERNEL + .endif b . /* prevent speculative execution */ - -#define EXCEPTION_PROLOG_2_NORI(label, h) \ - __EXCEPTION_PROLOG_2_NORI(label, h) +.endm #define EXCEPTION_PROLOG(area, label, h, extra, vec) \ SET_SCRATCH0(r13); /* save r13 */ \ EXCEPTION_PROLOG_0(area); \ EXCEPTION_PROLOG_1(area, extra, vec); \ - EXCEPTION_PROLOG_2(label, h) - -#define __KVMTEST(h, n) \ - lbz r10,HSTATE_IN_GUEST(r13); \ - cmpwi r10,0; \ - bne do_kvm_##h##n + EXCEPTION_PROLOG_2 label, h #ifdef CONFIG_KVM_BOOK3S_HV_POSSIBLE /* @@ -413,52 +442,66 @@ END_FTR_SECTION_NESTED(ftr,ftr,943) #define EXCEPTION_PROLOG_NORI(area, label, h, extra, vec) \ EXCEPTION_PROLOG_0(area); \ EXCEPTION_PROLOG_1(area, extra, vec); \ - EXCEPTION_PROLOG_2_NORI(label, h) - - -#define __KVM_HANDLER(area, h, n) \ - BEGIN_FTR_SECTION_NESTED(947) \ - ld r10,area+EX_CFAR(r13); \ - std r10,HSTATE_CFAR(r13); \ - END_FTR_SECTION_NESTED(CPU_FTR_CFAR,CPU_FTR_CFAR,947); \ - BEGIN_FTR_SECTION_NESTED(948) \ - ld r10,area+EX_PPR(r13); \ - std r10,HSTATE_PPR(r13); \ - END_FTR_SECTION_NESTED(CPU_FTR_HAS_PPR,CPU_FTR_HAS_PPR,948); \ - ld r10,area+EX_R10(r13); \ - std r12,HSTATE_SCRATCH0(r13); \ - sldi r12,r9,32; \ - ori r12,r12,(n); \ - /* This reloads r9 before branching to kvmppc_interrupt */ \ - __BRANCH_TO_KVM_EXIT(area, kvmppc_interrupt) - -#define __KVM_HANDLER_SKIP(area, h, n) \ - cmpwi r10,KVM_GUEST_MODE_SKIP; \ - beq 89f; \ - BEGIN_FTR_SECTION_NESTED(948) \ - ld r10,area+EX_PPR(r13); \ - std r10,HSTATE_PPR(r13); \ - END_FTR_SECTION_NESTED(CPU_FTR_HAS_PPR,CPU_FTR_HAS_PPR,948); \ - ld r10,area+EX_R10(r13); \ - std r12,HSTATE_SCRATCH0(r13); \ - sldi r12,r9,32; \ - ori r12,r12,(n); \ - /* This reloads r9 before branching to kvmppc_interrupt */ \ - __BRANCH_TO_KVM_EXIT(area, kvmppc_interrupt); \ -89: mtocrf 0x80,r9; \ - ld r9,area+EX_R9(r13); \ - ld r10,area+EX_R10(r13); \ - b kvmppc_skip_##h##interrupt + EXCEPTION_PROLOG_2_NORI label, h #ifdef CONFIG_KVM_BOOK3S_64_HANDLER -#define KVMTEST(h, n) __KVMTEST(h, n) -#define KVM_HANDLER(area, h, n) __KVM_HANDLER(area, h, n) -#define KVM_HANDLER_SKIP(area, h, n) __KVM_HANDLER_SKIP(area, h, n) +.macro KVMTEST hsrr, n + lbz r10,HSTATE_IN_GUEST(r13) + cmpwi r10,0 + .if \hsrr + bne do_kvm_H\n + .else + bne do_kvm_\n + .endif +.endm + +.macro KVM_HANDLER area, hsrr, n + BEGIN_FTR_SECTION_NESTED(947) + ld r10,\area+EX_CFAR(r13) + std r10,HSTATE_CFAR(r13) + END_FTR_SECTION_NESTED(CPU_FTR_CFAR,CPU_FTR_CFAR,947) + BEGIN_FTR_SECTION_NESTED(948) + ld r10,\area+EX_PPR(r13) + std r10,HSTATE_PPR(r13) + END_FTR_SECTION_NESTED(CPU_FTR_HAS_PPR,CPU_FTR_HAS_PPR,948) + ld r10,\area+EX_R10(r13) + std r12,HSTATE_SCRATCH0(r13) + sldi r12,r9,32 + ori r12,r12,(\n) + /* This reloads r9 before branching to kvmppc_interrupt */ + __BRANCH_TO_KVM_EXIT(\area, kvmppc_interrupt) +.endm + +.macro KVM_HANDLER_SKIP area, hsrr, n + cmpwi r10,KVM_GUEST_MODE_SKIP + beq 89f + BEGIN_FTR_SECTION_NESTED(948) + ld r10,\area+EX_PPR(r13) + std r10,HSTATE_PPR(r13) + END_FTR_SECTION_NESTED(CPU_FTR_HAS_PPR,CPU_FTR_HAS_PPR,948) + ld r10,\area+EX_R10(r13) + std r12,HSTATE_SCRATCH0(r13) + sldi r12,r9,32 + ori r12,r12,(\n) + /* This reloads r9 before branching to kvmppc_interrupt */ + __BRANCH_TO_KVM_EXIT(\area, kvmppc_interrupt) +89: mtocrf 0x80,r9 + ld r9,\area+EX_R9(r13) + ld r10,\area+EX_R10(r13) + .if \hsrr + b kvmppc_skip_Hinterrupt + .else + b kvmppc_skip_interrupt + .endif +.endm #else -#define KVMTEST(h, n) -#define KVM_HANDLER(area, h, n) -#define KVM_HANDLER_SKIP(area, h, n) +.macro KVMTEST hsrr, n +.endm +.macro KVM_HANDLER area, hsrr, n +.endm +.macro KVM_HANDLER_SKIP area, hsrr, n +.endm #endif #define NOTEST(n) @@ -556,14 +599,14 @@ END_FTR_SECTION_NESTED(ftr,ftr,943) #define STD_EXCEPTION_OOL(vec, label) \ EXCEPTION_PROLOG_1(PACA_EXGEN, KVMTEST_PR, vec); \ - EXCEPTION_PROLOG_2(label, EXC_STD) + EXCEPTION_PROLOG_2 label, EXC_STD #define STD_EXCEPTION_HV(loc, vec, label) \ EXCEPTION_PROLOG(PACA_EXGEN, label, EXC_HV, KVMTEST_HV, vec) #define STD_EXCEPTION_HV_OOL(vec, label) \ EXCEPTION_PROLOG_1(PACA_EXGEN, KVMTEST_HV, vec); \ - EXCEPTION_PROLOG_2(label, EXC_HV) + EXCEPTION_PROLOG_2 label, EXC_HV #define STD_RELON_EXCEPTION(loc, vec, label) \ /* No guest interrupts come through here */ \ @@ -571,88 +614,97 @@ END_FTR_SECTION_NESTED(ftr,ftr,943) #define STD_RELON_EXCEPTION_OOL(vec, label) \ EXCEPTION_PROLOG_1(PACA_EXGEN, NOTEST, vec); \ - EXCEPTION_PROLOG_2_RELON(label, EXC_STD) + EXCEPTION_PROLOG_2_RELON label, EXC_STD #define STD_RELON_EXCEPTION_HV(loc, vec, label) \ EXCEPTION_RELON_PROLOG(PACA_EXGEN, label, EXC_HV, KVMTEST_HV, vec) #define STD_RELON_EXCEPTION_HV_OOL(vec, label) \ EXCEPTION_PROLOG_1(PACA_EXGEN, KVMTEST_HV, vec); \ - EXCEPTION_PROLOG_2_RELON(label, EXC_HV) - -/* This associate vector numbers with bits in paca->irq_happened */ -#define SOFTEN_VALUE_0x500 PACA_IRQ_EE -#define SOFTEN_VALUE_0x900 PACA_IRQ_DEC -#define SOFTEN_VALUE_0xa00 PACA_IRQ_DBELL -#define SOFTEN_VALUE_0xe80 PACA_IRQ_DBELL -#define SOFTEN_VALUE_0xe60 PACA_IRQ_HMI -#define SOFTEN_VALUE_0xea0 PACA_IRQ_EE -#define SOFTEN_VALUE_0xf00 PACA_IRQ_PMI - -#define __SOFTEN_TEST(h, vec, bitmask) \ - lbz r10,PACAIRQSOFTMASK(r13); \ - andi. r10,r10,bitmask; \ - li r10,SOFTEN_VALUE_##vec; \ - bne masked_##h##interrupt - -#define _SOFTEN_TEST(h, vec, bitmask) __SOFTEN_TEST(h, vec, bitmask) + EXCEPTION_PROLOG_2_RELON label, EXC_HV + +.macro SOFTEN_TEST hsrr, vec, bitmask + lbz r10, PACAIRQSOFTMASK(r13) + andi. r10, r10, \bitmask + /* This associates vector numbers with bits in paca->irq_happened */ + .if \vec == 0x500 || \vec == 0xea0 + li r10, PACA_IRQ_EE + .elseif \vec == 0x900 + li r10, PACA_IRQ_DEC + .elseif \vec == 0xa00 || \vec == 0xe80 + li r10, PACA_IRQ_DBELL + .elseif \vec == 0xe60 + li r10, PACA_IRQ_HMI + .elseif \vec == 0xf00 + li r10, PACA_IRQ_PMI + .else + .abort "Bad maskable vector" + .endif + + + .if \hsrr + bne masked_Hinterrupt + .else + bne masked_interrupt + .endif +.endm #define SOFTEN_TEST_PR(vec, bitmask) \ - KVMTEST(EXC_STD, vec); \ - _SOFTEN_TEST(EXC_STD, vec, bitmask) + KVMTEST EXC_STD, vec ; \ + SOFTEN_TEST EXC_STD, vec, bitmask #define SOFTEN_TEST_HV(vec, bitmask) \ - KVMTEST(EXC_HV, vec); \ - _SOFTEN_TEST(EXC_HV, vec, bitmask) + KVMTEST EXC_HV, vec ; \ + SOFTEN_TEST EXC_HV, vec, bitmask #define KVMTEST_PR(vec) \ - KVMTEST(EXC_STD, vec) + KVMTEST EXC_STD, vec #define KVMTEST_HV(vec) \ - KVMTEST(EXC_HV, vec) + KVMTEST EXC_HV, vec -#define SOFTEN_NOTEST_PR(vec, bitmask) _SOFTEN_TEST(EXC_STD, vec, bitmask) -#define SOFTEN_NOTEST_HV(vec, bitmask) _SOFTEN_TEST(EXC_HV, vec, bitmask) +#define SOFTEN_NOTEST_PR(vec, bitmask) SOFTEN_TEST EXC_STD, vec, bitmask +#define SOFTEN_NOTEST_HV(vec, bitmask) SOFTEN_TEST EXC_HV, vec, bitmask #define __MASKABLE_EXCEPTION(vec, label, h, extra, bitmask) \ SET_SCRATCH0(r13); /* save r13 */ \ EXCEPTION_PROLOG_0(PACA_EXGEN); \ MASKABLE_EXCEPTION_PROLOG_1(PACA_EXGEN, extra, vec, bitmask); \ - EXCEPTION_PROLOG_2(label, h) + EXCEPTION_PROLOG_2 label, h #define MASKABLE_EXCEPTION(vec, label, bitmask) \ __MASKABLE_EXCEPTION(vec, label, EXC_STD, SOFTEN_TEST_PR, bitmask) #define MASKABLE_EXCEPTION_OOL(vec, label, bitmask) \ MASKABLE_EXCEPTION_PROLOG_1(PACA_EXGEN, SOFTEN_TEST_PR, vec, bitmask);\ - EXCEPTION_PROLOG_2(label, EXC_STD) + EXCEPTION_PROLOG_2 label, EXC_STD #define MASKABLE_EXCEPTION_HV(vec, label, bitmask) \ __MASKABLE_EXCEPTION(vec, label, EXC_HV, SOFTEN_TEST_HV, bitmask) #define MASKABLE_EXCEPTION_HV_OOL(vec, label, bitmask) \ MASKABLE_EXCEPTION_PROLOG_1(PACA_EXGEN, SOFTEN_TEST_HV, vec, bitmask);\ - EXCEPTION_PROLOG_2(label, EXC_HV) + EXCEPTION_PROLOG_2 label, EXC_HV #define __MASKABLE_RELON_EXCEPTION(vec, label, h, extra, bitmask) \ SET_SCRATCH0(r13); /* save r13 */ \ EXCEPTION_PROLOG_0(PACA_EXGEN); \ MASKABLE_EXCEPTION_PROLOG_1(PACA_EXGEN, extra, vec, bitmask); \ - EXCEPTION_PROLOG_2_RELON(label, h) + EXCEPTION_PROLOG_2_RELON label, h #define MASKABLE_RELON_EXCEPTION(vec, label, bitmask) \ __MASKABLE_RELON_EXCEPTION(vec, label, EXC_STD, SOFTEN_NOTEST_PR, bitmask) #define MASKABLE_RELON_EXCEPTION_OOL(vec, label, bitmask) \ MASKABLE_EXCEPTION_PROLOG_1(PACA_EXGEN, SOFTEN_NOTEST_PR, vec, bitmask);\ - EXCEPTION_PROLOG_2(label, EXC_STD) + EXCEPTION_PROLOG_2 label, EXC_STD #define MASKABLE_RELON_EXCEPTION_HV(vec, label, bitmask) \ __MASKABLE_RELON_EXCEPTION(vec, label, EXC_HV, SOFTEN_TEST_HV, bitmask) #define MASKABLE_RELON_EXCEPTION_HV_OOL(vec, label, bitmask) \ MASKABLE_EXCEPTION_PROLOG_1(PACA_EXGEN, SOFTEN_TEST_HV, vec, bitmask);\ - EXCEPTION_PROLOG_2_RELON(label, EXC_HV) + EXCEPTION_PROLOG_2_RELON label, EXC_HV /* * Our exception common code can be passed various "additions" @@ -731,4 +783,6 @@ END_FTR_SECTION_IFSET(CPU_FTR_CAN_NAP) #define FINISH_NAP #endif +#endif /* __ASSEMBLY__ */ + #endif /* _ASM_POWERPC_EXCEPTION_H */ |