summaryrefslogtreecommitdiffstats
path: root/arch/riscv/include/asm/cpufeature.h
diff options
context:
space:
mode:
Diffstat (limited to 'arch/riscv/include/asm/cpufeature.h')
-rw-r--r--arch/riscv/include/asm/cpufeature.h24
1 files changed, 13 insertions, 11 deletions
diff --git a/arch/riscv/include/asm/cpufeature.h b/arch/riscv/include/asm/cpufeature.h
index 6fec91845aa0..46061f5e9764 100644
--- a/arch/riscv/include/asm/cpufeature.h
+++ b/arch/riscv/include/asm/cpufeature.h
@@ -28,37 +28,39 @@ struct riscv_isainfo {
DECLARE_PER_CPU(struct riscv_cpuinfo, riscv_cpuinfo);
-DECLARE_PER_CPU(long, misaligned_access_speed);
-
/* Per-cpu ISA extensions. */
extern struct riscv_isainfo hart_isa[NR_CPUS];
void riscv_user_isa_enable(void);
-#ifdef CONFIG_RISCV_MISALIGNED
-bool unaligned_ctl_available(void);
+#if defined(CONFIG_RISCV_MISALIGNED)
bool check_unaligned_access_emulated_all_cpus(void);
void unaligned_emulation_finish(void);
+bool unaligned_ctl_available(void);
+DECLARE_PER_CPU(long, misaligned_access_speed);
#else
static inline bool unaligned_ctl_available(void)
{
return false;
}
-
-static inline bool check_unaligned_access_emulated(int cpu)
-{
- return false;
-}
-
-static inline void unaligned_emulation_finish(void) {}
#endif
+#if defined(CONFIG_RISCV_PROBE_UNALIGNED_ACCESS)
DECLARE_STATIC_KEY_FALSE(fast_unaligned_access_speed_key);
static __always_inline bool has_fast_unaligned_accesses(void)
{
return static_branch_likely(&fast_unaligned_access_speed_key);
}
+#else
+static __always_inline bool has_fast_unaligned_accesses(void)
+{
+ if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS))
+ return true;
+ else
+ return false;
+}
+#endif
unsigned long riscv_get_elf_hwcap(void);