They are now accessible through exec/memory.h instead, and we make sure all variants are available for common or target dependent code. To allow this, we need to implement address_space_st{*}_cached, simply forwarding the calls to _cached_slow variants. Signed-off-by: Pierrick Bouvier <pierrick.bouvier@xxxxxxxxxx> --- include/exec/cpu-all.h | 15 ------------ include/exec/memory.h | 36 +++++++++++++++++++++++++++++ include/exec/memory_ldst_phys.h.inc | 5 +--- 3 files changed, 37 insertions(+), 19 deletions(-) diff --git a/include/exec/cpu-all.h b/include/exec/cpu-all.h index 17ea82518a0..1c2e18f492b 100644 --- a/include/exec/cpu-all.h +++ b/include/exec/cpu-all.h @@ -75,21 +75,6 @@ static inline void stl_phys_notdirty(AddressSpace *as, hwaddr addr, uint32_t val MEMTXATTRS_UNSPECIFIED, NULL); } -#define SUFFIX -#define ARG1 as -#define ARG1_DECL AddressSpace *as -#define TARGET_ENDIANNESS -#include "exec/memory_ldst_phys.h.inc" - -/* Inline fast path for direct RAM access. */ -#define ENDIANNESS -#include "exec/memory_ldst_cached.h.inc" - -#define SUFFIX _cached -#define ARG1 cache -#define ARG1_DECL MemoryRegionCache *cache -#define TARGET_ENDIANNESS -#include "exec/memory_ldst_phys.h.inc" #endif /* page related stuff */ diff --git a/include/exec/memory.h b/include/exec/memory.h index 78c4e0aec8d..7c20f36a312 100644 --- a/include/exec/memory.h +++ b/include/exec/memory.h @@ -2798,6 +2798,42 @@ static inline void address_space_stb_cached(MemoryRegionCache *cache, } } +static inline uint16_t address_space_lduw_cached(MemoryRegionCache *cache, + hwaddr addr, MemTxAttrs attrs, MemTxResult *result) +{ + return address_space_lduw_cached_slow(cache, addr, attrs, result); +} + +static inline void address_space_stw_cached(MemoryRegionCache *cache, + hwaddr addr, uint16_t val, MemTxAttrs attrs, MemTxResult *result) +{ + address_space_stw_cached_slow(cache, addr, val, attrs, result); +} + +static inline uint32_t address_space_ldl_cached(MemoryRegionCache *cache, + hwaddr addr, MemTxAttrs attrs, MemTxResult *result) +{ + return address_space_ldl_cached_slow(cache, addr, attrs, result); +} + +static inline void address_space_stl_cached(MemoryRegionCache *cache, + hwaddr addr, uint32_t val, MemTxAttrs attrs, MemTxResult *result) +{ + address_space_stl_cached_slow(cache, addr, val, attrs, result); +} + +static inline uint64_t address_space_ldq_cached(MemoryRegionCache *cache, + hwaddr addr, MemTxAttrs attrs, MemTxResult *result) +{ + return address_space_ldq_cached_slow(cache, addr, attrs, result); +} + +static inline void address_space_stq_cached(MemoryRegionCache *cache, + hwaddr addr, uint64_t val, MemTxAttrs attrs, MemTxResult *result) +{ + address_space_stq_cached_slow(cache, addr, val, attrs, result); +} + #define ENDIANNESS _le #include "exec/memory_ldst_cached.h.inc" diff --git a/include/exec/memory_ldst_phys.h.inc b/include/exec/memory_ldst_phys.h.inc index ecd678610d1..db67de75251 100644 --- a/include/exec/memory_ldst_phys.h.inc +++ b/include/exec/memory_ldst_phys.h.inc @@ -19,7 +19,6 @@ * License along with this library; if not, see <http://www.gnu.org/licenses/>. */ -#ifdef TARGET_ENDIANNESS static inline uint16_t glue(lduw_phys, SUFFIX)(ARG1_DECL, hwaddr addr) { return glue(address_space_lduw, SUFFIX)(ARG1, addr, @@ -55,7 +54,7 @@ static inline void glue(stq_phys, SUFFIX)(ARG1_DECL, hwaddr addr, uint64_t val) glue(address_space_stq, SUFFIX)(ARG1, addr, val, MEMTXATTRS_UNSPECIFIED, NULL); } -#else + static inline uint8_t glue(ldub_phys, SUFFIX)(ARG1_DECL, hwaddr addr) { return glue(address_space_ldub, SUFFIX)(ARG1, addr, @@ -139,9 +138,7 @@ static inline void glue(stq_be_phys, SUFFIX)(ARG1_DECL, hwaddr addr, uint64_t va glue(address_space_stq_be, SUFFIX)(ARG1, addr, val, MEMTXATTRS_UNSPECIFIED, NULL); } -#endif #undef ARG1_DECL #undef ARG1 #undef SUFFIX -#undef TARGET_ENDIANNESS -- 2.39.5