This array is within CPUNegativeOffsetState, which means the last element of the array has an offset from env with the smallest magnitude. This can be encoded into fewer bits when generating TCG fast path memory references.
When we changed the NB_MMU_MODES to be a global constant, rather than a per-target value, we pessimized the code generated for targets which use only a few mmu indexes. By inverting the array index, we counteract that. Signed-off-by: Richard Henderson <richard.hender...@linaro.org> --- include/hw/core/cpu.h | 6 +++++- tcg/tcg.c | 3 ++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/include/hw/core/cpu.h b/include/hw/core/cpu.h index bd835b07d5..85b1ab4022 100644 --- a/include/hw/core/cpu.h +++ b/include/hw/core/cpu.h @@ -596,7 +596,11 @@ static inline CPUArchState *cpu_env(CPUState *cpu) #ifdef CONFIG_TCG static inline CPUTLBDescFast *cpu_tlb_fast(CPUState *cpu, int mmu_idx) { - return &cpu->neg.tlb.f[mmu_idx]; + /* + * Invert the index order of the CPUTLBDescFast array so that lower + * mmu_idx have negative offsets from env with smaller absolute values. + */ + return &cpu->neg.tlb.f[NB_MMU_MODES - 1 - mmu_idx]; } #endif diff --git a/tcg/tcg.c b/tcg/tcg.c index afac55a203..615675d185 100644 --- a/tcg/tcg.c +++ b/tcg/tcg.c @@ -425,7 +425,8 @@ static uintptr_t G_GNUC_UNUSED get_jmp_target_addr(TCGContext *s, int which) static int __attribute__((unused)) tlb_mask_table_ofs(TCGContext *s, int which) { - return (offsetof(CPUNegativeOffsetState, tlb.f[which]) - + /* Invert the index order -- see cpu_tlb_fast. */ + return (offsetof(CPUNegativeOffsetState, tlb.f[NB_MMU_MODES - 1 - which]) - sizeof(CPUNegativeOffsetState)); } -- 2.43.0