tcg: Introduce tcg_use_softmmu

Begin disconnecting CONFIG_SOFTMMU from !CONFIG_USER_ONLY.
Introduce a variable which can be set at startup to select
one method or another for user-only.

Reviewed-by: Philippe Mathieu-Daudé <philmd@linaro.org>
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
This commit is contained in:
Richard Henderson 2023-10-01 07:53:03 -07:00
parent cc3f99aac4
commit 397cabaae0
3 changed files with 19 additions and 12 deletions

View File

@ -488,11 +488,9 @@ struct TCGContext {
int nb_ops; int nb_ops;
TCGType addr_type; /* TCG_TYPE_I32 or TCG_TYPE_I64 */ TCGType addr_type; /* TCG_TYPE_I32 or TCG_TYPE_I64 */
#ifdef CONFIG_SOFTMMU
int page_mask; int page_mask;
uint8_t page_bits; uint8_t page_bits;
uint8_t tlb_dyn_max_bits; uint8_t tlb_dyn_max_bits;
#endif
uint8_t insn_start_words; uint8_t insn_start_words;
TCGBar guest_mo; TCGBar guest_mo;
@ -573,6 +571,12 @@ static inline bool temp_readonly(TCGTemp *ts)
return ts->kind >= TEMP_FIXED; return ts->kind >= TEMP_FIXED;
} }
#ifdef CONFIG_USER_ONLY
extern bool tcg_use_softmmu;
#else
#define tcg_use_softmmu true
#endif
extern __thread TCGContext *tcg_ctx; extern __thread TCGContext *tcg_ctx;
extern const void *tcg_code_gen_epilogue; extern const void *tcg_code_gen_epilogue;
extern uintptr_t tcg_splitwx_diff; extern uintptr_t tcg_splitwx_diff;

View File

@ -34,13 +34,13 @@
static void check_max_alignment(unsigned a_bits) static void check_max_alignment(unsigned a_bits)
{ {
#if defined(CONFIG_SOFTMMU)
/* /*
* The requested alignment cannot overlap the TLB flags. * The requested alignment cannot overlap the TLB flags.
* FIXME: Must keep the count up-to-date with "exec/cpu-all.h". * FIXME: Must keep the count up-to-date with "exec/cpu-all.h".
*/ */
tcg_debug_assert(a_bits + 5 <= tcg_ctx->page_bits); if (tcg_use_softmmu) {
#endif tcg_debug_assert(a_bits + 5 <= tcg_ctx->page_bits);
}
} }
static MemOp tcg_canonicalize_memop(MemOp op, bool is64, bool st) static MemOp tcg_canonicalize_memop(MemOp op, bool is64, bool st)
@ -411,10 +411,11 @@ void tcg_gen_qemu_st_i64_chk(TCGv_i64 val, TCGTemp *addr, TCGArg idx,
*/ */
static bool use_two_i64_for_i128(MemOp mop) static bool use_two_i64_for_i128(MemOp mop)
{ {
#ifdef CONFIG_SOFTMMU
/* Two softmmu tlb lookups is larger than one function call. */ /* Two softmmu tlb lookups is larger than one function call. */
return false; if (tcg_use_softmmu) {
#else return false;
}
/* /*
* For user-only, two 64-bit operations may well be smaller than a call. * For user-only, two 64-bit operations may well be smaller than a call.
* Determine if that would be legal for the requested atomicity. * Determine if that would be legal for the requested atomicity.
@ -432,7 +433,6 @@ static bool use_two_i64_for_i128(MemOp mop)
default: default:
g_assert_not_reached(); g_assert_not_reached();
} }
#endif
} }
static void canonicalize_memop_i128_as_i64(MemOp ret[2], MemOp orig) static void canonicalize_memop_i128_as_i64(MemOp ret[2], MemOp orig)

View File

@ -226,6 +226,10 @@ static TCGAtomAlign atom_and_align_for_opc(TCGContext *s, MemOp opc,
MemOp host_atom, bool allow_two_ops) MemOp host_atom, bool allow_two_ops)
__attribute__((unused)); __attribute__((unused));
#ifdef CONFIG_USER_ONLY
bool tcg_use_softmmu;
#endif
TCGContext tcg_init_ctx; TCGContext tcg_init_ctx;
__thread TCGContext *tcg_ctx; __thread TCGContext *tcg_ctx;
@ -404,13 +408,12 @@ static uintptr_t G_GNUC_UNUSED get_jmp_target_addr(TCGContext *s, int which)
return (uintptr_t)tcg_splitwx_to_rx(&s->gen_tb->jmp_target_addr[which]); return (uintptr_t)tcg_splitwx_to_rx(&s->gen_tb->jmp_target_addr[which]);
} }
#if defined(CONFIG_SOFTMMU) && !defined(CONFIG_TCG_INTERPRETER) static int __attribute__((unused))
static int tlb_mask_table_ofs(TCGContext *s, int which) tlb_mask_table_ofs(TCGContext *s, int which)
{ {
return (offsetof(CPUNegativeOffsetState, tlb.f[which]) - return (offsetof(CPUNegativeOffsetState, tlb.f[which]) -
sizeof(CPUNegativeOffsetState)); sizeof(CPUNegativeOffsetState));
} }
#endif
/* Signal overflow, starting over with fewer guest insns. */ /* Signal overflow, starting over with fewer guest insns. */
static G_NORETURN static G_NORETURN