tcg: Add qemu_ld_st_i32/64

Step two in the transition, adding the new ldst opcodes.  Keep the old
opcodes around until all backends support the new opcodes.

Signed-off-by: Richard Henderson <rth@twiddle.net>
This commit is contained in:
Richard Henderson 2013-09-04 08:11:05 -07:00
parent 6c5f4ead64
commit f713d6ad7b
14 changed files with 366 additions and 237 deletions

View File

@ -412,30 +412,25 @@ current TB was linked to this TB. Otherwise execute the next
instructions. Only indices 0 and 1 are valid and tcg_gen_goto_tb may be issued instructions. Only indices 0 and 1 are valid and tcg_gen_goto_tb may be issued
at most once with each slot index per TB. at most once with each slot index per TB.
* qemu_ld8u t0, t1, flags * qemu_ld_i32/i64 t0, t1, flags, memidx
qemu_ld8s t0, t1, flags * qemu_st_i32/i64 t0, t1, flags, memidx
qemu_ld16u t0, t1, flags
qemu_ld16s t0, t1, flags
qemu_ld32 t0, t1, flags
qemu_ld32u t0, t1, flags
qemu_ld32s t0, t1, flags
qemu_ld64 t0, t1, flags
Load data at the QEMU CPU address t1 into t0. t1 has the QEMU CPU address Load data at the guest address t1 into t0, or store data in t0 at guest
type. 'flags' contains the QEMU memory index (selects user or kernel access) address t1. The _i32/_i64 size applies to the size of the input/output
for example. register t0 only. The address t1 is always sized according to the guest,
and the width of the memory operation is controlled by flags.
Note that "qemu_ld32" implies a 32-bit result, while "qemu_ld32u" and Both t0 and t1 may be split into little-endian ordered pairs of registers
"qemu_ld32s" imply a 64-bit result appropriately extended from 32 bits. if dealing with 64-bit quantities on a 32-bit host.
* qemu_st8 t0, t1, flags The memidx selects the qemu tlb index to use (e.g. user or kernel access).
qemu_st16 t0, t1, flags The flags are the TCGMemOp bits, selecting the sign, width, and endianness
qemu_st32 t0, t1, flags of the memory access.
qemu_st64 t0, t1, flags
Store the data t0 at the QEMU CPU Address t1. t1 has the QEMU CPU For a 32-bit host, qemu_ld/st_i64 is guaranteed to only be used with a
address type. 'flags' contains the QEMU memory index (selects user or 64-bit memory access specified in flags.
kernel access) for example.
*********
Note 1: Some shortcuts are defined when the last operand is known to be Note 1: Some shortcuts are defined when the last operand is known to be
a constant (e.g. addi for add, movi for mov). a constant (e.g. addi for add, movi for mov).

View File

@ -96,6 +96,8 @@ enum {
TCG_AREG0 = TCG_REG_X19, TCG_AREG0 = TCG_REG_X19,
}; };
#define TCG_TARGET_HAS_new_ldst 0
static inline void flush_icache_range(uintptr_t start, uintptr_t stop) static inline void flush_icache_range(uintptr_t start, uintptr_t stop)
{ {
__builtin___clear_cache((char *)start, (char *)stop); __builtin___clear_cache((char *)start, (char *)stop);

View File

@ -85,6 +85,8 @@ extern bool use_idiv_instructions;
#define TCG_TARGET_HAS_div_i32 use_idiv_instructions #define TCG_TARGET_HAS_div_i32 use_idiv_instructions
#define TCG_TARGET_HAS_rem_i32 0 #define TCG_TARGET_HAS_rem_i32 0
#define TCG_TARGET_HAS_new_ldst 0
extern bool tcg_target_deposit_valid(int ofs, int len); extern bool tcg_target_deposit_valid(int ofs, int len);
#define TCG_TARGET_deposit_i32_valid tcg_target_deposit_valid #define TCG_TARGET_deposit_i32_valid tcg_target_deposit_valid

View File

@ -130,6 +130,8 @@ typedef enum {
#define TCG_TARGET_HAS_mulsh_i64 0 #define TCG_TARGET_HAS_mulsh_i64 0
#endif #endif
#define TCG_TARGET_HAS_new_ldst 0
#define TCG_TARGET_deposit_i32_valid(ofs, len) \ #define TCG_TARGET_deposit_i32_valid(ofs, len) \
(((ofs) == 0 && (len) == 8) || ((ofs) == 8 && (len) == 8) || \ (((ofs) == 0 && (len) == 8) || ((ofs) == 8 && (len) == 8) || \
((ofs) == 0 && (len) == 16)) ((ofs) == 0 && (len) == 16))

View File

@ -151,6 +151,8 @@ typedef enum {
#define TCG_TARGET_HAS_mulsh_i32 0 #define TCG_TARGET_HAS_mulsh_i32 0
#define TCG_TARGET_HAS_mulsh_i64 0 #define TCG_TARGET_HAS_mulsh_i64 0
#define TCG_TARGET_HAS_new_ldst 0
#define TCG_TARGET_deposit_i32_valid(ofs, len) ((len) <= 16) #define TCG_TARGET_deposit_i32_valid(ofs, len) ((len) <= 16)
#define TCG_TARGET_deposit_i64_valid(ofs, len) ((len) <= 16) #define TCG_TARGET_deposit_i64_valid(ofs, len) ((len) <= 16)

View File

@ -122,6 +122,8 @@ extern bool use_mips32r2_instructions;
#define TCG_TARGET_HAS_ext16s_i32 use_mips32r2_instructions #define TCG_TARGET_HAS_ext16s_i32 use_mips32r2_instructions
#define TCG_TARGET_HAS_rot_i32 use_mips32r2_instructions #define TCG_TARGET_HAS_rot_i32 use_mips32r2_instructions
#define TCG_TARGET_HAS_new_ldst 0
/* optional instructions automatically implemented */ /* optional instructions automatically implemented */
#define TCG_TARGET_HAS_neg_i32 0 /* sub rd, zero, rt */ #define TCG_TARGET_HAS_neg_i32 0 /* sub rd, zero, rt */
#define TCG_TARGET_HAS_ext8u_i32 0 /* andi rt, rs, 0xff */ #define TCG_TARGET_HAS_ext8u_i32 0 /* andi rt, rs, 0xff */

View File

@ -99,6 +99,8 @@ typedef enum {
#define TCG_TARGET_HAS_muluh_i32 0 #define TCG_TARGET_HAS_muluh_i32 0
#define TCG_TARGET_HAS_mulsh_i32 0 #define TCG_TARGET_HAS_mulsh_i32 0
#define TCG_TARGET_HAS_new_ldst 0
#define TCG_AREG0 TCG_REG_R27 #define TCG_AREG0 TCG_REG_R27
#define tcg_qemu_tb_exec(env, tb_ptr) \ #define tcg_qemu_tb_exec(env, tb_ptr) \

View File

@ -123,6 +123,8 @@ typedef enum {
#define TCG_TARGET_HAS_muluh_i64 1 #define TCG_TARGET_HAS_muluh_i64 1
#define TCG_TARGET_HAS_mulsh_i64 1 #define TCG_TARGET_HAS_mulsh_i64 1
#define TCG_TARGET_HAS_new_ldst 0
#define TCG_AREG0 TCG_REG_R27 #define TCG_AREG0 TCG_REG_R27
#define TCG_TARGET_EXTEND_ARGS 1 #define TCG_TARGET_EXTEND_ARGS 1

View File

@ -99,6 +99,8 @@ typedef enum TCGReg {
#define TCG_TARGET_HAS_muluh_i64 0 #define TCG_TARGET_HAS_muluh_i64 0
#define TCG_TARGET_HAS_mulsh_i64 0 #define TCG_TARGET_HAS_mulsh_i64 0
#define TCG_TARGET_HAS_new_ldst 0
extern bool tcg_target_deposit_valid(int ofs, int len); extern bool tcg_target_deposit_valid(int ofs, int len);
#define TCG_TARGET_deposit_i32_valid tcg_target_deposit_valid #define TCG_TARGET_deposit_i32_valid tcg_target_deposit_valid
#define TCG_TARGET_deposit_i64_valid tcg_target_deposit_valid #define TCG_TARGET_deposit_i64_valid tcg_target_deposit_valid

View File

@ -148,6 +148,8 @@ typedef enum {
#define TCG_TARGET_HAS_mulsh_i64 0 #define TCG_TARGET_HAS_mulsh_i64 0
#endif #endif
#define TCG_TARGET_HAS_new_ldst 0
#define TCG_AREG0 TCG_REG_I0 #define TCG_AREG0 TCG_REG_I0
static inline void flush_icache_range(uintptr_t start, uintptr_t stop) static inline void flush_icache_range(uintptr_t start, uintptr_t stop)

View File

@ -137,24 +137,6 @@ static inline void tcg_gen_ldst_op_i64(TCGOpcode opc, TCGv_i64 val,
*tcg_ctx.gen_opparam_ptr++ = offset; *tcg_ctx.gen_opparam_ptr++ = offset;
} }
static inline void tcg_gen_qemu_ldst_op_i64_i32(TCGOpcode opc, TCGv_i64 val,
TCGv_i32 addr, TCGArg mem_index)
{
*tcg_ctx.gen_opc_ptr++ = opc;
*tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(val);
*tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(addr);
*tcg_ctx.gen_opparam_ptr++ = mem_index;
}
static inline void tcg_gen_qemu_ldst_op_i64_i64(TCGOpcode opc, TCGv_i64 val,
TCGv_i64 addr, TCGArg mem_index)
{
*tcg_ctx.gen_opc_ptr++ = opc;
*tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(val);
*tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(addr);
*tcg_ctx.gen_opparam_ptr++ = mem_index;
}
static inline void tcg_gen_op4_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2, static inline void tcg_gen_op4_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2,
TCGv_i32 arg3, TCGv_i32 arg4) TCGv_i32 arg3, TCGv_i32 arg4)
{ {
@ -361,6 +343,21 @@ static inline void tcg_gen_op6ii_i64(TCGOpcode opc, TCGv_i64 arg1,
*tcg_ctx.gen_opparam_ptr++ = arg6; *tcg_ctx.gen_opparam_ptr++ = arg6;
} }
static inline void tcg_add_param_i32(TCGv_i32 val)
{
*tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(val);
}
static inline void tcg_add_param_i64(TCGv_i64 val)
{
#if TCG_TARGET_REG_BITS == 32
*tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(TCGV_LOW(val));
*tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(TCGV_HIGH(val));
#else
*tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(val);
#endif
}
static inline void gen_set_label(int n) static inline void gen_set_label(int n)
{ {
tcg_gen_op1i(INDEX_op_set_label, n); tcg_gen_op1i(INDEX_op_set_label, n);
@ -2600,11 +2597,12 @@ static inline void tcg_gen_muls2_i64(TCGv_i64 rl, TCGv_i64 rh,
#define tcg_global_mem_new tcg_global_mem_new_i32 #define tcg_global_mem_new tcg_global_mem_new_i32
#define tcg_temp_local_new() tcg_temp_local_new_i32() #define tcg_temp_local_new() tcg_temp_local_new_i32()
#define tcg_temp_free tcg_temp_free_i32 #define tcg_temp_free tcg_temp_free_i32
#define tcg_gen_qemu_ldst_op tcg_gen_op3i_i32
#define tcg_gen_qemu_ldst_op_i64 tcg_gen_qemu_ldst_op_i64_i32
#define TCGV_UNUSED(x) TCGV_UNUSED_I32(x) #define TCGV_UNUSED(x) TCGV_UNUSED_I32(x)
#define TCGV_IS_UNUSED(x) TCGV_IS_UNUSED_I32(x) #define TCGV_IS_UNUSED(x) TCGV_IS_UNUSED_I32(x)
#define TCGV_EQUAL(a, b) TCGV_EQUAL_I32(a, b) #define TCGV_EQUAL(a, b) TCGV_EQUAL_I32(a, b)
#define tcg_add_param_tl tcg_add_param_i32
#define tcg_gen_qemu_ld_tl tcg_gen_qemu_ld_i32
#define tcg_gen_qemu_st_tl tcg_gen_qemu_st_i32
#else #else
#define TCGv TCGv_i64 #define TCGv TCGv_i64
#define tcg_temp_new() tcg_temp_new_i64() #define tcg_temp_new() tcg_temp_new_i64()
@ -2612,11 +2610,12 @@ static inline void tcg_gen_muls2_i64(TCGv_i64 rl, TCGv_i64 rh,
#define tcg_global_mem_new tcg_global_mem_new_i64 #define tcg_global_mem_new tcg_global_mem_new_i64
#define tcg_temp_local_new() tcg_temp_local_new_i64() #define tcg_temp_local_new() tcg_temp_local_new_i64()
#define tcg_temp_free tcg_temp_free_i64 #define tcg_temp_free tcg_temp_free_i64
#define tcg_gen_qemu_ldst_op tcg_gen_op3i_i64
#define tcg_gen_qemu_ldst_op_i64 tcg_gen_qemu_ldst_op_i64_i64
#define TCGV_UNUSED(x) TCGV_UNUSED_I64(x) #define TCGV_UNUSED(x) TCGV_UNUSED_I64(x)
#define TCGV_IS_UNUSED(x) TCGV_IS_UNUSED_I64(x) #define TCGV_IS_UNUSED(x) TCGV_IS_UNUSED_I64(x)
#define TCGV_EQUAL(a, b) TCGV_EQUAL_I64(a, b) #define TCGV_EQUAL(a, b) TCGV_EQUAL_I64(a, b)
#define tcg_add_param_tl tcg_add_param_i64
#define tcg_gen_qemu_ld_tl tcg_gen_qemu_ld_i64
#define tcg_gen_qemu_st_tl tcg_gen_qemu_st_i64
#endif #endif
/* debug info: write the PC of the corresponding QEMU CPU instruction */ /* debug info: write the PC of the corresponding QEMU CPU instruction */
@ -2648,197 +2647,67 @@ static inline void tcg_gen_goto_tb(unsigned idx)
tcg_gen_op1i(INDEX_op_goto_tb, idx); tcg_gen_op1i(INDEX_op_goto_tb, idx);
} }
#if TCG_TARGET_REG_BITS == 32
void tcg_gen_qemu_ld_i32(TCGv_i32, TCGv, TCGArg, TCGMemOp);
void tcg_gen_qemu_st_i32(TCGv_i32, TCGv, TCGArg, TCGMemOp);
void tcg_gen_qemu_ld_i64(TCGv_i64, TCGv, TCGArg, TCGMemOp);
void tcg_gen_qemu_st_i64(TCGv_i64, TCGv, TCGArg, TCGMemOp);
static inline void tcg_gen_qemu_ld8u(TCGv ret, TCGv addr, int mem_index) static inline void tcg_gen_qemu_ld8u(TCGv ret, TCGv addr, int mem_index)
{ {
#if TARGET_LONG_BITS == 32 tcg_gen_qemu_ld_tl(ret, addr, mem_index, MO_UB);
tcg_gen_op3i_i32(INDEX_op_qemu_ld8u, ret, addr, mem_index);
#else
tcg_gen_op4i_i32(INDEX_op_qemu_ld8u, TCGV_LOW(ret), TCGV_LOW(addr),
TCGV_HIGH(addr), mem_index);
tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
#endif
} }
static inline void tcg_gen_qemu_ld8s(TCGv ret, TCGv addr, int mem_index) static inline void tcg_gen_qemu_ld8s(TCGv ret, TCGv addr, int mem_index)
{ {
#if TARGET_LONG_BITS == 32 tcg_gen_qemu_ld_tl(ret, addr, mem_index, MO_SB);
tcg_gen_op3i_i32(INDEX_op_qemu_ld8s, ret, addr, mem_index);
#else
tcg_gen_op4i_i32(INDEX_op_qemu_ld8s, TCGV_LOW(ret), TCGV_LOW(addr),
TCGV_HIGH(addr), mem_index);
tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
#endif
} }
static inline void tcg_gen_qemu_ld16u(TCGv ret, TCGv addr, int mem_index) static inline void tcg_gen_qemu_ld16u(TCGv ret, TCGv addr, int mem_index)
{ {
#if TARGET_LONG_BITS == 32 tcg_gen_qemu_ld_tl(ret, addr, mem_index, MO_TEUW);
tcg_gen_op3i_i32(INDEX_op_qemu_ld16u, ret, addr, mem_index);
#else
tcg_gen_op4i_i32(INDEX_op_qemu_ld16u, TCGV_LOW(ret), TCGV_LOW(addr),
TCGV_HIGH(addr), mem_index);
tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
#endif
} }
static inline void tcg_gen_qemu_ld16s(TCGv ret, TCGv addr, int mem_index) static inline void tcg_gen_qemu_ld16s(TCGv ret, TCGv addr, int mem_index)
{ {
#if TARGET_LONG_BITS == 32 tcg_gen_qemu_ld_tl(ret, addr, mem_index, MO_TESW);
tcg_gen_op3i_i32(INDEX_op_qemu_ld16s, ret, addr, mem_index);
#else
tcg_gen_op4i_i32(INDEX_op_qemu_ld16s, TCGV_LOW(ret), TCGV_LOW(addr),
TCGV_HIGH(addr), mem_index);
tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
#endif
} }
static inline void tcg_gen_qemu_ld32u(TCGv ret, TCGv addr, int mem_index) static inline void tcg_gen_qemu_ld32u(TCGv ret, TCGv addr, int mem_index)
{ {
#if TARGET_LONG_BITS == 32 tcg_gen_qemu_ld_tl(ret, addr, mem_index, MO_TEUL);
tcg_gen_op3i_i32(INDEX_op_qemu_ld32, ret, addr, mem_index);
#else
tcg_gen_op4i_i32(INDEX_op_qemu_ld32, TCGV_LOW(ret), TCGV_LOW(addr),
TCGV_HIGH(addr), mem_index);
tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
#endif
} }
static inline void tcg_gen_qemu_ld32s(TCGv ret, TCGv addr, int mem_index) static inline void tcg_gen_qemu_ld32s(TCGv ret, TCGv addr, int mem_index)
{ {
#if TARGET_LONG_BITS == 32 tcg_gen_qemu_ld_tl(ret, addr, mem_index, MO_TESL);
tcg_gen_op3i_i32(INDEX_op_qemu_ld32, ret, addr, mem_index);
#else
tcg_gen_op4i_i32(INDEX_op_qemu_ld32, TCGV_LOW(ret), TCGV_LOW(addr),
TCGV_HIGH(addr), mem_index);
tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
#endif
} }
static inline void tcg_gen_qemu_ld64(TCGv_i64 ret, TCGv addr, int mem_index) static inline void tcg_gen_qemu_ld64(TCGv_i64 ret, TCGv addr, int mem_index)
{ {
#if TARGET_LONG_BITS == 32 tcg_gen_qemu_ld_i64(ret, addr, mem_index, MO_TEQ);
tcg_gen_op4i_i32(INDEX_op_qemu_ld64, TCGV_LOW(ret), TCGV_HIGH(ret), addr, mem_index);
#else
tcg_gen_op5i_i32(INDEX_op_qemu_ld64, TCGV_LOW(ret), TCGV_HIGH(ret),
TCGV_LOW(addr), TCGV_HIGH(addr), mem_index);
#endif
} }
static inline void tcg_gen_qemu_st8(TCGv arg, TCGv addr, int mem_index) static inline void tcg_gen_qemu_st8(TCGv arg, TCGv addr, int mem_index)
{ {
#if TARGET_LONG_BITS == 32 tcg_gen_qemu_st_tl(arg, addr, mem_index, MO_UB);
tcg_gen_op3i_i32(INDEX_op_qemu_st8, arg, addr, mem_index);
#else
tcg_gen_op4i_i32(INDEX_op_qemu_st8, TCGV_LOW(arg), TCGV_LOW(addr),
TCGV_HIGH(addr), mem_index);
#endif
} }
static inline void tcg_gen_qemu_st16(TCGv arg, TCGv addr, int mem_index) static inline void tcg_gen_qemu_st16(TCGv arg, TCGv addr, int mem_index)
{ {
#if TARGET_LONG_BITS == 32 tcg_gen_qemu_st_tl(arg, addr, mem_index, MO_TEUW);
tcg_gen_op3i_i32(INDEX_op_qemu_st16, arg, addr, mem_index);
#else
tcg_gen_op4i_i32(INDEX_op_qemu_st16, TCGV_LOW(arg), TCGV_LOW(addr),
TCGV_HIGH(addr), mem_index);
#endif
} }
static inline void tcg_gen_qemu_st32(TCGv arg, TCGv addr, int mem_index) static inline void tcg_gen_qemu_st32(TCGv arg, TCGv addr, int mem_index)
{ {
#if TARGET_LONG_BITS == 32 tcg_gen_qemu_st_tl(arg, addr, mem_index, MO_TEUL);
tcg_gen_op3i_i32(INDEX_op_qemu_st32, arg, addr, mem_index);
#else
tcg_gen_op4i_i32(INDEX_op_qemu_st32, TCGV_LOW(arg), TCGV_LOW(addr),
TCGV_HIGH(addr), mem_index);
#endif
} }
static inline void tcg_gen_qemu_st64(TCGv_i64 arg, TCGv addr, int mem_index) static inline void tcg_gen_qemu_st64(TCGv_i64 arg, TCGv addr, int mem_index)
{ {
#if TARGET_LONG_BITS == 32 tcg_gen_qemu_st_i64(arg, addr, mem_index, MO_TEQ);
tcg_gen_op4i_i32(INDEX_op_qemu_st64, TCGV_LOW(arg), TCGV_HIGH(arg), addr,
mem_index);
#else
tcg_gen_op5i_i32(INDEX_op_qemu_st64, TCGV_LOW(arg), TCGV_HIGH(arg),
TCGV_LOW(addr), TCGV_HIGH(addr), mem_index);
#endif
} }
#define tcg_gen_ld_ptr(R, A, O) tcg_gen_ld_i32(TCGV_PTR_TO_NAT(R), (A), (O))
#define tcg_gen_discard_ptr(A) tcg_gen_discard_i32(TCGV_PTR_TO_NAT(A))
#else /* TCG_TARGET_REG_BITS == 32 */
static inline void tcg_gen_qemu_ld8u(TCGv ret, TCGv addr, int mem_index)
{
tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld8u, ret, addr, mem_index);
}
static inline void tcg_gen_qemu_ld8s(TCGv ret, TCGv addr, int mem_index)
{
tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld8s, ret, addr, mem_index);
}
static inline void tcg_gen_qemu_ld16u(TCGv ret, TCGv addr, int mem_index)
{
tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld16u, ret, addr, mem_index);
}
static inline void tcg_gen_qemu_ld16s(TCGv ret, TCGv addr, int mem_index)
{
tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld16s, ret, addr, mem_index);
}
static inline void tcg_gen_qemu_ld32u(TCGv ret, TCGv addr, int mem_index)
{
#if TARGET_LONG_BITS == 32
tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld32, ret, addr, mem_index);
#else
tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld32u, ret, addr, mem_index);
#endif
}
static inline void tcg_gen_qemu_ld32s(TCGv ret, TCGv addr, int mem_index)
{
#if TARGET_LONG_BITS == 32
tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld32, ret, addr, mem_index);
#else
tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld32s, ret, addr, mem_index);
#endif
}
static inline void tcg_gen_qemu_ld64(TCGv_i64 ret, TCGv addr, int mem_index)
{
tcg_gen_qemu_ldst_op_i64(INDEX_op_qemu_ld64, ret, addr, mem_index);
}
static inline void tcg_gen_qemu_st8(TCGv arg, TCGv addr, int mem_index)
{
tcg_gen_qemu_ldst_op(INDEX_op_qemu_st8, arg, addr, mem_index);
}
static inline void tcg_gen_qemu_st16(TCGv arg, TCGv addr, int mem_index)
{
tcg_gen_qemu_ldst_op(INDEX_op_qemu_st16, arg, addr, mem_index);
}
static inline void tcg_gen_qemu_st32(TCGv arg, TCGv addr, int mem_index)
{
tcg_gen_qemu_ldst_op(INDEX_op_qemu_st32, arg, addr, mem_index);
}
static inline void tcg_gen_qemu_st64(TCGv_i64 arg, TCGv addr, int mem_index)
{
tcg_gen_qemu_ldst_op_i64(INDEX_op_qemu_st64, arg, addr, mem_index);
}
#define tcg_gen_ld_ptr(R, A, O) tcg_gen_ld_i64(TCGV_PTR_TO_NAT(R), (A), (O))
#define tcg_gen_discard_ptr(A) tcg_gen_discard_i64(TCGV_PTR_TO_NAT(A))
#endif /* TCG_TARGET_REG_BITS != 32 */
#if TARGET_LONG_BITS == 64 #if TARGET_LONG_BITS == 64
#define tcg_gen_movi_tl tcg_gen_movi_i64 #define tcg_gen_movi_tl tcg_gen_movi_i64
#define tcg_gen_mov_tl tcg_gen_mov_i64 #define tcg_gen_mov_tl tcg_gen_mov_i64
@ -2997,17 +2866,25 @@ static inline void tcg_gen_qemu_st64(TCGv_i64 arg, TCGv addr, int mem_index)
#endif #endif
#if TCG_TARGET_REG_BITS == 32 #if TCG_TARGET_REG_BITS == 32
#define tcg_gen_add_ptr(R, A, B) tcg_gen_add_i32(TCGV_PTR_TO_NAT(R), \ # define tcg_gen_ld_ptr(R, A, O) \
TCGV_PTR_TO_NAT(A), \ tcg_gen_ld_i32(TCGV_PTR_TO_NAT(R), (A), (O))
TCGV_PTR_TO_NAT(B)) # define tcg_gen_discard_ptr(A) \
#define tcg_gen_addi_ptr(R, A, B) tcg_gen_addi_i32(TCGV_PTR_TO_NAT(R), \ tcg_gen_discard_i32(TCGV_PTR_TO_NAT(A))
TCGV_PTR_TO_NAT(A), (B)) # define tcg_gen_add_ptr(R, A, B) \
#define tcg_gen_ext_i32_ptr(R, A) tcg_gen_mov_i32(TCGV_PTR_TO_NAT(R), (A)) tcg_gen_add_i32(TCGV_PTR_TO_NAT(R), TCGV_PTR_TO_NAT(A), TCGV_PTR_TO_NAT(B))
#else /* TCG_TARGET_REG_BITS == 32 */ # define tcg_gen_addi_ptr(R, A, B) \
#define tcg_gen_add_ptr(R, A, B) tcg_gen_add_i64(TCGV_PTR_TO_NAT(R), \ tcg_gen_addi_i32(TCGV_PTR_TO_NAT(R), TCGV_PTR_TO_NAT(A), (B))
TCGV_PTR_TO_NAT(A), \ # define tcg_gen_ext_i32_ptr(R, A) \
TCGV_PTR_TO_NAT(B)) tcg_gen_mov_i32(TCGV_PTR_TO_NAT(R), (A))
#define tcg_gen_addi_ptr(R, A, B) tcg_gen_addi_i64(TCGV_PTR_TO_NAT(R), \ #else
TCGV_PTR_TO_NAT(A), (B)) # define tcg_gen_ld_ptr(R, A, O) \
#define tcg_gen_ext_i32_ptr(R, A) tcg_gen_ext_i32_i64(TCGV_PTR_TO_NAT(R), (A)) tcg_gen_ld_i64(TCGV_PTR_TO_NAT(R), (A), (O))
#endif /* TCG_TARGET_REG_BITS != 32 */ # define tcg_gen_discard_ptr(A) \
tcg_gen_discard_i64(TCGV_PTR_TO_NAT(A))
# define tcg_gen_add_ptr(R, A, B) \
tcg_gen_add_i64(TCGV_PTR_TO_NAT(R), TCGV_PTR_TO_NAT(A), TCGV_PTR_TO_NAT(B))
# define tcg_gen_addi_ptr(R, A, B) \
tcg_gen_addi_i64(TCGV_PTR_TO_NAT(R), TCGV_PTR_TO_NAT(A), (B))
# define tcg_gen_ext_i32_ptr(R, A) \
tcg_gen_ext_i32_i64(TCGV_PTR_TO_NAT(R), (A))
#endif /* TCG_TARGET_REG_BITS == 32 */

View File

@ -180,79 +180,107 @@ DEF(debug_insn_start, 0, 0, 1, TCG_OPF_NOT_PRESENT)
#endif #endif
DEF(exit_tb, 0, 0, 1, TCG_OPF_BB_END) DEF(exit_tb, 0, 0, 1, TCG_OPF_BB_END)
DEF(goto_tb, 0, 0, 1, TCG_OPF_BB_END) DEF(goto_tb, 0, 0, 1, TCG_OPF_BB_END)
/* Note: even if TARGET_LONG_BITS is not defined, the INDEX_op
constants must be defined */ #define IMPL_NEW_LDST \
(TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS \
| IMPL(TCG_TARGET_HAS_new_ldst))
#if TARGET_LONG_BITS <= TCG_TARGET_REG_BITS
DEF(qemu_ld_i32, 1, 1, 2, IMPL_NEW_LDST)
DEF(qemu_st_i32, 0, 2, 2, IMPL_NEW_LDST)
# if TCG_TARGET_REG_BITS == 64
DEF(qemu_ld_i64, 1, 1, 2, IMPL_NEW_LDST | TCG_OPF_64BIT)
DEF(qemu_st_i64, 0, 2, 2, IMPL_NEW_LDST | TCG_OPF_64BIT)
# else
DEF(qemu_ld_i64, 2, 1, 2, IMPL_NEW_LDST | TCG_OPF_64BIT)
DEF(qemu_st_i64, 0, 3, 2, IMPL_NEW_LDST | TCG_OPF_64BIT)
# endif
#else
DEF(qemu_ld_i32, 1, 2, 2, IMPL_NEW_LDST)
DEF(qemu_st_i32, 0, 3, 2, IMPL_NEW_LDST)
DEF(qemu_ld_i64, 2, 2, 2, IMPL_NEW_LDST | TCG_OPF_64BIT)
DEF(qemu_st_i64, 0, 4, 2, IMPL_NEW_LDST | TCG_OPF_64BIT)
#endif
#undef IMPL_NEW_LDST
#define IMPL_OLD_LDST \
(TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS \
| IMPL(!TCG_TARGET_HAS_new_ldst))
#if TCG_TARGET_REG_BITS == 32 #if TCG_TARGET_REG_BITS == 32
#if TARGET_LONG_BITS == 32 #if TARGET_LONG_BITS == 32
DEF(qemu_ld8u, 1, 1, 1, TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS) DEF(qemu_ld8u, 1, 1, 1, IMPL_OLD_LDST)
#else #else
DEF(qemu_ld8u, 1, 2, 1, TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS) DEF(qemu_ld8u, 1, 2, 1, IMPL_OLD_LDST)
#endif #endif
#if TARGET_LONG_BITS == 32 #if TARGET_LONG_BITS == 32
DEF(qemu_ld8s, 1, 1, 1, TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS) DEF(qemu_ld8s, 1, 1, 1, IMPL_OLD_LDST)
#else #else
DEF(qemu_ld8s, 1, 2, 1, TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS) DEF(qemu_ld8s, 1, 2, 1, IMPL_OLD_LDST)
#endif #endif
#if TARGET_LONG_BITS == 32 #if TARGET_LONG_BITS == 32
DEF(qemu_ld16u, 1, 1, 1, TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS) DEF(qemu_ld16u, 1, 1, 1, IMPL_OLD_LDST)
#else #else
DEF(qemu_ld16u, 1, 2, 1, TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS) DEF(qemu_ld16u, 1, 2, 1, IMPL_OLD_LDST)
#endif #endif
#if TARGET_LONG_BITS == 32 #if TARGET_LONG_BITS == 32
DEF(qemu_ld16s, 1, 1, 1, TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS) DEF(qemu_ld16s, 1, 1, 1, IMPL_OLD_LDST)
#else #else
DEF(qemu_ld16s, 1, 2, 1, TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS) DEF(qemu_ld16s, 1, 2, 1, IMPL_OLD_LDST)
#endif #endif
#if TARGET_LONG_BITS == 32 #if TARGET_LONG_BITS == 32
DEF(qemu_ld32, 1, 1, 1, TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS) DEF(qemu_ld32, 1, 1, 1, IMPL_OLD_LDST)
#else #else
DEF(qemu_ld32, 1, 2, 1, TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS) DEF(qemu_ld32, 1, 2, 1, IMPL_OLD_LDST)
#endif #endif
#if TARGET_LONG_BITS == 32 #if TARGET_LONG_BITS == 32
DEF(qemu_ld64, 2, 1, 1, TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS) DEF(qemu_ld64, 2, 1, 1, IMPL_OLD_LDST | TCG_OPF_64BIT)
#else #else
DEF(qemu_ld64, 2, 2, 1, TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS) DEF(qemu_ld64, 2, 2, 1, IMPL_OLD_LDST | TCG_OPF_64BIT)
#endif #endif
#if TARGET_LONG_BITS == 32 #if TARGET_LONG_BITS == 32
DEF(qemu_st8, 0, 2, 1, TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS) DEF(qemu_st8, 0, 2, 1, IMPL_OLD_LDST)
#else #else
DEF(qemu_st8, 0, 3, 1, TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS) DEF(qemu_st8, 0, 3, 1, IMPL_OLD_LDST)
#endif #endif
#if TARGET_LONG_BITS == 32 #if TARGET_LONG_BITS == 32
DEF(qemu_st16, 0, 2, 1, TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS) DEF(qemu_st16, 0, 2, 1, IMPL_OLD_LDST)
#else #else
DEF(qemu_st16, 0, 3, 1, TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS) DEF(qemu_st16, 0, 3, 1, IMPL_OLD_LDST)
#endif #endif
#if TARGET_LONG_BITS == 32 #if TARGET_LONG_BITS == 32
DEF(qemu_st32, 0, 2, 1, TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS) DEF(qemu_st32, 0, 2, 1, IMPL_OLD_LDST)
#else #else
DEF(qemu_st32, 0, 3, 1, TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS) DEF(qemu_st32, 0, 3, 1, IMPL_OLD_LDST)
#endif #endif
#if TARGET_LONG_BITS == 32 #if TARGET_LONG_BITS == 32
DEF(qemu_st64, 0, 3, 1, TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS) DEF(qemu_st64, 0, 3, 1, IMPL_OLD_LDST | TCG_OPF_64BIT)
#else #else
DEF(qemu_st64, 0, 4, 1, TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS) DEF(qemu_st64, 0, 4, 1, IMPL_OLD_LDST | TCG_OPF_64BIT)
#endif #endif
#else /* TCG_TARGET_REG_BITS == 32 */ #else /* TCG_TARGET_REG_BITS == 32 */
DEF(qemu_ld8u, 1, 1, 1, TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS) DEF(qemu_ld8u, 1, 1, 1, IMPL_OLD_LDST | TCG_OPF_64BIT)
DEF(qemu_ld8s, 1, 1, 1, TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS) DEF(qemu_ld8s, 1, 1, 1, IMPL_OLD_LDST | TCG_OPF_64BIT)
DEF(qemu_ld16u, 1, 1, 1, TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS) DEF(qemu_ld16u, 1, 1, 1, IMPL_OLD_LDST | TCG_OPF_64BIT)
DEF(qemu_ld16s, 1, 1, 1, TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS) DEF(qemu_ld16s, 1, 1, 1, IMPL_OLD_LDST | TCG_OPF_64BIT)
DEF(qemu_ld32, 1, 1, 1, TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS) DEF(qemu_ld32, 1, 1, 1, IMPL_OLD_LDST | TCG_OPF_64BIT)
DEF(qemu_ld32u, 1, 1, 1, TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS) DEF(qemu_ld32u, 1, 1, 1, IMPL_OLD_LDST | TCG_OPF_64BIT)
DEF(qemu_ld32s, 1, 1, 1, TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS) DEF(qemu_ld32s, 1, 1, 1, IMPL_OLD_LDST | TCG_OPF_64BIT)
DEF(qemu_ld64, 1, 1, 1, TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS) DEF(qemu_ld64, 1, 1, 1, IMPL_OLD_LDST | TCG_OPF_64BIT)
DEF(qemu_st8, 0, 2, 1, TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS) DEF(qemu_st8, 0, 2, 1, IMPL_OLD_LDST | TCG_OPF_64BIT)
DEF(qemu_st16, 0, 2, 1, TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS) DEF(qemu_st16, 0, 2, 1, IMPL_OLD_LDST | TCG_OPF_64BIT)
DEF(qemu_st32, 0, 2, 1, TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS) DEF(qemu_st32, 0, 2, 1, IMPL_OLD_LDST | TCG_OPF_64BIT)
DEF(qemu_st64, 0, 2, 1, TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS) DEF(qemu_st64, 0, 2, 1, IMPL_OLD_LDST | TCG_OPF_64BIT)
#endif /* TCG_TARGET_REG_BITS != 32 */ #endif /* TCG_TARGET_REG_BITS != 32 */
#undef IMPL_OLD_LDST
#undef IMPL #undef IMPL
#undef IMPL64 #undef IMPL64
#undef DEF #undef DEF

209
tcg/tcg.c
View File

@ -811,6 +811,188 @@ void tcg_gen_shifti_i64(TCGv_i64 ret, TCGv_i64 arg1,
} }
#endif #endif
static inline TCGMemOp tcg_canonicalize_memop(TCGMemOp op, bool is64, bool st)
{
switch (op & MO_SIZE) {
case MO_8:
op &= ~MO_BSWAP;
break;
case MO_16:
break;
case MO_32:
if (!is64) {
op &= ~MO_SIGN;
}
break;
case MO_64:
if (!is64) {
tcg_abort();
}
break;
}
if (st) {
op &= ~MO_SIGN;
}
return op;
}
static const TCGOpcode old_ld_opc[8] = {
[MO_UB] = INDEX_op_qemu_ld8u,
[MO_SB] = INDEX_op_qemu_ld8s,
[MO_UW] = INDEX_op_qemu_ld16u,
[MO_SW] = INDEX_op_qemu_ld16s,
#if TCG_TARGET_REG_BITS == 32
[MO_UL] = INDEX_op_qemu_ld32,
[MO_SL] = INDEX_op_qemu_ld32,
#else
[MO_UL] = INDEX_op_qemu_ld32u,
[MO_SL] = INDEX_op_qemu_ld32s,
#endif
[MO_Q] = INDEX_op_qemu_ld64,
};
static const TCGOpcode old_st_opc[4] = {
[MO_UB] = INDEX_op_qemu_st8,
[MO_UW] = INDEX_op_qemu_st16,
[MO_UL] = INDEX_op_qemu_st32,
[MO_Q] = INDEX_op_qemu_st64,
};
void tcg_gen_qemu_ld_i32(TCGv_i32 val, TCGv addr, TCGArg idx, TCGMemOp memop)
{
memop = tcg_canonicalize_memop(memop, 0, 0);
if (TCG_TARGET_HAS_new_ldst) {
*tcg_ctx.gen_opc_ptr++ = INDEX_op_qemu_ld_i32;
tcg_add_param_i32(val);
tcg_add_param_tl(addr);
*tcg_ctx.gen_opparam_ptr++ = memop;
*tcg_ctx.gen_opparam_ptr++ = idx;
return;
}
/* The old opcodes only support target-endian memory operations. */
assert((memop & MO_BSWAP) == MO_TE || (memop & MO_SIZE) == MO_8);
assert(old_ld_opc[memop & MO_SSIZE] != 0);
if (TCG_TARGET_REG_BITS == 32) {
*tcg_ctx.gen_opc_ptr++ = old_ld_opc[memop & MO_SSIZE];
tcg_add_param_i32(val);
tcg_add_param_tl(addr);
*tcg_ctx.gen_opparam_ptr++ = idx;
} else {
TCGv_i64 val64 = tcg_temp_new_i64();
*tcg_ctx.gen_opc_ptr++ = old_ld_opc[memop & MO_SSIZE];
tcg_add_param_i64(val64);
tcg_add_param_tl(addr);
*tcg_ctx.gen_opparam_ptr++ = idx;
tcg_gen_trunc_i64_i32(val, val64);
tcg_temp_free_i64(val64);
}
}
void tcg_gen_qemu_st_i32(TCGv_i32 val, TCGv addr, TCGArg idx, TCGMemOp memop)
{
memop = tcg_canonicalize_memop(memop, 0, 1);
if (TCG_TARGET_HAS_new_ldst) {
*tcg_ctx.gen_opc_ptr++ = INDEX_op_qemu_st_i32;
tcg_add_param_i32(val);
tcg_add_param_tl(addr);
*tcg_ctx.gen_opparam_ptr++ = memop;
*tcg_ctx.gen_opparam_ptr++ = idx;
return;
}
/* The old opcodes only support target-endian memory operations. */
assert((memop & MO_BSWAP) == MO_TE || (memop & MO_SIZE) == MO_8);
assert(old_st_opc[memop & MO_SIZE] != 0);
if (TCG_TARGET_REG_BITS == 32) {
*tcg_ctx.gen_opc_ptr++ = old_st_opc[memop & MO_SIZE];
tcg_add_param_i32(val);
tcg_add_param_tl(addr);
*tcg_ctx.gen_opparam_ptr++ = idx;
} else {
TCGv_i64 val64 = tcg_temp_new_i64();
tcg_gen_extu_i32_i64(val64, val);
*tcg_ctx.gen_opc_ptr++ = old_st_opc[memop & MO_SIZE];
tcg_add_param_i64(val64);
tcg_add_param_tl(addr);
*tcg_ctx.gen_opparam_ptr++ = idx;
tcg_temp_free_i64(val64);
}
}
void tcg_gen_qemu_ld_i64(TCGv_i64 val, TCGv addr, TCGArg idx, TCGMemOp memop)
{
memop = tcg_canonicalize_memop(memop, 1, 0);
#if TCG_TARGET_REG_BITS == 32
if ((memop & MO_SIZE) < MO_64) {
tcg_gen_qemu_ld_i32(TCGV_LOW(val), addr, idx, memop);
if (memop & MO_SIGN) {
tcg_gen_sari_i32(TCGV_HIGH(val), TCGV_LOW(val), 31);
} else {
tcg_gen_movi_i32(TCGV_HIGH(val), 0);
}
return;
}
#endif
if (TCG_TARGET_HAS_new_ldst) {
*tcg_ctx.gen_opc_ptr++ = INDEX_op_qemu_ld_i64;
tcg_add_param_i64(val);
tcg_add_param_tl(addr);
*tcg_ctx.gen_opparam_ptr++ = memop;
*tcg_ctx.gen_opparam_ptr++ = idx;
return;
}
/* The old opcodes only support target-endian memory operations. */
assert((memop & MO_BSWAP) == MO_TE || (memop & MO_SIZE) == MO_8);
assert(old_ld_opc[memop & MO_SSIZE] != 0);
*tcg_ctx.gen_opc_ptr++ = old_ld_opc[memop & MO_SSIZE];
tcg_add_param_i64(val);
tcg_add_param_tl(addr);
*tcg_ctx.gen_opparam_ptr++ = idx;
}
void tcg_gen_qemu_st_i64(TCGv_i64 val, TCGv addr, TCGArg idx, TCGMemOp memop)
{
memop = tcg_canonicalize_memop(memop, 1, 1);
#if TCG_TARGET_REG_BITS == 32
if ((memop & MO_SIZE) < MO_64) {
tcg_gen_qemu_st_i32(TCGV_LOW(val), addr, idx, memop);
return;
}
#endif
if (TCG_TARGET_HAS_new_ldst) {
*tcg_ctx.gen_opc_ptr++ = INDEX_op_qemu_st_i64;
tcg_add_param_i64(val);
tcg_add_param_tl(addr);
*tcg_ctx.gen_opparam_ptr++ = memop;
*tcg_ctx.gen_opparam_ptr++ = idx;
return;
}
/* The old opcodes only support target-endian memory operations. */
assert((memop & MO_BSWAP) == MO_TE || (memop & MO_SIZE) == MO_8);
assert(old_st_opc[memop & MO_SIZE] != 0);
*tcg_ctx.gen_opc_ptr++ = old_st_opc[memop & MO_SIZE];
tcg_add_param_i64(val);
tcg_add_param_tl(addr);
*tcg_ctx.gen_opparam_ptr++ = idx;
}
static void tcg_reg_alloc_start(TCGContext *s) static void tcg_reg_alloc_start(TCGContext *s)
{ {
@ -893,6 +1075,22 @@ static const char * const cond_name[] =
[TCG_COND_GTU] = "gtu" [TCG_COND_GTU] = "gtu"
}; };
static const char * const ldst_name[] =
{
[MO_UB] = "ub",
[MO_SB] = "sb",
[MO_LEUW] = "leuw",
[MO_LESW] = "lesw",
[MO_LEUL] = "leul",
[MO_LESL] = "lesl",
[MO_LEQ] = "leq",
[MO_BEUW] = "beuw",
[MO_BESW] = "besw",
[MO_BEUL] = "beul",
[MO_BESL] = "besl",
[MO_BEQ] = "beq",
};
void tcg_dump_ops(TCGContext *s) void tcg_dump_ops(TCGContext *s)
{ {
const uint16_t *opc_ptr; const uint16_t *opc_ptr;
@ -1021,6 +1219,17 @@ void tcg_dump_ops(TCGContext *s)
} }
i = 1; i = 1;
break; break;
case INDEX_op_qemu_ld_i32:
case INDEX_op_qemu_st_i32:
case INDEX_op_qemu_ld_i64:
case INDEX_op_qemu_st_i64:
if (args[k] < ARRAY_SIZE(ldst_name) && ldst_name[args[k]]) {
qemu_log(",%s", ldst_name[args[k++]]);
} else {
qemu_log(",$0x%" TCG_PRIlx, args[k++]);
}
i = 1;
break;
default: default:
i = 0; i = 0;
break; break;

View File

@ -120,6 +120,8 @@
#define TCG_TARGET_HAS_mulsh_i64 0 #define TCG_TARGET_HAS_mulsh_i64 0
#endif /* TCG_TARGET_REG_BITS == 64 */ #endif /* TCG_TARGET_REG_BITS == 64 */
#define TCG_TARGET_HAS_new_ldst 0
/* Number of registers available. /* Number of registers available.
For 32 bit hosts, we need more than 8 registers (call arguments). */ For 32 bit hosts, we need more than 8 registers (call arguments). */
/* #define TCG_TARGET_NB_REGS 8 */ /* #define TCG_TARGET_NB_REGS 8 */