2 * Tiny Code Interpreter for QEMU
4 * Copyright (c) 2009, 2011, 2016 Stefan Weil
6 * This program is free software: you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation, either version 2 of the License, or
9 * (at your option) any later version.
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
16 * You should have received a copy of the GNU General Public License
17 * along with this program. If not, see <http://www.gnu.org/licenses/>.
20 #include "qemu/osdep.h"
22 /* Enable TCI assertions only when debugging TCG (and without NDEBUG defined).
23 * Without assertions, the interpreter runs much faster. */
24 #if defined(CONFIG_DEBUG_TCG)
25 # define tci_assert(cond) assert(cond)
27 # define tci_assert(cond) ((void)0)
30 #include "qemu-common.h"
31 #include "tcg/tcg.h" /* MAX_OPC_PARAM_IARGS */
32 #include "exec/cpu_ldst.h"
33 #include "tcg/tcg-op.h"
34 #include "qemu/compiler.h"
36 #if MAX_OPC_PARAM_IARGS != 6
37 # error Fix needed, number of supported input arguments changed!
39 #if TCG_TARGET_REG_BITS == 32
40 typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
41 tcg_target_ulong, tcg_target_ulong,
42 tcg_target_ulong, tcg_target_ulong,
43 tcg_target_ulong, tcg_target_ulong,
44 tcg_target_ulong, tcg_target_ulong,
45 tcg_target_ulong, tcg_target_ulong);
47 typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
48 tcg_target_ulong, tcg_target_ulong,
49 tcg_target_ulong, tcg_target_ulong);
52 __thread uintptr_t tci_tb_ptr;
54 static tcg_target_ulong tci_read_reg(const tcg_target_ulong *regs, TCGReg index)
56 tci_assert(index < TCG_TARGET_NB_REGS);
61 tci_write_reg(tcg_target_ulong *regs, TCGReg index, tcg_target_ulong value)
63 tci_assert(index < TCG_TARGET_NB_REGS);
64 tci_assert(index != TCG_AREG0);
65 tci_assert(index != TCG_REG_CALL_STACK);
69 #if TCG_TARGET_REG_BITS == 32
70 static void tci_write_reg64(tcg_target_ulong *regs, uint32_t high_index,
71 uint32_t low_index, uint64_t value)
73 tci_write_reg(regs, low_index, value);
74 tci_write_reg(regs, high_index, value >> 32);
78 #if TCG_TARGET_REG_BITS == 32
79 /* Create a 64 bit value from two 32 bit values. */
80 static uint64_t tci_uint64(uint32_t high, uint32_t low)
82 return ((uint64_t)high << 32) + low;
86 /* Read constant byte from bytecode. */
87 static uint8_t tci_read_b(const uint8_t **tb_ptr)
89 return *(tb_ptr[0]++);
92 /* Read register number from bytecode. */
93 static TCGReg tci_read_r(const uint8_t **tb_ptr)
95 uint8_t regno = tci_read_b(tb_ptr);
96 tci_assert(regno < TCG_TARGET_NB_REGS);
100 /* Read constant (native size) from bytecode. */
101 static tcg_target_ulong tci_read_i(const uint8_t **tb_ptr)
103 tcg_target_ulong value = *(const tcg_target_ulong *)(*tb_ptr);
104 *tb_ptr += sizeof(value);
108 /* Read unsigned constant (32 bit) from bytecode. */
109 static uint32_t tci_read_i32(const uint8_t **tb_ptr)
111 uint32_t value = *(const uint32_t *)(*tb_ptr);
112 *tb_ptr += sizeof(value);
116 /* Read signed constant (32 bit) from bytecode. */
117 static int32_t tci_read_s32(const uint8_t **tb_ptr)
119 int32_t value = *(const int32_t *)(*tb_ptr);
120 *tb_ptr += sizeof(value);
124 /* Read indexed register (native size) from bytecode. */
125 static tcg_target_ulong
126 tci_read_rval(const tcg_target_ulong *regs, const uint8_t **tb_ptr)
128 tcg_target_ulong value = tci_read_reg(regs, **tb_ptr);
133 #if TCG_TARGET_REG_BITS == 32
134 /* Read two indexed registers (2 * 32 bit) from bytecode. */
135 static uint64_t tci_read_r64(const tcg_target_ulong *regs,
136 const uint8_t **tb_ptr)
138 uint32_t low = tci_read_rval(regs, tb_ptr);
139 return tci_uint64(tci_read_rval(regs, tb_ptr), low);
141 #elif TCG_TARGET_REG_BITS == 64
142 /* Read indexed register (64 bit) from bytecode. */
143 static uint64_t tci_read_r64(const tcg_target_ulong *regs,
144 const uint8_t **tb_ptr)
146 return tci_read_rval(regs, tb_ptr);
150 /* Read indexed register(s) with target address from bytecode. */
152 tci_read_ulong(const tcg_target_ulong *regs, const uint8_t **tb_ptr)
154 target_ulong taddr = tci_read_rval(regs, tb_ptr);
155 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
156 taddr += (uint64_t)tci_read_rval(regs, tb_ptr) << 32;
161 static tcg_target_ulong tci_read_label(const uint8_t **tb_ptr)
163 return tci_read_i(tb_ptr);
167 * Load sets of arguments all at once. The naming convention is:
168 * tci_args_<arguments>
169 * where arguments is a sequence of
171 * c = condition (TCGCond)
172 * i = immediate (uint32_t)
173 * I = immediate (tcg_target_ulong)
174 * l = label or pointer
176 * s = signed ldst offset
179 static void tci_args_l(const uint8_t **tb_ptr, void **l0)
181 *l0 = (void *)tci_read_label(tb_ptr);
184 static void tci_args_rr(const uint8_t **tb_ptr,
185 TCGReg *r0, TCGReg *r1)
187 *r0 = tci_read_r(tb_ptr);
188 *r1 = tci_read_r(tb_ptr);
191 static void tci_args_ri(const uint8_t **tb_ptr,
192 TCGReg *r0, tcg_target_ulong *i1)
194 *r0 = tci_read_r(tb_ptr);
195 *i1 = tci_read_i32(tb_ptr);
198 #if TCG_TARGET_REG_BITS == 64
199 static void tci_args_rI(const uint8_t **tb_ptr,
200 TCGReg *r0, tcg_target_ulong *i1)
202 *r0 = tci_read_r(tb_ptr);
203 *i1 = tci_read_i(tb_ptr);
207 static void tci_args_rrr(const uint8_t **tb_ptr,
208 TCGReg *r0, TCGReg *r1, TCGReg *r2)
210 *r0 = tci_read_r(tb_ptr);
211 *r1 = tci_read_r(tb_ptr);
212 *r2 = tci_read_r(tb_ptr);
215 static void tci_args_rrs(const uint8_t **tb_ptr,
216 TCGReg *r0, TCGReg *r1, int32_t *i2)
218 *r0 = tci_read_r(tb_ptr);
219 *r1 = tci_read_r(tb_ptr);
220 *i2 = tci_read_s32(tb_ptr);
223 static void tci_args_rrcl(const uint8_t **tb_ptr,
224 TCGReg *r0, TCGReg *r1, TCGCond *c2, void **l3)
226 *r0 = tci_read_r(tb_ptr);
227 *r1 = tci_read_r(tb_ptr);
228 *c2 = tci_read_b(tb_ptr);
229 *l3 = (void *)tci_read_label(tb_ptr);
232 static void tci_args_rrrc(const uint8_t **tb_ptr,
233 TCGReg *r0, TCGReg *r1, TCGReg *r2, TCGCond *c3)
235 *r0 = tci_read_r(tb_ptr);
236 *r1 = tci_read_r(tb_ptr);
237 *r2 = tci_read_r(tb_ptr);
238 *c3 = tci_read_b(tb_ptr);
241 #if TCG_TARGET_REG_BITS == 32
242 static void tci_args_rrrrcl(const uint8_t **tb_ptr, TCGReg *r0, TCGReg *r1,
243 TCGReg *r2, TCGReg *r3, TCGCond *c4, void **l5)
245 *r0 = tci_read_r(tb_ptr);
246 *r1 = tci_read_r(tb_ptr);
247 *r2 = tci_read_r(tb_ptr);
248 *r3 = tci_read_r(tb_ptr);
249 *c4 = tci_read_b(tb_ptr);
250 *l5 = (void *)tci_read_label(tb_ptr);
253 static void tci_args_rrrrrc(const uint8_t **tb_ptr, TCGReg *r0, TCGReg *r1,
254 TCGReg *r2, TCGReg *r3, TCGReg *r4, TCGCond *c5)
256 *r0 = tci_read_r(tb_ptr);
257 *r1 = tci_read_r(tb_ptr);
258 *r2 = tci_read_r(tb_ptr);
259 *r3 = tci_read_r(tb_ptr);
260 *r4 = tci_read_r(tb_ptr);
261 *c5 = tci_read_b(tb_ptr);
264 static void tci_args_rrrrrr(const uint8_t **tb_ptr, TCGReg *r0, TCGReg *r1,
265 TCGReg *r2, TCGReg *r3, TCGReg *r4, TCGReg *r5)
267 *r0 = tci_read_r(tb_ptr);
268 *r1 = tci_read_r(tb_ptr);
269 *r2 = tci_read_r(tb_ptr);
270 *r3 = tci_read_r(tb_ptr);
271 *r4 = tci_read_r(tb_ptr);
272 *r5 = tci_read_r(tb_ptr);
276 static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition)
313 g_assert_not_reached();
318 static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition)
355 g_assert_not_reached();
361 cpu_ldub_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
362 #define qemu_ld_leuw \
363 cpu_lduw_le_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
364 #define qemu_ld_leul \
365 cpu_ldl_le_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
366 #define qemu_ld_leq \
367 cpu_ldq_le_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
368 #define qemu_ld_beuw \
369 cpu_lduw_be_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
370 #define qemu_ld_beul \
371 cpu_ldl_be_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
372 #define qemu_ld_beq \
373 cpu_ldq_be_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
374 #define qemu_st_b(X) \
375 cpu_stb_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
376 #define qemu_st_lew(X) \
377 cpu_stw_le_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
378 #define qemu_st_lel(X) \
379 cpu_stl_le_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
380 #define qemu_st_leq(X) \
381 cpu_stq_le_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
382 #define qemu_st_bew(X) \
383 cpu_stw_be_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
384 #define qemu_st_bel(X) \
385 cpu_stl_be_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
386 #define qemu_st_beq(X) \
387 cpu_stq_be_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
389 #if TCG_TARGET_REG_BITS == 64
390 # define CASE_32_64(x) \
391 case glue(glue(INDEX_op_, x), _i64): \
392 case glue(glue(INDEX_op_, x), _i32):
393 # define CASE_64(x) \
394 case glue(glue(INDEX_op_, x), _i64):
396 # define CASE_32_64(x) \
397 case glue(glue(INDEX_op_, x), _i32):
401 /* Interpret pseudo code in tb. */
403 * Disable CFI checks.
404 * One possible operation in the pseudo code is a call to binary code.
405 * Therefore, disable CFI checks in the interpreter function
407 uintptr_t QEMU_DISABLE_CFI tcg_qemu_tb_exec(CPUArchState *env,
408 const void *v_tb_ptr)
410 const uint8_t *tb_ptr = v_tb_ptr;
411 tcg_target_ulong regs[TCG_TARGET_NB_REGS];
412 long tcg_temps[CPU_TEMP_BUF_NLONGS];
413 uintptr_t sp_value = (uintptr_t)(tcg_temps + CPU_TEMP_BUF_NLONGS);
415 regs[TCG_AREG0] = (tcg_target_ulong)env;
416 regs[TCG_REG_CALL_STACK] = sp_value;
420 TCGOpcode opc = tb_ptr[0];
421 #if defined(CONFIG_DEBUG_TCG) && !defined(NDEBUG)
422 uint8_t op_size = tb_ptr[1];
423 const uint8_t *old_code_ptr = tb_ptr;
435 #if TCG_TARGET_REG_BITS == 32
443 /* Skip opcode and size entry. */
448 tci_args_l(&tb_ptr, &ptr);
449 tci_tb_ptr = (uintptr_t)tb_ptr;
450 #if TCG_TARGET_REG_BITS == 32
451 tmp64 = ((helper_function)ptr)(tci_read_reg(regs, TCG_REG_R0),
452 tci_read_reg(regs, TCG_REG_R1),
453 tci_read_reg(regs, TCG_REG_R2),
454 tci_read_reg(regs, TCG_REG_R3),
455 tci_read_reg(regs, TCG_REG_R4),
456 tci_read_reg(regs, TCG_REG_R5),
457 tci_read_reg(regs, TCG_REG_R6),
458 tci_read_reg(regs, TCG_REG_R7),
459 tci_read_reg(regs, TCG_REG_R8),
460 tci_read_reg(regs, TCG_REG_R9),
461 tci_read_reg(regs, TCG_REG_R10),
462 tci_read_reg(regs, TCG_REG_R11));
463 tci_write_reg(regs, TCG_REG_R0, tmp64);
464 tci_write_reg(regs, TCG_REG_R1, tmp64 >> 32);
466 tmp64 = ((helper_function)ptr)(tci_read_reg(regs, TCG_REG_R0),
467 tci_read_reg(regs, TCG_REG_R1),
468 tci_read_reg(regs, TCG_REG_R2),
469 tci_read_reg(regs, TCG_REG_R3),
470 tci_read_reg(regs, TCG_REG_R4),
471 tci_read_reg(regs, TCG_REG_R5));
472 tci_write_reg(regs, TCG_REG_R0, tmp64);
476 tci_args_l(&tb_ptr, &ptr);
477 tci_assert(tb_ptr == old_code_ptr + op_size);
480 case INDEX_op_setcond_i32:
481 tci_args_rrrc(&tb_ptr, &r0, &r1, &r2, &condition);
482 regs[r0] = tci_compare32(regs[r1], regs[r2], condition);
484 #if TCG_TARGET_REG_BITS == 32
485 case INDEX_op_setcond2_i32:
486 tci_args_rrrrrc(&tb_ptr, &r0, &r1, &r2, &r3, &r4, &condition);
487 T1 = tci_uint64(regs[r2], regs[r1]);
488 T2 = tci_uint64(regs[r4], regs[r3]);
489 regs[r0] = tci_compare64(T1, T2, condition);
491 #elif TCG_TARGET_REG_BITS == 64
492 case INDEX_op_setcond_i64:
493 tci_args_rrrc(&tb_ptr, &r0, &r1, &r2, &condition);
494 regs[r0] = tci_compare64(regs[r1], regs[r2], condition);
498 tci_args_rr(&tb_ptr, &r0, &r1);
501 case INDEX_op_tci_movi_i32:
502 tci_args_ri(&tb_ptr, &r0, &t1);
506 /* Load/store operations (32 bit). */
509 tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
510 ptr = (void *)(regs[r1] + ofs);
511 regs[r0] = *(uint8_t *)ptr;
514 tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
515 ptr = (void *)(regs[r1] + ofs);
516 regs[r0] = *(int8_t *)ptr;
519 tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
520 ptr = (void *)(regs[r1] + ofs);
521 regs[r0] = *(uint16_t *)ptr;
524 tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
525 ptr = (void *)(regs[r1] + ofs);
526 regs[r0] = *(int16_t *)ptr;
528 case INDEX_op_ld_i32:
530 tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
531 ptr = (void *)(regs[r1] + ofs);
532 regs[r0] = *(uint32_t *)ptr;
535 tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
536 ptr = (void *)(regs[r1] + ofs);
537 *(uint8_t *)ptr = regs[r0];
540 tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
541 ptr = (void *)(regs[r1] + ofs);
542 *(uint16_t *)ptr = regs[r0];
544 case INDEX_op_st_i32:
546 tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
547 ptr = (void *)(regs[r1] + ofs);
548 *(uint32_t *)ptr = regs[r0];
551 /* Arithmetic operations (mixed 32/64 bit). */
554 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
555 regs[r0] = regs[r1] + regs[r2];
558 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
559 regs[r0] = regs[r1] - regs[r2];
562 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
563 regs[r0] = regs[r1] * regs[r2];
566 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
567 regs[r0] = regs[r1] & regs[r2];
570 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
571 regs[r0] = regs[r1] | regs[r2];
574 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
575 regs[r0] = regs[r1] ^ regs[r2];
578 /* Arithmetic operations (32 bit). */
580 case INDEX_op_div_i32:
581 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
582 regs[r0] = (int32_t)regs[r1] / (int32_t)regs[r2];
584 case INDEX_op_divu_i32:
585 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
586 regs[r0] = (uint32_t)regs[r1] / (uint32_t)regs[r2];
588 case INDEX_op_rem_i32:
589 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
590 regs[r0] = (int32_t)regs[r1] % (int32_t)regs[r2];
592 case INDEX_op_remu_i32:
593 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
594 regs[r0] = (uint32_t)regs[r1] % (uint32_t)regs[r2];
597 /* Shift/rotate operations (32 bit). */
599 case INDEX_op_shl_i32:
600 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
601 regs[r0] = (uint32_t)regs[r1] << (regs[r2] & 31);
603 case INDEX_op_shr_i32:
604 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
605 regs[r0] = (uint32_t)regs[r1] >> (regs[r2] & 31);
607 case INDEX_op_sar_i32:
608 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
609 regs[r0] = (int32_t)regs[r1] >> (regs[r2] & 31);
611 #if TCG_TARGET_HAS_rot_i32
612 case INDEX_op_rotl_i32:
613 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
614 regs[r0] = rol32(regs[r1], regs[r2] & 31);
616 case INDEX_op_rotr_i32:
617 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
618 regs[r0] = ror32(regs[r1], regs[r2] & 31);
621 #if TCG_TARGET_HAS_deposit_i32
622 case INDEX_op_deposit_i32:
624 t1 = tci_read_rval(regs, &tb_ptr);
625 t2 = tci_read_rval(regs, &tb_ptr);
628 tmp32 = (((1 << tmp8) - 1) << tmp16);
629 tci_write_reg(regs, t0, (t1 & ~tmp32) | ((t2 << tmp16) & tmp32));
632 case INDEX_op_brcond_i32:
633 tci_args_rrcl(&tb_ptr, &r0, &r1, &condition, &ptr);
634 if (tci_compare32(regs[r0], regs[r1], condition)) {
635 tci_assert(tb_ptr == old_code_ptr + op_size);
640 #if TCG_TARGET_REG_BITS == 32
641 case INDEX_op_add2_i32:
642 tci_args_rrrrrr(&tb_ptr, &r0, &r1, &r2, &r3, &r4, &r5);
643 T1 = tci_uint64(regs[r3], regs[r2]);
644 T2 = tci_uint64(regs[r5], regs[r4]);
645 tci_write_reg64(regs, r1, r0, T1 + T2);
647 case INDEX_op_sub2_i32:
648 tci_args_rrrrrr(&tb_ptr, &r0, &r1, &r2, &r3, &r4, &r5);
649 T1 = tci_uint64(regs[r3], regs[r2]);
650 T2 = tci_uint64(regs[r5], regs[r4]);
651 tci_write_reg64(regs, r1, r0, T1 - T2);
653 case INDEX_op_brcond2_i32:
654 tci_args_rrrrcl(&tb_ptr, &r0, &r1, &r2, &r3, &condition, &ptr);
655 T1 = tci_uint64(regs[r1], regs[r0]);
656 T2 = tci_uint64(regs[r3], regs[r2]);
657 if (tci_compare64(T1, T2, condition)) {
658 tci_assert(tb_ptr == old_code_ptr + op_size);
663 case INDEX_op_mulu2_i32:
666 t2 = tci_read_rval(regs, &tb_ptr);
667 tmp64 = (uint32_t)tci_read_rval(regs, &tb_ptr);
668 tci_write_reg64(regs, t1, t0, (uint32_t)t2 * tmp64);
670 #endif /* TCG_TARGET_REG_BITS == 32 */
671 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
673 tci_args_rr(&tb_ptr, &r0, &r1);
674 regs[r0] = (int8_t)regs[r1];
677 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
679 tci_args_rr(&tb_ptr, &r0, &r1);
680 regs[r0] = (int16_t)regs[r1];
683 #if TCG_TARGET_HAS_ext8u_i32 || TCG_TARGET_HAS_ext8u_i64
685 tci_args_rr(&tb_ptr, &r0, &r1);
686 regs[r0] = (uint8_t)regs[r1];
689 #if TCG_TARGET_HAS_ext16u_i32 || TCG_TARGET_HAS_ext16u_i64
691 tci_args_rr(&tb_ptr, &r0, &r1);
692 regs[r0] = (uint16_t)regs[r1];
695 #if TCG_TARGET_HAS_bswap16_i32 || TCG_TARGET_HAS_bswap16_i64
697 tci_args_rr(&tb_ptr, &r0, &r1);
698 regs[r0] = bswap16(regs[r1]);
701 #if TCG_TARGET_HAS_bswap32_i32 || TCG_TARGET_HAS_bswap32_i64
703 tci_args_rr(&tb_ptr, &r0, &r1);
704 regs[r0] = bswap32(regs[r1]);
707 #if TCG_TARGET_HAS_not_i32 || TCG_TARGET_HAS_not_i64
709 tci_args_rr(&tb_ptr, &r0, &r1);
710 regs[r0] = ~regs[r1];
713 #if TCG_TARGET_HAS_neg_i32 || TCG_TARGET_HAS_neg_i64
715 tci_args_rr(&tb_ptr, &r0, &r1);
716 regs[r0] = -regs[r1];
719 #if TCG_TARGET_REG_BITS == 64
720 case INDEX_op_tci_movi_i64:
721 tci_args_rI(&tb_ptr, &r0, &t1);
725 /* Load/store operations (64 bit). */
727 case INDEX_op_ld32s_i64:
728 tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
729 ptr = (void *)(regs[r1] + ofs);
730 regs[r0] = *(int32_t *)ptr;
732 case INDEX_op_ld_i64:
733 tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
734 ptr = (void *)(regs[r1] + ofs);
735 regs[r0] = *(uint64_t *)ptr;
737 case INDEX_op_st_i64:
738 tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
739 ptr = (void *)(regs[r1] + ofs);
740 *(uint64_t *)ptr = regs[r0];
743 /* Arithmetic operations (64 bit). */
745 case INDEX_op_div_i64:
746 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
747 regs[r0] = (int64_t)regs[r1] / (int64_t)regs[r2];
749 case INDEX_op_divu_i64:
750 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
751 regs[r0] = (uint64_t)regs[r1] / (uint64_t)regs[r2];
753 case INDEX_op_rem_i64:
754 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
755 regs[r0] = (int64_t)regs[r1] % (int64_t)regs[r2];
757 case INDEX_op_remu_i64:
758 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
759 regs[r0] = (uint64_t)regs[r1] % (uint64_t)regs[r2];
762 /* Shift/rotate operations (64 bit). */
764 case INDEX_op_shl_i64:
765 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
766 regs[r0] = regs[r1] << (regs[r2] & 63);
768 case INDEX_op_shr_i64:
769 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
770 regs[r0] = regs[r1] >> (regs[r2] & 63);
772 case INDEX_op_sar_i64:
773 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
774 regs[r0] = (int64_t)regs[r1] >> (regs[r2] & 63);
776 #if TCG_TARGET_HAS_rot_i64
777 case INDEX_op_rotl_i64:
778 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
779 regs[r0] = rol64(regs[r1], regs[r2] & 63);
781 case INDEX_op_rotr_i64:
782 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
783 regs[r0] = ror64(regs[r1], regs[r2] & 63);
786 #if TCG_TARGET_HAS_deposit_i64
787 case INDEX_op_deposit_i64:
789 t1 = tci_read_rval(regs, &tb_ptr);
790 t2 = tci_read_rval(regs, &tb_ptr);
793 tmp64 = (((1ULL << tmp8) - 1) << tmp16);
794 tci_write_reg(regs, t0, (t1 & ~tmp64) | ((t2 << tmp16) & tmp64));
797 case INDEX_op_brcond_i64:
798 tci_args_rrcl(&tb_ptr, &r0, &r1, &condition, &ptr);
799 if (tci_compare64(regs[r0], regs[r1], condition)) {
800 tci_assert(tb_ptr == old_code_ptr + op_size);
805 case INDEX_op_ext32s_i64:
806 case INDEX_op_ext_i32_i64:
807 tci_args_rr(&tb_ptr, &r0, &r1);
808 regs[r0] = (int32_t)regs[r1];
810 case INDEX_op_ext32u_i64:
811 case INDEX_op_extu_i32_i64:
812 tci_args_rr(&tb_ptr, &r0, &r1);
813 regs[r0] = (uint32_t)regs[r1];
815 #if TCG_TARGET_HAS_bswap64_i64
816 case INDEX_op_bswap64_i64:
817 tci_args_rr(&tb_ptr, &r0, &r1);
818 regs[r0] = bswap64(regs[r1]);
821 #endif /* TCG_TARGET_REG_BITS == 64 */
823 /* QEMU specific operations. */
825 case INDEX_op_exit_tb:
826 tci_args_l(&tb_ptr, &ptr);
827 return (uintptr_t)ptr;
829 case INDEX_op_goto_tb:
830 tci_args_l(&tb_ptr, &ptr);
831 tci_assert(tb_ptr == old_code_ptr + op_size);
832 tb_ptr = *(void **)ptr;
835 case INDEX_op_qemu_ld_i32:
837 taddr = tci_read_ulong(regs, &tb_ptr);
838 oi = tci_read_i(&tb_ptr);
839 switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) {
844 tmp32 = (int8_t)qemu_ld_ub;
847 tmp32 = qemu_ld_leuw;
850 tmp32 = (int16_t)qemu_ld_leuw;
853 tmp32 = qemu_ld_leul;
856 tmp32 = qemu_ld_beuw;
859 tmp32 = (int16_t)qemu_ld_beuw;
862 tmp32 = qemu_ld_beul;
865 g_assert_not_reached();
867 tci_write_reg(regs, t0, tmp32);
869 case INDEX_op_qemu_ld_i64:
871 if (TCG_TARGET_REG_BITS == 32) {
874 taddr = tci_read_ulong(regs, &tb_ptr);
875 oi = tci_read_i(&tb_ptr);
876 switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) {
881 tmp64 = (int8_t)qemu_ld_ub;
884 tmp64 = qemu_ld_leuw;
887 tmp64 = (int16_t)qemu_ld_leuw;
890 tmp64 = qemu_ld_leul;
893 tmp64 = (int32_t)qemu_ld_leul;
899 tmp64 = qemu_ld_beuw;
902 tmp64 = (int16_t)qemu_ld_beuw;
905 tmp64 = qemu_ld_beul;
908 tmp64 = (int32_t)qemu_ld_beul;
914 g_assert_not_reached();
916 tci_write_reg(regs, t0, tmp64);
917 if (TCG_TARGET_REG_BITS == 32) {
918 tci_write_reg(regs, t1, tmp64 >> 32);
921 case INDEX_op_qemu_st_i32:
922 t0 = tci_read_rval(regs, &tb_ptr);
923 taddr = tci_read_ulong(regs, &tb_ptr);
924 oi = tci_read_i(&tb_ptr);
925 switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) {
942 g_assert_not_reached();
945 case INDEX_op_qemu_st_i64:
946 tmp64 = tci_read_r64(regs, &tb_ptr);
947 taddr = tci_read_ulong(regs, &tb_ptr);
948 oi = tci_read_i(&tb_ptr);
949 switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) {
972 g_assert_not_reached();
976 /* Ensure ordering for all kinds */
980 g_assert_not_reached();
982 tci_assert(tb_ptr == old_code_ptr + op_size);