OSDN Git Service

tcg/tci: Split out tci_args_rrrrrr
[qmiga/qemu.git] / tcg / tci.c
1 /*
2  * Tiny Code Interpreter for QEMU
3  *
4  * Copyright (c) 2009, 2011, 2016 Stefan Weil
5  *
6  * This program is free software: you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation, either version 2 of the License, or
9  * (at your option) any later version.
10  *
11  * This program is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14  * GNU General Public License for more details.
15  *
16  * You should have received a copy of the GNU General Public License
17  * along with this program.  If not, see <http://www.gnu.org/licenses/>.
18  */
19
20 #include "qemu/osdep.h"
21
22 /* Enable TCI assertions only when debugging TCG (and without NDEBUG defined).
23  * Without assertions, the interpreter runs much faster. */
24 #if defined(CONFIG_DEBUG_TCG)
25 # define tci_assert(cond) assert(cond)
26 #else
27 # define tci_assert(cond) ((void)0)
28 #endif
29
30 #include "qemu-common.h"
31 #include "tcg/tcg.h"           /* MAX_OPC_PARAM_IARGS */
32 #include "exec/cpu_ldst.h"
33 #include "tcg/tcg-op.h"
34 #include "qemu/compiler.h"
35
36 #if MAX_OPC_PARAM_IARGS != 6
37 # error Fix needed, number of supported input arguments changed!
38 #endif
39 #if TCG_TARGET_REG_BITS == 32
40 typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
41                                     tcg_target_ulong, tcg_target_ulong,
42                                     tcg_target_ulong, tcg_target_ulong,
43                                     tcg_target_ulong, tcg_target_ulong,
44                                     tcg_target_ulong, tcg_target_ulong,
45                                     tcg_target_ulong, tcg_target_ulong);
46 #else
47 typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
48                                     tcg_target_ulong, tcg_target_ulong,
49                                     tcg_target_ulong, tcg_target_ulong);
50 #endif
51
52 __thread uintptr_t tci_tb_ptr;
53
54 static tcg_target_ulong tci_read_reg(const tcg_target_ulong *regs, TCGReg index)
55 {
56     tci_assert(index < TCG_TARGET_NB_REGS);
57     return regs[index];
58 }
59
60 static void
61 tci_write_reg(tcg_target_ulong *regs, TCGReg index, tcg_target_ulong value)
62 {
63     tci_assert(index < TCG_TARGET_NB_REGS);
64     tci_assert(index != TCG_AREG0);
65     tci_assert(index != TCG_REG_CALL_STACK);
66     regs[index] = value;
67 }
68
69 #if TCG_TARGET_REG_BITS == 32
70 static void tci_write_reg64(tcg_target_ulong *regs, uint32_t high_index,
71                             uint32_t low_index, uint64_t value)
72 {
73     tci_write_reg(regs, low_index, value);
74     tci_write_reg(regs, high_index, value >> 32);
75 }
76 #endif
77
78 #if TCG_TARGET_REG_BITS == 32
79 /* Create a 64 bit value from two 32 bit values. */
80 static uint64_t tci_uint64(uint32_t high, uint32_t low)
81 {
82     return ((uint64_t)high << 32) + low;
83 }
84 #endif
85
86 /* Read constant byte from bytecode. */
87 static uint8_t tci_read_b(const uint8_t **tb_ptr)
88 {
89     return *(tb_ptr[0]++);
90 }
91
92 /* Read register number from bytecode. */
93 static TCGReg tci_read_r(const uint8_t **tb_ptr)
94 {
95     uint8_t regno = tci_read_b(tb_ptr);
96     tci_assert(regno < TCG_TARGET_NB_REGS);
97     return regno;
98 }
99
100 /* Read constant (native size) from bytecode. */
101 static tcg_target_ulong tci_read_i(const uint8_t **tb_ptr)
102 {
103     tcg_target_ulong value = *(const tcg_target_ulong *)(*tb_ptr);
104     *tb_ptr += sizeof(value);
105     return value;
106 }
107
108 /* Read unsigned constant (32 bit) from bytecode. */
109 static uint32_t tci_read_i32(const uint8_t **tb_ptr)
110 {
111     uint32_t value = *(const uint32_t *)(*tb_ptr);
112     *tb_ptr += sizeof(value);
113     return value;
114 }
115
116 /* Read signed constant (32 bit) from bytecode. */
117 static int32_t tci_read_s32(const uint8_t **tb_ptr)
118 {
119     int32_t value = *(const int32_t *)(*tb_ptr);
120     *tb_ptr += sizeof(value);
121     return value;
122 }
123
124 /* Read indexed register (native size) from bytecode. */
125 static tcg_target_ulong
126 tci_read_rval(const tcg_target_ulong *regs, const uint8_t **tb_ptr)
127 {
128     tcg_target_ulong value = tci_read_reg(regs, **tb_ptr);
129     *tb_ptr += 1;
130     return value;
131 }
132
133 #if TCG_TARGET_REG_BITS == 32
134 /* Read two indexed registers (2 * 32 bit) from bytecode. */
135 static uint64_t tci_read_r64(const tcg_target_ulong *regs,
136                              const uint8_t **tb_ptr)
137 {
138     uint32_t low = tci_read_rval(regs, tb_ptr);
139     return tci_uint64(tci_read_rval(regs, tb_ptr), low);
140 }
141 #elif TCG_TARGET_REG_BITS == 64
142 /* Read indexed register (64 bit) from bytecode. */
143 static uint64_t tci_read_r64(const tcg_target_ulong *regs,
144                              const uint8_t **tb_ptr)
145 {
146     return tci_read_rval(regs, tb_ptr);
147 }
148 #endif
149
150 /* Read indexed register(s) with target address from bytecode. */
151 static target_ulong
152 tci_read_ulong(const tcg_target_ulong *regs, const uint8_t **tb_ptr)
153 {
154     target_ulong taddr = tci_read_rval(regs, tb_ptr);
155 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
156     taddr += (uint64_t)tci_read_rval(regs, tb_ptr) << 32;
157 #endif
158     return taddr;
159 }
160
161 static tcg_target_ulong tci_read_label(const uint8_t **tb_ptr)
162 {
163     return tci_read_i(tb_ptr);
164 }
165
166 /*
167  * Load sets of arguments all at once.  The naming convention is:
168  *   tci_args_<arguments>
169  * where arguments is a sequence of
170  *
171  *   c = condition (TCGCond)
172  *   i = immediate (uint32_t)
173  *   I = immediate (tcg_target_ulong)
174  *   l = label or pointer
175  *   r = register
176  *   s = signed ldst offset
177  */
178
179 static void tci_args_l(const uint8_t **tb_ptr, void **l0)
180 {
181     *l0 = (void *)tci_read_label(tb_ptr);
182 }
183
184 static void tci_args_rr(const uint8_t **tb_ptr,
185                         TCGReg *r0, TCGReg *r1)
186 {
187     *r0 = tci_read_r(tb_ptr);
188     *r1 = tci_read_r(tb_ptr);
189 }
190
191 static void tci_args_ri(const uint8_t **tb_ptr,
192                         TCGReg *r0, tcg_target_ulong *i1)
193 {
194     *r0 = tci_read_r(tb_ptr);
195     *i1 = tci_read_i32(tb_ptr);
196 }
197
198 #if TCG_TARGET_REG_BITS == 64
199 static void tci_args_rI(const uint8_t **tb_ptr,
200                         TCGReg *r0, tcg_target_ulong *i1)
201 {
202     *r0 = tci_read_r(tb_ptr);
203     *i1 = tci_read_i(tb_ptr);
204 }
205 #endif
206
207 static void tci_args_rrr(const uint8_t **tb_ptr,
208                          TCGReg *r0, TCGReg *r1, TCGReg *r2)
209 {
210     *r0 = tci_read_r(tb_ptr);
211     *r1 = tci_read_r(tb_ptr);
212     *r2 = tci_read_r(tb_ptr);
213 }
214
215 static void tci_args_rrs(const uint8_t **tb_ptr,
216                          TCGReg *r0, TCGReg *r1, int32_t *i2)
217 {
218     *r0 = tci_read_r(tb_ptr);
219     *r1 = tci_read_r(tb_ptr);
220     *i2 = tci_read_s32(tb_ptr);
221 }
222
223 static void tci_args_rrcl(const uint8_t **tb_ptr,
224                           TCGReg *r0, TCGReg *r1, TCGCond *c2, void **l3)
225 {
226     *r0 = tci_read_r(tb_ptr);
227     *r1 = tci_read_r(tb_ptr);
228     *c2 = tci_read_b(tb_ptr);
229     *l3 = (void *)tci_read_label(tb_ptr);
230 }
231
232 static void tci_args_rrrc(const uint8_t **tb_ptr,
233                           TCGReg *r0, TCGReg *r1, TCGReg *r2, TCGCond *c3)
234 {
235     *r0 = tci_read_r(tb_ptr);
236     *r1 = tci_read_r(tb_ptr);
237     *r2 = tci_read_r(tb_ptr);
238     *c3 = tci_read_b(tb_ptr);
239 }
240
241 #if TCG_TARGET_REG_BITS == 32
242 static void tci_args_rrrrcl(const uint8_t **tb_ptr, TCGReg *r0, TCGReg *r1,
243                             TCGReg *r2, TCGReg *r3, TCGCond *c4, void **l5)
244 {
245     *r0 = tci_read_r(tb_ptr);
246     *r1 = tci_read_r(tb_ptr);
247     *r2 = tci_read_r(tb_ptr);
248     *r3 = tci_read_r(tb_ptr);
249     *c4 = tci_read_b(tb_ptr);
250     *l5 = (void *)tci_read_label(tb_ptr);
251 }
252
253 static void tci_args_rrrrrc(const uint8_t **tb_ptr, TCGReg *r0, TCGReg *r1,
254                             TCGReg *r2, TCGReg *r3, TCGReg *r4, TCGCond *c5)
255 {
256     *r0 = tci_read_r(tb_ptr);
257     *r1 = tci_read_r(tb_ptr);
258     *r2 = tci_read_r(tb_ptr);
259     *r3 = tci_read_r(tb_ptr);
260     *r4 = tci_read_r(tb_ptr);
261     *c5 = tci_read_b(tb_ptr);
262 }
263
264 static void tci_args_rrrrrr(const uint8_t **tb_ptr, TCGReg *r0, TCGReg *r1,
265                             TCGReg *r2, TCGReg *r3, TCGReg *r4, TCGReg *r5)
266 {
267     *r0 = tci_read_r(tb_ptr);
268     *r1 = tci_read_r(tb_ptr);
269     *r2 = tci_read_r(tb_ptr);
270     *r3 = tci_read_r(tb_ptr);
271     *r4 = tci_read_r(tb_ptr);
272     *r5 = tci_read_r(tb_ptr);
273 }
274 #endif
275
276 static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition)
277 {
278     bool result = false;
279     int32_t i0 = u0;
280     int32_t i1 = u1;
281     switch (condition) {
282     case TCG_COND_EQ:
283         result = (u0 == u1);
284         break;
285     case TCG_COND_NE:
286         result = (u0 != u1);
287         break;
288     case TCG_COND_LT:
289         result = (i0 < i1);
290         break;
291     case TCG_COND_GE:
292         result = (i0 >= i1);
293         break;
294     case TCG_COND_LE:
295         result = (i0 <= i1);
296         break;
297     case TCG_COND_GT:
298         result = (i0 > i1);
299         break;
300     case TCG_COND_LTU:
301         result = (u0 < u1);
302         break;
303     case TCG_COND_GEU:
304         result = (u0 >= u1);
305         break;
306     case TCG_COND_LEU:
307         result = (u0 <= u1);
308         break;
309     case TCG_COND_GTU:
310         result = (u0 > u1);
311         break;
312     default:
313         g_assert_not_reached();
314     }
315     return result;
316 }
317
318 static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition)
319 {
320     bool result = false;
321     int64_t i0 = u0;
322     int64_t i1 = u1;
323     switch (condition) {
324     case TCG_COND_EQ:
325         result = (u0 == u1);
326         break;
327     case TCG_COND_NE:
328         result = (u0 != u1);
329         break;
330     case TCG_COND_LT:
331         result = (i0 < i1);
332         break;
333     case TCG_COND_GE:
334         result = (i0 >= i1);
335         break;
336     case TCG_COND_LE:
337         result = (i0 <= i1);
338         break;
339     case TCG_COND_GT:
340         result = (i0 > i1);
341         break;
342     case TCG_COND_LTU:
343         result = (u0 < u1);
344         break;
345     case TCG_COND_GEU:
346         result = (u0 >= u1);
347         break;
348     case TCG_COND_LEU:
349         result = (u0 <= u1);
350         break;
351     case TCG_COND_GTU:
352         result = (u0 > u1);
353         break;
354     default:
355         g_assert_not_reached();
356     }
357     return result;
358 }
359
360 #define qemu_ld_ub \
361     cpu_ldub_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
362 #define qemu_ld_leuw \
363     cpu_lduw_le_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
364 #define qemu_ld_leul \
365     cpu_ldl_le_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
366 #define qemu_ld_leq \
367     cpu_ldq_le_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
368 #define qemu_ld_beuw \
369     cpu_lduw_be_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
370 #define qemu_ld_beul \
371     cpu_ldl_be_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
372 #define qemu_ld_beq \
373     cpu_ldq_be_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
374 #define qemu_st_b(X) \
375     cpu_stb_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
376 #define qemu_st_lew(X) \
377     cpu_stw_le_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
378 #define qemu_st_lel(X) \
379     cpu_stl_le_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
380 #define qemu_st_leq(X) \
381     cpu_stq_le_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
382 #define qemu_st_bew(X) \
383     cpu_stw_be_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
384 #define qemu_st_bel(X) \
385     cpu_stl_be_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
386 #define qemu_st_beq(X) \
387     cpu_stq_be_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
388
389 #if TCG_TARGET_REG_BITS == 64
390 # define CASE_32_64(x) \
391         case glue(glue(INDEX_op_, x), _i64): \
392         case glue(glue(INDEX_op_, x), _i32):
393 # define CASE_64(x) \
394         case glue(glue(INDEX_op_, x), _i64):
395 #else
396 # define CASE_32_64(x) \
397         case glue(glue(INDEX_op_, x), _i32):
398 # define CASE_64(x)
399 #endif
400
401 /* Interpret pseudo code in tb. */
402 /*
403  * Disable CFI checks.
404  * One possible operation in the pseudo code is a call to binary code.
405  * Therefore, disable CFI checks in the interpreter function
406  */
407 uintptr_t QEMU_DISABLE_CFI tcg_qemu_tb_exec(CPUArchState *env,
408                                             const void *v_tb_ptr)
409 {
410     const uint8_t *tb_ptr = v_tb_ptr;
411     tcg_target_ulong regs[TCG_TARGET_NB_REGS];
412     long tcg_temps[CPU_TEMP_BUF_NLONGS];
413     uintptr_t sp_value = (uintptr_t)(tcg_temps + CPU_TEMP_BUF_NLONGS);
414
415     regs[TCG_AREG0] = (tcg_target_ulong)env;
416     regs[TCG_REG_CALL_STACK] = sp_value;
417     tci_assert(tb_ptr);
418
419     for (;;) {
420         TCGOpcode opc = tb_ptr[0];
421 #if defined(CONFIG_DEBUG_TCG) && !defined(NDEBUG)
422         uint8_t op_size = tb_ptr[1];
423         const uint8_t *old_code_ptr = tb_ptr;
424 #endif
425         TCGReg r0, r1, r2;
426         tcg_target_ulong t0;
427         tcg_target_ulong t1;
428         tcg_target_ulong t2;
429         TCGCond condition;
430         target_ulong taddr;
431         uint8_t tmp8;
432         uint16_t tmp16;
433         uint32_t tmp32;
434         uint64_t tmp64;
435 #if TCG_TARGET_REG_BITS == 32
436         TCGReg r3, r4, r5;
437         uint64_t T1, T2;
438 #endif
439         TCGMemOpIdx oi;
440         int32_t ofs;
441         void *ptr;
442
443         /* Skip opcode and size entry. */
444         tb_ptr += 2;
445
446         switch (opc) {
447         case INDEX_op_call:
448             tci_args_l(&tb_ptr, &ptr);
449             tci_tb_ptr = (uintptr_t)tb_ptr;
450 #if TCG_TARGET_REG_BITS == 32
451             tmp64 = ((helper_function)ptr)(tci_read_reg(regs, TCG_REG_R0),
452                                            tci_read_reg(regs, TCG_REG_R1),
453                                            tci_read_reg(regs, TCG_REG_R2),
454                                            tci_read_reg(regs, TCG_REG_R3),
455                                            tci_read_reg(regs, TCG_REG_R4),
456                                            tci_read_reg(regs, TCG_REG_R5),
457                                            tci_read_reg(regs, TCG_REG_R6),
458                                            tci_read_reg(regs, TCG_REG_R7),
459                                            tci_read_reg(regs, TCG_REG_R8),
460                                            tci_read_reg(regs, TCG_REG_R9),
461                                            tci_read_reg(regs, TCG_REG_R10),
462                                            tci_read_reg(regs, TCG_REG_R11));
463             tci_write_reg(regs, TCG_REG_R0, tmp64);
464             tci_write_reg(regs, TCG_REG_R1, tmp64 >> 32);
465 #else
466             tmp64 = ((helper_function)ptr)(tci_read_reg(regs, TCG_REG_R0),
467                                            tci_read_reg(regs, TCG_REG_R1),
468                                            tci_read_reg(regs, TCG_REG_R2),
469                                            tci_read_reg(regs, TCG_REG_R3),
470                                            tci_read_reg(regs, TCG_REG_R4),
471                                            tci_read_reg(regs, TCG_REG_R5));
472             tci_write_reg(regs, TCG_REG_R0, tmp64);
473 #endif
474             break;
475         case INDEX_op_br:
476             tci_args_l(&tb_ptr, &ptr);
477             tci_assert(tb_ptr == old_code_ptr + op_size);
478             tb_ptr = ptr;
479             continue;
480         case INDEX_op_setcond_i32:
481             tci_args_rrrc(&tb_ptr, &r0, &r1, &r2, &condition);
482             regs[r0] = tci_compare32(regs[r1], regs[r2], condition);
483             break;
484 #if TCG_TARGET_REG_BITS == 32
485         case INDEX_op_setcond2_i32:
486             tci_args_rrrrrc(&tb_ptr, &r0, &r1, &r2, &r3, &r4, &condition);
487             T1 = tci_uint64(regs[r2], regs[r1]);
488             T2 = tci_uint64(regs[r4], regs[r3]);
489             regs[r0] = tci_compare64(T1, T2, condition);
490             break;
491 #elif TCG_TARGET_REG_BITS == 64
492         case INDEX_op_setcond_i64:
493             tci_args_rrrc(&tb_ptr, &r0, &r1, &r2, &condition);
494             regs[r0] = tci_compare64(regs[r1], regs[r2], condition);
495             break;
496 #endif
497         CASE_32_64(mov)
498             tci_args_rr(&tb_ptr, &r0, &r1);
499             regs[r0] = regs[r1];
500             break;
501         case INDEX_op_tci_movi_i32:
502             tci_args_ri(&tb_ptr, &r0, &t1);
503             regs[r0] = t1;
504             break;
505
506             /* Load/store operations (32 bit). */
507
508         CASE_32_64(ld8u)
509             tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
510             ptr = (void *)(regs[r1] + ofs);
511             regs[r0] = *(uint8_t *)ptr;
512             break;
513         CASE_32_64(ld8s)
514             tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
515             ptr = (void *)(regs[r1] + ofs);
516             regs[r0] = *(int8_t *)ptr;
517             break;
518         CASE_32_64(ld16u)
519             tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
520             ptr = (void *)(regs[r1] + ofs);
521             regs[r0] = *(uint16_t *)ptr;
522             break;
523         CASE_32_64(ld16s)
524             tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
525             ptr = (void *)(regs[r1] + ofs);
526             regs[r0] = *(int16_t *)ptr;
527             break;
528         case INDEX_op_ld_i32:
529         CASE_64(ld32u)
530             tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
531             ptr = (void *)(regs[r1] + ofs);
532             regs[r0] = *(uint32_t *)ptr;
533             break;
534         CASE_32_64(st8)
535             tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
536             ptr = (void *)(regs[r1] + ofs);
537             *(uint8_t *)ptr = regs[r0];
538             break;
539         CASE_32_64(st16)
540             tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
541             ptr = (void *)(regs[r1] + ofs);
542             *(uint16_t *)ptr = regs[r0];
543             break;
544         case INDEX_op_st_i32:
545         CASE_64(st32)
546             tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
547             ptr = (void *)(regs[r1] + ofs);
548             *(uint32_t *)ptr = regs[r0];
549             break;
550
551             /* Arithmetic operations (mixed 32/64 bit). */
552
553         CASE_32_64(add)
554             tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
555             regs[r0] = regs[r1] + regs[r2];
556             break;
557         CASE_32_64(sub)
558             tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
559             regs[r0] = regs[r1] - regs[r2];
560             break;
561         CASE_32_64(mul)
562             tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
563             regs[r0] = regs[r1] * regs[r2];
564             break;
565         CASE_32_64(and)
566             tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
567             regs[r0] = regs[r1] & regs[r2];
568             break;
569         CASE_32_64(or)
570             tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
571             regs[r0] = regs[r1] | regs[r2];
572             break;
573         CASE_32_64(xor)
574             tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
575             regs[r0] = regs[r1] ^ regs[r2];
576             break;
577
578             /* Arithmetic operations (32 bit). */
579
580         case INDEX_op_div_i32:
581             tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
582             regs[r0] = (int32_t)regs[r1] / (int32_t)regs[r2];
583             break;
584         case INDEX_op_divu_i32:
585             tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
586             regs[r0] = (uint32_t)regs[r1] / (uint32_t)regs[r2];
587             break;
588         case INDEX_op_rem_i32:
589             tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
590             regs[r0] = (int32_t)regs[r1] % (int32_t)regs[r2];
591             break;
592         case INDEX_op_remu_i32:
593             tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
594             regs[r0] = (uint32_t)regs[r1] % (uint32_t)regs[r2];
595             break;
596
597             /* Shift/rotate operations (32 bit). */
598
599         case INDEX_op_shl_i32:
600             tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
601             regs[r0] = (uint32_t)regs[r1] << (regs[r2] & 31);
602             break;
603         case INDEX_op_shr_i32:
604             tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
605             regs[r0] = (uint32_t)regs[r1] >> (regs[r2] & 31);
606             break;
607         case INDEX_op_sar_i32:
608             tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
609             regs[r0] = (int32_t)regs[r1] >> (regs[r2] & 31);
610             break;
611 #if TCG_TARGET_HAS_rot_i32
612         case INDEX_op_rotl_i32:
613             tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
614             regs[r0] = rol32(regs[r1], regs[r2] & 31);
615             break;
616         case INDEX_op_rotr_i32:
617             tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
618             regs[r0] = ror32(regs[r1], regs[r2] & 31);
619             break;
620 #endif
621 #if TCG_TARGET_HAS_deposit_i32
622         case INDEX_op_deposit_i32:
623             t0 = *tb_ptr++;
624             t1 = tci_read_rval(regs, &tb_ptr);
625             t2 = tci_read_rval(regs, &tb_ptr);
626             tmp16 = *tb_ptr++;
627             tmp8 = *tb_ptr++;
628             tmp32 = (((1 << tmp8) - 1) << tmp16);
629             tci_write_reg(regs, t0, (t1 & ~tmp32) | ((t2 << tmp16) & tmp32));
630             break;
631 #endif
632         case INDEX_op_brcond_i32:
633             tci_args_rrcl(&tb_ptr, &r0, &r1, &condition, &ptr);
634             if (tci_compare32(regs[r0], regs[r1], condition)) {
635                 tci_assert(tb_ptr == old_code_ptr + op_size);
636                 tb_ptr = ptr;
637                 continue;
638             }
639             break;
640 #if TCG_TARGET_REG_BITS == 32
641         case INDEX_op_add2_i32:
642             tci_args_rrrrrr(&tb_ptr, &r0, &r1, &r2, &r3, &r4, &r5);
643             T1 = tci_uint64(regs[r3], regs[r2]);
644             T2 = tci_uint64(regs[r5], regs[r4]);
645             tci_write_reg64(regs, r1, r0, T1 + T2);
646             break;
647         case INDEX_op_sub2_i32:
648             tci_args_rrrrrr(&tb_ptr, &r0, &r1, &r2, &r3, &r4, &r5);
649             T1 = tci_uint64(regs[r3], regs[r2]);
650             T2 = tci_uint64(regs[r5], regs[r4]);
651             tci_write_reg64(regs, r1, r0, T1 - T2);
652             break;
653         case INDEX_op_brcond2_i32:
654             tci_args_rrrrcl(&tb_ptr, &r0, &r1, &r2, &r3, &condition, &ptr);
655             T1 = tci_uint64(regs[r1], regs[r0]);
656             T2 = tci_uint64(regs[r3], regs[r2]);
657             if (tci_compare64(T1, T2, condition)) {
658                 tci_assert(tb_ptr == old_code_ptr + op_size);
659                 tb_ptr = ptr;
660                 continue;
661             }
662             break;
663         case INDEX_op_mulu2_i32:
664             t0 = *tb_ptr++;
665             t1 = *tb_ptr++;
666             t2 = tci_read_rval(regs, &tb_ptr);
667             tmp64 = (uint32_t)tci_read_rval(regs, &tb_ptr);
668             tci_write_reg64(regs, t1, t0, (uint32_t)t2 * tmp64);
669             break;
670 #endif /* TCG_TARGET_REG_BITS == 32 */
671 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
672         CASE_32_64(ext8s)
673             tci_args_rr(&tb_ptr, &r0, &r1);
674             regs[r0] = (int8_t)regs[r1];
675             break;
676 #endif
677 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
678         CASE_32_64(ext16s)
679             tci_args_rr(&tb_ptr, &r0, &r1);
680             regs[r0] = (int16_t)regs[r1];
681             break;
682 #endif
683 #if TCG_TARGET_HAS_ext8u_i32 || TCG_TARGET_HAS_ext8u_i64
684         CASE_32_64(ext8u)
685             tci_args_rr(&tb_ptr, &r0, &r1);
686             regs[r0] = (uint8_t)regs[r1];
687             break;
688 #endif
689 #if TCG_TARGET_HAS_ext16u_i32 || TCG_TARGET_HAS_ext16u_i64
690         CASE_32_64(ext16u)
691             tci_args_rr(&tb_ptr, &r0, &r1);
692             regs[r0] = (uint16_t)regs[r1];
693             break;
694 #endif
695 #if TCG_TARGET_HAS_bswap16_i32 || TCG_TARGET_HAS_bswap16_i64
696         CASE_32_64(bswap16)
697             tci_args_rr(&tb_ptr, &r0, &r1);
698             regs[r0] = bswap16(regs[r1]);
699             break;
700 #endif
701 #if TCG_TARGET_HAS_bswap32_i32 || TCG_TARGET_HAS_bswap32_i64
702         CASE_32_64(bswap32)
703             tci_args_rr(&tb_ptr, &r0, &r1);
704             regs[r0] = bswap32(regs[r1]);
705             break;
706 #endif
707 #if TCG_TARGET_HAS_not_i32 || TCG_TARGET_HAS_not_i64
708         CASE_32_64(not)
709             tci_args_rr(&tb_ptr, &r0, &r1);
710             regs[r0] = ~regs[r1];
711             break;
712 #endif
713 #if TCG_TARGET_HAS_neg_i32 || TCG_TARGET_HAS_neg_i64
714         CASE_32_64(neg)
715             tci_args_rr(&tb_ptr, &r0, &r1);
716             regs[r0] = -regs[r1];
717             break;
718 #endif
719 #if TCG_TARGET_REG_BITS == 64
720         case INDEX_op_tci_movi_i64:
721             tci_args_rI(&tb_ptr, &r0, &t1);
722             regs[r0] = t1;
723             break;
724
725             /* Load/store operations (64 bit). */
726
727         case INDEX_op_ld32s_i64:
728             tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
729             ptr = (void *)(regs[r1] + ofs);
730             regs[r0] = *(int32_t *)ptr;
731             break;
732         case INDEX_op_ld_i64:
733             tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
734             ptr = (void *)(regs[r1] + ofs);
735             regs[r0] = *(uint64_t *)ptr;
736             break;
737         case INDEX_op_st_i64:
738             tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
739             ptr = (void *)(regs[r1] + ofs);
740             *(uint64_t *)ptr = regs[r0];
741             break;
742
743             /* Arithmetic operations (64 bit). */
744
745         case INDEX_op_div_i64:
746             tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
747             regs[r0] = (int64_t)regs[r1] / (int64_t)regs[r2];
748             break;
749         case INDEX_op_divu_i64:
750             tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
751             regs[r0] = (uint64_t)regs[r1] / (uint64_t)regs[r2];
752             break;
753         case INDEX_op_rem_i64:
754             tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
755             regs[r0] = (int64_t)regs[r1] % (int64_t)regs[r2];
756             break;
757         case INDEX_op_remu_i64:
758             tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
759             regs[r0] = (uint64_t)regs[r1] % (uint64_t)regs[r2];
760             break;
761
762             /* Shift/rotate operations (64 bit). */
763
764         case INDEX_op_shl_i64:
765             tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
766             regs[r0] = regs[r1] << (regs[r2] & 63);
767             break;
768         case INDEX_op_shr_i64:
769             tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
770             regs[r0] = regs[r1] >> (regs[r2] & 63);
771             break;
772         case INDEX_op_sar_i64:
773             tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
774             regs[r0] = (int64_t)regs[r1] >> (regs[r2] & 63);
775             break;
776 #if TCG_TARGET_HAS_rot_i64
777         case INDEX_op_rotl_i64:
778             tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
779             regs[r0] = rol64(regs[r1], regs[r2] & 63);
780             break;
781         case INDEX_op_rotr_i64:
782             tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
783             regs[r0] = ror64(regs[r1], regs[r2] & 63);
784             break;
785 #endif
786 #if TCG_TARGET_HAS_deposit_i64
787         case INDEX_op_deposit_i64:
788             t0 = *tb_ptr++;
789             t1 = tci_read_rval(regs, &tb_ptr);
790             t2 = tci_read_rval(regs, &tb_ptr);
791             tmp16 = *tb_ptr++;
792             tmp8 = *tb_ptr++;
793             tmp64 = (((1ULL << tmp8) - 1) << tmp16);
794             tci_write_reg(regs, t0, (t1 & ~tmp64) | ((t2 << tmp16) & tmp64));
795             break;
796 #endif
797         case INDEX_op_brcond_i64:
798             tci_args_rrcl(&tb_ptr, &r0, &r1, &condition, &ptr);
799             if (tci_compare64(regs[r0], regs[r1], condition)) {
800                 tci_assert(tb_ptr == old_code_ptr + op_size);
801                 tb_ptr = ptr;
802                 continue;
803             }
804             break;
805         case INDEX_op_ext32s_i64:
806         case INDEX_op_ext_i32_i64:
807             tci_args_rr(&tb_ptr, &r0, &r1);
808             regs[r0] = (int32_t)regs[r1];
809             break;
810         case INDEX_op_ext32u_i64:
811         case INDEX_op_extu_i32_i64:
812             tci_args_rr(&tb_ptr, &r0, &r1);
813             regs[r0] = (uint32_t)regs[r1];
814             break;
815 #if TCG_TARGET_HAS_bswap64_i64
816         case INDEX_op_bswap64_i64:
817             tci_args_rr(&tb_ptr, &r0, &r1);
818             regs[r0] = bswap64(regs[r1]);
819             break;
820 #endif
821 #endif /* TCG_TARGET_REG_BITS == 64 */
822
823             /* QEMU specific operations. */
824
825         case INDEX_op_exit_tb:
826             tci_args_l(&tb_ptr, &ptr);
827             return (uintptr_t)ptr;
828
829         case INDEX_op_goto_tb:
830             tci_args_l(&tb_ptr, &ptr);
831             tci_assert(tb_ptr == old_code_ptr + op_size);
832             tb_ptr = *(void **)ptr;
833             continue;
834
835         case INDEX_op_qemu_ld_i32:
836             t0 = *tb_ptr++;
837             taddr = tci_read_ulong(regs, &tb_ptr);
838             oi = tci_read_i(&tb_ptr);
839             switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) {
840             case MO_UB:
841                 tmp32 = qemu_ld_ub;
842                 break;
843             case MO_SB:
844                 tmp32 = (int8_t)qemu_ld_ub;
845                 break;
846             case MO_LEUW:
847                 tmp32 = qemu_ld_leuw;
848                 break;
849             case MO_LESW:
850                 tmp32 = (int16_t)qemu_ld_leuw;
851                 break;
852             case MO_LEUL:
853                 tmp32 = qemu_ld_leul;
854                 break;
855             case MO_BEUW:
856                 tmp32 = qemu_ld_beuw;
857                 break;
858             case MO_BESW:
859                 tmp32 = (int16_t)qemu_ld_beuw;
860                 break;
861             case MO_BEUL:
862                 tmp32 = qemu_ld_beul;
863                 break;
864             default:
865                 g_assert_not_reached();
866             }
867             tci_write_reg(regs, t0, tmp32);
868             break;
869         case INDEX_op_qemu_ld_i64:
870             t0 = *tb_ptr++;
871             if (TCG_TARGET_REG_BITS == 32) {
872                 t1 = *tb_ptr++;
873             }
874             taddr = tci_read_ulong(regs, &tb_ptr);
875             oi = tci_read_i(&tb_ptr);
876             switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) {
877             case MO_UB:
878                 tmp64 = qemu_ld_ub;
879                 break;
880             case MO_SB:
881                 tmp64 = (int8_t)qemu_ld_ub;
882                 break;
883             case MO_LEUW:
884                 tmp64 = qemu_ld_leuw;
885                 break;
886             case MO_LESW:
887                 tmp64 = (int16_t)qemu_ld_leuw;
888                 break;
889             case MO_LEUL:
890                 tmp64 = qemu_ld_leul;
891                 break;
892             case MO_LESL:
893                 tmp64 = (int32_t)qemu_ld_leul;
894                 break;
895             case MO_LEQ:
896                 tmp64 = qemu_ld_leq;
897                 break;
898             case MO_BEUW:
899                 tmp64 = qemu_ld_beuw;
900                 break;
901             case MO_BESW:
902                 tmp64 = (int16_t)qemu_ld_beuw;
903                 break;
904             case MO_BEUL:
905                 tmp64 = qemu_ld_beul;
906                 break;
907             case MO_BESL:
908                 tmp64 = (int32_t)qemu_ld_beul;
909                 break;
910             case MO_BEQ:
911                 tmp64 = qemu_ld_beq;
912                 break;
913             default:
914                 g_assert_not_reached();
915             }
916             tci_write_reg(regs, t0, tmp64);
917             if (TCG_TARGET_REG_BITS == 32) {
918                 tci_write_reg(regs, t1, tmp64 >> 32);
919             }
920             break;
921         case INDEX_op_qemu_st_i32:
922             t0 = tci_read_rval(regs, &tb_ptr);
923             taddr = tci_read_ulong(regs, &tb_ptr);
924             oi = tci_read_i(&tb_ptr);
925             switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) {
926             case MO_UB:
927                 qemu_st_b(t0);
928                 break;
929             case MO_LEUW:
930                 qemu_st_lew(t0);
931                 break;
932             case MO_LEUL:
933                 qemu_st_lel(t0);
934                 break;
935             case MO_BEUW:
936                 qemu_st_bew(t0);
937                 break;
938             case MO_BEUL:
939                 qemu_st_bel(t0);
940                 break;
941             default:
942                 g_assert_not_reached();
943             }
944             break;
945         case INDEX_op_qemu_st_i64:
946             tmp64 = tci_read_r64(regs, &tb_ptr);
947             taddr = tci_read_ulong(regs, &tb_ptr);
948             oi = tci_read_i(&tb_ptr);
949             switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) {
950             case MO_UB:
951                 qemu_st_b(tmp64);
952                 break;
953             case MO_LEUW:
954                 qemu_st_lew(tmp64);
955                 break;
956             case MO_LEUL:
957                 qemu_st_lel(tmp64);
958                 break;
959             case MO_LEQ:
960                 qemu_st_leq(tmp64);
961                 break;
962             case MO_BEUW:
963                 qemu_st_bew(tmp64);
964                 break;
965             case MO_BEUL:
966                 qemu_st_bel(tmp64);
967                 break;
968             case MO_BEQ:
969                 qemu_st_beq(tmp64);
970                 break;
971             default:
972                 g_assert_not_reached();
973             }
974             break;
975         case INDEX_op_mb:
976             /* Ensure ordering for all kinds */
977             smp_mb();
978             break;
979         default:
980             g_assert_not_reached();
981         }
982         tci_assert(tb_ptr == old_code_ptr + op_size);
983     }
984 }