migration: split migration hooks out of QEMUFileOps
[qemu.git] / tcg / tcg-op.c
1 /*
2 * Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2008 Fabrice Bellard
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
23 */
24
25 #include "qemu/osdep.h"
26 #include "qemu-common.h"
27 #include "cpu.h"
28 #include "exec/exec-all.h"
29 #include "tcg.h"
30 #include "tcg-op.h"
31
32 /* Reduce the number of ifdefs below. This assumes that all uses of
33 TCGV_HIGH and TCGV_LOW are properly protected by a conditional that
34 the compiler can eliminate. */
35 #if TCG_TARGET_REG_BITS == 64
36 extern TCGv_i32 TCGV_LOW_link_error(TCGv_i64);
37 extern TCGv_i32 TCGV_HIGH_link_error(TCGv_i64);
38 #define TCGV_LOW TCGV_LOW_link_error
39 #define TCGV_HIGH TCGV_HIGH_link_error
40 #endif
41
42 /* Note that this is optimized for sequential allocation during translate.
43 Up to and including filling in the forward link immediately. We'll do
44 proper termination of the end of the list after we finish translation. */
45
46 static void tcg_emit_op(TCGContext *ctx, TCGOpcode opc, int args)
47 {
48 int oi = ctx->gen_next_op_idx;
49 int ni = oi + 1;
50 int pi = oi - 1;
51
52 tcg_debug_assert(oi < OPC_BUF_SIZE);
53 ctx->gen_last_op_idx = oi;
54 ctx->gen_next_op_idx = ni;
55
56 ctx->gen_op_buf[oi] = (TCGOp){
57 .opc = opc,
58 .args = args,
59 .prev = pi,
60 .next = ni
61 };
62 }
63
64 void tcg_gen_op1(TCGContext *ctx, TCGOpcode opc, TCGArg a1)
65 {
66 int pi = ctx->gen_next_parm_idx;
67
68 tcg_debug_assert(pi + 1 <= OPPARAM_BUF_SIZE);
69 ctx->gen_next_parm_idx = pi + 1;
70 ctx->gen_opparam_buf[pi] = a1;
71
72 tcg_emit_op(ctx, opc, pi);
73 }
74
75 void tcg_gen_op2(TCGContext *ctx, TCGOpcode opc, TCGArg a1, TCGArg a2)
76 {
77 int pi = ctx->gen_next_parm_idx;
78
79 tcg_debug_assert(pi + 2 <= OPPARAM_BUF_SIZE);
80 ctx->gen_next_parm_idx = pi + 2;
81 ctx->gen_opparam_buf[pi + 0] = a1;
82 ctx->gen_opparam_buf[pi + 1] = a2;
83
84 tcg_emit_op(ctx, opc, pi);
85 }
86
87 void tcg_gen_op3(TCGContext *ctx, TCGOpcode opc, TCGArg a1,
88 TCGArg a2, TCGArg a3)
89 {
90 int pi = ctx->gen_next_parm_idx;
91
92 tcg_debug_assert(pi + 3 <= OPPARAM_BUF_SIZE);
93 ctx->gen_next_parm_idx = pi + 3;
94 ctx->gen_opparam_buf[pi + 0] = a1;
95 ctx->gen_opparam_buf[pi + 1] = a2;
96 ctx->gen_opparam_buf[pi + 2] = a3;
97
98 tcg_emit_op(ctx, opc, pi);
99 }
100
101 void tcg_gen_op4(TCGContext *ctx, TCGOpcode opc, TCGArg a1,
102 TCGArg a2, TCGArg a3, TCGArg a4)
103 {
104 int pi = ctx->gen_next_parm_idx;
105
106 tcg_debug_assert(pi + 4 <= OPPARAM_BUF_SIZE);
107 ctx->gen_next_parm_idx = pi + 4;
108 ctx->gen_opparam_buf[pi + 0] = a1;
109 ctx->gen_opparam_buf[pi + 1] = a2;
110 ctx->gen_opparam_buf[pi + 2] = a3;
111 ctx->gen_opparam_buf[pi + 3] = a4;
112
113 tcg_emit_op(ctx, opc, pi);
114 }
115
116 void tcg_gen_op5(TCGContext *ctx, TCGOpcode opc, TCGArg a1,
117 TCGArg a2, TCGArg a3, TCGArg a4, TCGArg a5)
118 {
119 int pi = ctx->gen_next_parm_idx;
120
121 tcg_debug_assert(pi + 5 <= OPPARAM_BUF_SIZE);
122 ctx->gen_next_parm_idx = pi + 5;
123 ctx->gen_opparam_buf[pi + 0] = a1;
124 ctx->gen_opparam_buf[pi + 1] = a2;
125 ctx->gen_opparam_buf[pi + 2] = a3;
126 ctx->gen_opparam_buf[pi + 3] = a4;
127 ctx->gen_opparam_buf[pi + 4] = a5;
128
129 tcg_emit_op(ctx, opc, pi);
130 }
131
132 void tcg_gen_op6(TCGContext *ctx, TCGOpcode opc, TCGArg a1, TCGArg a2,
133 TCGArg a3, TCGArg a4, TCGArg a5, TCGArg a6)
134 {
135 int pi = ctx->gen_next_parm_idx;
136
137 tcg_debug_assert(pi + 6 <= OPPARAM_BUF_SIZE);
138 ctx->gen_next_parm_idx = pi + 6;
139 ctx->gen_opparam_buf[pi + 0] = a1;
140 ctx->gen_opparam_buf[pi + 1] = a2;
141 ctx->gen_opparam_buf[pi + 2] = a3;
142 ctx->gen_opparam_buf[pi + 3] = a4;
143 ctx->gen_opparam_buf[pi + 4] = a5;
144 ctx->gen_opparam_buf[pi + 5] = a6;
145
146 tcg_emit_op(ctx, opc, pi);
147 }
148
149 /* 32 bit ops */
150
151 void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
152 {
153 /* some cases can be optimized here */
154 if (arg2 == 0) {
155 tcg_gen_mov_i32(ret, arg1);
156 } else {
157 TCGv_i32 t0 = tcg_const_i32(arg2);
158 tcg_gen_add_i32(ret, arg1, t0);
159 tcg_temp_free_i32(t0);
160 }
161 }
162
163 void tcg_gen_subfi_i32(TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2)
164 {
165 if (arg1 == 0 && TCG_TARGET_HAS_neg_i32) {
166 /* Don't recurse with tcg_gen_neg_i32. */
167 tcg_gen_op2_i32(INDEX_op_neg_i32, ret, arg2);
168 } else {
169 TCGv_i32 t0 = tcg_const_i32(arg1);
170 tcg_gen_sub_i32(ret, t0, arg2);
171 tcg_temp_free_i32(t0);
172 }
173 }
174
175 void tcg_gen_subi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
176 {
177 /* some cases can be optimized here */
178 if (arg2 == 0) {
179 tcg_gen_mov_i32(ret, arg1);
180 } else {
181 TCGv_i32 t0 = tcg_const_i32(arg2);
182 tcg_gen_sub_i32(ret, arg1, t0);
183 tcg_temp_free_i32(t0);
184 }
185 }
186
187 void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
188 {
189 TCGv_i32 t0;
190 /* Some cases can be optimized here. */
191 switch (arg2) {
192 case 0:
193 tcg_gen_movi_i32(ret, 0);
194 return;
195 case 0xffffffffu:
196 tcg_gen_mov_i32(ret, arg1);
197 return;
198 case 0xffu:
199 /* Don't recurse with tcg_gen_ext8u_i32. */
200 if (TCG_TARGET_HAS_ext8u_i32) {
201 tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg1);
202 return;
203 }
204 break;
205 case 0xffffu:
206 if (TCG_TARGET_HAS_ext16u_i32) {
207 tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg1);
208 return;
209 }
210 break;
211 }
212 t0 = tcg_const_i32(arg2);
213 tcg_gen_and_i32(ret, arg1, t0);
214 tcg_temp_free_i32(t0);
215 }
216
217 void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
218 {
219 /* Some cases can be optimized here. */
220 if (arg2 == -1) {
221 tcg_gen_movi_i32(ret, -1);
222 } else if (arg2 == 0) {
223 tcg_gen_mov_i32(ret, arg1);
224 } else {
225 TCGv_i32 t0 = tcg_const_i32(arg2);
226 tcg_gen_or_i32(ret, arg1, t0);
227 tcg_temp_free_i32(t0);
228 }
229 }
230
231 void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
232 {
233 /* Some cases can be optimized here. */
234 if (arg2 == 0) {
235 tcg_gen_mov_i32(ret, arg1);
236 } else if (arg2 == -1 && TCG_TARGET_HAS_not_i32) {
237 /* Don't recurse with tcg_gen_not_i32. */
238 tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg1);
239 } else {
240 TCGv_i32 t0 = tcg_const_i32(arg2);
241 tcg_gen_xor_i32(ret, arg1, t0);
242 tcg_temp_free_i32(t0);
243 }
244 }
245
246 void tcg_gen_shli_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
247 {
248 tcg_debug_assert(arg2 < 32);
249 if (arg2 == 0) {
250 tcg_gen_mov_i32(ret, arg1);
251 } else {
252 TCGv_i32 t0 = tcg_const_i32(arg2);
253 tcg_gen_shl_i32(ret, arg1, t0);
254 tcg_temp_free_i32(t0);
255 }
256 }
257
258 void tcg_gen_shri_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
259 {
260 tcg_debug_assert(arg2 < 32);
261 if (arg2 == 0) {
262 tcg_gen_mov_i32(ret, arg1);
263 } else {
264 TCGv_i32 t0 = tcg_const_i32(arg2);
265 tcg_gen_shr_i32(ret, arg1, t0);
266 tcg_temp_free_i32(t0);
267 }
268 }
269
270 void tcg_gen_sari_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
271 {
272 tcg_debug_assert(arg2 < 32);
273 if (arg2 == 0) {
274 tcg_gen_mov_i32(ret, arg1);
275 } else {
276 TCGv_i32 t0 = tcg_const_i32(arg2);
277 tcg_gen_sar_i32(ret, arg1, t0);
278 tcg_temp_free_i32(t0);
279 }
280 }
281
282 void tcg_gen_brcond_i32(TCGCond cond, TCGv_i32 arg1, TCGv_i32 arg2, TCGLabel *l)
283 {
284 if (cond == TCG_COND_ALWAYS) {
285 tcg_gen_br(l);
286 } else if (cond != TCG_COND_NEVER) {
287 tcg_gen_op4ii_i32(INDEX_op_brcond_i32, arg1, arg2, cond, label_arg(l));
288 }
289 }
290
291 void tcg_gen_brcondi_i32(TCGCond cond, TCGv_i32 arg1, int32_t arg2, TCGLabel *l)
292 {
293 if (cond == TCG_COND_ALWAYS) {
294 tcg_gen_br(l);
295 } else if (cond != TCG_COND_NEVER) {
296 TCGv_i32 t0 = tcg_const_i32(arg2);
297 tcg_gen_brcond_i32(cond, arg1, t0, l);
298 tcg_temp_free_i32(t0);
299 }
300 }
301
302 void tcg_gen_setcond_i32(TCGCond cond, TCGv_i32 ret,
303 TCGv_i32 arg1, TCGv_i32 arg2)
304 {
305 if (cond == TCG_COND_ALWAYS) {
306 tcg_gen_movi_i32(ret, 1);
307 } else if (cond == TCG_COND_NEVER) {
308 tcg_gen_movi_i32(ret, 0);
309 } else {
310 tcg_gen_op4i_i32(INDEX_op_setcond_i32, ret, arg1, arg2, cond);
311 }
312 }
313
314 void tcg_gen_setcondi_i32(TCGCond cond, TCGv_i32 ret,
315 TCGv_i32 arg1, int32_t arg2)
316 {
317 TCGv_i32 t0 = tcg_const_i32(arg2);
318 tcg_gen_setcond_i32(cond, ret, arg1, t0);
319 tcg_temp_free_i32(t0);
320 }
321
322 void tcg_gen_muli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
323 {
324 TCGv_i32 t0 = tcg_const_i32(arg2);
325 tcg_gen_mul_i32(ret, arg1, t0);
326 tcg_temp_free_i32(t0);
327 }
328
329 void tcg_gen_div_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
330 {
331 if (TCG_TARGET_HAS_div_i32) {
332 tcg_gen_op3_i32(INDEX_op_div_i32, ret, arg1, arg2);
333 } else if (TCG_TARGET_HAS_div2_i32) {
334 TCGv_i32 t0 = tcg_temp_new_i32();
335 tcg_gen_sari_i32(t0, arg1, 31);
336 tcg_gen_op5_i32(INDEX_op_div2_i32, ret, t0, arg1, t0, arg2);
337 tcg_temp_free_i32(t0);
338 } else {
339 gen_helper_div_i32(ret, arg1, arg2);
340 }
341 }
342
343 void tcg_gen_rem_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
344 {
345 if (TCG_TARGET_HAS_rem_i32) {
346 tcg_gen_op3_i32(INDEX_op_rem_i32, ret, arg1, arg2);
347 } else if (TCG_TARGET_HAS_div_i32) {
348 TCGv_i32 t0 = tcg_temp_new_i32();
349 tcg_gen_op3_i32(INDEX_op_div_i32, t0, arg1, arg2);
350 tcg_gen_mul_i32(t0, t0, arg2);
351 tcg_gen_sub_i32(ret, arg1, t0);
352 tcg_temp_free_i32(t0);
353 } else if (TCG_TARGET_HAS_div2_i32) {
354 TCGv_i32 t0 = tcg_temp_new_i32();
355 tcg_gen_sari_i32(t0, arg1, 31);
356 tcg_gen_op5_i32(INDEX_op_div2_i32, t0, ret, arg1, t0, arg2);
357 tcg_temp_free_i32(t0);
358 } else {
359 gen_helper_rem_i32(ret, arg1, arg2);
360 }
361 }
362
363 void tcg_gen_divu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
364 {
365 if (TCG_TARGET_HAS_div_i32) {
366 tcg_gen_op3_i32(INDEX_op_divu_i32, ret, arg1, arg2);
367 } else if (TCG_TARGET_HAS_div2_i32) {
368 TCGv_i32 t0 = tcg_temp_new_i32();
369 tcg_gen_movi_i32(t0, 0);
370 tcg_gen_op5_i32(INDEX_op_divu2_i32, ret, t0, arg1, t0, arg2);
371 tcg_temp_free_i32(t0);
372 } else {
373 gen_helper_divu_i32(ret, arg1, arg2);
374 }
375 }
376
377 void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
378 {
379 if (TCG_TARGET_HAS_rem_i32) {
380 tcg_gen_op3_i32(INDEX_op_remu_i32, ret, arg1, arg2);
381 } else if (TCG_TARGET_HAS_div_i32) {
382 TCGv_i32 t0 = tcg_temp_new_i32();
383 tcg_gen_op3_i32(INDEX_op_divu_i32, t0, arg1, arg2);
384 tcg_gen_mul_i32(t0, t0, arg2);
385 tcg_gen_sub_i32(ret, arg1, t0);
386 tcg_temp_free_i32(t0);
387 } else if (TCG_TARGET_HAS_div2_i32) {
388 TCGv_i32 t0 = tcg_temp_new_i32();
389 tcg_gen_movi_i32(t0, 0);
390 tcg_gen_op5_i32(INDEX_op_divu2_i32, t0, ret, arg1, t0, arg2);
391 tcg_temp_free_i32(t0);
392 } else {
393 gen_helper_remu_i32(ret, arg1, arg2);
394 }
395 }
396
397 void tcg_gen_andc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
398 {
399 if (TCG_TARGET_HAS_andc_i32) {
400 tcg_gen_op3_i32(INDEX_op_andc_i32, ret, arg1, arg2);
401 } else {
402 TCGv_i32 t0 = tcg_temp_new_i32();
403 tcg_gen_not_i32(t0, arg2);
404 tcg_gen_and_i32(ret, arg1, t0);
405 tcg_temp_free_i32(t0);
406 }
407 }
408
409 void tcg_gen_eqv_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
410 {
411 if (TCG_TARGET_HAS_eqv_i32) {
412 tcg_gen_op3_i32(INDEX_op_eqv_i32, ret, arg1, arg2);
413 } else {
414 tcg_gen_xor_i32(ret, arg1, arg2);
415 tcg_gen_not_i32(ret, ret);
416 }
417 }
418
419 void tcg_gen_nand_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
420 {
421 if (TCG_TARGET_HAS_nand_i32) {
422 tcg_gen_op3_i32(INDEX_op_nand_i32, ret, arg1, arg2);
423 } else {
424 tcg_gen_and_i32(ret, arg1, arg2);
425 tcg_gen_not_i32(ret, ret);
426 }
427 }
428
429 void tcg_gen_nor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
430 {
431 if (TCG_TARGET_HAS_nor_i32) {
432 tcg_gen_op3_i32(INDEX_op_nor_i32, ret, arg1, arg2);
433 } else {
434 tcg_gen_or_i32(ret, arg1, arg2);
435 tcg_gen_not_i32(ret, ret);
436 }
437 }
438
439 void tcg_gen_orc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
440 {
441 if (TCG_TARGET_HAS_orc_i32) {
442 tcg_gen_op3_i32(INDEX_op_orc_i32, ret, arg1, arg2);
443 } else {
444 TCGv_i32 t0 = tcg_temp_new_i32();
445 tcg_gen_not_i32(t0, arg2);
446 tcg_gen_or_i32(ret, arg1, t0);
447 tcg_temp_free_i32(t0);
448 }
449 }
450
451 void tcg_gen_rotl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
452 {
453 if (TCG_TARGET_HAS_rot_i32) {
454 tcg_gen_op3_i32(INDEX_op_rotl_i32, ret, arg1, arg2);
455 } else {
456 TCGv_i32 t0, t1;
457
458 t0 = tcg_temp_new_i32();
459 t1 = tcg_temp_new_i32();
460 tcg_gen_shl_i32(t0, arg1, arg2);
461 tcg_gen_subfi_i32(t1, 32, arg2);
462 tcg_gen_shr_i32(t1, arg1, t1);
463 tcg_gen_or_i32(ret, t0, t1);
464 tcg_temp_free_i32(t0);
465 tcg_temp_free_i32(t1);
466 }
467 }
468
469 void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
470 {
471 tcg_debug_assert(arg2 < 32);
472 /* some cases can be optimized here */
473 if (arg2 == 0) {
474 tcg_gen_mov_i32(ret, arg1);
475 } else if (TCG_TARGET_HAS_rot_i32) {
476 TCGv_i32 t0 = tcg_const_i32(arg2);
477 tcg_gen_rotl_i32(ret, arg1, t0);
478 tcg_temp_free_i32(t0);
479 } else {
480 TCGv_i32 t0, t1;
481 t0 = tcg_temp_new_i32();
482 t1 = tcg_temp_new_i32();
483 tcg_gen_shli_i32(t0, arg1, arg2);
484 tcg_gen_shri_i32(t1, arg1, 32 - arg2);
485 tcg_gen_or_i32(ret, t0, t1);
486 tcg_temp_free_i32(t0);
487 tcg_temp_free_i32(t1);
488 }
489 }
490
491 void tcg_gen_rotr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
492 {
493 if (TCG_TARGET_HAS_rot_i32) {
494 tcg_gen_op3_i32(INDEX_op_rotr_i32, ret, arg1, arg2);
495 } else {
496 TCGv_i32 t0, t1;
497
498 t0 = tcg_temp_new_i32();
499 t1 = tcg_temp_new_i32();
500 tcg_gen_shr_i32(t0, arg1, arg2);
501 tcg_gen_subfi_i32(t1, 32, arg2);
502 tcg_gen_shl_i32(t1, arg1, t1);
503 tcg_gen_or_i32(ret, t0, t1);
504 tcg_temp_free_i32(t0);
505 tcg_temp_free_i32(t1);
506 }
507 }
508
509 void tcg_gen_rotri_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
510 {
511 tcg_debug_assert(arg2 < 32);
512 /* some cases can be optimized here */
513 if (arg2 == 0) {
514 tcg_gen_mov_i32(ret, arg1);
515 } else {
516 tcg_gen_rotli_i32(ret, arg1, 32 - arg2);
517 }
518 }
519
520 void tcg_gen_deposit_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2,
521 unsigned int ofs, unsigned int len)
522 {
523 uint32_t mask;
524 TCGv_i32 t1;
525
526 tcg_debug_assert(ofs < 32);
527 tcg_debug_assert(len <= 32);
528 tcg_debug_assert(ofs + len <= 32);
529
530 if (ofs == 0 && len == 32) {
531 tcg_gen_mov_i32(ret, arg2);
532 return;
533 }
534 if (TCG_TARGET_HAS_deposit_i32 && TCG_TARGET_deposit_i32_valid(ofs, len)) {
535 tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, arg1, arg2, ofs, len);
536 return;
537 }
538
539 mask = (1u << len) - 1;
540 t1 = tcg_temp_new_i32();
541
542 if (ofs + len < 32) {
543 tcg_gen_andi_i32(t1, arg2, mask);
544 tcg_gen_shli_i32(t1, t1, ofs);
545 } else {
546 tcg_gen_shli_i32(t1, arg2, ofs);
547 }
548 tcg_gen_andi_i32(ret, arg1, ~(mask << ofs));
549 tcg_gen_or_i32(ret, ret, t1);
550
551 tcg_temp_free_i32(t1);
552 }
553
554 void tcg_gen_movcond_i32(TCGCond cond, TCGv_i32 ret, TCGv_i32 c1,
555 TCGv_i32 c2, TCGv_i32 v1, TCGv_i32 v2)
556 {
557 if (cond == TCG_COND_ALWAYS) {
558 tcg_gen_mov_i32(ret, v1);
559 } else if (cond == TCG_COND_NEVER) {
560 tcg_gen_mov_i32(ret, v2);
561 } else if (TCG_TARGET_HAS_movcond_i32) {
562 tcg_gen_op6i_i32(INDEX_op_movcond_i32, ret, c1, c2, v1, v2, cond);
563 } else {
564 TCGv_i32 t0 = tcg_temp_new_i32();
565 TCGv_i32 t1 = tcg_temp_new_i32();
566 tcg_gen_setcond_i32(cond, t0, c1, c2);
567 tcg_gen_neg_i32(t0, t0);
568 tcg_gen_and_i32(t1, v1, t0);
569 tcg_gen_andc_i32(ret, v2, t0);
570 tcg_gen_or_i32(ret, ret, t1);
571 tcg_temp_free_i32(t0);
572 tcg_temp_free_i32(t1);
573 }
574 }
575
576 void tcg_gen_add2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
577 TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
578 {
579 if (TCG_TARGET_HAS_add2_i32) {
580 tcg_gen_op6_i32(INDEX_op_add2_i32, rl, rh, al, ah, bl, bh);
581 } else {
582 TCGv_i64 t0 = tcg_temp_new_i64();
583 TCGv_i64 t1 = tcg_temp_new_i64();
584 tcg_gen_concat_i32_i64(t0, al, ah);
585 tcg_gen_concat_i32_i64(t1, bl, bh);
586 tcg_gen_add_i64(t0, t0, t1);
587 tcg_gen_extr_i64_i32(rl, rh, t0);
588 tcg_temp_free_i64(t0);
589 tcg_temp_free_i64(t1);
590 }
591 }
592
593 void tcg_gen_sub2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
594 TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
595 {
596 if (TCG_TARGET_HAS_sub2_i32) {
597 tcg_gen_op6_i32(INDEX_op_sub2_i32, rl, rh, al, ah, bl, bh);
598 } else {
599 TCGv_i64 t0 = tcg_temp_new_i64();
600 TCGv_i64 t1 = tcg_temp_new_i64();
601 tcg_gen_concat_i32_i64(t0, al, ah);
602 tcg_gen_concat_i32_i64(t1, bl, bh);
603 tcg_gen_sub_i64(t0, t0, t1);
604 tcg_gen_extr_i64_i32(rl, rh, t0);
605 tcg_temp_free_i64(t0);
606 tcg_temp_free_i64(t1);
607 }
608 }
609
610 void tcg_gen_mulu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
611 {
612 if (TCG_TARGET_HAS_mulu2_i32) {
613 tcg_gen_op4_i32(INDEX_op_mulu2_i32, rl, rh, arg1, arg2);
614 } else if (TCG_TARGET_HAS_muluh_i32) {
615 TCGv_i32 t = tcg_temp_new_i32();
616 tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
617 tcg_gen_op3_i32(INDEX_op_muluh_i32, rh, arg1, arg2);
618 tcg_gen_mov_i32(rl, t);
619 tcg_temp_free_i32(t);
620 } else {
621 TCGv_i64 t0 = tcg_temp_new_i64();
622 TCGv_i64 t1 = tcg_temp_new_i64();
623 tcg_gen_extu_i32_i64(t0, arg1);
624 tcg_gen_extu_i32_i64(t1, arg2);
625 tcg_gen_mul_i64(t0, t0, t1);
626 tcg_gen_extr_i64_i32(rl, rh, t0);
627 tcg_temp_free_i64(t0);
628 tcg_temp_free_i64(t1);
629 }
630 }
631
632 void tcg_gen_muls2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
633 {
634 if (TCG_TARGET_HAS_muls2_i32) {
635 tcg_gen_op4_i32(INDEX_op_muls2_i32, rl, rh, arg1, arg2);
636 } else if (TCG_TARGET_HAS_mulsh_i32) {
637 TCGv_i32 t = tcg_temp_new_i32();
638 tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
639 tcg_gen_op3_i32(INDEX_op_mulsh_i32, rh, arg1, arg2);
640 tcg_gen_mov_i32(rl, t);
641 tcg_temp_free_i32(t);
642 } else if (TCG_TARGET_REG_BITS == 32) {
643 TCGv_i32 t0 = tcg_temp_new_i32();
644 TCGv_i32 t1 = tcg_temp_new_i32();
645 TCGv_i32 t2 = tcg_temp_new_i32();
646 TCGv_i32 t3 = tcg_temp_new_i32();
647 tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
648 /* Adjust for negative inputs. */
649 tcg_gen_sari_i32(t2, arg1, 31);
650 tcg_gen_sari_i32(t3, arg2, 31);
651 tcg_gen_and_i32(t2, t2, arg2);
652 tcg_gen_and_i32(t3, t3, arg1);
653 tcg_gen_sub_i32(rh, t1, t2);
654 tcg_gen_sub_i32(rh, rh, t3);
655 tcg_gen_mov_i32(rl, t0);
656 tcg_temp_free_i32(t0);
657 tcg_temp_free_i32(t1);
658 tcg_temp_free_i32(t2);
659 tcg_temp_free_i32(t3);
660 } else {
661 TCGv_i64 t0 = tcg_temp_new_i64();
662 TCGv_i64 t1 = tcg_temp_new_i64();
663 tcg_gen_ext_i32_i64(t0, arg1);
664 tcg_gen_ext_i32_i64(t1, arg2);
665 tcg_gen_mul_i64(t0, t0, t1);
666 tcg_gen_extr_i64_i32(rl, rh, t0);
667 tcg_temp_free_i64(t0);
668 tcg_temp_free_i64(t1);
669 }
670 }
671
672 void tcg_gen_ext8s_i32(TCGv_i32 ret, TCGv_i32 arg)
673 {
674 if (TCG_TARGET_HAS_ext8s_i32) {
675 tcg_gen_op2_i32(INDEX_op_ext8s_i32, ret, arg);
676 } else {
677 tcg_gen_shli_i32(ret, arg, 24);
678 tcg_gen_sari_i32(ret, ret, 24);
679 }
680 }
681
682 void tcg_gen_ext16s_i32(TCGv_i32 ret, TCGv_i32 arg)
683 {
684 if (TCG_TARGET_HAS_ext16s_i32) {
685 tcg_gen_op2_i32(INDEX_op_ext16s_i32, ret, arg);
686 } else {
687 tcg_gen_shli_i32(ret, arg, 16);
688 tcg_gen_sari_i32(ret, ret, 16);
689 }
690 }
691
692 void tcg_gen_ext8u_i32(TCGv_i32 ret, TCGv_i32 arg)
693 {
694 if (TCG_TARGET_HAS_ext8u_i32) {
695 tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg);
696 } else {
697 tcg_gen_andi_i32(ret, arg, 0xffu);
698 }
699 }
700
701 void tcg_gen_ext16u_i32(TCGv_i32 ret, TCGv_i32 arg)
702 {
703 if (TCG_TARGET_HAS_ext16u_i32) {
704 tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg);
705 } else {
706 tcg_gen_andi_i32(ret, arg, 0xffffu);
707 }
708 }
709
710 /* Note: we assume the two high bytes are set to zero */
711 void tcg_gen_bswap16_i32(TCGv_i32 ret, TCGv_i32 arg)
712 {
713 if (TCG_TARGET_HAS_bswap16_i32) {
714 tcg_gen_op2_i32(INDEX_op_bswap16_i32, ret, arg);
715 } else {
716 TCGv_i32 t0 = tcg_temp_new_i32();
717
718 tcg_gen_ext8u_i32(t0, arg);
719 tcg_gen_shli_i32(t0, t0, 8);
720 tcg_gen_shri_i32(ret, arg, 8);
721 tcg_gen_or_i32(ret, ret, t0);
722 tcg_temp_free_i32(t0);
723 }
724 }
725
726 void tcg_gen_bswap32_i32(TCGv_i32 ret, TCGv_i32 arg)
727 {
728 if (TCG_TARGET_HAS_bswap32_i32) {
729 tcg_gen_op2_i32(INDEX_op_bswap32_i32, ret, arg);
730 } else {
731 TCGv_i32 t0, t1;
732 t0 = tcg_temp_new_i32();
733 t1 = tcg_temp_new_i32();
734
735 tcg_gen_shli_i32(t0, arg, 24);
736
737 tcg_gen_andi_i32(t1, arg, 0x0000ff00);
738 tcg_gen_shli_i32(t1, t1, 8);
739 tcg_gen_or_i32(t0, t0, t1);
740
741 tcg_gen_shri_i32(t1, arg, 8);
742 tcg_gen_andi_i32(t1, t1, 0x0000ff00);
743 tcg_gen_or_i32(t0, t0, t1);
744
745 tcg_gen_shri_i32(t1, arg, 24);
746 tcg_gen_or_i32(ret, t0, t1);
747 tcg_temp_free_i32(t0);
748 tcg_temp_free_i32(t1);
749 }
750 }
751
752 /* 64-bit ops */
753
754 #if TCG_TARGET_REG_BITS == 32
755 /* These are all inline for TCG_TARGET_REG_BITS == 64. */
756
757 void tcg_gen_discard_i64(TCGv_i64 arg)
758 {
759 tcg_gen_discard_i32(TCGV_LOW(arg));
760 tcg_gen_discard_i32(TCGV_HIGH(arg));
761 }
762
763 void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
764 {
765 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
766 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
767 }
768
769 void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
770 {
771 tcg_gen_movi_i32(TCGV_LOW(ret), arg);
772 tcg_gen_movi_i32(TCGV_HIGH(ret), arg >> 32);
773 }
774
775 void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
776 {
777 tcg_gen_ld8u_i32(TCGV_LOW(ret), arg2, offset);
778 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
779 }
780
781 void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
782 {
783 tcg_gen_ld8s_i32(TCGV_LOW(ret), arg2, offset);
784 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), 31);
785 }
786
787 void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
788 {
789 tcg_gen_ld16u_i32(TCGV_LOW(ret), arg2, offset);
790 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
791 }
792
793 void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
794 {
795 tcg_gen_ld16s_i32(TCGV_LOW(ret), arg2, offset);
796 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
797 }
798
799 void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
800 {
801 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
802 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
803 }
804
805 void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
806 {
807 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
808 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
809 }
810
811 void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
812 {
813 /* Since arg2 and ret have different types,
814 they cannot be the same temporary */
815 #ifdef HOST_WORDS_BIGENDIAN
816 tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset);
817 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset + 4);
818 #else
819 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
820 tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset + 4);
821 #endif
822 }
823
824 void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
825 {
826 #ifdef HOST_WORDS_BIGENDIAN
827 tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset);
828 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset + 4);
829 #else
830 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
831 tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset + 4);
832 #endif
833 }
834
835 void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
836 {
837 tcg_gen_and_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
838 tcg_gen_and_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
839 }
840
841 void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
842 {
843 tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
844 tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
845 }
846
847 void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
848 {
849 tcg_gen_xor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
850 tcg_gen_xor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
851 }
852
853 void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
854 {
855 gen_helper_shl_i64(ret, arg1, arg2);
856 }
857
858 void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
859 {
860 gen_helper_shr_i64(ret, arg1, arg2);
861 }
862
863 void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
864 {
865 gen_helper_sar_i64(ret, arg1, arg2);
866 }
867
868 void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
869 {
870 TCGv_i64 t0;
871 TCGv_i32 t1;
872
873 t0 = tcg_temp_new_i64();
874 t1 = tcg_temp_new_i32();
875
876 tcg_gen_mulu2_i32(TCGV_LOW(t0), TCGV_HIGH(t0),
877 TCGV_LOW(arg1), TCGV_LOW(arg2));
878
879 tcg_gen_mul_i32(t1, TCGV_LOW(arg1), TCGV_HIGH(arg2));
880 tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
881 tcg_gen_mul_i32(t1, TCGV_HIGH(arg1), TCGV_LOW(arg2));
882 tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
883
884 tcg_gen_mov_i64(ret, t0);
885 tcg_temp_free_i64(t0);
886 tcg_temp_free_i32(t1);
887 }
888 #endif /* TCG_TARGET_REG_SIZE == 32 */
889
890 void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
891 {
892 /* some cases can be optimized here */
893 if (arg2 == 0) {
894 tcg_gen_mov_i64(ret, arg1);
895 } else {
896 TCGv_i64 t0 = tcg_const_i64(arg2);
897 tcg_gen_add_i64(ret, arg1, t0);
898 tcg_temp_free_i64(t0);
899 }
900 }
901
902 void tcg_gen_subfi_i64(TCGv_i64 ret, int64_t arg1, TCGv_i64 arg2)
903 {
904 if (arg1 == 0 && TCG_TARGET_HAS_neg_i64) {
905 /* Don't recurse with tcg_gen_neg_i64. */
906 tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg2);
907 } else {
908 TCGv_i64 t0 = tcg_const_i64(arg1);
909 tcg_gen_sub_i64(ret, t0, arg2);
910 tcg_temp_free_i64(t0);
911 }
912 }
913
914 void tcg_gen_subi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
915 {
916 /* some cases can be optimized here */
917 if (arg2 == 0) {
918 tcg_gen_mov_i64(ret, arg1);
919 } else {
920 TCGv_i64 t0 = tcg_const_i64(arg2);
921 tcg_gen_sub_i64(ret, arg1, t0);
922 tcg_temp_free_i64(t0);
923 }
924 }
925
926 void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
927 {
928 TCGv_i64 t0;
929
930 if (TCG_TARGET_REG_BITS == 32) {
931 tcg_gen_andi_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
932 tcg_gen_andi_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
933 return;
934 }
935
936 /* Some cases can be optimized here. */
937 switch (arg2) {
938 case 0:
939 tcg_gen_movi_i64(ret, 0);
940 return;
941 case 0xffffffffffffffffull:
942 tcg_gen_mov_i64(ret, arg1);
943 return;
944 case 0xffull:
945 /* Don't recurse with tcg_gen_ext8u_i64. */
946 if (TCG_TARGET_HAS_ext8u_i64) {
947 tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg1);
948 return;
949 }
950 break;
951 case 0xffffu:
952 if (TCG_TARGET_HAS_ext16u_i64) {
953 tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg1);
954 return;
955 }
956 break;
957 case 0xffffffffull:
958 if (TCG_TARGET_HAS_ext32u_i64) {
959 tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg1);
960 return;
961 }
962 break;
963 }
964 t0 = tcg_const_i64(arg2);
965 tcg_gen_and_i64(ret, arg1, t0);
966 tcg_temp_free_i64(t0);
967 }
968
969 void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
970 {
971 if (TCG_TARGET_REG_BITS == 32) {
972 tcg_gen_ori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
973 tcg_gen_ori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
974 return;
975 }
976 /* Some cases can be optimized here. */
977 if (arg2 == -1) {
978 tcg_gen_movi_i64(ret, -1);
979 } else if (arg2 == 0) {
980 tcg_gen_mov_i64(ret, arg1);
981 } else {
982 TCGv_i64 t0 = tcg_const_i64(arg2);
983 tcg_gen_or_i64(ret, arg1, t0);
984 tcg_temp_free_i64(t0);
985 }
986 }
987
988 void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
989 {
990 if (TCG_TARGET_REG_BITS == 32) {
991 tcg_gen_xori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
992 tcg_gen_xori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
993 return;
994 }
995 /* Some cases can be optimized here. */
996 if (arg2 == 0) {
997 tcg_gen_mov_i64(ret, arg1);
998 } else if (arg2 == -1 && TCG_TARGET_HAS_not_i64) {
999 /* Don't recurse with tcg_gen_not_i64. */
1000 tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg1);
1001 } else {
1002 TCGv_i64 t0 = tcg_const_i64(arg2);
1003 tcg_gen_xor_i64(ret, arg1, t0);
1004 tcg_temp_free_i64(t0);
1005 }
1006 }
1007
1008 static inline void tcg_gen_shifti_i64(TCGv_i64 ret, TCGv_i64 arg1,
1009 unsigned c, bool right, bool arith)
1010 {
1011 tcg_debug_assert(c < 64);
1012 if (c == 0) {
1013 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1014 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1015 } else if (c >= 32) {
1016 c -= 32;
1017 if (right) {
1018 if (arith) {
1019 tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1020 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), 31);
1021 } else {
1022 tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1023 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1024 }
1025 } else {
1026 tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_LOW(arg1), c);
1027 tcg_gen_movi_i32(TCGV_LOW(ret), 0);
1028 }
1029 } else {
1030 TCGv_i32 t0, t1;
1031
1032 t0 = tcg_temp_new_i32();
1033 t1 = tcg_temp_new_i32();
1034 if (right) {
1035 tcg_gen_shli_i32(t0, TCGV_HIGH(arg1), 32 - c);
1036 if (arith) {
1037 tcg_gen_sari_i32(t1, TCGV_HIGH(arg1), c);
1038 } else {
1039 tcg_gen_shri_i32(t1, TCGV_HIGH(arg1), c);
1040 }
1041 tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
1042 tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t0);
1043 tcg_gen_mov_i32(TCGV_HIGH(ret), t1);
1044 } else {
1045 tcg_gen_shri_i32(t0, TCGV_LOW(arg1), 32 - c);
1046 /* Note: ret can be the same as arg1, so we use t1 */
1047 tcg_gen_shli_i32(t1, TCGV_LOW(arg1), c);
1048 tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1049 tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t0);
1050 tcg_gen_mov_i32(TCGV_LOW(ret), t1);
1051 }
1052 tcg_temp_free_i32(t0);
1053 tcg_temp_free_i32(t1);
1054 }
1055 }
1056
1057 void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1058 {
1059 tcg_debug_assert(arg2 < 64);
1060 if (TCG_TARGET_REG_BITS == 32) {
1061 tcg_gen_shifti_i64(ret, arg1, arg2, 0, 0);
1062 } else if (arg2 == 0) {
1063 tcg_gen_mov_i64(ret, arg1);
1064 } else {
1065 TCGv_i64 t0 = tcg_const_i64(arg2);
1066 tcg_gen_shl_i64(ret, arg1, t0);
1067 tcg_temp_free_i64(t0);
1068 }
1069 }
1070
1071 void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1072 {
1073 tcg_debug_assert(arg2 < 64);
1074 if (TCG_TARGET_REG_BITS == 32) {
1075 tcg_gen_shifti_i64(ret, arg1, arg2, 1, 0);
1076 } else if (arg2 == 0) {
1077 tcg_gen_mov_i64(ret, arg1);
1078 } else {
1079 TCGv_i64 t0 = tcg_const_i64(arg2);
1080 tcg_gen_shr_i64(ret, arg1, t0);
1081 tcg_temp_free_i64(t0);
1082 }
1083 }
1084
1085 void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1086 {
1087 tcg_debug_assert(arg2 < 64);
1088 if (TCG_TARGET_REG_BITS == 32) {
1089 tcg_gen_shifti_i64(ret, arg1, arg2, 1, 1);
1090 } else if (arg2 == 0) {
1091 tcg_gen_mov_i64(ret, arg1);
1092 } else {
1093 TCGv_i64 t0 = tcg_const_i64(arg2);
1094 tcg_gen_sar_i64(ret, arg1, t0);
1095 tcg_temp_free_i64(t0);
1096 }
1097 }
1098
1099 void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1, TCGv_i64 arg2, TCGLabel *l)
1100 {
1101 if (cond == TCG_COND_ALWAYS) {
1102 tcg_gen_br(l);
1103 } else if (cond != TCG_COND_NEVER) {
1104 if (TCG_TARGET_REG_BITS == 32) {
1105 tcg_gen_op6ii_i32(INDEX_op_brcond2_i32, TCGV_LOW(arg1),
1106 TCGV_HIGH(arg1), TCGV_LOW(arg2),
1107 TCGV_HIGH(arg2), cond, label_arg(l));
1108 } else {
1109 tcg_gen_op4ii_i64(INDEX_op_brcond_i64, arg1, arg2, cond,
1110 label_arg(l));
1111 }
1112 }
1113 }
1114
1115 void tcg_gen_brcondi_i64(TCGCond cond, TCGv_i64 arg1, int64_t arg2, TCGLabel *l)
1116 {
1117 if (cond == TCG_COND_ALWAYS) {
1118 tcg_gen_br(l);
1119 } else if (cond != TCG_COND_NEVER) {
1120 TCGv_i64 t0 = tcg_const_i64(arg2);
1121 tcg_gen_brcond_i64(cond, arg1, t0, l);
1122 tcg_temp_free_i64(t0);
1123 }
1124 }
1125
1126 void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret,
1127 TCGv_i64 arg1, TCGv_i64 arg2)
1128 {
1129 if (cond == TCG_COND_ALWAYS) {
1130 tcg_gen_movi_i64(ret, 1);
1131 } else if (cond == TCG_COND_NEVER) {
1132 tcg_gen_movi_i64(ret, 0);
1133 } else {
1134 if (TCG_TARGET_REG_BITS == 32) {
1135 tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1136 TCGV_LOW(arg1), TCGV_HIGH(arg1),
1137 TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
1138 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1139 } else {
1140 tcg_gen_op4i_i64(INDEX_op_setcond_i64, ret, arg1, arg2, cond);
1141 }
1142 }
1143 }
1144
1145 void tcg_gen_setcondi_i64(TCGCond cond, TCGv_i64 ret,
1146 TCGv_i64 arg1, int64_t arg2)
1147 {
1148 TCGv_i64 t0 = tcg_const_i64(arg2);
1149 tcg_gen_setcond_i64(cond, ret, arg1, t0);
1150 tcg_temp_free_i64(t0);
1151 }
1152
1153 void tcg_gen_muli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1154 {
1155 TCGv_i64 t0 = tcg_const_i64(arg2);
1156 tcg_gen_mul_i64(ret, arg1, t0);
1157 tcg_temp_free_i64(t0);
1158 }
1159
1160 void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1161 {
1162 if (TCG_TARGET_HAS_div_i64) {
1163 tcg_gen_op3_i64(INDEX_op_div_i64, ret, arg1, arg2);
1164 } else if (TCG_TARGET_HAS_div2_i64) {
1165 TCGv_i64 t0 = tcg_temp_new_i64();
1166 tcg_gen_sari_i64(t0, arg1, 63);
1167 tcg_gen_op5_i64(INDEX_op_div2_i64, ret, t0, arg1, t0, arg2);
1168 tcg_temp_free_i64(t0);
1169 } else {
1170 gen_helper_div_i64(ret, arg1, arg2);
1171 }
1172 }
1173
1174 void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1175 {
1176 if (TCG_TARGET_HAS_rem_i64) {
1177 tcg_gen_op3_i64(INDEX_op_rem_i64, ret, arg1, arg2);
1178 } else if (TCG_TARGET_HAS_div_i64) {
1179 TCGv_i64 t0 = tcg_temp_new_i64();
1180 tcg_gen_op3_i64(INDEX_op_div_i64, t0, arg1, arg2);
1181 tcg_gen_mul_i64(t0, t0, arg2);
1182 tcg_gen_sub_i64(ret, arg1, t0);
1183 tcg_temp_free_i64(t0);
1184 } else if (TCG_TARGET_HAS_div2_i64) {
1185 TCGv_i64 t0 = tcg_temp_new_i64();
1186 tcg_gen_sari_i64(t0, arg1, 63);
1187 tcg_gen_op5_i64(INDEX_op_div2_i64, t0, ret, arg1, t0, arg2);
1188 tcg_temp_free_i64(t0);
1189 } else {
1190 gen_helper_rem_i64(ret, arg1, arg2);
1191 }
1192 }
1193
1194 void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1195 {
1196 if (TCG_TARGET_HAS_div_i64) {
1197 tcg_gen_op3_i64(INDEX_op_divu_i64, ret, arg1, arg2);
1198 } else if (TCG_TARGET_HAS_div2_i64) {
1199 TCGv_i64 t0 = tcg_temp_new_i64();
1200 tcg_gen_movi_i64(t0, 0);
1201 tcg_gen_op5_i64(INDEX_op_divu2_i64, ret, t0, arg1, t0, arg2);
1202 tcg_temp_free_i64(t0);
1203 } else {
1204 gen_helper_divu_i64(ret, arg1, arg2);
1205 }
1206 }
1207
1208 void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1209 {
1210 if (TCG_TARGET_HAS_rem_i64) {
1211 tcg_gen_op3_i64(INDEX_op_remu_i64, ret, arg1, arg2);
1212 } else if (TCG_TARGET_HAS_div_i64) {
1213 TCGv_i64 t0 = tcg_temp_new_i64();
1214 tcg_gen_op3_i64(INDEX_op_divu_i64, t0, arg1, arg2);
1215 tcg_gen_mul_i64(t0, t0, arg2);
1216 tcg_gen_sub_i64(ret, arg1, t0);
1217 tcg_temp_free_i64(t0);
1218 } else if (TCG_TARGET_HAS_div2_i64) {
1219 TCGv_i64 t0 = tcg_temp_new_i64();
1220 tcg_gen_movi_i64(t0, 0);
1221 tcg_gen_op5_i64(INDEX_op_divu2_i64, t0, ret, arg1, t0, arg2);
1222 tcg_temp_free_i64(t0);
1223 } else {
1224 gen_helper_remu_i64(ret, arg1, arg2);
1225 }
1226 }
1227
1228 void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg)
1229 {
1230 if (TCG_TARGET_REG_BITS == 32) {
1231 tcg_gen_ext8s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1232 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1233 } else if (TCG_TARGET_HAS_ext8s_i64) {
1234 tcg_gen_op2_i64(INDEX_op_ext8s_i64, ret, arg);
1235 } else {
1236 tcg_gen_shli_i64(ret, arg, 56);
1237 tcg_gen_sari_i64(ret, ret, 56);
1238 }
1239 }
1240
1241 void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg)
1242 {
1243 if (TCG_TARGET_REG_BITS == 32) {
1244 tcg_gen_ext16s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1245 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1246 } else if (TCG_TARGET_HAS_ext16s_i64) {
1247 tcg_gen_op2_i64(INDEX_op_ext16s_i64, ret, arg);
1248 } else {
1249 tcg_gen_shli_i64(ret, arg, 48);
1250 tcg_gen_sari_i64(ret, ret, 48);
1251 }
1252 }
1253
1254 void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg)
1255 {
1256 if (TCG_TARGET_REG_BITS == 32) {
1257 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1258 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1259 } else if (TCG_TARGET_HAS_ext32s_i64) {
1260 tcg_gen_op2_i64(INDEX_op_ext32s_i64, ret, arg);
1261 } else {
1262 tcg_gen_shli_i64(ret, arg, 32);
1263 tcg_gen_sari_i64(ret, ret, 32);
1264 }
1265 }
1266
1267 void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg)
1268 {
1269 if (TCG_TARGET_REG_BITS == 32) {
1270 tcg_gen_ext8u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1271 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1272 } else if (TCG_TARGET_HAS_ext8u_i64) {
1273 tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg);
1274 } else {
1275 tcg_gen_andi_i64(ret, arg, 0xffu);
1276 }
1277 }
1278
1279 void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg)
1280 {
1281 if (TCG_TARGET_REG_BITS == 32) {
1282 tcg_gen_ext16u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1283 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1284 } else if (TCG_TARGET_HAS_ext16u_i64) {
1285 tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg);
1286 } else {
1287 tcg_gen_andi_i64(ret, arg, 0xffffu);
1288 }
1289 }
1290
1291 void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg)
1292 {
1293 if (TCG_TARGET_REG_BITS == 32) {
1294 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1295 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1296 } else if (TCG_TARGET_HAS_ext32u_i64) {
1297 tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg);
1298 } else {
1299 tcg_gen_andi_i64(ret, arg, 0xffffffffu);
1300 }
1301 }
1302
1303 /* Note: we assume the six high bytes are set to zero */
1304 void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg)
1305 {
1306 if (TCG_TARGET_REG_BITS == 32) {
1307 tcg_gen_bswap16_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1308 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1309 } else if (TCG_TARGET_HAS_bswap16_i64) {
1310 tcg_gen_op2_i64(INDEX_op_bswap16_i64, ret, arg);
1311 } else {
1312 TCGv_i64 t0 = tcg_temp_new_i64();
1313
1314 tcg_gen_ext8u_i64(t0, arg);
1315 tcg_gen_shli_i64(t0, t0, 8);
1316 tcg_gen_shri_i64(ret, arg, 8);
1317 tcg_gen_or_i64(ret, ret, t0);
1318 tcg_temp_free_i64(t0);
1319 }
1320 }
1321
1322 /* Note: we assume the four high bytes are set to zero */
1323 void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg)
1324 {
1325 if (TCG_TARGET_REG_BITS == 32) {
1326 tcg_gen_bswap32_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1327 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1328 } else if (TCG_TARGET_HAS_bswap32_i64) {
1329 tcg_gen_op2_i64(INDEX_op_bswap32_i64, ret, arg);
1330 } else {
1331 TCGv_i64 t0, t1;
1332 t0 = tcg_temp_new_i64();
1333 t1 = tcg_temp_new_i64();
1334
1335 tcg_gen_shli_i64(t0, arg, 24);
1336 tcg_gen_ext32u_i64(t0, t0);
1337
1338 tcg_gen_andi_i64(t1, arg, 0x0000ff00);
1339 tcg_gen_shli_i64(t1, t1, 8);
1340 tcg_gen_or_i64(t0, t0, t1);
1341
1342 tcg_gen_shri_i64(t1, arg, 8);
1343 tcg_gen_andi_i64(t1, t1, 0x0000ff00);
1344 tcg_gen_or_i64(t0, t0, t1);
1345
1346 tcg_gen_shri_i64(t1, arg, 24);
1347 tcg_gen_or_i64(ret, t0, t1);
1348 tcg_temp_free_i64(t0);
1349 tcg_temp_free_i64(t1);
1350 }
1351 }
1352
1353 void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg)
1354 {
1355 if (TCG_TARGET_REG_BITS == 32) {
1356 TCGv_i32 t0, t1;
1357 t0 = tcg_temp_new_i32();
1358 t1 = tcg_temp_new_i32();
1359
1360 tcg_gen_bswap32_i32(t0, TCGV_LOW(arg));
1361 tcg_gen_bswap32_i32(t1, TCGV_HIGH(arg));
1362 tcg_gen_mov_i32(TCGV_LOW(ret), t1);
1363 tcg_gen_mov_i32(TCGV_HIGH(ret), t0);
1364 tcg_temp_free_i32(t0);
1365 tcg_temp_free_i32(t1);
1366 } else if (TCG_TARGET_HAS_bswap64_i64) {
1367 tcg_gen_op2_i64(INDEX_op_bswap64_i64, ret, arg);
1368 } else {
1369 TCGv_i64 t0 = tcg_temp_new_i64();
1370 TCGv_i64 t1 = tcg_temp_new_i64();
1371
1372 tcg_gen_shli_i64(t0, arg, 56);
1373
1374 tcg_gen_andi_i64(t1, arg, 0x0000ff00);
1375 tcg_gen_shli_i64(t1, t1, 40);
1376 tcg_gen_or_i64(t0, t0, t1);
1377
1378 tcg_gen_andi_i64(t1, arg, 0x00ff0000);
1379 tcg_gen_shli_i64(t1, t1, 24);
1380 tcg_gen_or_i64(t0, t0, t1);
1381
1382 tcg_gen_andi_i64(t1, arg, 0xff000000);
1383 tcg_gen_shli_i64(t1, t1, 8);
1384 tcg_gen_or_i64(t0, t0, t1);
1385
1386 tcg_gen_shri_i64(t1, arg, 8);
1387 tcg_gen_andi_i64(t1, t1, 0xff000000);
1388 tcg_gen_or_i64(t0, t0, t1);
1389
1390 tcg_gen_shri_i64(t1, arg, 24);
1391 tcg_gen_andi_i64(t1, t1, 0x00ff0000);
1392 tcg_gen_or_i64(t0, t0, t1);
1393
1394 tcg_gen_shri_i64(t1, arg, 40);
1395 tcg_gen_andi_i64(t1, t1, 0x0000ff00);
1396 tcg_gen_or_i64(t0, t0, t1);
1397
1398 tcg_gen_shri_i64(t1, arg, 56);
1399 tcg_gen_or_i64(ret, t0, t1);
1400 tcg_temp_free_i64(t0);
1401 tcg_temp_free_i64(t1);
1402 }
1403 }
1404
1405 void tcg_gen_not_i64(TCGv_i64 ret, TCGv_i64 arg)
1406 {
1407 if (TCG_TARGET_REG_BITS == 32) {
1408 tcg_gen_not_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1409 tcg_gen_not_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1410 } else if (TCG_TARGET_HAS_not_i64) {
1411 tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg);
1412 } else {
1413 tcg_gen_xori_i64(ret, arg, -1);
1414 }
1415 }
1416
1417 void tcg_gen_andc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1418 {
1419 if (TCG_TARGET_REG_BITS == 32) {
1420 tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1421 tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1422 } else if (TCG_TARGET_HAS_andc_i64) {
1423 tcg_gen_op3_i64(INDEX_op_andc_i64, ret, arg1, arg2);
1424 } else {
1425 TCGv_i64 t0 = tcg_temp_new_i64();
1426 tcg_gen_not_i64(t0, arg2);
1427 tcg_gen_and_i64(ret, arg1, t0);
1428 tcg_temp_free_i64(t0);
1429 }
1430 }
1431
1432 void tcg_gen_eqv_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1433 {
1434 if (TCG_TARGET_REG_BITS == 32) {
1435 tcg_gen_eqv_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1436 tcg_gen_eqv_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1437 } else if (TCG_TARGET_HAS_eqv_i64) {
1438 tcg_gen_op3_i64(INDEX_op_eqv_i64, ret, arg1, arg2);
1439 } else {
1440 tcg_gen_xor_i64(ret, arg1, arg2);
1441 tcg_gen_not_i64(ret, ret);
1442 }
1443 }
1444
1445 void tcg_gen_nand_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1446 {
1447 if (TCG_TARGET_REG_BITS == 32) {
1448 tcg_gen_nand_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1449 tcg_gen_nand_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1450 } else if (TCG_TARGET_HAS_nand_i64) {
1451 tcg_gen_op3_i64(INDEX_op_nand_i64, ret, arg1, arg2);
1452 } else {
1453 tcg_gen_and_i64(ret, arg1, arg2);
1454 tcg_gen_not_i64(ret, ret);
1455 }
1456 }
1457
1458 void tcg_gen_nor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1459 {
1460 if (TCG_TARGET_REG_BITS == 32) {
1461 tcg_gen_nor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1462 tcg_gen_nor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1463 } else if (TCG_TARGET_HAS_nor_i64) {
1464 tcg_gen_op3_i64(INDEX_op_nor_i64, ret, arg1, arg2);
1465 } else {
1466 tcg_gen_or_i64(ret, arg1, arg2);
1467 tcg_gen_not_i64(ret, ret);
1468 }
1469 }
1470
1471 void tcg_gen_orc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1472 {
1473 if (TCG_TARGET_REG_BITS == 32) {
1474 tcg_gen_orc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1475 tcg_gen_orc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1476 } else if (TCG_TARGET_HAS_orc_i64) {
1477 tcg_gen_op3_i64(INDEX_op_orc_i64, ret, arg1, arg2);
1478 } else {
1479 TCGv_i64 t0 = tcg_temp_new_i64();
1480 tcg_gen_not_i64(t0, arg2);
1481 tcg_gen_or_i64(ret, arg1, t0);
1482 tcg_temp_free_i64(t0);
1483 }
1484 }
1485
1486 void tcg_gen_rotl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1487 {
1488 if (TCG_TARGET_HAS_rot_i64) {
1489 tcg_gen_op3_i64(INDEX_op_rotl_i64, ret, arg1, arg2);
1490 } else {
1491 TCGv_i64 t0, t1;
1492 t0 = tcg_temp_new_i64();
1493 t1 = tcg_temp_new_i64();
1494 tcg_gen_shl_i64(t0, arg1, arg2);
1495 tcg_gen_subfi_i64(t1, 64, arg2);
1496 tcg_gen_shr_i64(t1, arg1, t1);
1497 tcg_gen_or_i64(ret, t0, t1);
1498 tcg_temp_free_i64(t0);
1499 tcg_temp_free_i64(t1);
1500 }
1501 }
1502
1503 void tcg_gen_rotli_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1504 {
1505 tcg_debug_assert(arg2 < 64);
1506 /* some cases can be optimized here */
1507 if (arg2 == 0) {
1508 tcg_gen_mov_i64(ret, arg1);
1509 } else if (TCG_TARGET_HAS_rot_i64) {
1510 TCGv_i64 t0 = tcg_const_i64(arg2);
1511 tcg_gen_rotl_i64(ret, arg1, t0);
1512 tcg_temp_free_i64(t0);
1513 } else {
1514 TCGv_i64 t0, t1;
1515 t0 = tcg_temp_new_i64();
1516 t1 = tcg_temp_new_i64();
1517 tcg_gen_shli_i64(t0, arg1, arg2);
1518 tcg_gen_shri_i64(t1, arg1, 64 - arg2);
1519 tcg_gen_or_i64(ret, t0, t1);
1520 tcg_temp_free_i64(t0);
1521 tcg_temp_free_i64(t1);
1522 }
1523 }
1524
1525 void tcg_gen_rotr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1526 {
1527 if (TCG_TARGET_HAS_rot_i64) {
1528 tcg_gen_op3_i64(INDEX_op_rotr_i64, ret, arg1, arg2);
1529 } else {
1530 TCGv_i64 t0, t1;
1531 t0 = tcg_temp_new_i64();
1532 t1 = tcg_temp_new_i64();
1533 tcg_gen_shr_i64(t0, arg1, arg2);
1534 tcg_gen_subfi_i64(t1, 64, arg2);
1535 tcg_gen_shl_i64(t1, arg1, t1);
1536 tcg_gen_or_i64(ret, t0, t1);
1537 tcg_temp_free_i64(t0);
1538 tcg_temp_free_i64(t1);
1539 }
1540 }
1541
1542 void tcg_gen_rotri_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1543 {
1544 tcg_debug_assert(arg2 < 64);
1545 /* some cases can be optimized here */
1546 if (arg2 == 0) {
1547 tcg_gen_mov_i64(ret, arg1);
1548 } else {
1549 tcg_gen_rotli_i64(ret, arg1, 64 - arg2);
1550 }
1551 }
1552
1553 void tcg_gen_deposit_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2,
1554 unsigned int ofs, unsigned int len)
1555 {
1556 uint64_t mask;
1557 TCGv_i64 t1;
1558
1559 tcg_debug_assert(ofs < 64);
1560 tcg_debug_assert(len <= 64);
1561 tcg_debug_assert(ofs + len <= 64);
1562
1563 if (ofs == 0 && len == 64) {
1564 tcg_gen_mov_i64(ret, arg2);
1565 return;
1566 }
1567 if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(ofs, len)) {
1568 tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, arg1, arg2, ofs, len);
1569 return;
1570 }
1571
1572 if (TCG_TARGET_REG_BITS == 32) {
1573 if (ofs >= 32) {
1574 tcg_gen_deposit_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1),
1575 TCGV_LOW(arg2), ofs - 32, len);
1576 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1577 return;
1578 }
1579 if (ofs + len <= 32) {
1580 tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(arg1),
1581 TCGV_LOW(arg2), ofs, len);
1582 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1583 return;
1584 }
1585 }
1586
1587 mask = (1ull << len) - 1;
1588 t1 = tcg_temp_new_i64();
1589
1590 if (ofs + len < 64) {
1591 tcg_gen_andi_i64(t1, arg2, mask);
1592 tcg_gen_shli_i64(t1, t1, ofs);
1593 } else {
1594 tcg_gen_shli_i64(t1, arg2, ofs);
1595 }
1596 tcg_gen_andi_i64(ret, arg1, ~(mask << ofs));
1597 tcg_gen_or_i64(ret, ret, t1);
1598
1599 tcg_temp_free_i64(t1);
1600 }
1601
1602 void tcg_gen_movcond_i64(TCGCond cond, TCGv_i64 ret, TCGv_i64 c1,
1603 TCGv_i64 c2, TCGv_i64 v1, TCGv_i64 v2)
1604 {
1605 if (cond == TCG_COND_ALWAYS) {
1606 tcg_gen_mov_i64(ret, v1);
1607 } else if (cond == TCG_COND_NEVER) {
1608 tcg_gen_mov_i64(ret, v2);
1609 } else if (TCG_TARGET_REG_BITS == 32) {
1610 TCGv_i32 t0 = tcg_temp_new_i32();
1611 TCGv_i32 t1 = tcg_temp_new_i32();
1612 tcg_gen_op6i_i32(INDEX_op_setcond2_i32, t0,
1613 TCGV_LOW(c1), TCGV_HIGH(c1),
1614 TCGV_LOW(c2), TCGV_HIGH(c2), cond);
1615
1616 if (TCG_TARGET_HAS_movcond_i32) {
1617 tcg_gen_movi_i32(t1, 0);
1618 tcg_gen_movcond_i32(TCG_COND_NE, TCGV_LOW(ret), t0, t1,
1619 TCGV_LOW(v1), TCGV_LOW(v2));
1620 tcg_gen_movcond_i32(TCG_COND_NE, TCGV_HIGH(ret), t0, t1,
1621 TCGV_HIGH(v1), TCGV_HIGH(v2));
1622 } else {
1623 tcg_gen_neg_i32(t0, t0);
1624
1625 tcg_gen_and_i32(t1, TCGV_LOW(v1), t0);
1626 tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(v2), t0);
1627 tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t1);
1628
1629 tcg_gen_and_i32(t1, TCGV_HIGH(v1), t0);
1630 tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(v2), t0);
1631 tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t1);
1632 }
1633 tcg_temp_free_i32(t0);
1634 tcg_temp_free_i32(t1);
1635 } else if (TCG_TARGET_HAS_movcond_i64) {
1636 tcg_gen_op6i_i64(INDEX_op_movcond_i64, ret, c1, c2, v1, v2, cond);
1637 } else {
1638 TCGv_i64 t0 = tcg_temp_new_i64();
1639 TCGv_i64 t1 = tcg_temp_new_i64();
1640 tcg_gen_setcond_i64(cond, t0, c1, c2);
1641 tcg_gen_neg_i64(t0, t0);
1642 tcg_gen_and_i64(t1, v1, t0);
1643 tcg_gen_andc_i64(ret, v2, t0);
1644 tcg_gen_or_i64(ret, ret, t1);
1645 tcg_temp_free_i64(t0);
1646 tcg_temp_free_i64(t1);
1647 }
1648 }
1649
1650 void tcg_gen_add2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
1651 TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
1652 {
1653 if (TCG_TARGET_HAS_add2_i64) {
1654 tcg_gen_op6_i64(INDEX_op_add2_i64, rl, rh, al, ah, bl, bh);
1655 } else {
1656 TCGv_i64 t0 = tcg_temp_new_i64();
1657 TCGv_i64 t1 = tcg_temp_new_i64();
1658 tcg_gen_add_i64(t0, al, bl);
1659 tcg_gen_setcond_i64(TCG_COND_LTU, t1, t0, al);
1660 tcg_gen_add_i64(rh, ah, bh);
1661 tcg_gen_add_i64(rh, rh, t1);
1662 tcg_gen_mov_i64(rl, t0);
1663 tcg_temp_free_i64(t0);
1664 tcg_temp_free_i64(t1);
1665 }
1666 }
1667
1668 void tcg_gen_sub2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
1669 TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
1670 {
1671 if (TCG_TARGET_HAS_sub2_i64) {
1672 tcg_gen_op6_i64(INDEX_op_sub2_i64, rl, rh, al, ah, bl, bh);
1673 } else {
1674 TCGv_i64 t0 = tcg_temp_new_i64();
1675 TCGv_i64 t1 = tcg_temp_new_i64();
1676 tcg_gen_sub_i64(t0, al, bl);
1677 tcg_gen_setcond_i64(TCG_COND_LTU, t1, al, bl);
1678 tcg_gen_sub_i64(rh, ah, bh);
1679 tcg_gen_sub_i64(rh, rh, t1);
1680 tcg_gen_mov_i64(rl, t0);
1681 tcg_temp_free_i64(t0);
1682 tcg_temp_free_i64(t1);
1683 }
1684 }
1685
1686 void tcg_gen_mulu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
1687 {
1688 if (TCG_TARGET_HAS_mulu2_i64) {
1689 tcg_gen_op4_i64(INDEX_op_mulu2_i64, rl, rh, arg1, arg2);
1690 } else if (TCG_TARGET_HAS_muluh_i64) {
1691 TCGv_i64 t = tcg_temp_new_i64();
1692 tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
1693 tcg_gen_op3_i64(INDEX_op_muluh_i64, rh, arg1, arg2);
1694 tcg_gen_mov_i64(rl, t);
1695 tcg_temp_free_i64(t);
1696 } else {
1697 TCGv_i64 t0 = tcg_temp_new_i64();
1698 tcg_gen_mul_i64(t0, arg1, arg2);
1699 gen_helper_muluh_i64(rh, arg1, arg2);
1700 tcg_gen_mov_i64(rl, t0);
1701 tcg_temp_free_i64(t0);
1702 }
1703 }
1704
1705 void tcg_gen_muls2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
1706 {
1707 if (TCG_TARGET_HAS_muls2_i64) {
1708 tcg_gen_op4_i64(INDEX_op_muls2_i64, rl, rh, arg1, arg2);
1709 } else if (TCG_TARGET_HAS_mulsh_i64) {
1710 TCGv_i64 t = tcg_temp_new_i64();
1711 tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
1712 tcg_gen_op3_i64(INDEX_op_mulsh_i64, rh, arg1, arg2);
1713 tcg_gen_mov_i64(rl, t);
1714 tcg_temp_free_i64(t);
1715 } else if (TCG_TARGET_HAS_mulu2_i64 || TCG_TARGET_HAS_muluh_i64) {
1716 TCGv_i64 t0 = tcg_temp_new_i64();
1717 TCGv_i64 t1 = tcg_temp_new_i64();
1718 TCGv_i64 t2 = tcg_temp_new_i64();
1719 TCGv_i64 t3 = tcg_temp_new_i64();
1720 tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
1721 /* Adjust for negative inputs. */
1722 tcg_gen_sari_i64(t2, arg1, 63);
1723 tcg_gen_sari_i64(t3, arg2, 63);
1724 tcg_gen_and_i64(t2, t2, arg2);
1725 tcg_gen_and_i64(t3, t3, arg1);
1726 tcg_gen_sub_i64(rh, t1, t2);
1727 tcg_gen_sub_i64(rh, rh, t3);
1728 tcg_gen_mov_i64(rl, t0);
1729 tcg_temp_free_i64(t0);
1730 tcg_temp_free_i64(t1);
1731 tcg_temp_free_i64(t2);
1732 tcg_temp_free_i64(t3);
1733 } else {
1734 TCGv_i64 t0 = tcg_temp_new_i64();
1735 tcg_gen_mul_i64(t0, arg1, arg2);
1736 gen_helper_mulsh_i64(rh, arg1, arg2);
1737 tcg_gen_mov_i64(rl, t0);
1738 tcg_temp_free_i64(t0);
1739 }
1740 }
1741
1742 /* Size changing operations. */
1743
1744 void tcg_gen_extrl_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
1745 {
1746 if (TCG_TARGET_REG_BITS == 32) {
1747 tcg_gen_mov_i32(ret, TCGV_LOW(arg));
1748 } else if (TCG_TARGET_HAS_extrl_i64_i32) {
1749 tcg_gen_op2(&tcg_ctx, INDEX_op_extrl_i64_i32,
1750 GET_TCGV_I32(ret), GET_TCGV_I64(arg));
1751 } else {
1752 tcg_gen_mov_i32(ret, MAKE_TCGV_I32(GET_TCGV_I64(arg)));
1753 }
1754 }
1755
1756 void tcg_gen_extrh_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
1757 {
1758 if (TCG_TARGET_REG_BITS == 32) {
1759 tcg_gen_mov_i32(ret, TCGV_HIGH(arg));
1760 } else if (TCG_TARGET_HAS_extrh_i64_i32) {
1761 tcg_gen_op2(&tcg_ctx, INDEX_op_extrh_i64_i32,
1762 GET_TCGV_I32(ret), GET_TCGV_I64(arg));
1763 } else {
1764 TCGv_i64 t = tcg_temp_new_i64();
1765 tcg_gen_shri_i64(t, arg, 32);
1766 tcg_gen_mov_i32(ret, MAKE_TCGV_I32(GET_TCGV_I64(t)));
1767 tcg_temp_free_i64(t);
1768 }
1769 }
1770
1771 void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
1772 {
1773 if (TCG_TARGET_REG_BITS == 32) {
1774 tcg_gen_mov_i32(TCGV_LOW(ret), arg);
1775 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1776 } else {
1777 tcg_gen_op2(&tcg_ctx, INDEX_op_extu_i32_i64,
1778 GET_TCGV_I64(ret), GET_TCGV_I32(arg));
1779 }
1780 }
1781
1782 void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
1783 {
1784 if (TCG_TARGET_REG_BITS == 32) {
1785 tcg_gen_mov_i32(TCGV_LOW(ret), arg);
1786 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1787 } else {
1788 tcg_gen_op2(&tcg_ctx, INDEX_op_ext_i32_i64,
1789 GET_TCGV_I64(ret), GET_TCGV_I32(arg));
1790 }
1791 }
1792
1793 void tcg_gen_concat_i32_i64(TCGv_i64 dest, TCGv_i32 low, TCGv_i32 high)
1794 {
1795 TCGv_i64 tmp;
1796
1797 if (TCG_TARGET_REG_BITS == 32) {
1798 tcg_gen_mov_i32(TCGV_LOW(dest), low);
1799 tcg_gen_mov_i32(TCGV_HIGH(dest), high);
1800 return;
1801 }
1802
1803 tmp = tcg_temp_new_i64();
1804 /* These extensions are only needed for type correctness.
1805 We may be able to do better given target specific information. */
1806 tcg_gen_extu_i32_i64(tmp, high);
1807 tcg_gen_extu_i32_i64(dest, low);
1808 /* If deposit is available, use it. Otherwise use the extra
1809 knowledge that we have of the zero-extensions above. */
1810 if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(32, 32)) {
1811 tcg_gen_deposit_i64(dest, dest, tmp, 32, 32);
1812 } else {
1813 tcg_gen_shli_i64(tmp, tmp, 32);
1814 tcg_gen_or_i64(dest, dest, tmp);
1815 }
1816 tcg_temp_free_i64(tmp);
1817 }
1818
1819 void tcg_gen_extr_i64_i32(TCGv_i32 lo, TCGv_i32 hi, TCGv_i64 arg)
1820 {
1821 if (TCG_TARGET_REG_BITS == 32) {
1822 tcg_gen_mov_i32(lo, TCGV_LOW(arg));
1823 tcg_gen_mov_i32(hi, TCGV_HIGH(arg));
1824 } else {
1825 tcg_gen_extrl_i64_i32(lo, arg);
1826 tcg_gen_extrh_i64_i32(hi, arg);
1827 }
1828 }
1829
1830 void tcg_gen_extr32_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i64 arg)
1831 {
1832 tcg_gen_ext32u_i64(lo, arg);
1833 tcg_gen_shri_i64(hi, arg, 32);
1834 }
1835
1836 /* QEMU specific operations. */
1837
1838 void tcg_gen_goto_tb(unsigned idx)
1839 {
1840 /* We only support two chained exits. */
1841 tcg_debug_assert(idx <= 1);
1842 #ifdef CONFIG_DEBUG_TCG
1843 /* Verify that we havn't seen this numbered exit before. */
1844 tcg_debug_assert((tcg_ctx.goto_tb_issue_mask & (1 << idx)) == 0);
1845 tcg_ctx.goto_tb_issue_mask |= 1 << idx;
1846 #endif
1847 tcg_gen_op1i(INDEX_op_goto_tb, idx);
1848 }
1849
1850 static inline TCGMemOp tcg_canonicalize_memop(TCGMemOp op, bool is64, bool st)
1851 {
1852 switch (op & MO_SIZE) {
1853 case MO_8:
1854 op &= ~MO_BSWAP;
1855 break;
1856 case MO_16:
1857 break;
1858 case MO_32:
1859 if (!is64) {
1860 op &= ~MO_SIGN;
1861 }
1862 break;
1863 case MO_64:
1864 if (!is64) {
1865 tcg_abort();
1866 }
1867 break;
1868 }
1869 if (st) {
1870 op &= ~MO_SIGN;
1871 }
1872 return op;
1873 }
1874
1875 static void gen_ldst_i32(TCGOpcode opc, TCGv_i32 val, TCGv addr,
1876 TCGMemOp memop, TCGArg idx)
1877 {
1878 TCGMemOpIdx oi = make_memop_idx(memop, idx);
1879 #if TARGET_LONG_BITS == 32
1880 tcg_gen_op3i_i32(opc, val, addr, oi);
1881 #else
1882 if (TCG_TARGET_REG_BITS == 32) {
1883 tcg_gen_op4i_i32(opc, val, TCGV_LOW(addr), TCGV_HIGH(addr), oi);
1884 } else {
1885 tcg_gen_op3(&tcg_ctx, opc, GET_TCGV_I32(val), GET_TCGV_I64(addr), oi);
1886 }
1887 #endif
1888 }
1889
1890 static void gen_ldst_i64(TCGOpcode opc, TCGv_i64 val, TCGv addr,
1891 TCGMemOp memop, TCGArg idx)
1892 {
1893 TCGMemOpIdx oi = make_memop_idx(memop, idx);
1894 #if TARGET_LONG_BITS == 32
1895 if (TCG_TARGET_REG_BITS == 32) {
1896 tcg_gen_op4i_i32(opc, TCGV_LOW(val), TCGV_HIGH(val), addr, oi);
1897 } else {
1898 tcg_gen_op3(&tcg_ctx, opc, GET_TCGV_I64(val), GET_TCGV_I32(addr), oi);
1899 }
1900 #else
1901 if (TCG_TARGET_REG_BITS == 32) {
1902 tcg_gen_op5i_i32(opc, TCGV_LOW(val), TCGV_HIGH(val),
1903 TCGV_LOW(addr), TCGV_HIGH(addr), oi);
1904 } else {
1905 tcg_gen_op3i_i64(opc, val, addr, oi);
1906 }
1907 #endif
1908 }
1909
1910 void tcg_gen_qemu_ld_i32(TCGv_i32 val, TCGv addr, TCGArg idx, TCGMemOp memop)
1911 {
1912 memop = tcg_canonicalize_memop(memop, 0, 0);
1913 gen_ldst_i32(INDEX_op_qemu_ld_i32, val, addr, memop, idx);
1914 }
1915
1916 void tcg_gen_qemu_st_i32(TCGv_i32 val, TCGv addr, TCGArg idx, TCGMemOp memop)
1917 {
1918 memop = tcg_canonicalize_memop(memop, 0, 1);
1919 gen_ldst_i32(INDEX_op_qemu_st_i32, val, addr, memop, idx);
1920 }
1921
1922 void tcg_gen_qemu_ld_i64(TCGv_i64 val, TCGv addr, TCGArg idx, TCGMemOp memop)
1923 {
1924 if (TCG_TARGET_REG_BITS == 32 && (memop & MO_SIZE) < MO_64) {
1925 tcg_gen_qemu_ld_i32(TCGV_LOW(val), addr, idx, memop);
1926 if (memop & MO_SIGN) {
1927 tcg_gen_sari_i32(TCGV_HIGH(val), TCGV_LOW(val), 31);
1928 } else {
1929 tcg_gen_movi_i32(TCGV_HIGH(val), 0);
1930 }
1931 return;
1932 }
1933
1934 memop = tcg_canonicalize_memop(memop, 1, 0);
1935 gen_ldst_i64(INDEX_op_qemu_ld_i64, val, addr, memop, idx);
1936 }
1937
1938 void tcg_gen_qemu_st_i64(TCGv_i64 val, TCGv addr, TCGArg idx, TCGMemOp memop)
1939 {
1940 if (TCG_TARGET_REG_BITS == 32 && (memop & MO_SIZE) < MO_64) {
1941 tcg_gen_qemu_st_i32(TCGV_LOW(val), addr, idx, memop);
1942 return;
1943 }
1944
1945 memop = tcg_canonicalize_memop(memop, 1, 1);
1946 gen_ldst_i64(INDEX_op_qemu_st_i64, val, addr, memop, idx);
1947 }