1 /* 2 * Tiny Code Generator for QEMU 3 * 4 * Copyright (c) 2008 Fabrice Bellard 5 * 6 * Permission is hereby granted, free of charge, to any person obtaining a copy 7 * of this software and associated documentation files (the "Software"), to deal 8 * in the Software without restriction, including without limitation the rights 9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 * copies of the Software, and to permit persons to whom the Software is 11 * furnished to do so, subject to the following conditions: 12 * 13 * The above copyright notice and this permission notice shall be included in 14 * all copies or substantial portions of the Software. 15 * 16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 22 * THE SOFTWARE. 23 */ 24 #include "tcg.h" 25 26 int gen_new_label(void); 27 28 static inline void tcg_gen_op0(TCGOpcode opc) 29 { 30 *tcg_ctx.gen_opc_ptr++ = opc; 31 } 32 33 static inline void tcg_gen_op1_i32(TCGOpcode opc, TCGv_i32 arg1) 34 { 35 *tcg_ctx.gen_opc_ptr++ = opc; 36 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1); 37 } 38 39 static inline void tcg_gen_op1_i64(TCGOpcode opc, TCGv_i64 arg1) 40 { 41 *tcg_ctx.gen_opc_ptr++ = opc; 42 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1); 43 } 44 45 static inline void tcg_gen_op1i(TCGOpcode opc, TCGArg arg1) 46 { 47 *tcg_ctx.gen_opc_ptr++ = opc; 48 *tcg_ctx.gen_opparam_ptr++ = arg1; 49 } 50 51 static inline void tcg_gen_op2_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2) 52 { 53 *tcg_ctx.gen_opc_ptr++ = opc; 54 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1); 55 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2); 56 } 57 58 static inline void tcg_gen_op2_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2) 59 { 60 *tcg_ctx.gen_opc_ptr++ = opc; 61 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1); 62 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2); 63 } 64 65 static inline void tcg_gen_op2i_i32(TCGOpcode opc, TCGv_i32 arg1, TCGArg arg2) 66 { 67 *tcg_ctx.gen_opc_ptr++ = opc; 68 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1); 69 *tcg_ctx.gen_opparam_ptr++ = arg2; 70 } 71 72 static inline void tcg_gen_op2i_i64(TCGOpcode opc, TCGv_i64 arg1, TCGArg arg2) 73 { 74 *tcg_ctx.gen_opc_ptr++ = opc; 75 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1); 76 *tcg_ctx.gen_opparam_ptr++ = arg2; 77 } 78 79 static inline void tcg_gen_op2ii(TCGOpcode opc, TCGArg arg1, TCGArg arg2) 80 { 81 *tcg_ctx.gen_opc_ptr++ = opc; 82 *tcg_ctx.gen_opparam_ptr++ = arg1; 83 *tcg_ctx.gen_opparam_ptr++ = arg2; 84 } 85 86 static inline void tcg_gen_op3_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2, 87 TCGv_i32 arg3) 88 { 89 *tcg_ctx.gen_opc_ptr++ = opc; 90 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1); 91 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2); 92 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg3); 93 } 94 95 static inline void tcg_gen_op3_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2, 96 TCGv_i64 arg3) 97 { 98 *tcg_ctx.gen_opc_ptr++ = opc; 99 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1); 100 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2); 101 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg3); 102 } 103 104 static inline void tcg_gen_op3i_i32(TCGOpcode opc, TCGv_i32 arg1, 105 TCGv_i32 arg2, TCGArg arg3) 106 { 107 *tcg_ctx.gen_opc_ptr++ = opc; 108 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1); 109 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2); 110 *tcg_ctx.gen_opparam_ptr++ = arg3; 111 } 112 113 static inline void tcg_gen_op3i_i64(TCGOpcode opc, TCGv_i64 arg1, 114 TCGv_i64 arg2, TCGArg arg3) 115 { 116 *tcg_ctx.gen_opc_ptr++ = opc; 117 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1); 118 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2); 119 *tcg_ctx.gen_opparam_ptr++ = arg3; 120 } 121 122 static inline void tcg_gen_ldst_op_i32(TCGOpcode opc, TCGv_i32 val, 123 TCGv_ptr base, TCGArg offset) 124 { 125 *tcg_ctx.gen_opc_ptr++ = opc; 126 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(val); 127 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_PTR(base); 128 *tcg_ctx.gen_opparam_ptr++ = offset; 129 } 130 131 static inline void tcg_gen_ldst_op_i64(TCGOpcode opc, TCGv_i64 val, 132 TCGv_ptr base, TCGArg offset) 133 { 134 *tcg_ctx.gen_opc_ptr++ = opc; 135 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(val); 136 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_PTR(base); 137 *tcg_ctx.gen_opparam_ptr++ = offset; 138 } 139 140 static inline void tcg_gen_op4_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2, 141 TCGv_i32 arg3, TCGv_i32 arg4) 142 { 143 *tcg_ctx.gen_opc_ptr++ = opc; 144 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1); 145 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2); 146 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg3); 147 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg4); 148 } 149 150 static inline void tcg_gen_op4_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2, 151 TCGv_i64 arg3, TCGv_i64 arg4) 152 { 153 *tcg_ctx.gen_opc_ptr++ = opc; 154 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1); 155 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2); 156 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg3); 157 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg4); 158 } 159 160 static inline void tcg_gen_op4i_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2, 161 TCGv_i32 arg3, TCGArg arg4) 162 { 163 *tcg_ctx.gen_opc_ptr++ = opc; 164 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1); 165 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2); 166 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg3); 167 *tcg_ctx.gen_opparam_ptr++ = arg4; 168 } 169 170 static inline void tcg_gen_op4i_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2, 171 TCGv_i64 arg3, TCGArg arg4) 172 { 173 *tcg_ctx.gen_opc_ptr++ = opc; 174 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1); 175 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2); 176 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg3); 177 *tcg_ctx.gen_opparam_ptr++ = arg4; 178 } 179 180 static inline void tcg_gen_op4ii_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2, 181 TCGArg arg3, TCGArg arg4) 182 { 183 *tcg_ctx.gen_opc_ptr++ = opc; 184 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1); 185 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2); 186 *tcg_ctx.gen_opparam_ptr++ = arg3; 187 *tcg_ctx.gen_opparam_ptr++ = arg4; 188 } 189 190 static inline void tcg_gen_op4ii_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2, 191 TCGArg arg3, TCGArg arg4) 192 { 193 *tcg_ctx.gen_opc_ptr++ = opc; 194 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1); 195 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2); 196 *tcg_ctx.gen_opparam_ptr++ = arg3; 197 *tcg_ctx.gen_opparam_ptr++ = arg4; 198 } 199 200 static inline void tcg_gen_op5_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2, 201 TCGv_i32 arg3, TCGv_i32 arg4, TCGv_i32 arg5) 202 { 203 *tcg_ctx.gen_opc_ptr++ = opc; 204 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1); 205 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2); 206 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg3); 207 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg4); 208 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg5); 209 } 210 211 static inline void tcg_gen_op5_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2, 212 TCGv_i64 arg3, TCGv_i64 arg4, TCGv_i64 arg5) 213 { 214 *tcg_ctx.gen_opc_ptr++ = opc; 215 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1); 216 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2); 217 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg3); 218 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg4); 219 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg5); 220 } 221 222 static inline void tcg_gen_op5i_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2, 223 TCGv_i32 arg3, TCGv_i32 arg4, TCGArg arg5) 224 { 225 *tcg_ctx.gen_opc_ptr++ = opc; 226 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1); 227 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2); 228 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg3); 229 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg4); 230 *tcg_ctx.gen_opparam_ptr++ = arg5; 231 } 232 233 static inline void tcg_gen_op5i_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2, 234 TCGv_i64 arg3, TCGv_i64 arg4, TCGArg arg5) 235 { 236 *tcg_ctx.gen_opc_ptr++ = opc; 237 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1); 238 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2); 239 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg3); 240 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg4); 241 *tcg_ctx.gen_opparam_ptr++ = arg5; 242 } 243 244 static inline void tcg_gen_op5ii_i32(TCGOpcode opc, TCGv_i32 arg1, 245 TCGv_i32 arg2, TCGv_i32 arg3, 246 TCGArg arg4, TCGArg arg5) 247 { 248 *tcg_ctx.gen_opc_ptr++ = opc; 249 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1); 250 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2); 251 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg3); 252 *tcg_ctx.gen_opparam_ptr++ = arg4; 253 *tcg_ctx.gen_opparam_ptr++ = arg5; 254 } 255 256 static inline void tcg_gen_op5ii_i64(TCGOpcode opc, TCGv_i64 arg1, 257 TCGv_i64 arg2, TCGv_i64 arg3, 258 TCGArg arg4, TCGArg arg5) 259 { 260 *tcg_ctx.gen_opc_ptr++ = opc; 261 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1); 262 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2); 263 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg3); 264 *tcg_ctx.gen_opparam_ptr++ = arg4; 265 *tcg_ctx.gen_opparam_ptr++ = arg5; 266 } 267 268 static inline void tcg_gen_op6_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2, 269 TCGv_i32 arg3, TCGv_i32 arg4, TCGv_i32 arg5, 270 TCGv_i32 arg6) 271 { 272 *tcg_ctx.gen_opc_ptr++ = opc; 273 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1); 274 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2); 275 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg3); 276 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg4); 277 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg5); 278 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg6); 279 } 280 281 static inline void tcg_gen_op6_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2, 282 TCGv_i64 arg3, TCGv_i64 arg4, TCGv_i64 arg5, 283 TCGv_i64 arg6) 284 { 285 *tcg_ctx.gen_opc_ptr++ = opc; 286 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1); 287 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2); 288 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg3); 289 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg4); 290 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg5); 291 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg6); 292 } 293 294 static inline void tcg_gen_op6i_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2, 295 TCGv_i32 arg3, TCGv_i32 arg4, 296 TCGv_i32 arg5, TCGArg arg6) 297 { 298 *tcg_ctx.gen_opc_ptr++ = opc; 299 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1); 300 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2); 301 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg3); 302 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg4); 303 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg5); 304 *tcg_ctx.gen_opparam_ptr++ = arg6; 305 } 306 307 static inline void tcg_gen_op6i_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2, 308 TCGv_i64 arg3, TCGv_i64 arg4, 309 TCGv_i64 arg5, TCGArg arg6) 310 { 311 *tcg_ctx.gen_opc_ptr++ = opc; 312 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1); 313 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2); 314 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg3); 315 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg4); 316 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg5); 317 *tcg_ctx.gen_opparam_ptr++ = arg6; 318 } 319 320 static inline void tcg_gen_op6ii_i32(TCGOpcode opc, TCGv_i32 arg1, 321 TCGv_i32 arg2, TCGv_i32 arg3, 322 TCGv_i32 arg4, TCGArg arg5, TCGArg arg6) 323 { 324 *tcg_ctx.gen_opc_ptr++ = opc; 325 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1); 326 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2); 327 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg3); 328 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg4); 329 *tcg_ctx.gen_opparam_ptr++ = arg5; 330 *tcg_ctx.gen_opparam_ptr++ = arg6; 331 } 332 333 static inline void tcg_gen_op6ii_i64(TCGOpcode opc, TCGv_i64 arg1, 334 TCGv_i64 arg2, TCGv_i64 arg3, 335 TCGv_i64 arg4, TCGArg arg5, TCGArg arg6) 336 { 337 *tcg_ctx.gen_opc_ptr++ = opc; 338 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1); 339 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2); 340 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg3); 341 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg4); 342 *tcg_ctx.gen_opparam_ptr++ = arg5; 343 *tcg_ctx.gen_opparam_ptr++ = arg6; 344 } 345 346 static inline void tcg_add_param_i32(TCGv_i32 val) 347 { 348 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(val); 349 } 350 351 static inline void tcg_add_param_i64(TCGv_i64 val) 352 { 353 #if TCG_TARGET_REG_BITS == 32 354 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(TCGV_LOW(val)); 355 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(TCGV_HIGH(val)); 356 #else 357 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(val); 358 #endif 359 } 360 361 static inline void gen_set_label(int n) 362 { 363 tcg_gen_op1i(INDEX_op_set_label, n); 364 } 365 366 static inline void tcg_gen_br(int label) 367 { 368 tcg_gen_op1i(INDEX_op_br, label); 369 } 370 371 static inline void tcg_gen_mov_i32(TCGv_i32 ret, TCGv_i32 arg) 372 { 373 if (!TCGV_EQUAL_I32(ret, arg)) 374 tcg_gen_op2_i32(INDEX_op_mov_i32, ret, arg); 375 } 376 377 static inline void tcg_gen_movi_i32(TCGv_i32 ret, int32_t arg) 378 { 379 tcg_gen_op2i_i32(INDEX_op_movi_i32, ret, arg); 380 } 381 382 /* A version of dh_sizemask from def-helper.h that doesn't rely on 383 preprocessor magic. */ 384 static inline int tcg_gen_sizemask(int n, int is_64bit, int is_signed) 385 { 386 return (is_64bit << n*2) | (is_signed << (n*2 + 1)); 387 } 388 389 /* helper calls */ 390 static inline void tcg_gen_helperN(void *func, int flags, int sizemask, 391 TCGArg ret, int nargs, TCGArg *args) 392 { 393 TCGv_ptr fn; 394 fn = tcg_const_ptr(func); 395 tcg_gen_callN(&tcg_ctx, fn, flags, sizemask, ret, 396 nargs, args); 397 tcg_temp_free_ptr(fn); 398 } 399 400 /* Note: Both tcg_gen_helper32() and tcg_gen_helper64() are currently 401 reserved for helpers in tcg-runtime.c. These helpers all do not read 402 globals and do not have side effects, hence the call to tcg_gen_callN() 403 with TCG_CALL_NO_READ_GLOBALS | TCG_CALL_NO_SIDE_EFFECTS. This may need 404 to be adjusted if these functions start to be used with other helpers. */ 405 static inline void tcg_gen_helper32(void *func, int sizemask, TCGv_i32 ret, 406 TCGv_i32 a, TCGv_i32 b) 407 { 408 TCGv_ptr fn; 409 TCGArg args[2]; 410 fn = tcg_const_ptr(func); 411 args[0] = GET_TCGV_I32(a); 412 args[1] = GET_TCGV_I32(b); 413 tcg_gen_callN(&tcg_ctx, fn, 414 TCG_CALL_NO_READ_GLOBALS | TCG_CALL_NO_SIDE_EFFECTS, 415 sizemask, GET_TCGV_I32(ret), 2, args); 416 tcg_temp_free_ptr(fn); 417 } 418 419 static inline void tcg_gen_helper64(void *func, int sizemask, TCGv_i64 ret, 420 TCGv_i64 a, TCGv_i64 b) 421 { 422 TCGv_ptr fn; 423 TCGArg args[2]; 424 fn = tcg_const_ptr(func); 425 args[0] = GET_TCGV_I64(a); 426 args[1] = GET_TCGV_I64(b); 427 tcg_gen_callN(&tcg_ctx, fn, 428 TCG_CALL_NO_READ_GLOBALS | TCG_CALL_NO_SIDE_EFFECTS, 429 sizemask, GET_TCGV_I64(ret), 2, args); 430 tcg_temp_free_ptr(fn); 431 } 432 433 /* 32 bit ops */ 434 435 static inline void tcg_gen_ld8u_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset) 436 { 437 tcg_gen_ldst_op_i32(INDEX_op_ld8u_i32, ret, arg2, offset); 438 } 439 440 static inline void tcg_gen_ld8s_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset) 441 { 442 tcg_gen_ldst_op_i32(INDEX_op_ld8s_i32, ret, arg2, offset); 443 } 444 445 static inline void tcg_gen_ld16u_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset) 446 { 447 tcg_gen_ldst_op_i32(INDEX_op_ld16u_i32, ret, arg2, offset); 448 } 449 450 static inline void tcg_gen_ld16s_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset) 451 { 452 tcg_gen_ldst_op_i32(INDEX_op_ld16s_i32, ret, arg2, offset); 453 } 454 455 static inline void tcg_gen_ld_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset) 456 { 457 tcg_gen_ldst_op_i32(INDEX_op_ld_i32, ret, arg2, offset); 458 } 459 460 static inline void tcg_gen_st8_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset) 461 { 462 tcg_gen_ldst_op_i32(INDEX_op_st8_i32, arg1, arg2, offset); 463 } 464 465 static inline void tcg_gen_st16_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset) 466 { 467 tcg_gen_ldst_op_i32(INDEX_op_st16_i32, arg1, arg2, offset); 468 } 469 470 static inline void tcg_gen_st_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset) 471 { 472 tcg_gen_ldst_op_i32(INDEX_op_st_i32, arg1, arg2, offset); 473 } 474 475 static inline void tcg_gen_add_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 476 { 477 tcg_gen_op3_i32(INDEX_op_add_i32, ret, arg1, arg2); 478 } 479 480 static inline void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) 481 { 482 /* some cases can be optimized here */ 483 if (arg2 == 0) { 484 tcg_gen_mov_i32(ret, arg1); 485 } else { 486 TCGv_i32 t0 = tcg_const_i32(arg2); 487 tcg_gen_add_i32(ret, arg1, t0); 488 tcg_temp_free_i32(t0); 489 } 490 } 491 492 static inline void tcg_gen_sub_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 493 { 494 tcg_gen_op3_i32(INDEX_op_sub_i32, ret, arg1, arg2); 495 } 496 497 static inline void tcg_gen_subfi_i32(TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2) 498 { 499 TCGv_i32 t0 = tcg_const_i32(arg1); 500 tcg_gen_sub_i32(ret, t0, arg2); 501 tcg_temp_free_i32(t0); 502 } 503 504 static inline void tcg_gen_subi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) 505 { 506 /* some cases can be optimized here */ 507 if (arg2 == 0) { 508 tcg_gen_mov_i32(ret, arg1); 509 } else { 510 TCGv_i32 t0 = tcg_const_i32(arg2); 511 tcg_gen_sub_i32(ret, arg1, t0); 512 tcg_temp_free_i32(t0); 513 } 514 } 515 516 static inline void tcg_gen_and_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 517 { 518 if (TCGV_EQUAL_I32(arg1, arg2)) { 519 tcg_gen_mov_i32(ret, arg1); 520 } else { 521 tcg_gen_op3_i32(INDEX_op_and_i32, ret, arg1, arg2); 522 } 523 } 524 525 static inline void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2) 526 { 527 TCGv_i32 t0; 528 /* Some cases can be optimized here. */ 529 switch (arg2) { 530 case 0: 531 tcg_gen_movi_i32(ret, 0); 532 return; 533 case 0xffffffffu: 534 tcg_gen_mov_i32(ret, arg1); 535 return; 536 case 0xffu: 537 /* Don't recurse with tcg_gen_ext8u_i32. */ 538 if (TCG_TARGET_HAS_ext8u_i32) { 539 tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg1); 540 return; 541 } 542 break; 543 case 0xffffu: 544 if (TCG_TARGET_HAS_ext16u_i32) { 545 tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg1); 546 return; 547 } 548 break; 549 } 550 t0 = tcg_const_i32(arg2); 551 tcg_gen_and_i32(ret, arg1, t0); 552 tcg_temp_free_i32(t0); 553 } 554 555 static inline void tcg_gen_or_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 556 { 557 if (TCGV_EQUAL_I32(arg1, arg2)) { 558 tcg_gen_mov_i32(ret, arg1); 559 } else { 560 tcg_gen_op3_i32(INDEX_op_or_i32, ret, arg1, arg2); 561 } 562 } 563 564 static inline void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) 565 { 566 /* Some cases can be optimized here. */ 567 if (arg2 == -1) { 568 tcg_gen_movi_i32(ret, -1); 569 } else if (arg2 == 0) { 570 tcg_gen_mov_i32(ret, arg1); 571 } else { 572 TCGv_i32 t0 = tcg_const_i32(arg2); 573 tcg_gen_or_i32(ret, arg1, t0); 574 tcg_temp_free_i32(t0); 575 } 576 } 577 578 static inline void tcg_gen_xor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 579 { 580 if (TCGV_EQUAL_I32(arg1, arg2)) { 581 tcg_gen_movi_i32(ret, 0); 582 } else { 583 tcg_gen_op3_i32(INDEX_op_xor_i32, ret, arg1, arg2); 584 } 585 } 586 587 static inline void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) 588 { 589 /* Some cases can be optimized here. */ 590 if (arg2 == 0) { 591 tcg_gen_mov_i32(ret, arg1); 592 } else if (arg2 == -1 && TCG_TARGET_HAS_not_i32) { 593 /* Don't recurse with tcg_gen_not_i32. */ 594 tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg1); 595 } else { 596 TCGv_i32 t0 = tcg_const_i32(arg2); 597 tcg_gen_xor_i32(ret, arg1, t0); 598 tcg_temp_free_i32(t0); 599 } 600 } 601 602 static inline void tcg_gen_shl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 603 { 604 tcg_gen_op3_i32(INDEX_op_shl_i32, ret, arg1, arg2); 605 } 606 607 static inline void tcg_gen_shli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) 608 { 609 if (arg2 == 0) { 610 tcg_gen_mov_i32(ret, arg1); 611 } else { 612 TCGv_i32 t0 = tcg_const_i32(arg2); 613 tcg_gen_shl_i32(ret, arg1, t0); 614 tcg_temp_free_i32(t0); 615 } 616 } 617 618 static inline void tcg_gen_shr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 619 { 620 tcg_gen_op3_i32(INDEX_op_shr_i32, ret, arg1, arg2); 621 } 622 623 static inline void tcg_gen_shri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) 624 { 625 if (arg2 == 0) { 626 tcg_gen_mov_i32(ret, arg1); 627 } else { 628 TCGv_i32 t0 = tcg_const_i32(arg2); 629 tcg_gen_shr_i32(ret, arg1, t0); 630 tcg_temp_free_i32(t0); 631 } 632 } 633 634 static inline void tcg_gen_sar_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 635 { 636 tcg_gen_op3_i32(INDEX_op_sar_i32, ret, arg1, arg2); 637 } 638 639 static inline void tcg_gen_sari_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) 640 { 641 if (arg2 == 0) { 642 tcg_gen_mov_i32(ret, arg1); 643 } else { 644 TCGv_i32 t0 = tcg_const_i32(arg2); 645 tcg_gen_sar_i32(ret, arg1, t0); 646 tcg_temp_free_i32(t0); 647 } 648 } 649 650 static inline void tcg_gen_brcond_i32(TCGCond cond, TCGv_i32 arg1, 651 TCGv_i32 arg2, int label_index) 652 { 653 if (cond == TCG_COND_ALWAYS) { 654 tcg_gen_br(label_index); 655 } else if (cond != TCG_COND_NEVER) { 656 tcg_gen_op4ii_i32(INDEX_op_brcond_i32, arg1, arg2, cond, label_index); 657 } 658 } 659 660 static inline void tcg_gen_brcondi_i32(TCGCond cond, TCGv_i32 arg1, 661 int32_t arg2, int label_index) 662 { 663 if (cond == TCG_COND_ALWAYS) { 664 tcg_gen_br(label_index); 665 } else if (cond != TCG_COND_NEVER) { 666 TCGv_i32 t0 = tcg_const_i32(arg2); 667 tcg_gen_brcond_i32(cond, arg1, t0, label_index); 668 tcg_temp_free_i32(t0); 669 } 670 } 671 672 static inline void tcg_gen_setcond_i32(TCGCond cond, TCGv_i32 ret, 673 TCGv_i32 arg1, TCGv_i32 arg2) 674 { 675 if (cond == TCG_COND_ALWAYS) { 676 tcg_gen_movi_i32(ret, 1); 677 } else if (cond == TCG_COND_NEVER) { 678 tcg_gen_movi_i32(ret, 0); 679 } else { 680 tcg_gen_op4i_i32(INDEX_op_setcond_i32, ret, arg1, arg2, cond); 681 } 682 } 683 684 static inline void tcg_gen_setcondi_i32(TCGCond cond, TCGv_i32 ret, 685 TCGv_i32 arg1, int32_t arg2) 686 { 687 if (cond == TCG_COND_ALWAYS) { 688 tcg_gen_movi_i32(ret, 1); 689 } else if (cond == TCG_COND_NEVER) { 690 tcg_gen_movi_i32(ret, 0); 691 } else { 692 TCGv_i32 t0 = tcg_const_i32(arg2); 693 tcg_gen_setcond_i32(cond, ret, arg1, t0); 694 tcg_temp_free_i32(t0); 695 } 696 } 697 698 static inline void tcg_gen_mul_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 699 { 700 tcg_gen_op3_i32(INDEX_op_mul_i32, ret, arg1, arg2); 701 } 702 703 static inline void tcg_gen_muli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) 704 { 705 TCGv_i32 t0 = tcg_const_i32(arg2); 706 tcg_gen_mul_i32(ret, arg1, t0); 707 tcg_temp_free_i32(t0); 708 } 709 710 static inline void tcg_gen_div_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 711 { 712 if (TCG_TARGET_HAS_div_i32) { 713 tcg_gen_op3_i32(INDEX_op_div_i32, ret, arg1, arg2); 714 } else if (TCG_TARGET_HAS_div2_i32) { 715 TCGv_i32 t0 = tcg_temp_new_i32(); 716 tcg_gen_sari_i32(t0, arg1, 31); 717 tcg_gen_op5_i32(INDEX_op_div2_i32, ret, t0, arg1, t0, arg2); 718 tcg_temp_free_i32(t0); 719 } else { 720 int sizemask = 0; 721 /* Return value and both arguments are 32-bit and signed. */ 722 sizemask |= tcg_gen_sizemask(0, 0, 1); 723 sizemask |= tcg_gen_sizemask(1, 0, 1); 724 sizemask |= tcg_gen_sizemask(2, 0, 1); 725 tcg_gen_helper32(tcg_helper_div_i32, sizemask, ret, arg1, arg2); 726 } 727 } 728 729 static inline void tcg_gen_rem_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 730 { 731 if (TCG_TARGET_HAS_rem_i32) { 732 tcg_gen_op3_i32(INDEX_op_rem_i32, ret, arg1, arg2); 733 } else if (TCG_TARGET_HAS_div_i32) { 734 TCGv_i32 t0 = tcg_temp_new_i32(); 735 tcg_gen_op3_i32(INDEX_op_div_i32, t0, arg1, arg2); 736 tcg_gen_mul_i32(t0, t0, arg2); 737 tcg_gen_sub_i32(ret, arg1, t0); 738 tcg_temp_free_i32(t0); 739 } else if (TCG_TARGET_HAS_div2_i32) { 740 TCGv_i32 t0 = tcg_temp_new_i32(); 741 tcg_gen_sari_i32(t0, arg1, 31); 742 tcg_gen_op5_i32(INDEX_op_div2_i32, t0, ret, arg1, t0, arg2); 743 tcg_temp_free_i32(t0); 744 } else { 745 int sizemask = 0; 746 /* Return value and both arguments are 32-bit and signed. */ 747 sizemask |= tcg_gen_sizemask(0, 0, 1); 748 sizemask |= tcg_gen_sizemask(1, 0, 1); 749 sizemask |= tcg_gen_sizemask(2, 0, 1); 750 tcg_gen_helper32(tcg_helper_rem_i32, sizemask, ret, arg1, arg2); 751 } 752 } 753 754 static inline void tcg_gen_divu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 755 { 756 if (TCG_TARGET_HAS_div_i32) { 757 tcg_gen_op3_i32(INDEX_op_divu_i32, ret, arg1, arg2); 758 } else if (TCG_TARGET_HAS_div2_i32) { 759 TCGv_i32 t0 = tcg_temp_new_i32(); 760 tcg_gen_movi_i32(t0, 0); 761 tcg_gen_op5_i32(INDEX_op_divu2_i32, ret, t0, arg1, t0, arg2); 762 tcg_temp_free_i32(t0); 763 } else { 764 int sizemask = 0; 765 /* Return value and both arguments are 32-bit and unsigned. */ 766 sizemask |= tcg_gen_sizemask(0, 0, 0); 767 sizemask |= tcg_gen_sizemask(1, 0, 0); 768 sizemask |= tcg_gen_sizemask(2, 0, 0); 769 tcg_gen_helper32(tcg_helper_divu_i32, sizemask, ret, arg1, arg2); 770 } 771 } 772 773 static inline void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 774 { 775 if (TCG_TARGET_HAS_rem_i32) { 776 tcg_gen_op3_i32(INDEX_op_remu_i32, ret, arg1, arg2); 777 } else if (TCG_TARGET_HAS_div_i32) { 778 TCGv_i32 t0 = tcg_temp_new_i32(); 779 tcg_gen_op3_i32(INDEX_op_divu_i32, t0, arg1, arg2); 780 tcg_gen_mul_i32(t0, t0, arg2); 781 tcg_gen_sub_i32(ret, arg1, t0); 782 tcg_temp_free_i32(t0); 783 } else if (TCG_TARGET_HAS_div2_i32) { 784 TCGv_i32 t0 = tcg_temp_new_i32(); 785 tcg_gen_movi_i32(t0, 0); 786 tcg_gen_op5_i32(INDEX_op_divu2_i32, t0, ret, arg1, t0, arg2); 787 tcg_temp_free_i32(t0); 788 } else { 789 int sizemask = 0; 790 /* Return value and both arguments are 32-bit and unsigned. */ 791 sizemask |= tcg_gen_sizemask(0, 0, 0); 792 sizemask |= tcg_gen_sizemask(1, 0, 0); 793 sizemask |= tcg_gen_sizemask(2, 0, 0); 794 tcg_gen_helper32(tcg_helper_remu_i32, sizemask, ret, arg1, arg2); 795 } 796 } 797 798 #if TCG_TARGET_REG_BITS == 32 799 800 static inline void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg) 801 { 802 if (!TCGV_EQUAL_I64(ret, arg)) { 803 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg)); 804 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg)); 805 } 806 } 807 808 static inline void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg) 809 { 810 tcg_gen_movi_i32(TCGV_LOW(ret), arg); 811 tcg_gen_movi_i32(TCGV_HIGH(ret), arg >> 32); 812 } 813 814 static inline void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2, 815 tcg_target_long offset) 816 { 817 tcg_gen_ld8u_i32(TCGV_LOW(ret), arg2, offset); 818 tcg_gen_movi_i32(TCGV_HIGH(ret), 0); 819 } 820 821 static inline void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2, 822 tcg_target_long offset) 823 { 824 tcg_gen_ld8s_i32(TCGV_LOW(ret), arg2, offset); 825 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), 31); 826 } 827 828 static inline void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2, 829 tcg_target_long offset) 830 { 831 tcg_gen_ld16u_i32(TCGV_LOW(ret), arg2, offset); 832 tcg_gen_movi_i32(TCGV_HIGH(ret), 0); 833 } 834 835 static inline void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2, 836 tcg_target_long offset) 837 { 838 tcg_gen_ld16s_i32(TCGV_LOW(ret), arg2, offset); 839 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31); 840 } 841 842 static inline void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2, 843 tcg_target_long offset) 844 { 845 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset); 846 tcg_gen_movi_i32(TCGV_HIGH(ret), 0); 847 } 848 849 static inline void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2, 850 tcg_target_long offset) 851 { 852 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset); 853 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31); 854 } 855 856 static inline void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2, 857 tcg_target_long offset) 858 { 859 /* since arg2 and ret have different types, they cannot be the 860 same temporary */ 861 #ifdef TCG_TARGET_WORDS_BIGENDIAN 862 tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset); 863 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset + 4); 864 #else 865 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset); 866 tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset + 4); 867 #endif 868 } 869 870 static inline void tcg_gen_st8_i64(TCGv_i64 arg1, TCGv_ptr arg2, 871 tcg_target_long offset) 872 { 873 tcg_gen_st8_i32(TCGV_LOW(arg1), arg2, offset); 874 } 875 876 static inline void tcg_gen_st16_i64(TCGv_i64 arg1, TCGv_ptr arg2, 877 tcg_target_long offset) 878 { 879 tcg_gen_st16_i32(TCGV_LOW(arg1), arg2, offset); 880 } 881 882 static inline void tcg_gen_st32_i64(TCGv_i64 arg1, TCGv_ptr arg2, 883 tcg_target_long offset) 884 { 885 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset); 886 } 887 888 static inline void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2, 889 tcg_target_long offset) 890 { 891 #ifdef TCG_TARGET_WORDS_BIGENDIAN 892 tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset); 893 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset + 4); 894 #else 895 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset); 896 tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset + 4); 897 #endif 898 } 899 900 static inline void tcg_gen_add_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 901 { 902 tcg_gen_op6_i32(INDEX_op_add2_i32, TCGV_LOW(ret), TCGV_HIGH(ret), 903 TCGV_LOW(arg1), TCGV_HIGH(arg1), TCGV_LOW(arg2), 904 TCGV_HIGH(arg2)); 905 /* Allow the optimizer room to replace add2 with two moves. */ 906 tcg_gen_op0(INDEX_op_nop); 907 } 908 909 static inline void tcg_gen_sub_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 910 { 911 tcg_gen_op6_i32(INDEX_op_sub2_i32, TCGV_LOW(ret), TCGV_HIGH(ret), 912 TCGV_LOW(arg1), TCGV_HIGH(arg1), TCGV_LOW(arg2), 913 TCGV_HIGH(arg2)); 914 /* Allow the optimizer room to replace sub2 with two moves. */ 915 tcg_gen_op0(INDEX_op_nop); 916 } 917 918 static inline void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 919 { 920 tcg_gen_and_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2)); 921 tcg_gen_and_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2)); 922 } 923 924 static inline void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 925 { 926 tcg_gen_andi_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2); 927 tcg_gen_andi_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32); 928 } 929 930 static inline void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 931 { 932 tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2)); 933 tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2)); 934 } 935 936 static inline void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 937 { 938 tcg_gen_ori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2); 939 tcg_gen_ori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32); 940 } 941 942 static inline void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 943 { 944 tcg_gen_xor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2)); 945 tcg_gen_xor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2)); 946 } 947 948 static inline void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 949 { 950 tcg_gen_xori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2); 951 tcg_gen_xori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32); 952 } 953 954 /* XXX: use generic code when basic block handling is OK or CPU 955 specific code (x86) */ 956 static inline void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 957 { 958 int sizemask = 0; 959 /* Return value and both arguments are 64-bit and signed. */ 960 sizemask |= tcg_gen_sizemask(0, 1, 1); 961 sizemask |= tcg_gen_sizemask(1, 1, 1); 962 sizemask |= tcg_gen_sizemask(2, 1, 1); 963 964 tcg_gen_helper64(tcg_helper_shl_i64, sizemask, ret, arg1, arg2); 965 } 966 967 static inline void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 968 { 969 tcg_gen_shifti_i64(ret, arg1, arg2, 0, 0); 970 } 971 972 static inline void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 973 { 974 int sizemask = 0; 975 /* Return value and both arguments are 64-bit and signed. */ 976 sizemask |= tcg_gen_sizemask(0, 1, 1); 977 sizemask |= tcg_gen_sizemask(1, 1, 1); 978 sizemask |= tcg_gen_sizemask(2, 1, 1); 979 980 tcg_gen_helper64(tcg_helper_shr_i64, sizemask, ret, arg1, arg2); 981 } 982 983 static inline void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 984 { 985 tcg_gen_shifti_i64(ret, arg1, arg2, 1, 0); 986 } 987 988 static inline void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 989 { 990 int sizemask = 0; 991 /* Return value and both arguments are 64-bit and signed. */ 992 sizemask |= tcg_gen_sizemask(0, 1, 1); 993 sizemask |= tcg_gen_sizemask(1, 1, 1); 994 sizemask |= tcg_gen_sizemask(2, 1, 1); 995 996 tcg_gen_helper64(tcg_helper_sar_i64, sizemask, ret, arg1, arg2); 997 } 998 999 static inline void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 1000 { 1001 tcg_gen_shifti_i64(ret, arg1, arg2, 1, 1); 1002 } 1003 1004 static inline void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1, 1005 TCGv_i64 arg2, int label_index) 1006 { 1007 if (cond == TCG_COND_ALWAYS) { 1008 tcg_gen_br(label_index); 1009 } else if (cond != TCG_COND_NEVER) { 1010 tcg_gen_op6ii_i32(INDEX_op_brcond2_i32, 1011 TCGV_LOW(arg1), TCGV_HIGH(arg1), TCGV_LOW(arg2), 1012 TCGV_HIGH(arg2), cond, label_index); 1013 } 1014 } 1015 1016 static inline void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret, 1017 TCGv_i64 arg1, TCGv_i64 arg2) 1018 { 1019 if (cond == TCG_COND_ALWAYS) { 1020 tcg_gen_movi_i32(TCGV_LOW(ret), 1); 1021 } else if (cond == TCG_COND_NEVER) { 1022 tcg_gen_movi_i32(TCGV_LOW(ret), 0); 1023 } else { 1024 tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret), 1025 TCGV_LOW(arg1), TCGV_HIGH(arg1), 1026 TCGV_LOW(arg2), TCGV_HIGH(arg2), cond); 1027 } 1028 tcg_gen_movi_i32(TCGV_HIGH(ret), 0); 1029 } 1030 1031 static inline void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1032 { 1033 TCGv_i64 t0; 1034 TCGv_i32 t1; 1035 1036 t0 = tcg_temp_new_i64(); 1037 t1 = tcg_temp_new_i32(); 1038 1039 if (TCG_TARGET_HAS_mulu2_i32) { 1040 tcg_gen_op4_i32(INDEX_op_mulu2_i32, TCGV_LOW(t0), TCGV_HIGH(t0), 1041 TCGV_LOW(arg1), TCGV_LOW(arg2)); 1042 /* Allow the optimizer room to replace mulu2 with two moves. */ 1043 tcg_gen_op0(INDEX_op_nop); 1044 } else { 1045 tcg_debug_assert(TCG_TARGET_HAS_muluh_i32); 1046 tcg_gen_op3_i32(INDEX_op_mul_i32, TCGV_LOW(t0), 1047 TCGV_LOW(arg1), TCGV_LOW(arg2)); 1048 tcg_gen_op3_i32(INDEX_op_muluh_i32, TCGV_HIGH(t0), 1049 TCGV_LOW(arg1), TCGV_LOW(arg2)); 1050 } 1051 1052 tcg_gen_mul_i32(t1, TCGV_LOW(arg1), TCGV_HIGH(arg2)); 1053 tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1); 1054 tcg_gen_mul_i32(t1, TCGV_HIGH(arg1), TCGV_LOW(arg2)); 1055 tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1); 1056 1057 tcg_gen_mov_i64(ret, t0); 1058 tcg_temp_free_i64(t0); 1059 tcg_temp_free_i32(t1); 1060 } 1061 1062 static inline void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1063 { 1064 int sizemask = 0; 1065 /* Return value and both arguments are 64-bit and signed. */ 1066 sizemask |= tcg_gen_sizemask(0, 1, 1); 1067 sizemask |= tcg_gen_sizemask(1, 1, 1); 1068 sizemask |= tcg_gen_sizemask(2, 1, 1); 1069 1070 tcg_gen_helper64(tcg_helper_div_i64, sizemask, ret, arg1, arg2); 1071 } 1072 1073 static inline void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1074 { 1075 int sizemask = 0; 1076 /* Return value and both arguments are 64-bit and signed. */ 1077 sizemask |= tcg_gen_sizemask(0, 1, 1); 1078 sizemask |= tcg_gen_sizemask(1, 1, 1); 1079 sizemask |= tcg_gen_sizemask(2, 1, 1); 1080 1081 tcg_gen_helper64(tcg_helper_rem_i64, sizemask, ret, arg1, arg2); 1082 } 1083 1084 static inline void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1085 { 1086 int sizemask = 0; 1087 /* Return value and both arguments are 64-bit and unsigned. */ 1088 sizemask |= tcg_gen_sizemask(0, 1, 0); 1089 sizemask |= tcg_gen_sizemask(1, 1, 0); 1090 sizemask |= tcg_gen_sizemask(2, 1, 0); 1091 1092 tcg_gen_helper64(tcg_helper_divu_i64, sizemask, ret, arg1, arg2); 1093 } 1094 1095 static inline void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1096 { 1097 int sizemask = 0; 1098 /* Return value and both arguments are 64-bit and unsigned. */ 1099 sizemask |= tcg_gen_sizemask(0, 1, 0); 1100 sizemask |= tcg_gen_sizemask(1, 1, 0); 1101 sizemask |= tcg_gen_sizemask(2, 1, 0); 1102 1103 tcg_gen_helper64(tcg_helper_remu_i64, sizemask, ret, arg1, arg2); 1104 } 1105 1106 #else 1107 1108 static inline void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg) 1109 { 1110 if (!TCGV_EQUAL_I64(ret, arg)) 1111 tcg_gen_op2_i64(INDEX_op_mov_i64, ret, arg); 1112 } 1113 1114 static inline void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg) 1115 { 1116 tcg_gen_op2i_i64(INDEX_op_movi_i64, ret, arg); 1117 } 1118 1119 static inline void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2, 1120 tcg_target_long offset) 1121 { 1122 tcg_gen_ldst_op_i64(INDEX_op_ld8u_i64, ret, arg2, offset); 1123 } 1124 1125 static inline void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2, 1126 tcg_target_long offset) 1127 { 1128 tcg_gen_ldst_op_i64(INDEX_op_ld8s_i64, ret, arg2, offset); 1129 } 1130 1131 static inline void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2, 1132 tcg_target_long offset) 1133 { 1134 tcg_gen_ldst_op_i64(INDEX_op_ld16u_i64, ret, arg2, offset); 1135 } 1136 1137 static inline void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2, 1138 tcg_target_long offset) 1139 { 1140 tcg_gen_ldst_op_i64(INDEX_op_ld16s_i64, ret, arg2, offset); 1141 } 1142 1143 static inline void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2, 1144 tcg_target_long offset) 1145 { 1146 tcg_gen_ldst_op_i64(INDEX_op_ld32u_i64, ret, arg2, offset); 1147 } 1148 1149 static inline void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2, 1150 tcg_target_long offset) 1151 { 1152 tcg_gen_ldst_op_i64(INDEX_op_ld32s_i64, ret, arg2, offset); 1153 } 1154 1155 static inline void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset) 1156 { 1157 tcg_gen_ldst_op_i64(INDEX_op_ld_i64, ret, arg2, offset); 1158 } 1159 1160 static inline void tcg_gen_st8_i64(TCGv_i64 arg1, TCGv_ptr arg2, 1161 tcg_target_long offset) 1162 { 1163 tcg_gen_ldst_op_i64(INDEX_op_st8_i64, arg1, arg2, offset); 1164 } 1165 1166 static inline void tcg_gen_st16_i64(TCGv_i64 arg1, TCGv_ptr arg2, 1167 tcg_target_long offset) 1168 { 1169 tcg_gen_ldst_op_i64(INDEX_op_st16_i64, arg1, arg2, offset); 1170 } 1171 1172 static inline void tcg_gen_st32_i64(TCGv_i64 arg1, TCGv_ptr arg2, 1173 tcg_target_long offset) 1174 { 1175 tcg_gen_ldst_op_i64(INDEX_op_st32_i64, arg1, arg2, offset); 1176 } 1177 1178 static inline void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset) 1179 { 1180 tcg_gen_ldst_op_i64(INDEX_op_st_i64, arg1, arg2, offset); 1181 } 1182 1183 static inline void tcg_gen_add_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1184 { 1185 tcg_gen_op3_i64(INDEX_op_add_i64, ret, arg1, arg2); 1186 } 1187 1188 static inline void tcg_gen_sub_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1189 { 1190 tcg_gen_op3_i64(INDEX_op_sub_i64, ret, arg1, arg2); 1191 } 1192 1193 static inline void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1194 { 1195 if (TCGV_EQUAL_I64(arg1, arg2)) { 1196 tcg_gen_mov_i64(ret, arg1); 1197 } else { 1198 tcg_gen_op3_i64(INDEX_op_and_i64, ret, arg1, arg2); 1199 } 1200 } 1201 1202 static inline void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2) 1203 { 1204 TCGv_i64 t0; 1205 /* Some cases can be optimized here. */ 1206 switch (arg2) { 1207 case 0: 1208 tcg_gen_movi_i64(ret, 0); 1209 return; 1210 case 0xffffffffffffffffull: 1211 tcg_gen_mov_i64(ret, arg1); 1212 return; 1213 case 0xffull: 1214 /* Don't recurse with tcg_gen_ext8u_i32. */ 1215 if (TCG_TARGET_HAS_ext8u_i64) { 1216 tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg1); 1217 return; 1218 } 1219 break; 1220 case 0xffffu: 1221 if (TCG_TARGET_HAS_ext16u_i64) { 1222 tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg1); 1223 return; 1224 } 1225 break; 1226 case 0xffffffffull: 1227 if (TCG_TARGET_HAS_ext32u_i64) { 1228 tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg1); 1229 return; 1230 } 1231 break; 1232 } 1233 t0 = tcg_const_i64(arg2); 1234 tcg_gen_and_i64(ret, arg1, t0); 1235 tcg_temp_free_i64(t0); 1236 } 1237 1238 static inline void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1239 { 1240 if (TCGV_EQUAL_I64(arg1, arg2)) { 1241 tcg_gen_mov_i64(ret, arg1); 1242 } else { 1243 tcg_gen_op3_i64(INDEX_op_or_i64, ret, arg1, arg2); 1244 } 1245 } 1246 1247 static inline void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 1248 { 1249 /* Some cases can be optimized here. */ 1250 if (arg2 == -1) { 1251 tcg_gen_movi_i64(ret, -1); 1252 } else if (arg2 == 0) { 1253 tcg_gen_mov_i64(ret, arg1); 1254 } else { 1255 TCGv_i64 t0 = tcg_const_i64(arg2); 1256 tcg_gen_or_i64(ret, arg1, t0); 1257 tcg_temp_free_i64(t0); 1258 } 1259 } 1260 1261 static inline void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1262 { 1263 if (TCGV_EQUAL_I64(arg1, arg2)) { 1264 tcg_gen_movi_i64(ret, 0); 1265 } else { 1266 tcg_gen_op3_i64(INDEX_op_xor_i64, ret, arg1, arg2); 1267 } 1268 } 1269 1270 static inline void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 1271 { 1272 /* Some cases can be optimized here. */ 1273 if (arg2 == 0) { 1274 tcg_gen_mov_i64(ret, arg1); 1275 } else if (arg2 == -1 && TCG_TARGET_HAS_not_i64) { 1276 /* Don't recurse with tcg_gen_not_i64. */ 1277 tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg1); 1278 } else { 1279 TCGv_i64 t0 = tcg_const_i64(arg2); 1280 tcg_gen_xor_i64(ret, arg1, t0); 1281 tcg_temp_free_i64(t0); 1282 } 1283 } 1284 1285 static inline void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1286 { 1287 tcg_gen_op3_i64(INDEX_op_shl_i64, ret, arg1, arg2); 1288 } 1289 1290 static inline void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 1291 { 1292 if (arg2 == 0) { 1293 tcg_gen_mov_i64(ret, arg1); 1294 } else { 1295 TCGv_i64 t0 = tcg_const_i64(arg2); 1296 tcg_gen_shl_i64(ret, arg1, t0); 1297 tcg_temp_free_i64(t0); 1298 } 1299 } 1300 1301 static inline void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1302 { 1303 tcg_gen_op3_i64(INDEX_op_shr_i64, ret, arg1, arg2); 1304 } 1305 1306 static inline void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 1307 { 1308 if (arg2 == 0) { 1309 tcg_gen_mov_i64(ret, arg1); 1310 } else { 1311 TCGv_i64 t0 = tcg_const_i64(arg2); 1312 tcg_gen_shr_i64(ret, arg1, t0); 1313 tcg_temp_free_i64(t0); 1314 } 1315 } 1316 1317 static inline void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1318 { 1319 tcg_gen_op3_i64(INDEX_op_sar_i64, ret, arg1, arg2); 1320 } 1321 1322 static inline void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 1323 { 1324 if (arg2 == 0) { 1325 tcg_gen_mov_i64(ret, arg1); 1326 } else { 1327 TCGv_i64 t0 = tcg_const_i64(arg2); 1328 tcg_gen_sar_i64(ret, arg1, t0); 1329 tcg_temp_free_i64(t0); 1330 } 1331 } 1332 1333 static inline void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1, 1334 TCGv_i64 arg2, int label_index) 1335 { 1336 if (cond == TCG_COND_ALWAYS) { 1337 tcg_gen_br(label_index); 1338 } else if (cond != TCG_COND_NEVER) { 1339 tcg_gen_op4ii_i64(INDEX_op_brcond_i64, arg1, arg2, cond, label_index); 1340 } 1341 } 1342 1343 static inline void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret, 1344 TCGv_i64 arg1, TCGv_i64 arg2) 1345 { 1346 if (cond == TCG_COND_ALWAYS) { 1347 tcg_gen_movi_i64(ret, 1); 1348 } else if (cond == TCG_COND_NEVER) { 1349 tcg_gen_movi_i64(ret, 0); 1350 } else { 1351 tcg_gen_op4i_i64(INDEX_op_setcond_i64, ret, arg1, arg2, cond); 1352 } 1353 } 1354 1355 static inline void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1356 { 1357 tcg_gen_op3_i64(INDEX_op_mul_i64, ret, arg1, arg2); 1358 } 1359 1360 static inline void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1361 { 1362 if (TCG_TARGET_HAS_div_i64) { 1363 tcg_gen_op3_i64(INDEX_op_div_i64, ret, arg1, arg2); 1364 } else if (TCG_TARGET_HAS_div2_i64) { 1365 TCGv_i64 t0 = tcg_temp_new_i64(); 1366 tcg_gen_sari_i64(t0, arg1, 63); 1367 tcg_gen_op5_i64(INDEX_op_div2_i64, ret, t0, arg1, t0, arg2); 1368 tcg_temp_free_i64(t0); 1369 } else { 1370 int sizemask = 0; 1371 /* Return value and both arguments are 64-bit and signed. */ 1372 sizemask |= tcg_gen_sizemask(0, 1, 1); 1373 sizemask |= tcg_gen_sizemask(1, 1, 1); 1374 sizemask |= tcg_gen_sizemask(2, 1, 1); 1375 tcg_gen_helper64(tcg_helper_div_i64, sizemask, ret, arg1, arg2); 1376 } 1377 } 1378 1379 static inline void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1380 { 1381 if (TCG_TARGET_HAS_rem_i64) { 1382 tcg_gen_op3_i64(INDEX_op_rem_i64, ret, arg1, arg2); 1383 } else if (TCG_TARGET_HAS_div_i64) { 1384 TCGv_i64 t0 = tcg_temp_new_i64(); 1385 tcg_gen_op3_i64(INDEX_op_div_i64, t0, arg1, arg2); 1386 tcg_gen_mul_i64(t0, t0, arg2); 1387 tcg_gen_sub_i64(ret, arg1, t0); 1388 tcg_temp_free_i64(t0); 1389 } else if (TCG_TARGET_HAS_div2_i64) { 1390 TCGv_i64 t0 = tcg_temp_new_i64(); 1391 tcg_gen_sari_i64(t0, arg1, 63); 1392 tcg_gen_op5_i64(INDEX_op_div2_i64, t0, ret, arg1, t0, arg2); 1393 tcg_temp_free_i64(t0); 1394 } else { 1395 int sizemask = 0; 1396 /* Return value and both arguments are 64-bit and signed. */ 1397 sizemask |= tcg_gen_sizemask(0, 1, 1); 1398 sizemask |= tcg_gen_sizemask(1, 1, 1); 1399 sizemask |= tcg_gen_sizemask(2, 1, 1); 1400 tcg_gen_helper64(tcg_helper_rem_i64, sizemask, ret, arg1, arg2); 1401 } 1402 } 1403 1404 static inline void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1405 { 1406 if (TCG_TARGET_HAS_div_i64) { 1407 tcg_gen_op3_i64(INDEX_op_divu_i64, ret, arg1, arg2); 1408 } else if (TCG_TARGET_HAS_div2_i64) { 1409 TCGv_i64 t0 = tcg_temp_new_i64(); 1410 tcg_gen_movi_i64(t0, 0); 1411 tcg_gen_op5_i64(INDEX_op_divu2_i64, ret, t0, arg1, t0, arg2); 1412 tcg_temp_free_i64(t0); 1413 } else { 1414 int sizemask = 0; 1415 /* Return value and both arguments are 64-bit and unsigned. */ 1416 sizemask |= tcg_gen_sizemask(0, 1, 0); 1417 sizemask |= tcg_gen_sizemask(1, 1, 0); 1418 sizemask |= tcg_gen_sizemask(2, 1, 0); 1419 tcg_gen_helper64(tcg_helper_divu_i64, sizemask, ret, arg1, arg2); 1420 } 1421 } 1422 1423 static inline void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1424 { 1425 if (TCG_TARGET_HAS_rem_i64) { 1426 tcg_gen_op3_i64(INDEX_op_remu_i64, ret, arg1, arg2); 1427 } else if (TCG_TARGET_HAS_div_i64) { 1428 TCGv_i64 t0 = tcg_temp_new_i64(); 1429 tcg_gen_op3_i64(INDEX_op_divu_i64, t0, arg1, arg2); 1430 tcg_gen_mul_i64(t0, t0, arg2); 1431 tcg_gen_sub_i64(ret, arg1, t0); 1432 tcg_temp_free_i64(t0); 1433 } else if (TCG_TARGET_HAS_div2_i64) { 1434 TCGv_i64 t0 = tcg_temp_new_i64(); 1435 tcg_gen_movi_i64(t0, 0); 1436 tcg_gen_op5_i64(INDEX_op_divu2_i64, t0, ret, arg1, t0, arg2); 1437 tcg_temp_free_i64(t0); 1438 } else { 1439 int sizemask = 0; 1440 /* Return value and both arguments are 64-bit and unsigned. */ 1441 sizemask |= tcg_gen_sizemask(0, 1, 0); 1442 sizemask |= tcg_gen_sizemask(1, 1, 0); 1443 sizemask |= tcg_gen_sizemask(2, 1, 0); 1444 tcg_gen_helper64(tcg_helper_remu_i64, sizemask, ret, arg1, arg2); 1445 } 1446 } 1447 #endif /* TCG_TARGET_REG_BITS == 32 */ 1448 1449 static inline void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 1450 { 1451 /* some cases can be optimized here */ 1452 if (arg2 == 0) { 1453 tcg_gen_mov_i64(ret, arg1); 1454 } else { 1455 TCGv_i64 t0 = tcg_const_i64(arg2); 1456 tcg_gen_add_i64(ret, arg1, t0); 1457 tcg_temp_free_i64(t0); 1458 } 1459 } 1460 1461 static inline void tcg_gen_subfi_i64(TCGv_i64 ret, int64_t arg1, TCGv_i64 arg2) 1462 { 1463 TCGv_i64 t0 = tcg_const_i64(arg1); 1464 tcg_gen_sub_i64(ret, t0, arg2); 1465 tcg_temp_free_i64(t0); 1466 } 1467 1468 static inline void tcg_gen_subi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 1469 { 1470 /* some cases can be optimized here */ 1471 if (arg2 == 0) { 1472 tcg_gen_mov_i64(ret, arg1); 1473 } else { 1474 TCGv_i64 t0 = tcg_const_i64(arg2); 1475 tcg_gen_sub_i64(ret, arg1, t0); 1476 tcg_temp_free_i64(t0); 1477 } 1478 } 1479 static inline void tcg_gen_brcondi_i64(TCGCond cond, TCGv_i64 arg1, 1480 int64_t arg2, int label_index) 1481 { 1482 if (cond == TCG_COND_ALWAYS) { 1483 tcg_gen_br(label_index); 1484 } else if (cond != TCG_COND_NEVER) { 1485 TCGv_i64 t0 = tcg_const_i64(arg2); 1486 tcg_gen_brcond_i64(cond, arg1, t0, label_index); 1487 tcg_temp_free_i64(t0); 1488 } 1489 } 1490 1491 static inline void tcg_gen_setcondi_i64(TCGCond cond, TCGv_i64 ret, 1492 TCGv_i64 arg1, int64_t arg2) 1493 { 1494 TCGv_i64 t0 = tcg_const_i64(arg2); 1495 tcg_gen_setcond_i64(cond, ret, arg1, t0); 1496 tcg_temp_free_i64(t0); 1497 } 1498 1499 static inline void tcg_gen_muli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 1500 { 1501 TCGv_i64 t0 = tcg_const_i64(arg2); 1502 tcg_gen_mul_i64(ret, arg1, t0); 1503 tcg_temp_free_i64(t0); 1504 } 1505 1506 1507 /***************************************/ 1508 /* optional operations */ 1509 1510 static inline void tcg_gen_ext8s_i32(TCGv_i32 ret, TCGv_i32 arg) 1511 { 1512 if (TCG_TARGET_HAS_ext8s_i32) { 1513 tcg_gen_op2_i32(INDEX_op_ext8s_i32, ret, arg); 1514 } else { 1515 tcg_gen_shli_i32(ret, arg, 24); 1516 tcg_gen_sari_i32(ret, ret, 24); 1517 } 1518 } 1519 1520 static inline void tcg_gen_ext16s_i32(TCGv_i32 ret, TCGv_i32 arg) 1521 { 1522 if (TCG_TARGET_HAS_ext16s_i32) { 1523 tcg_gen_op2_i32(INDEX_op_ext16s_i32, ret, arg); 1524 } else { 1525 tcg_gen_shli_i32(ret, arg, 16); 1526 tcg_gen_sari_i32(ret, ret, 16); 1527 } 1528 } 1529 1530 static inline void tcg_gen_ext8u_i32(TCGv_i32 ret, TCGv_i32 arg) 1531 { 1532 if (TCG_TARGET_HAS_ext8u_i32) { 1533 tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg); 1534 } else { 1535 tcg_gen_andi_i32(ret, arg, 0xffu); 1536 } 1537 } 1538 1539 static inline void tcg_gen_ext16u_i32(TCGv_i32 ret, TCGv_i32 arg) 1540 { 1541 if (TCG_TARGET_HAS_ext16u_i32) { 1542 tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg); 1543 } else { 1544 tcg_gen_andi_i32(ret, arg, 0xffffu); 1545 } 1546 } 1547 1548 /* Note: we assume the two high bytes are set to zero */ 1549 static inline void tcg_gen_bswap16_i32(TCGv_i32 ret, TCGv_i32 arg) 1550 { 1551 if (TCG_TARGET_HAS_bswap16_i32) { 1552 tcg_gen_op2_i32(INDEX_op_bswap16_i32, ret, arg); 1553 } else { 1554 TCGv_i32 t0 = tcg_temp_new_i32(); 1555 1556 tcg_gen_ext8u_i32(t0, arg); 1557 tcg_gen_shli_i32(t0, t0, 8); 1558 tcg_gen_shri_i32(ret, arg, 8); 1559 tcg_gen_or_i32(ret, ret, t0); 1560 tcg_temp_free_i32(t0); 1561 } 1562 } 1563 1564 static inline void tcg_gen_bswap32_i32(TCGv_i32 ret, TCGv_i32 arg) 1565 { 1566 if (TCG_TARGET_HAS_bswap32_i32) { 1567 tcg_gen_op2_i32(INDEX_op_bswap32_i32, ret, arg); 1568 } else { 1569 TCGv_i32 t0, t1; 1570 t0 = tcg_temp_new_i32(); 1571 t1 = tcg_temp_new_i32(); 1572 1573 tcg_gen_shli_i32(t0, arg, 24); 1574 1575 tcg_gen_andi_i32(t1, arg, 0x0000ff00); 1576 tcg_gen_shli_i32(t1, t1, 8); 1577 tcg_gen_or_i32(t0, t0, t1); 1578 1579 tcg_gen_shri_i32(t1, arg, 8); 1580 tcg_gen_andi_i32(t1, t1, 0x0000ff00); 1581 tcg_gen_or_i32(t0, t0, t1); 1582 1583 tcg_gen_shri_i32(t1, arg, 24); 1584 tcg_gen_or_i32(ret, t0, t1); 1585 tcg_temp_free_i32(t0); 1586 tcg_temp_free_i32(t1); 1587 } 1588 } 1589 1590 #if TCG_TARGET_REG_BITS == 32 1591 static inline void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg) 1592 { 1593 tcg_gen_ext8s_i32(TCGV_LOW(ret), TCGV_LOW(arg)); 1594 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31); 1595 } 1596 1597 static inline void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg) 1598 { 1599 tcg_gen_ext16s_i32(TCGV_LOW(ret), TCGV_LOW(arg)); 1600 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31); 1601 } 1602 1603 static inline void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg) 1604 { 1605 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg)); 1606 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31); 1607 } 1608 1609 static inline void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg) 1610 { 1611 tcg_gen_ext8u_i32(TCGV_LOW(ret), TCGV_LOW(arg)); 1612 tcg_gen_movi_i32(TCGV_HIGH(ret), 0); 1613 } 1614 1615 static inline void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg) 1616 { 1617 tcg_gen_ext16u_i32(TCGV_LOW(ret), TCGV_LOW(arg)); 1618 tcg_gen_movi_i32(TCGV_HIGH(ret), 0); 1619 } 1620 1621 static inline void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg) 1622 { 1623 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg)); 1624 tcg_gen_movi_i32(TCGV_HIGH(ret), 0); 1625 } 1626 1627 static inline void tcg_gen_trunc_i64_i32(TCGv_i32 ret, TCGv_i64 arg) 1628 { 1629 tcg_gen_mov_i32(ret, TCGV_LOW(arg)); 1630 } 1631 1632 static inline void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg) 1633 { 1634 tcg_gen_mov_i32(TCGV_LOW(ret), arg); 1635 tcg_gen_movi_i32(TCGV_HIGH(ret), 0); 1636 } 1637 1638 static inline void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg) 1639 { 1640 tcg_gen_mov_i32(TCGV_LOW(ret), arg); 1641 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31); 1642 } 1643 1644 /* Note: we assume the six high bytes are set to zero */ 1645 static inline void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg) 1646 { 1647 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg)); 1648 tcg_gen_bswap16_i32(TCGV_LOW(ret), TCGV_LOW(arg)); 1649 } 1650 1651 /* Note: we assume the four high bytes are set to zero */ 1652 static inline void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg) 1653 { 1654 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg)); 1655 tcg_gen_bswap32_i32(TCGV_LOW(ret), TCGV_LOW(arg)); 1656 } 1657 1658 static inline void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg) 1659 { 1660 TCGv_i32 t0, t1; 1661 t0 = tcg_temp_new_i32(); 1662 t1 = tcg_temp_new_i32(); 1663 1664 tcg_gen_bswap32_i32(t0, TCGV_LOW(arg)); 1665 tcg_gen_bswap32_i32(t1, TCGV_HIGH(arg)); 1666 tcg_gen_mov_i32(TCGV_LOW(ret), t1); 1667 tcg_gen_mov_i32(TCGV_HIGH(ret), t0); 1668 tcg_temp_free_i32(t0); 1669 tcg_temp_free_i32(t1); 1670 } 1671 #else 1672 1673 static inline void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg) 1674 { 1675 if (TCG_TARGET_HAS_ext8s_i64) { 1676 tcg_gen_op2_i64(INDEX_op_ext8s_i64, ret, arg); 1677 } else { 1678 tcg_gen_shli_i64(ret, arg, 56); 1679 tcg_gen_sari_i64(ret, ret, 56); 1680 } 1681 } 1682 1683 static inline void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg) 1684 { 1685 if (TCG_TARGET_HAS_ext16s_i64) { 1686 tcg_gen_op2_i64(INDEX_op_ext16s_i64, ret, arg); 1687 } else { 1688 tcg_gen_shli_i64(ret, arg, 48); 1689 tcg_gen_sari_i64(ret, ret, 48); 1690 } 1691 } 1692 1693 static inline void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg) 1694 { 1695 if (TCG_TARGET_HAS_ext32s_i64) { 1696 tcg_gen_op2_i64(INDEX_op_ext32s_i64, ret, arg); 1697 } else { 1698 tcg_gen_shli_i64(ret, arg, 32); 1699 tcg_gen_sari_i64(ret, ret, 32); 1700 } 1701 } 1702 1703 static inline void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg) 1704 { 1705 if (TCG_TARGET_HAS_ext8u_i64) { 1706 tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg); 1707 } else { 1708 tcg_gen_andi_i64(ret, arg, 0xffu); 1709 } 1710 } 1711 1712 static inline void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg) 1713 { 1714 if (TCG_TARGET_HAS_ext16u_i64) { 1715 tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg); 1716 } else { 1717 tcg_gen_andi_i64(ret, arg, 0xffffu); 1718 } 1719 } 1720 1721 static inline void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg) 1722 { 1723 if (TCG_TARGET_HAS_ext32u_i64) { 1724 tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg); 1725 } else { 1726 tcg_gen_andi_i64(ret, arg, 0xffffffffu); 1727 } 1728 } 1729 1730 /* Note: we assume the target supports move between 32 and 64 bit 1731 registers. This will probably break MIPS64 targets. */ 1732 static inline void tcg_gen_trunc_i64_i32(TCGv_i32 ret, TCGv_i64 arg) 1733 { 1734 tcg_gen_mov_i32(ret, MAKE_TCGV_I32(GET_TCGV_I64(arg))); 1735 } 1736 1737 /* Note: we assume the target supports move between 32 and 64 bit 1738 registers */ 1739 static inline void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg) 1740 { 1741 tcg_gen_ext32u_i64(ret, MAKE_TCGV_I64(GET_TCGV_I32(arg))); 1742 } 1743 1744 /* Note: we assume the target supports move between 32 and 64 bit 1745 registers */ 1746 static inline void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg) 1747 { 1748 tcg_gen_ext32s_i64(ret, MAKE_TCGV_I64(GET_TCGV_I32(arg))); 1749 } 1750 1751 /* Note: we assume the six high bytes are set to zero */ 1752 static inline void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg) 1753 { 1754 if (TCG_TARGET_HAS_bswap16_i64) { 1755 tcg_gen_op2_i64(INDEX_op_bswap16_i64, ret, arg); 1756 } else { 1757 TCGv_i64 t0 = tcg_temp_new_i64(); 1758 1759 tcg_gen_ext8u_i64(t0, arg); 1760 tcg_gen_shli_i64(t0, t0, 8); 1761 tcg_gen_shri_i64(ret, arg, 8); 1762 tcg_gen_or_i64(ret, ret, t0); 1763 tcg_temp_free_i64(t0); 1764 } 1765 } 1766 1767 /* Note: we assume the four high bytes are set to zero */ 1768 static inline void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg) 1769 { 1770 if (TCG_TARGET_HAS_bswap32_i64) { 1771 tcg_gen_op2_i64(INDEX_op_bswap32_i64, ret, arg); 1772 } else { 1773 TCGv_i64 t0, t1; 1774 t0 = tcg_temp_new_i64(); 1775 t1 = tcg_temp_new_i64(); 1776 1777 tcg_gen_shli_i64(t0, arg, 24); 1778 tcg_gen_ext32u_i64(t0, t0); 1779 1780 tcg_gen_andi_i64(t1, arg, 0x0000ff00); 1781 tcg_gen_shli_i64(t1, t1, 8); 1782 tcg_gen_or_i64(t0, t0, t1); 1783 1784 tcg_gen_shri_i64(t1, arg, 8); 1785 tcg_gen_andi_i64(t1, t1, 0x0000ff00); 1786 tcg_gen_or_i64(t0, t0, t1); 1787 1788 tcg_gen_shri_i64(t1, arg, 24); 1789 tcg_gen_or_i64(ret, t0, t1); 1790 tcg_temp_free_i64(t0); 1791 tcg_temp_free_i64(t1); 1792 } 1793 } 1794 1795 static inline void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg) 1796 { 1797 if (TCG_TARGET_HAS_bswap64_i64) { 1798 tcg_gen_op2_i64(INDEX_op_bswap64_i64, ret, arg); 1799 } else { 1800 TCGv_i64 t0 = tcg_temp_new_i64(); 1801 TCGv_i64 t1 = tcg_temp_new_i64(); 1802 1803 tcg_gen_shli_i64(t0, arg, 56); 1804 1805 tcg_gen_andi_i64(t1, arg, 0x0000ff00); 1806 tcg_gen_shli_i64(t1, t1, 40); 1807 tcg_gen_or_i64(t0, t0, t1); 1808 1809 tcg_gen_andi_i64(t1, arg, 0x00ff0000); 1810 tcg_gen_shli_i64(t1, t1, 24); 1811 tcg_gen_or_i64(t0, t0, t1); 1812 1813 tcg_gen_andi_i64(t1, arg, 0xff000000); 1814 tcg_gen_shli_i64(t1, t1, 8); 1815 tcg_gen_or_i64(t0, t0, t1); 1816 1817 tcg_gen_shri_i64(t1, arg, 8); 1818 tcg_gen_andi_i64(t1, t1, 0xff000000); 1819 tcg_gen_or_i64(t0, t0, t1); 1820 1821 tcg_gen_shri_i64(t1, arg, 24); 1822 tcg_gen_andi_i64(t1, t1, 0x00ff0000); 1823 tcg_gen_or_i64(t0, t0, t1); 1824 1825 tcg_gen_shri_i64(t1, arg, 40); 1826 tcg_gen_andi_i64(t1, t1, 0x0000ff00); 1827 tcg_gen_or_i64(t0, t0, t1); 1828 1829 tcg_gen_shri_i64(t1, arg, 56); 1830 tcg_gen_or_i64(ret, t0, t1); 1831 tcg_temp_free_i64(t0); 1832 tcg_temp_free_i64(t1); 1833 } 1834 } 1835 1836 #endif 1837 1838 static inline void tcg_gen_neg_i32(TCGv_i32 ret, TCGv_i32 arg) 1839 { 1840 if (TCG_TARGET_HAS_neg_i32) { 1841 tcg_gen_op2_i32(INDEX_op_neg_i32, ret, arg); 1842 } else { 1843 TCGv_i32 t0 = tcg_const_i32(0); 1844 tcg_gen_sub_i32(ret, t0, arg); 1845 tcg_temp_free_i32(t0); 1846 } 1847 } 1848 1849 static inline void tcg_gen_neg_i64(TCGv_i64 ret, TCGv_i64 arg) 1850 { 1851 if (TCG_TARGET_HAS_neg_i64) { 1852 tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg); 1853 } else { 1854 TCGv_i64 t0 = tcg_const_i64(0); 1855 tcg_gen_sub_i64(ret, t0, arg); 1856 tcg_temp_free_i64(t0); 1857 } 1858 } 1859 1860 static inline void tcg_gen_not_i32(TCGv_i32 ret, TCGv_i32 arg) 1861 { 1862 if (TCG_TARGET_HAS_not_i32) { 1863 tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg); 1864 } else { 1865 tcg_gen_xori_i32(ret, arg, -1); 1866 } 1867 } 1868 1869 static inline void tcg_gen_not_i64(TCGv_i64 ret, TCGv_i64 arg) 1870 { 1871 #if TCG_TARGET_REG_BITS == 64 1872 if (TCG_TARGET_HAS_not_i64) { 1873 tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg); 1874 } else { 1875 tcg_gen_xori_i64(ret, arg, -1); 1876 } 1877 #else 1878 tcg_gen_not_i32(TCGV_LOW(ret), TCGV_LOW(arg)); 1879 tcg_gen_not_i32(TCGV_HIGH(ret), TCGV_HIGH(arg)); 1880 #endif 1881 } 1882 1883 static inline void tcg_gen_discard_i32(TCGv_i32 arg) 1884 { 1885 tcg_gen_op1_i32(INDEX_op_discard, arg); 1886 } 1887 1888 static inline void tcg_gen_discard_i64(TCGv_i64 arg) 1889 { 1890 #if TCG_TARGET_REG_BITS == 32 1891 tcg_gen_discard_i32(TCGV_LOW(arg)); 1892 tcg_gen_discard_i32(TCGV_HIGH(arg)); 1893 #else 1894 tcg_gen_op1_i64(INDEX_op_discard, arg); 1895 #endif 1896 } 1897 1898 static inline void tcg_gen_andc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 1899 { 1900 if (TCG_TARGET_HAS_andc_i32) { 1901 tcg_gen_op3_i32(INDEX_op_andc_i32, ret, arg1, arg2); 1902 } else { 1903 TCGv_i32 t0 = tcg_temp_new_i32(); 1904 tcg_gen_not_i32(t0, arg2); 1905 tcg_gen_and_i32(ret, arg1, t0); 1906 tcg_temp_free_i32(t0); 1907 } 1908 } 1909 1910 static inline void tcg_gen_andc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1911 { 1912 #if TCG_TARGET_REG_BITS == 64 1913 if (TCG_TARGET_HAS_andc_i64) { 1914 tcg_gen_op3_i64(INDEX_op_andc_i64, ret, arg1, arg2); 1915 } else { 1916 TCGv_i64 t0 = tcg_temp_new_i64(); 1917 tcg_gen_not_i64(t0, arg2); 1918 tcg_gen_and_i64(ret, arg1, t0); 1919 tcg_temp_free_i64(t0); 1920 } 1921 #else 1922 tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2)); 1923 tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2)); 1924 #endif 1925 } 1926 1927 static inline void tcg_gen_eqv_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 1928 { 1929 if (TCG_TARGET_HAS_eqv_i32) { 1930 tcg_gen_op3_i32(INDEX_op_eqv_i32, ret, arg1, arg2); 1931 } else { 1932 tcg_gen_xor_i32(ret, arg1, arg2); 1933 tcg_gen_not_i32(ret, ret); 1934 } 1935 } 1936 1937 static inline void tcg_gen_eqv_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1938 { 1939 #if TCG_TARGET_REG_BITS == 64 1940 if (TCG_TARGET_HAS_eqv_i64) { 1941 tcg_gen_op3_i64(INDEX_op_eqv_i64, ret, arg1, arg2); 1942 } else { 1943 tcg_gen_xor_i64(ret, arg1, arg2); 1944 tcg_gen_not_i64(ret, ret); 1945 } 1946 #else 1947 tcg_gen_eqv_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2)); 1948 tcg_gen_eqv_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2)); 1949 #endif 1950 } 1951 1952 static inline void tcg_gen_nand_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 1953 { 1954 if (TCG_TARGET_HAS_nand_i32) { 1955 tcg_gen_op3_i32(INDEX_op_nand_i32, ret, arg1, arg2); 1956 } else { 1957 tcg_gen_and_i32(ret, arg1, arg2); 1958 tcg_gen_not_i32(ret, ret); 1959 } 1960 } 1961 1962 static inline void tcg_gen_nand_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1963 { 1964 #if TCG_TARGET_REG_BITS == 64 1965 if (TCG_TARGET_HAS_nand_i64) { 1966 tcg_gen_op3_i64(INDEX_op_nand_i64, ret, arg1, arg2); 1967 } else { 1968 tcg_gen_and_i64(ret, arg1, arg2); 1969 tcg_gen_not_i64(ret, ret); 1970 } 1971 #else 1972 tcg_gen_nand_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2)); 1973 tcg_gen_nand_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2)); 1974 #endif 1975 } 1976 1977 static inline void tcg_gen_nor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 1978 { 1979 if (TCG_TARGET_HAS_nor_i32) { 1980 tcg_gen_op3_i32(INDEX_op_nor_i32, ret, arg1, arg2); 1981 } else { 1982 tcg_gen_or_i32(ret, arg1, arg2); 1983 tcg_gen_not_i32(ret, ret); 1984 } 1985 } 1986 1987 static inline void tcg_gen_nor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1988 { 1989 #if TCG_TARGET_REG_BITS == 64 1990 if (TCG_TARGET_HAS_nor_i64) { 1991 tcg_gen_op3_i64(INDEX_op_nor_i64, ret, arg1, arg2); 1992 } else { 1993 tcg_gen_or_i64(ret, arg1, arg2); 1994 tcg_gen_not_i64(ret, ret); 1995 } 1996 #else 1997 tcg_gen_nor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2)); 1998 tcg_gen_nor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2)); 1999 #endif 2000 } 2001 2002 static inline void tcg_gen_orc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 2003 { 2004 if (TCG_TARGET_HAS_orc_i32) { 2005 tcg_gen_op3_i32(INDEX_op_orc_i32, ret, arg1, arg2); 2006 } else { 2007 TCGv_i32 t0 = tcg_temp_new_i32(); 2008 tcg_gen_not_i32(t0, arg2); 2009 tcg_gen_or_i32(ret, arg1, t0); 2010 tcg_temp_free_i32(t0); 2011 } 2012 } 2013 2014 static inline void tcg_gen_orc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 2015 { 2016 #if TCG_TARGET_REG_BITS == 64 2017 if (TCG_TARGET_HAS_orc_i64) { 2018 tcg_gen_op3_i64(INDEX_op_orc_i64, ret, arg1, arg2); 2019 } else { 2020 TCGv_i64 t0 = tcg_temp_new_i64(); 2021 tcg_gen_not_i64(t0, arg2); 2022 tcg_gen_or_i64(ret, arg1, t0); 2023 tcg_temp_free_i64(t0); 2024 } 2025 #else 2026 tcg_gen_orc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2)); 2027 tcg_gen_orc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2)); 2028 #endif 2029 } 2030 2031 static inline void tcg_gen_rotl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 2032 { 2033 if (TCG_TARGET_HAS_rot_i32) { 2034 tcg_gen_op3_i32(INDEX_op_rotl_i32, ret, arg1, arg2); 2035 } else { 2036 TCGv_i32 t0, t1; 2037 2038 t0 = tcg_temp_new_i32(); 2039 t1 = tcg_temp_new_i32(); 2040 tcg_gen_shl_i32(t0, arg1, arg2); 2041 tcg_gen_subfi_i32(t1, 32, arg2); 2042 tcg_gen_shr_i32(t1, arg1, t1); 2043 tcg_gen_or_i32(ret, t0, t1); 2044 tcg_temp_free_i32(t0); 2045 tcg_temp_free_i32(t1); 2046 } 2047 } 2048 2049 static inline void tcg_gen_rotl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 2050 { 2051 if (TCG_TARGET_HAS_rot_i64) { 2052 tcg_gen_op3_i64(INDEX_op_rotl_i64, ret, arg1, arg2); 2053 } else { 2054 TCGv_i64 t0, t1; 2055 t0 = tcg_temp_new_i64(); 2056 t1 = tcg_temp_new_i64(); 2057 tcg_gen_shl_i64(t0, arg1, arg2); 2058 tcg_gen_subfi_i64(t1, 64, arg2); 2059 tcg_gen_shr_i64(t1, arg1, t1); 2060 tcg_gen_or_i64(ret, t0, t1); 2061 tcg_temp_free_i64(t0); 2062 tcg_temp_free_i64(t1); 2063 } 2064 } 2065 2066 static inline void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) 2067 { 2068 /* some cases can be optimized here */ 2069 if (arg2 == 0) { 2070 tcg_gen_mov_i32(ret, arg1); 2071 } else if (TCG_TARGET_HAS_rot_i32) { 2072 TCGv_i32 t0 = tcg_const_i32(arg2); 2073 tcg_gen_rotl_i32(ret, arg1, t0); 2074 tcg_temp_free_i32(t0); 2075 } else { 2076 TCGv_i32 t0, t1; 2077 t0 = tcg_temp_new_i32(); 2078 t1 = tcg_temp_new_i32(); 2079 tcg_gen_shli_i32(t0, arg1, arg2); 2080 tcg_gen_shri_i32(t1, arg1, 32 - arg2); 2081 tcg_gen_or_i32(ret, t0, t1); 2082 tcg_temp_free_i32(t0); 2083 tcg_temp_free_i32(t1); 2084 } 2085 } 2086 2087 static inline void tcg_gen_rotli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 2088 { 2089 /* some cases can be optimized here */ 2090 if (arg2 == 0) { 2091 tcg_gen_mov_i64(ret, arg1); 2092 } else if (TCG_TARGET_HAS_rot_i64) { 2093 TCGv_i64 t0 = tcg_const_i64(arg2); 2094 tcg_gen_rotl_i64(ret, arg1, t0); 2095 tcg_temp_free_i64(t0); 2096 } else { 2097 TCGv_i64 t0, t1; 2098 t0 = tcg_temp_new_i64(); 2099 t1 = tcg_temp_new_i64(); 2100 tcg_gen_shli_i64(t0, arg1, arg2); 2101 tcg_gen_shri_i64(t1, arg1, 64 - arg2); 2102 tcg_gen_or_i64(ret, t0, t1); 2103 tcg_temp_free_i64(t0); 2104 tcg_temp_free_i64(t1); 2105 } 2106 } 2107 2108 static inline void tcg_gen_rotr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 2109 { 2110 if (TCG_TARGET_HAS_rot_i32) { 2111 tcg_gen_op3_i32(INDEX_op_rotr_i32, ret, arg1, arg2); 2112 } else { 2113 TCGv_i32 t0, t1; 2114 2115 t0 = tcg_temp_new_i32(); 2116 t1 = tcg_temp_new_i32(); 2117 tcg_gen_shr_i32(t0, arg1, arg2); 2118 tcg_gen_subfi_i32(t1, 32, arg2); 2119 tcg_gen_shl_i32(t1, arg1, t1); 2120 tcg_gen_or_i32(ret, t0, t1); 2121 tcg_temp_free_i32(t0); 2122 tcg_temp_free_i32(t1); 2123 } 2124 } 2125 2126 static inline void tcg_gen_rotr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 2127 { 2128 if (TCG_TARGET_HAS_rot_i64) { 2129 tcg_gen_op3_i64(INDEX_op_rotr_i64, ret, arg1, arg2); 2130 } else { 2131 TCGv_i64 t0, t1; 2132 t0 = tcg_temp_new_i64(); 2133 t1 = tcg_temp_new_i64(); 2134 tcg_gen_shr_i64(t0, arg1, arg2); 2135 tcg_gen_subfi_i64(t1, 64, arg2); 2136 tcg_gen_shl_i64(t1, arg1, t1); 2137 tcg_gen_or_i64(ret, t0, t1); 2138 tcg_temp_free_i64(t0); 2139 tcg_temp_free_i64(t1); 2140 } 2141 } 2142 2143 static inline void tcg_gen_rotri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) 2144 { 2145 /* some cases can be optimized here */ 2146 if (arg2 == 0) { 2147 tcg_gen_mov_i32(ret, arg1); 2148 } else { 2149 tcg_gen_rotli_i32(ret, arg1, 32 - arg2); 2150 } 2151 } 2152 2153 static inline void tcg_gen_rotri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 2154 { 2155 /* some cases can be optimized here */ 2156 if (arg2 == 0) { 2157 tcg_gen_mov_i64(ret, arg1); 2158 } else { 2159 tcg_gen_rotli_i64(ret, arg1, 64 - arg2); 2160 } 2161 } 2162 2163 static inline void tcg_gen_deposit_i32(TCGv_i32 ret, TCGv_i32 arg1, 2164 TCGv_i32 arg2, unsigned int ofs, 2165 unsigned int len) 2166 { 2167 uint32_t mask; 2168 TCGv_i32 t1; 2169 2170 tcg_debug_assert(ofs < 32); 2171 tcg_debug_assert(len <= 32); 2172 tcg_debug_assert(ofs + len <= 32); 2173 2174 if (ofs == 0 && len == 32) { 2175 tcg_gen_mov_i32(ret, arg2); 2176 return; 2177 } 2178 if (TCG_TARGET_HAS_deposit_i32 && TCG_TARGET_deposit_i32_valid(ofs, len)) { 2179 tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, arg1, arg2, ofs, len); 2180 return; 2181 } 2182 2183 mask = (1u << len) - 1; 2184 t1 = tcg_temp_new_i32(); 2185 2186 if (ofs + len < 32) { 2187 tcg_gen_andi_i32(t1, arg2, mask); 2188 tcg_gen_shli_i32(t1, t1, ofs); 2189 } else { 2190 tcg_gen_shli_i32(t1, arg2, ofs); 2191 } 2192 tcg_gen_andi_i32(ret, arg1, ~(mask << ofs)); 2193 tcg_gen_or_i32(ret, ret, t1); 2194 2195 tcg_temp_free_i32(t1); 2196 } 2197 2198 static inline void tcg_gen_deposit_i64(TCGv_i64 ret, TCGv_i64 arg1, 2199 TCGv_i64 arg2, unsigned int ofs, 2200 unsigned int len) 2201 { 2202 uint64_t mask; 2203 TCGv_i64 t1; 2204 2205 tcg_debug_assert(ofs < 64); 2206 tcg_debug_assert(len <= 64); 2207 tcg_debug_assert(ofs + len <= 64); 2208 2209 if (ofs == 0 && len == 64) { 2210 tcg_gen_mov_i64(ret, arg2); 2211 return; 2212 } 2213 if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(ofs, len)) { 2214 tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, arg1, arg2, ofs, len); 2215 return; 2216 } 2217 2218 #if TCG_TARGET_REG_BITS == 32 2219 if (ofs >= 32) { 2220 tcg_gen_deposit_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), 2221 TCGV_LOW(arg2), ofs - 32, len); 2222 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1)); 2223 return; 2224 } 2225 if (ofs + len <= 32) { 2226 tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(arg1), 2227 TCGV_LOW(arg2), ofs, len); 2228 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1)); 2229 return; 2230 } 2231 #endif 2232 2233 mask = (1ull << len) - 1; 2234 t1 = tcg_temp_new_i64(); 2235 2236 if (ofs + len < 64) { 2237 tcg_gen_andi_i64(t1, arg2, mask); 2238 tcg_gen_shli_i64(t1, t1, ofs); 2239 } else { 2240 tcg_gen_shli_i64(t1, arg2, ofs); 2241 } 2242 tcg_gen_andi_i64(ret, arg1, ~(mask << ofs)); 2243 tcg_gen_or_i64(ret, ret, t1); 2244 2245 tcg_temp_free_i64(t1); 2246 } 2247 2248 static inline void tcg_gen_concat_i32_i64(TCGv_i64 dest, TCGv_i32 low, 2249 TCGv_i32 high) 2250 { 2251 #if TCG_TARGET_REG_BITS == 32 2252 tcg_gen_mov_i32(TCGV_LOW(dest), low); 2253 tcg_gen_mov_i32(TCGV_HIGH(dest), high); 2254 #else 2255 TCGv_i64 tmp = tcg_temp_new_i64(); 2256 /* These extensions are only needed for type correctness. 2257 We may be able to do better given target specific information. */ 2258 tcg_gen_extu_i32_i64(tmp, high); 2259 tcg_gen_extu_i32_i64(dest, low); 2260 /* If deposit is available, use it. Otherwise use the extra 2261 knowledge that we have of the zero-extensions above. */ 2262 if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(32, 32)) { 2263 tcg_gen_deposit_i64(dest, dest, tmp, 32, 32); 2264 } else { 2265 tcg_gen_shli_i64(tmp, tmp, 32); 2266 tcg_gen_or_i64(dest, dest, tmp); 2267 } 2268 tcg_temp_free_i64(tmp); 2269 #endif 2270 } 2271 2272 static inline void tcg_gen_concat32_i64(TCGv_i64 dest, TCGv_i64 low, 2273 TCGv_i64 high) 2274 { 2275 tcg_gen_deposit_i64(dest, low, high, 32, 32); 2276 } 2277 2278 static inline void tcg_gen_extr_i64_i32(TCGv_i32 lo, TCGv_i32 hi, TCGv_i64 arg) 2279 { 2280 #if TCG_TARGET_REG_BITS == 32 2281 tcg_gen_mov_i32(lo, TCGV_LOW(arg)); 2282 tcg_gen_mov_i32(hi, TCGV_HIGH(arg)); 2283 #else 2284 TCGv_i64 t0 = tcg_temp_new_i64(); 2285 tcg_gen_trunc_i64_i32(lo, arg); 2286 tcg_gen_shri_i64(t0, arg, 32); 2287 tcg_gen_trunc_i64_i32(hi, t0); 2288 tcg_temp_free_i64(t0); 2289 #endif 2290 } 2291 2292 static inline void tcg_gen_extr32_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i64 arg) 2293 { 2294 tcg_gen_ext32u_i64(lo, arg); 2295 tcg_gen_shri_i64(hi, arg, 32); 2296 } 2297 2298 static inline void tcg_gen_movcond_i32(TCGCond cond, TCGv_i32 ret, 2299 TCGv_i32 c1, TCGv_i32 c2, 2300 TCGv_i32 v1, TCGv_i32 v2) 2301 { 2302 if (TCG_TARGET_HAS_movcond_i32) { 2303 tcg_gen_op6i_i32(INDEX_op_movcond_i32, ret, c1, c2, v1, v2, cond); 2304 } else { 2305 TCGv_i32 t0 = tcg_temp_new_i32(); 2306 TCGv_i32 t1 = tcg_temp_new_i32(); 2307 tcg_gen_setcond_i32(cond, t0, c1, c2); 2308 tcg_gen_neg_i32(t0, t0); 2309 tcg_gen_and_i32(t1, v1, t0); 2310 tcg_gen_andc_i32(ret, v2, t0); 2311 tcg_gen_or_i32(ret, ret, t1); 2312 tcg_temp_free_i32(t0); 2313 tcg_temp_free_i32(t1); 2314 } 2315 } 2316 2317 static inline void tcg_gen_movcond_i64(TCGCond cond, TCGv_i64 ret, 2318 TCGv_i64 c1, TCGv_i64 c2, 2319 TCGv_i64 v1, TCGv_i64 v2) 2320 { 2321 #if TCG_TARGET_REG_BITS == 32 2322 TCGv_i32 t0 = tcg_temp_new_i32(); 2323 TCGv_i32 t1 = tcg_temp_new_i32(); 2324 tcg_gen_op6i_i32(INDEX_op_setcond2_i32, t0, 2325 TCGV_LOW(c1), TCGV_HIGH(c1), 2326 TCGV_LOW(c2), TCGV_HIGH(c2), cond); 2327 2328 if (TCG_TARGET_HAS_movcond_i32) { 2329 tcg_gen_movi_i32(t1, 0); 2330 tcg_gen_movcond_i32(TCG_COND_NE, TCGV_LOW(ret), t0, t1, 2331 TCGV_LOW(v1), TCGV_LOW(v2)); 2332 tcg_gen_movcond_i32(TCG_COND_NE, TCGV_HIGH(ret), t0, t1, 2333 TCGV_HIGH(v1), TCGV_HIGH(v2)); 2334 } else { 2335 tcg_gen_neg_i32(t0, t0); 2336 2337 tcg_gen_and_i32(t1, TCGV_LOW(v1), t0); 2338 tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(v2), t0); 2339 tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t1); 2340 2341 tcg_gen_and_i32(t1, TCGV_HIGH(v1), t0); 2342 tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(v2), t0); 2343 tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t1); 2344 } 2345 tcg_temp_free_i32(t0); 2346 tcg_temp_free_i32(t1); 2347 #else 2348 if (TCG_TARGET_HAS_movcond_i64) { 2349 tcg_gen_op6i_i64(INDEX_op_movcond_i64, ret, c1, c2, v1, v2, cond); 2350 } else { 2351 TCGv_i64 t0 = tcg_temp_new_i64(); 2352 TCGv_i64 t1 = tcg_temp_new_i64(); 2353 tcg_gen_setcond_i64(cond, t0, c1, c2); 2354 tcg_gen_neg_i64(t0, t0); 2355 tcg_gen_and_i64(t1, v1, t0); 2356 tcg_gen_andc_i64(ret, v2, t0); 2357 tcg_gen_or_i64(ret, ret, t1); 2358 tcg_temp_free_i64(t0); 2359 tcg_temp_free_i64(t1); 2360 } 2361 #endif 2362 } 2363 2364 static inline void tcg_gen_add2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al, 2365 TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh) 2366 { 2367 if (TCG_TARGET_HAS_add2_i32) { 2368 tcg_gen_op6_i32(INDEX_op_add2_i32, rl, rh, al, ah, bl, bh); 2369 /* Allow the optimizer room to replace add2 with two moves. */ 2370 tcg_gen_op0(INDEX_op_nop); 2371 } else { 2372 TCGv_i64 t0 = tcg_temp_new_i64(); 2373 TCGv_i64 t1 = tcg_temp_new_i64(); 2374 tcg_gen_concat_i32_i64(t0, al, ah); 2375 tcg_gen_concat_i32_i64(t1, bl, bh); 2376 tcg_gen_add_i64(t0, t0, t1); 2377 tcg_gen_extr_i64_i32(rl, rh, t0); 2378 tcg_temp_free_i64(t0); 2379 tcg_temp_free_i64(t1); 2380 } 2381 } 2382 2383 static inline void tcg_gen_sub2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al, 2384 TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh) 2385 { 2386 if (TCG_TARGET_HAS_sub2_i32) { 2387 tcg_gen_op6_i32(INDEX_op_sub2_i32, rl, rh, al, ah, bl, bh); 2388 /* Allow the optimizer room to replace sub2 with two moves. */ 2389 tcg_gen_op0(INDEX_op_nop); 2390 } else { 2391 TCGv_i64 t0 = tcg_temp_new_i64(); 2392 TCGv_i64 t1 = tcg_temp_new_i64(); 2393 tcg_gen_concat_i32_i64(t0, al, ah); 2394 tcg_gen_concat_i32_i64(t1, bl, bh); 2395 tcg_gen_sub_i64(t0, t0, t1); 2396 tcg_gen_extr_i64_i32(rl, rh, t0); 2397 tcg_temp_free_i64(t0); 2398 tcg_temp_free_i64(t1); 2399 } 2400 } 2401 2402 static inline void tcg_gen_mulu2_i32(TCGv_i32 rl, TCGv_i32 rh, 2403 TCGv_i32 arg1, TCGv_i32 arg2) 2404 { 2405 if (TCG_TARGET_HAS_mulu2_i32) { 2406 tcg_gen_op4_i32(INDEX_op_mulu2_i32, rl, rh, arg1, arg2); 2407 /* Allow the optimizer room to replace mulu2 with two moves. */ 2408 tcg_gen_op0(INDEX_op_nop); 2409 } else if (TCG_TARGET_HAS_muluh_i32) { 2410 TCGv_i32 t = tcg_temp_new_i32(); 2411 tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2); 2412 tcg_gen_op3_i32(INDEX_op_muluh_i32, rh, arg1, arg2); 2413 tcg_gen_mov_i32(rl, t); 2414 tcg_temp_free_i32(t); 2415 } else { 2416 TCGv_i64 t0 = tcg_temp_new_i64(); 2417 TCGv_i64 t1 = tcg_temp_new_i64(); 2418 tcg_gen_extu_i32_i64(t0, arg1); 2419 tcg_gen_extu_i32_i64(t1, arg2); 2420 tcg_gen_mul_i64(t0, t0, t1); 2421 tcg_gen_extr_i64_i32(rl, rh, t0); 2422 tcg_temp_free_i64(t0); 2423 tcg_temp_free_i64(t1); 2424 } 2425 } 2426 2427 static inline void tcg_gen_muls2_i32(TCGv_i32 rl, TCGv_i32 rh, 2428 TCGv_i32 arg1, TCGv_i32 arg2) 2429 { 2430 if (TCG_TARGET_HAS_muls2_i32) { 2431 tcg_gen_op4_i32(INDEX_op_muls2_i32, rl, rh, arg1, arg2); 2432 /* Allow the optimizer room to replace muls2 with two moves. */ 2433 tcg_gen_op0(INDEX_op_nop); 2434 } else if (TCG_TARGET_HAS_mulsh_i32) { 2435 TCGv_i32 t = tcg_temp_new_i32(); 2436 tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2); 2437 tcg_gen_op3_i32(INDEX_op_mulsh_i32, rh, arg1, arg2); 2438 tcg_gen_mov_i32(rl, t); 2439 tcg_temp_free_i32(t); 2440 } else if (TCG_TARGET_REG_BITS == 32 && TCG_TARGET_HAS_mulu2_i32) { 2441 TCGv_i32 t0 = tcg_temp_new_i32(); 2442 TCGv_i32 t1 = tcg_temp_new_i32(); 2443 TCGv_i32 t2 = tcg_temp_new_i32(); 2444 TCGv_i32 t3 = tcg_temp_new_i32(); 2445 tcg_gen_op4_i32(INDEX_op_mulu2_i32, t0, t1, arg1, arg2); 2446 /* Allow the optimizer room to replace mulu2 with two moves. */ 2447 tcg_gen_op0(INDEX_op_nop); 2448 /* Adjust for negative inputs. */ 2449 tcg_gen_sari_i32(t2, arg1, 31); 2450 tcg_gen_sari_i32(t3, arg2, 31); 2451 tcg_gen_and_i32(t2, t2, arg2); 2452 tcg_gen_and_i32(t3, t3, arg1); 2453 tcg_gen_sub_i32(rh, t1, t2); 2454 tcg_gen_sub_i32(rh, rh, t3); 2455 tcg_gen_mov_i32(rl, t0); 2456 tcg_temp_free_i32(t0); 2457 tcg_temp_free_i32(t1); 2458 tcg_temp_free_i32(t2); 2459 tcg_temp_free_i32(t3); 2460 } else { 2461 TCGv_i64 t0 = tcg_temp_new_i64(); 2462 TCGv_i64 t1 = tcg_temp_new_i64(); 2463 tcg_gen_ext_i32_i64(t0, arg1); 2464 tcg_gen_ext_i32_i64(t1, arg2); 2465 tcg_gen_mul_i64(t0, t0, t1); 2466 tcg_gen_extr_i64_i32(rl, rh, t0); 2467 tcg_temp_free_i64(t0); 2468 tcg_temp_free_i64(t1); 2469 } 2470 } 2471 2472 static inline void tcg_gen_add2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al, 2473 TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh) 2474 { 2475 if (TCG_TARGET_HAS_add2_i64) { 2476 tcg_gen_op6_i64(INDEX_op_add2_i64, rl, rh, al, ah, bl, bh); 2477 /* Allow the optimizer room to replace add2 with two moves. */ 2478 tcg_gen_op0(INDEX_op_nop); 2479 } else { 2480 TCGv_i64 t0 = tcg_temp_new_i64(); 2481 TCGv_i64 t1 = tcg_temp_new_i64(); 2482 tcg_gen_add_i64(t0, al, bl); 2483 tcg_gen_setcond_i64(TCG_COND_LTU, t1, t0, al); 2484 tcg_gen_add_i64(rh, ah, bh); 2485 tcg_gen_add_i64(rh, rh, t1); 2486 tcg_gen_mov_i64(rl, t0); 2487 tcg_temp_free_i64(t0); 2488 tcg_temp_free_i64(t1); 2489 } 2490 } 2491 2492 static inline void tcg_gen_sub2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al, 2493 TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh) 2494 { 2495 if (TCG_TARGET_HAS_sub2_i64) { 2496 tcg_gen_op6_i64(INDEX_op_sub2_i64, rl, rh, al, ah, bl, bh); 2497 /* Allow the optimizer room to replace sub2 with two moves. */ 2498 tcg_gen_op0(INDEX_op_nop); 2499 } else { 2500 TCGv_i64 t0 = tcg_temp_new_i64(); 2501 TCGv_i64 t1 = tcg_temp_new_i64(); 2502 tcg_gen_sub_i64(t0, al, bl); 2503 tcg_gen_setcond_i64(TCG_COND_LTU, t1, al, bl); 2504 tcg_gen_sub_i64(rh, ah, bh); 2505 tcg_gen_sub_i64(rh, rh, t1); 2506 tcg_gen_mov_i64(rl, t0); 2507 tcg_temp_free_i64(t0); 2508 tcg_temp_free_i64(t1); 2509 } 2510 } 2511 2512 static inline void tcg_gen_mulu2_i64(TCGv_i64 rl, TCGv_i64 rh, 2513 TCGv_i64 arg1, TCGv_i64 arg2) 2514 { 2515 if (TCG_TARGET_HAS_mulu2_i64) { 2516 tcg_gen_op4_i64(INDEX_op_mulu2_i64, rl, rh, arg1, arg2); 2517 /* Allow the optimizer room to replace mulu2 with two moves. */ 2518 tcg_gen_op0(INDEX_op_nop); 2519 } else if (TCG_TARGET_HAS_muluh_i64) { 2520 TCGv_i64 t = tcg_temp_new_i64(); 2521 tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2); 2522 tcg_gen_op3_i64(INDEX_op_muluh_i64, rh, arg1, arg2); 2523 tcg_gen_mov_i64(rl, t); 2524 tcg_temp_free_i64(t); 2525 } else if (TCG_TARGET_HAS_mulu2_i64) { 2526 TCGv_i64 t0 = tcg_temp_new_i64(); 2527 TCGv_i64 t1 = tcg_temp_new_i64(); 2528 TCGv_i64 t2 = tcg_temp_new_i64(); 2529 TCGv_i64 t3 = tcg_temp_new_i64(); 2530 tcg_gen_op4_i64(INDEX_op_mulu2_i64, t0, t1, arg1, arg2); 2531 /* Allow the optimizer room to replace mulu2 with two moves. */ 2532 tcg_gen_op0(INDEX_op_nop); 2533 /* Adjust for negative inputs. */ 2534 tcg_gen_sari_i64(t2, arg1, 63); 2535 tcg_gen_sari_i64(t3, arg2, 63); 2536 tcg_gen_and_i64(t2, t2, arg2); 2537 tcg_gen_and_i64(t3, t3, arg1); 2538 tcg_gen_sub_i64(rh, t1, t2); 2539 tcg_gen_sub_i64(rh, rh, t3); 2540 tcg_gen_mov_i64(rl, t0); 2541 tcg_temp_free_i64(t0); 2542 tcg_temp_free_i64(t1); 2543 tcg_temp_free_i64(t2); 2544 tcg_temp_free_i64(t3); 2545 } else { 2546 TCGv_i64 t0 = tcg_temp_new_i64(); 2547 int sizemask = 0; 2548 /* Return value and both arguments are 64-bit and unsigned. */ 2549 sizemask |= tcg_gen_sizemask(0, 1, 0); 2550 sizemask |= tcg_gen_sizemask(1, 1, 0); 2551 sizemask |= tcg_gen_sizemask(2, 1, 0); 2552 tcg_gen_mul_i64(t0, arg1, arg2); 2553 tcg_gen_helper64(tcg_helper_muluh_i64, sizemask, rh, arg1, arg2); 2554 tcg_gen_mov_i64(rl, t0); 2555 tcg_temp_free_i64(t0); 2556 } 2557 } 2558 2559 static inline void tcg_gen_muls2_i64(TCGv_i64 rl, TCGv_i64 rh, 2560 TCGv_i64 arg1, TCGv_i64 arg2) 2561 { 2562 if (TCG_TARGET_HAS_muls2_i64) { 2563 tcg_gen_op4_i64(INDEX_op_muls2_i64, rl, rh, arg1, arg2); 2564 /* Allow the optimizer room to replace muls2 with two moves. */ 2565 tcg_gen_op0(INDEX_op_nop); 2566 } else if (TCG_TARGET_HAS_mulsh_i64) { 2567 TCGv_i64 t = tcg_temp_new_i64(); 2568 tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2); 2569 tcg_gen_op3_i64(INDEX_op_mulsh_i64, rh, arg1, arg2); 2570 tcg_gen_mov_i64(rl, t); 2571 tcg_temp_free_i64(t); 2572 } else { 2573 TCGv_i64 t0 = tcg_temp_new_i64(); 2574 int sizemask = 0; 2575 /* Return value and both arguments are 64-bit and signed. */ 2576 sizemask |= tcg_gen_sizemask(0, 1, 1); 2577 sizemask |= tcg_gen_sizemask(1, 1, 1); 2578 sizemask |= tcg_gen_sizemask(2, 1, 1); 2579 tcg_gen_mul_i64(t0, arg1, arg2); 2580 tcg_gen_helper64(tcg_helper_mulsh_i64, sizemask, rh, arg1, arg2); 2581 tcg_gen_mov_i64(rl, t0); 2582 tcg_temp_free_i64(t0); 2583 } 2584 } 2585 2586 /***************************************/ 2587 /* QEMU specific operations. Their type depend on the QEMU CPU 2588 type. */ 2589 #ifndef TARGET_LONG_BITS 2590 #error must include QEMU headers 2591 #endif 2592 2593 #if TARGET_LONG_BITS == 32 2594 #define TCGv TCGv_i32 2595 #define tcg_temp_new() tcg_temp_new_i32() 2596 #define tcg_global_reg_new tcg_global_reg_new_i32 2597 #define tcg_global_mem_new tcg_global_mem_new_i32 2598 #define tcg_temp_local_new() tcg_temp_local_new_i32() 2599 #define tcg_temp_free tcg_temp_free_i32 2600 #define TCGV_UNUSED(x) TCGV_UNUSED_I32(x) 2601 #define TCGV_IS_UNUSED(x) TCGV_IS_UNUSED_I32(x) 2602 #define TCGV_EQUAL(a, b) TCGV_EQUAL_I32(a, b) 2603 #define tcg_add_param_tl tcg_add_param_i32 2604 #define tcg_gen_qemu_ld_tl tcg_gen_qemu_ld_i32 2605 #define tcg_gen_qemu_st_tl tcg_gen_qemu_st_i32 2606 #else 2607 #define TCGv TCGv_i64 2608 #define tcg_temp_new() tcg_temp_new_i64() 2609 #define tcg_global_reg_new tcg_global_reg_new_i64 2610 #define tcg_global_mem_new tcg_global_mem_new_i64 2611 #define tcg_temp_local_new() tcg_temp_local_new_i64() 2612 #define tcg_temp_free tcg_temp_free_i64 2613 #define TCGV_UNUSED(x) TCGV_UNUSED_I64(x) 2614 #define TCGV_IS_UNUSED(x) TCGV_IS_UNUSED_I64(x) 2615 #define TCGV_EQUAL(a, b) TCGV_EQUAL_I64(a, b) 2616 #define tcg_add_param_tl tcg_add_param_i64 2617 #define tcg_gen_qemu_ld_tl tcg_gen_qemu_ld_i64 2618 #define tcg_gen_qemu_st_tl tcg_gen_qemu_st_i64 2619 #endif 2620 2621 /* debug info: write the PC of the corresponding QEMU CPU instruction */ 2622 static inline void tcg_gen_debug_insn_start(uint64_t pc) 2623 { 2624 /* XXX: must really use a 32 bit size for TCGArg in all cases */ 2625 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS 2626 tcg_gen_op2ii(INDEX_op_debug_insn_start, 2627 (uint32_t)(pc), (uint32_t)(pc >> 32)); 2628 #else 2629 tcg_gen_op1i(INDEX_op_debug_insn_start, pc); 2630 #endif 2631 } 2632 2633 static inline void tcg_gen_exit_tb(uintptr_t val) 2634 { 2635 tcg_gen_op1i(INDEX_op_exit_tb, val); 2636 } 2637 2638 static inline void tcg_gen_goto_tb(unsigned idx) 2639 { 2640 /* We only support two chained exits. */ 2641 tcg_debug_assert(idx <= 1); 2642 #ifdef CONFIG_DEBUG_TCG 2643 /* Verify that we havn't seen this numbered exit before. */ 2644 tcg_debug_assert((tcg_ctx.goto_tb_issue_mask & (1 << idx)) == 0); 2645 tcg_ctx.goto_tb_issue_mask |= 1 << idx; 2646 #endif 2647 tcg_gen_op1i(INDEX_op_goto_tb, idx); 2648 } 2649 2650 2651 void tcg_gen_qemu_ld_i32(TCGv_i32, TCGv, TCGArg, TCGMemOp); 2652 void tcg_gen_qemu_st_i32(TCGv_i32, TCGv, TCGArg, TCGMemOp); 2653 void tcg_gen_qemu_ld_i64(TCGv_i64, TCGv, TCGArg, TCGMemOp); 2654 void tcg_gen_qemu_st_i64(TCGv_i64, TCGv, TCGArg, TCGMemOp); 2655 2656 static inline void tcg_gen_qemu_ld8u(TCGv ret, TCGv addr, int mem_index) 2657 { 2658 tcg_gen_qemu_ld_tl(ret, addr, mem_index, MO_UB); 2659 } 2660 2661 static inline void tcg_gen_qemu_ld8s(TCGv ret, TCGv addr, int mem_index) 2662 { 2663 tcg_gen_qemu_ld_tl(ret, addr, mem_index, MO_SB); 2664 } 2665 2666 static inline void tcg_gen_qemu_ld16u(TCGv ret, TCGv addr, int mem_index) 2667 { 2668 tcg_gen_qemu_ld_tl(ret, addr, mem_index, MO_TEUW); 2669 } 2670 2671 static inline void tcg_gen_qemu_ld16s(TCGv ret, TCGv addr, int mem_index) 2672 { 2673 tcg_gen_qemu_ld_tl(ret, addr, mem_index, MO_TESW); 2674 } 2675 2676 static inline void tcg_gen_qemu_ld32u(TCGv ret, TCGv addr, int mem_index) 2677 { 2678 tcg_gen_qemu_ld_tl(ret, addr, mem_index, MO_TEUL); 2679 } 2680 2681 static inline void tcg_gen_qemu_ld32s(TCGv ret, TCGv addr, int mem_index) 2682 { 2683 tcg_gen_qemu_ld_tl(ret, addr, mem_index, MO_TESL); 2684 } 2685 2686 static inline void tcg_gen_qemu_ld64(TCGv_i64 ret, TCGv addr, int mem_index) 2687 { 2688 tcg_gen_qemu_ld_i64(ret, addr, mem_index, MO_TEQ); 2689 } 2690 2691 static inline void tcg_gen_qemu_st8(TCGv arg, TCGv addr, int mem_index) 2692 { 2693 tcg_gen_qemu_st_tl(arg, addr, mem_index, MO_UB); 2694 } 2695 2696 static inline void tcg_gen_qemu_st16(TCGv arg, TCGv addr, int mem_index) 2697 { 2698 tcg_gen_qemu_st_tl(arg, addr, mem_index, MO_TEUW); 2699 } 2700 2701 static inline void tcg_gen_qemu_st32(TCGv arg, TCGv addr, int mem_index) 2702 { 2703 tcg_gen_qemu_st_tl(arg, addr, mem_index, MO_TEUL); 2704 } 2705 2706 static inline void tcg_gen_qemu_st64(TCGv_i64 arg, TCGv addr, int mem_index) 2707 { 2708 tcg_gen_qemu_st_i64(arg, addr, mem_index, MO_TEQ); 2709 } 2710 2711 #if TARGET_LONG_BITS == 64 2712 #define tcg_gen_movi_tl tcg_gen_movi_i64 2713 #define tcg_gen_mov_tl tcg_gen_mov_i64 2714 #define tcg_gen_ld8u_tl tcg_gen_ld8u_i64 2715 #define tcg_gen_ld8s_tl tcg_gen_ld8s_i64 2716 #define tcg_gen_ld16u_tl tcg_gen_ld16u_i64 2717 #define tcg_gen_ld16s_tl tcg_gen_ld16s_i64 2718 #define tcg_gen_ld32u_tl tcg_gen_ld32u_i64 2719 #define tcg_gen_ld32s_tl tcg_gen_ld32s_i64 2720 #define tcg_gen_ld_tl tcg_gen_ld_i64 2721 #define tcg_gen_st8_tl tcg_gen_st8_i64 2722 #define tcg_gen_st16_tl tcg_gen_st16_i64 2723 #define tcg_gen_st32_tl tcg_gen_st32_i64 2724 #define tcg_gen_st_tl tcg_gen_st_i64 2725 #define tcg_gen_add_tl tcg_gen_add_i64 2726 #define tcg_gen_addi_tl tcg_gen_addi_i64 2727 #define tcg_gen_sub_tl tcg_gen_sub_i64 2728 #define tcg_gen_neg_tl tcg_gen_neg_i64 2729 #define tcg_gen_subfi_tl tcg_gen_subfi_i64 2730 #define tcg_gen_subi_tl tcg_gen_subi_i64 2731 #define tcg_gen_and_tl tcg_gen_and_i64 2732 #define tcg_gen_andi_tl tcg_gen_andi_i64 2733 #define tcg_gen_or_tl tcg_gen_or_i64 2734 #define tcg_gen_ori_tl tcg_gen_ori_i64 2735 #define tcg_gen_xor_tl tcg_gen_xor_i64 2736 #define tcg_gen_xori_tl tcg_gen_xori_i64 2737 #define tcg_gen_not_tl tcg_gen_not_i64 2738 #define tcg_gen_shl_tl tcg_gen_shl_i64 2739 #define tcg_gen_shli_tl tcg_gen_shli_i64 2740 #define tcg_gen_shr_tl tcg_gen_shr_i64 2741 #define tcg_gen_shri_tl tcg_gen_shri_i64 2742 #define tcg_gen_sar_tl tcg_gen_sar_i64 2743 #define tcg_gen_sari_tl tcg_gen_sari_i64 2744 #define tcg_gen_brcond_tl tcg_gen_brcond_i64 2745 #define tcg_gen_brcondi_tl tcg_gen_brcondi_i64 2746 #define tcg_gen_setcond_tl tcg_gen_setcond_i64 2747 #define tcg_gen_setcondi_tl tcg_gen_setcondi_i64 2748 #define tcg_gen_mul_tl tcg_gen_mul_i64 2749 #define tcg_gen_muli_tl tcg_gen_muli_i64 2750 #define tcg_gen_div_tl tcg_gen_div_i64 2751 #define tcg_gen_rem_tl tcg_gen_rem_i64 2752 #define tcg_gen_divu_tl tcg_gen_divu_i64 2753 #define tcg_gen_remu_tl tcg_gen_remu_i64 2754 #define tcg_gen_discard_tl tcg_gen_discard_i64 2755 #define tcg_gen_trunc_tl_i32 tcg_gen_trunc_i64_i32 2756 #define tcg_gen_trunc_i64_tl tcg_gen_mov_i64 2757 #define tcg_gen_extu_i32_tl tcg_gen_extu_i32_i64 2758 #define tcg_gen_ext_i32_tl tcg_gen_ext_i32_i64 2759 #define tcg_gen_extu_tl_i64 tcg_gen_mov_i64 2760 #define tcg_gen_ext_tl_i64 tcg_gen_mov_i64 2761 #define tcg_gen_ext8u_tl tcg_gen_ext8u_i64 2762 #define tcg_gen_ext8s_tl tcg_gen_ext8s_i64 2763 #define tcg_gen_ext16u_tl tcg_gen_ext16u_i64 2764 #define tcg_gen_ext16s_tl tcg_gen_ext16s_i64 2765 #define tcg_gen_ext32u_tl tcg_gen_ext32u_i64 2766 #define tcg_gen_ext32s_tl tcg_gen_ext32s_i64 2767 #define tcg_gen_bswap16_tl tcg_gen_bswap16_i64 2768 #define tcg_gen_bswap32_tl tcg_gen_bswap32_i64 2769 #define tcg_gen_bswap64_tl tcg_gen_bswap64_i64 2770 #define tcg_gen_concat_tl_i64 tcg_gen_concat32_i64 2771 #define tcg_gen_extr_i64_tl tcg_gen_extr32_i64 2772 #define tcg_gen_andc_tl tcg_gen_andc_i64 2773 #define tcg_gen_eqv_tl tcg_gen_eqv_i64 2774 #define tcg_gen_nand_tl tcg_gen_nand_i64 2775 #define tcg_gen_nor_tl tcg_gen_nor_i64 2776 #define tcg_gen_orc_tl tcg_gen_orc_i64 2777 #define tcg_gen_rotl_tl tcg_gen_rotl_i64 2778 #define tcg_gen_rotli_tl tcg_gen_rotli_i64 2779 #define tcg_gen_rotr_tl tcg_gen_rotr_i64 2780 #define tcg_gen_rotri_tl tcg_gen_rotri_i64 2781 #define tcg_gen_deposit_tl tcg_gen_deposit_i64 2782 #define tcg_const_tl tcg_const_i64 2783 #define tcg_const_local_tl tcg_const_local_i64 2784 #define tcg_gen_movcond_tl tcg_gen_movcond_i64 2785 #define tcg_gen_add2_tl tcg_gen_add2_i64 2786 #define tcg_gen_sub2_tl tcg_gen_sub2_i64 2787 #define tcg_gen_mulu2_tl tcg_gen_mulu2_i64 2788 #define tcg_gen_muls2_tl tcg_gen_muls2_i64 2789 #else 2790 #define tcg_gen_movi_tl tcg_gen_movi_i32 2791 #define tcg_gen_mov_tl tcg_gen_mov_i32 2792 #define tcg_gen_ld8u_tl tcg_gen_ld8u_i32 2793 #define tcg_gen_ld8s_tl tcg_gen_ld8s_i32 2794 #define tcg_gen_ld16u_tl tcg_gen_ld16u_i32 2795 #define tcg_gen_ld16s_tl tcg_gen_ld16s_i32 2796 #define tcg_gen_ld32u_tl tcg_gen_ld_i32 2797 #define tcg_gen_ld32s_tl tcg_gen_ld_i32 2798 #define tcg_gen_ld_tl tcg_gen_ld_i32 2799 #define tcg_gen_st8_tl tcg_gen_st8_i32 2800 #define tcg_gen_st16_tl tcg_gen_st16_i32 2801 #define tcg_gen_st32_tl tcg_gen_st_i32 2802 #define tcg_gen_st_tl tcg_gen_st_i32 2803 #define tcg_gen_add_tl tcg_gen_add_i32 2804 #define tcg_gen_addi_tl tcg_gen_addi_i32 2805 #define tcg_gen_sub_tl tcg_gen_sub_i32 2806 #define tcg_gen_neg_tl tcg_gen_neg_i32 2807 #define tcg_gen_subfi_tl tcg_gen_subfi_i32 2808 #define tcg_gen_subi_tl tcg_gen_subi_i32 2809 #define tcg_gen_and_tl tcg_gen_and_i32 2810 #define tcg_gen_andi_tl tcg_gen_andi_i32 2811 #define tcg_gen_or_tl tcg_gen_or_i32 2812 #define tcg_gen_ori_tl tcg_gen_ori_i32 2813 #define tcg_gen_xor_tl tcg_gen_xor_i32 2814 #define tcg_gen_xori_tl tcg_gen_xori_i32 2815 #define tcg_gen_not_tl tcg_gen_not_i32 2816 #define tcg_gen_shl_tl tcg_gen_shl_i32 2817 #define tcg_gen_shli_tl tcg_gen_shli_i32 2818 #define tcg_gen_shr_tl tcg_gen_shr_i32 2819 #define tcg_gen_shri_tl tcg_gen_shri_i32 2820 #define tcg_gen_sar_tl tcg_gen_sar_i32 2821 #define tcg_gen_sari_tl tcg_gen_sari_i32 2822 #define tcg_gen_brcond_tl tcg_gen_brcond_i32 2823 #define tcg_gen_brcondi_tl tcg_gen_brcondi_i32 2824 #define tcg_gen_setcond_tl tcg_gen_setcond_i32 2825 #define tcg_gen_setcondi_tl tcg_gen_setcondi_i32 2826 #define tcg_gen_mul_tl tcg_gen_mul_i32 2827 #define tcg_gen_muli_tl tcg_gen_muli_i32 2828 #define tcg_gen_div_tl tcg_gen_div_i32 2829 #define tcg_gen_rem_tl tcg_gen_rem_i32 2830 #define tcg_gen_divu_tl tcg_gen_divu_i32 2831 #define tcg_gen_remu_tl tcg_gen_remu_i32 2832 #define tcg_gen_discard_tl tcg_gen_discard_i32 2833 #define tcg_gen_trunc_tl_i32 tcg_gen_mov_i32 2834 #define tcg_gen_trunc_i64_tl tcg_gen_trunc_i64_i32 2835 #define tcg_gen_extu_i32_tl tcg_gen_mov_i32 2836 #define tcg_gen_ext_i32_tl tcg_gen_mov_i32 2837 #define tcg_gen_extu_tl_i64 tcg_gen_extu_i32_i64 2838 #define tcg_gen_ext_tl_i64 tcg_gen_ext_i32_i64 2839 #define tcg_gen_ext8u_tl tcg_gen_ext8u_i32 2840 #define tcg_gen_ext8s_tl tcg_gen_ext8s_i32 2841 #define tcg_gen_ext16u_tl tcg_gen_ext16u_i32 2842 #define tcg_gen_ext16s_tl tcg_gen_ext16s_i32 2843 #define tcg_gen_ext32u_tl tcg_gen_mov_i32 2844 #define tcg_gen_ext32s_tl tcg_gen_mov_i32 2845 #define tcg_gen_bswap16_tl tcg_gen_bswap16_i32 2846 #define tcg_gen_bswap32_tl tcg_gen_bswap32_i32 2847 #define tcg_gen_concat_tl_i64 tcg_gen_concat_i32_i64 2848 #define tcg_gen_extr_tl_i64 tcg_gen_extr_i32_i64 2849 #define tcg_gen_andc_tl tcg_gen_andc_i32 2850 #define tcg_gen_eqv_tl tcg_gen_eqv_i32 2851 #define tcg_gen_nand_tl tcg_gen_nand_i32 2852 #define tcg_gen_nor_tl tcg_gen_nor_i32 2853 #define tcg_gen_orc_tl tcg_gen_orc_i32 2854 #define tcg_gen_rotl_tl tcg_gen_rotl_i32 2855 #define tcg_gen_rotli_tl tcg_gen_rotli_i32 2856 #define tcg_gen_rotr_tl tcg_gen_rotr_i32 2857 #define tcg_gen_rotri_tl tcg_gen_rotri_i32 2858 #define tcg_gen_deposit_tl tcg_gen_deposit_i32 2859 #define tcg_const_tl tcg_const_i32 2860 #define tcg_const_local_tl tcg_const_local_i32 2861 #define tcg_gen_movcond_tl tcg_gen_movcond_i32 2862 #define tcg_gen_add2_tl tcg_gen_add2_i32 2863 #define tcg_gen_sub2_tl tcg_gen_sub2_i32 2864 #define tcg_gen_mulu2_tl tcg_gen_mulu2_i32 2865 #define tcg_gen_muls2_tl tcg_gen_muls2_i32 2866 #endif 2867 2868 #if TCG_TARGET_REG_BITS == 32 2869 # define tcg_gen_ld_ptr(R, A, O) \ 2870 tcg_gen_ld_i32(TCGV_PTR_TO_NAT(R), (A), (O)) 2871 # define tcg_gen_discard_ptr(A) \ 2872 tcg_gen_discard_i32(TCGV_PTR_TO_NAT(A)) 2873 # define tcg_gen_add_ptr(R, A, B) \ 2874 tcg_gen_add_i32(TCGV_PTR_TO_NAT(R), TCGV_PTR_TO_NAT(A), TCGV_PTR_TO_NAT(B)) 2875 # define tcg_gen_addi_ptr(R, A, B) \ 2876 tcg_gen_addi_i32(TCGV_PTR_TO_NAT(R), TCGV_PTR_TO_NAT(A), (B)) 2877 # define tcg_gen_ext_i32_ptr(R, A) \ 2878 tcg_gen_mov_i32(TCGV_PTR_TO_NAT(R), (A)) 2879 #else 2880 # define tcg_gen_ld_ptr(R, A, O) \ 2881 tcg_gen_ld_i64(TCGV_PTR_TO_NAT(R), (A), (O)) 2882 # define tcg_gen_discard_ptr(A) \ 2883 tcg_gen_discard_i64(TCGV_PTR_TO_NAT(A)) 2884 # define tcg_gen_add_ptr(R, A, B) \ 2885 tcg_gen_add_i64(TCGV_PTR_TO_NAT(R), TCGV_PTR_TO_NAT(A), TCGV_PTR_TO_NAT(B)) 2886 # define tcg_gen_addi_ptr(R, A, B) \ 2887 tcg_gen_addi_i64(TCGV_PTR_TO_NAT(R), TCGV_PTR_TO_NAT(A), (B)) 2888 # define tcg_gen_ext_i32_ptr(R, A) \ 2889 tcg_gen_ext_i32_i64(TCGV_PTR_TO_NAT(R), (A)) 2890 #endif /* TCG_TARGET_REG_BITS == 32 */ 2891