fp-impl.c.inc (40585B)
1 /* 2 * translate-fp.c 3 * 4 * Standard FPU translation 5 */ 6 7 static inline void gen_reset_fpstatus(void) 8 { 9 gen_helper_reset_fpstatus(cpu_env); 10 } 11 12 static inline void gen_compute_fprf_float64(TCGv_i64 arg) 13 { 14 gen_helper_compute_fprf_float64(cpu_env, arg); 15 gen_helper_float_check_status(cpu_env); 16 } 17 18 #if defined(TARGET_PPC64) 19 static void gen_set_cr1_from_fpscr(DisasContext *ctx) 20 { 21 TCGv_i32 tmp = tcg_temp_new_i32(); 22 tcg_gen_trunc_tl_i32(tmp, cpu_fpscr); 23 tcg_gen_shri_i32(cpu_crf[1], tmp, 28); 24 tcg_temp_free_i32(tmp); 25 } 26 #else 27 static void gen_set_cr1_from_fpscr(DisasContext *ctx) 28 { 29 tcg_gen_shri_tl(cpu_crf[1], cpu_fpscr, 28); 30 } 31 #endif 32 33 /*** Floating-Point arithmetic ***/ 34 #define _GEN_FLOAT_ACB(name, op1, op2, set_fprf, type) \ 35 static void gen_f##name(DisasContext *ctx) \ 36 { \ 37 TCGv_i64 t0; \ 38 TCGv_i64 t1; \ 39 TCGv_i64 t2; \ 40 TCGv_i64 t3; \ 41 if (unlikely(!ctx->fpu_enabled)) { \ 42 gen_exception(ctx, POWERPC_EXCP_FPU); \ 43 return; \ 44 } \ 45 t0 = tcg_temp_new_i64(); \ 46 t1 = tcg_temp_new_i64(); \ 47 t2 = tcg_temp_new_i64(); \ 48 t3 = tcg_temp_new_i64(); \ 49 gen_reset_fpstatus(); \ 50 get_fpr(t0, rA(ctx->opcode)); \ 51 get_fpr(t1, rC(ctx->opcode)); \ 52 get_fpr(t2, rB(ctx->opcode)); \ 53 gen_helper_f##name(t3, cpu_env, t0, t1, t2); \ 54 set_fpr(rD(ctx->opcode), t3); \ 55 if (set_fprf) { \ 56 gen_compute_fprf_float64(t3); \ 57 } \ 58 if (unlikely(Rc(ctx->opcode) != 0)) { \ 59 gen_set_cr1_from_fpscr(ctx); \ 60 } \ 61 tcg_temp_free_i64(t0); \ 62 tcg_temp_free_i64(t1); \ 63 tcg_temp_free_i64(t2); \ 64 tcg_temp_free_i64(t3); \ 65 } 66 67 #define GEN_FLOAT_ACB(name, op2, set_fprf, type) \ 68 _GEN_FLOAT_ACB(name, 0x3F, op2, set_fprf, type); \ 69 _GEN_FLOAT_ACB(name##s, 0x3B, op2, set_fprf, type); 70 71 #define _GEN_FLOAT_AB(name, op1, op2, inval, set_fprf, type) \ 72 static void gen_f##name(DisasContext *ctx) \ 73 { \ 74 TCGv_i64 t0; \ 75 TCGv_i64 t1; \ 76 TCGv_i64 t2; \ 77 if (unlikely(!ctx->fpu_enabled)) { \ 78 gen_exception(ctx, POWERPC_EXCP_FPU); \ 79 return; \ 80 } \ 81 t0 = tcg_temp_new_i64(); \ 82 t1 = tcg_temp_new_i64(); \ 83 t2 = tcg_temp_new_i64(); \ 84 gen_reset_fpstatus(); \ 85 get_fpr(t0, rA(ctx->opcode)); \ 86 get_fpr(t1, rB(ctx->opcode)); \ 87 gen_helper_f##name(t2, cpu_env, t0, t1); \ 88 set_fpr(rD(ctx->opcode), t2); \ 89 if (set_fprf) { \ 90 gen_compute_fprf_float64(t2); \ 91 } \ 92 if (unlikely(Rc(ctx->opcode) != 0)) { \ 93 gen_set_cr1_from_fpscr(ctx); \ 94 } \ 95 tcg_temp_free_i64(t0); \ 96 tcg_temp_free_i64(t1); \ 97 tcg_temp_free_i64(t2); \ 98 } 99 #define GEN_FLOAT_AB(name, op2, inval, set_fprf, type) \ 100 _GEN_FLOAT_AB(name, 0x3F, op2, inval, set_fprf, type); \ 101 _GEN_FLOAT_AB(name##s, 0x3B, op2, inval, set_fprf, type); 102 103 #define _GEN_FLOAT_AC(name, op1, op2, inval, set_fprf, type) \ 104 static void gen_f##name(DisasContext *ctx) \ 105 { \ 106 TCGv_i64 t0; \ 107 TCGv_i64 t1; \ 108 TCGv_i64 t2; \ 109 if (unlikely(!ctx->fpu_enabled)) { \ 110 gen_exception(ctx, POWERPC_EXCP_FPU); \ 111 return; \ 112 } \ 113 t0 = tcg_temp_new_i64(); \ 114 t1 = tcg_temp_new_i64(); \ 115 t2 = tcg_temp_new_i64(); \ 116 gen_reset_fpstatus(); \ 117 get_fpr(t0, rA(ctx->opcode)); \ 118 get_fpr(t1, rC(ctx->opcode)); \ 119 gen_helper_f##name(t2, cpu_env, t0, t1); \ 120 set_fpr(rD(ctx->opcode), t2); \ 121 if (set_fprf) { \ 122 gen_compute_fprf_float64(t2); \ 123 } \ 124 if (unlikely(Rc(ctx->opcode) != 0)) { \ 125 gen_set_cr1_from_fpscr(ctx); \ 126 } \ 127 tcg_temp_free_i64(t0); \ 128 tcg_temp_free_i64(t1); \ 129 tcg_temp_free_i64(t2); \ 130 } 131 #define GEN_FLOAT_AC(name, op2, inval, set_fprf, type) \ 132 _GEN_FLOAT_AC(name, 0x3F, op2, inval, set_fprf, type); \ 133 _GEN_FLOAT_AC(name##s, 0x3B, op2, inval, set_fprf, type); 134 135 #define GEN_FLOAT_B(name, op2, op3, set_fprf, type) \ 136 static void gen_f##name(DisasContext *ctx) \ 137 { \ 138 TCGv_i64 t0; \ 139 TCGv_i64 t1; \ 140 if (unlikely(!ctx->fpu_enabled)) { \ 141 gen_exception(ctx, POWERPC_EXCP_FPU); \ 142 return; \ 143 } \ 144 t0 = tcg_temp_new_i64(); \ 145 t1 = tcg_temp_new_i64(); \ 146 gen_reset_fpstatus(); \ 147 get_fpr(t0, rB(ctx->opcode)); \ 148 gen_helper_f##name(t1, cpu_env, t0); \ 149 set_fpr(rD(ctx->opcode), t1); \ 150 if (set_fprf) { \ 151 gen_helper_compute_fprf_float64(cpu_env, t1); \ 152 } \ 153 gen_helper_float_check_status(cpu_env); \ 154 if (unlikely(Rc(ctx->opcode) != 0)) { \ 155 gen_set_cr1_from_fpscr(ctx); \ 156 } \ 157 tcg_temp_free_i64(t0); \ 158 tcg_temp_free_i64(t1); \ 159 } 160 161 #define GEN_FLOAT_BS(name, op1, op2, set_fprf, type) \ 162 static void gen_f##name(DisasContext *ctx) \ 163 { \ 164 TCGv_i64 t0; \ 165 TCGv_i64 t1; \ 166 if (unlikely(!ctx->fpu_enabled)) { \ 167 gen_exception(ctx, POWERPC_EXCP_FPU); \ 168 return; \ 169 } \ 170 t0 = tcg_temp_new_i64(); \ 171 t1 = tcg_temp_new_i64(); \ 172 gen_reset_fpstatus(); \ 173 get_fpr(t0, rB(ctx->opcode)); \ 174 gen_helper_f##name(t1, cpu_env, t0); \ 175 set_fpr(rD(ctx->opcode), t1); \ 176 if (set_fprf) { \ 177 gen_compute_fprf_float64(t1); \ 178 } \ 179 if (unlikely(Rc(ctx->opcode) != 0)) { \ 180 gen_set_cr1_from_fpscr(ctx); \ 181 } \ 182 tcg_temp_free_i64(t0); \ 183 tcg_temp_free_i64(t1); \ 184 } 185 186 /* fadd - fadds */ 187 GEN_FLOAT_AB(add, 0x15, 0x000007C0, 1, PPC_FLOAT); 188 /* fdiv - fdivs */ 189 GEN_FLOAT_AB(div, 0x12, 0x000007C0, 1, PPC_FLOAT); 190 /* fmul - fmuls */ 191 GEN_FLOAT_AC(mul, 0x19, 0x0000F800, 1, PPC_FLOAT); 192 193 /* fre */ 194 GEN_FLOAT_BS(re, 0x3F, 0x18, 1, PPC_FLOAT_EXT); 195 196 /* fres */ 197 GEN_FLOAT_BS(res, 0x3B, 0x18, 1, PPC_FLOAT_FRES); 198 199 /* frsqrte */ 200 GEN_FLOAT_BS(rsqrte, 0x3F, 0x1A, 1, PPC_FLOAT_FRSQRTE); 201 202 /* frsqrtes */ 203 static void gen_frsqrtes(DisasContext *ctx) 204 { 205 TCGv_i64 t0; 206 TCGv_i64 t1; 207 if (unlikely(!ctx->fpu_enabled)) { 208 gen_exception(ctx, POWERPC_EXCP_FPU); 209 return; 210 } 211 t0 = tcg_temp_new_i64(); 212 t1 = tcg_temp_new_i64(); 213 gen_reset_fpstatus(); 214 get_fpr(t0, rB(ctx->opcode)); 215 gen_helper_frsqrtes(t1, cpu_env, t0); 216 set_fpr(rD(ctx->opcode), t1); 217 gen_compute_fprf_float64(t1); 218 if (unlikely(Rc(ctx->opcode) != 0)) { 219 gen_set_cr1_from_fpscr(ctx); 220 } 221 tcg_temp_free_i64(t0); 222 tcg_temp_free_i64(t1); 223 } 224 225 static bool trans_FSEL(DisasContext *ctx, arg_A *a) 226 { 227 TCGv_i64 t0, t1, t2; 228 229 REQUIRE_INSNS_FLAGS(ctx, FLOAT_FSEL); 230 REQUIRE_FPU(ctx); 231 232 t0 = tcg_temp_new_i64(); 233 t1 = tcg_temp_new_i64(); 234 t2 = tcg_temp_new_i64(); 235 236 get_fpr(t0, a->fra); 237 get_fpr(t1, a->frb); 238 get_fpr(t2, a->frc); 239 240 gen_helper_FSEL(t0, t0, t1, t2); 241 set_fpr(a->frt, t0); 242 if (a->rc) { 243 gen_set_cr1_from_fpscr(ctx); 244 } 245 246 tcg_temp_free_i64(t0); 247 tcg_temp_free_i64(t1); 248 tcg_temp_free_i64(t2); 249 250 return true; 251 } 252 253 /* fsub - fsubs */ 254 GEN_FLOAT_AB(sub, 0x14, 0x000007C0, 1, PPC_FLOAT); 255 /* Optional: */ 256 257 static bool do_helper_fsqrt(DisasContext *ctx, arg_A_tb *a, 258 void (*helper)(TCGv_i64, TCGv_ptr, TCGv_i64)) 259 { 260 TCGv_i64 t0, t1; 261 262 REQUIRE_INSNS_FLAGS(ctx, FLOAT_FSQRT); 263 REQUIRE_FPU(ctx); 264 265 t0 = tcg_temp_new_i64(); 266 t1 = tcg_temp_new_i64(); 267 268 gen_reset_fpstatus(); 269 get_fpr(t0, a->frb); 270 helper(t1, cpu_env, t0); 271 set_fpr(a->frt, t1); 272 gen_compute_fprf_float64(t1); 273 if (unlikely(a->rc != 0)) { 274 gen_set_cr1_from_fpscr(ctx); 275 } 276 277 tcg_temp_free_i64(t0); 278 tcg_temp_free_i64(t1); 279 280 return true; 281 } 282 283 TRANS(FSQRT, do_helper_fsqrt, gen_helper_FSQRT); 284 TRANS(FSQRTS, do_helper_fsqrt, gen_helper_FSQRTS); 285 286 /*** Floating-Point multiply-and-add ***/ 287 /* fmadd - fmadds */ 288 GEN_FLOAT_ACB(madd, 0x1D, 1, PPC_FLOAT); 289 /* fmsub - fmsubs */ 290 GEN_FLOAT_ACB(msub, 0x1C, 1, PPC_FLOAT); 291 /* fnmadd - fnmadds */ 292 GEN_FLOAT_ACB(nmadd, 0x1F, 1, PPC_FLOAT); 293 /* fnmsub - fnmsubs */ 294 GEN_FLOAT_ACB(nmsub, 0x1E, 1, PPC_FLOAT); 295 296 /*** Floating-Point round & convert ***/ 297 /* fctiw */ 298 GEN_FLOAT_B(ctiw, 0x0E, 0x00, 0, PPC_FLOAT); 299 /* fctiwu */ 300 GEN_FLOAT_B(ctiwu, 0x0E, 0x04, 0, PPC2_FP_CVT_ISA206); 301 /* fctiwz */ 302 GEN_FLOAT_B(ctiwz, 0x0F, 0x00, 0, PPC_FLOAT); 303 /* fctiwuz */ 304 GEN_FLOAT_B(ctiwuz, 0x0F, 0x04, 0, PPC2_FP_CVT_ISA206); 305 /* frsp */ 306 GEN_FLOAT_B(rsp, 0x0C, 0x00, 1, PPC_FLOAT); 307 /* fcfid */ 308 GEN_FLOAT_B(cfid, 0x0E, 0x1A, 1, PPC2_FP_CVT_S64); 309 /* fcfids */ 310 GEN_FLOAT_B(cfids, 0x0E, 0x1A, 0, PPC2_FP_CVT_ISA206); 311 /* fcfidu */ 312 GEN_FLOAT_B(cfidu, 0x0E, 0x1E, 0, PPC2_FP_CVT_ISA206); 313 /* fcfidus */ 314 GEN_FLOAT_B(cfidus, 0x0E, 0x1E, 0, PPC2_FP_CVT_ISA206); 315 /* fctid */ 316 GEN_FLOAT_B(ctid, 0x0E, 0x19, 0, PPC2_FP_CVT_S64); 317 /* fctidu */ 318 GEN_FLOAT_B(ctidu, 0x0E, 0x1D, 0, PPC2_FP_CVT_ISA206); 319 /* fctidz */ 320 GEN_FLOAT_B(ctidz, 0x0F, 0x19, 0, PPC2_FP_CVT_S64); 321 /* fctidu */ 322 GEN_FLOAT_B(ctiduz, 0x0F, 0x1D, 0, PPC2_FP_CVT_ISA206); 323 324 /* frin */ 325 GEN_FLOAT_B(rin, 0x08, 0x0C, 1, PPC_FLOAT_EXT); 326 /* friz */ 327 GEN_FLOAT_B(riz, 0x08, 0x0D, 1, PPC_FLOAT_EXT); 328 /* frip */ 329 GEN_FLOAT_B(rip, 0x08, 0x0E, 1, PPC_FLOAT_EXT); 330 /* frim */ 331 GEN_FLOAT_B(rim, 0x08, 0x0F, 1, PPC_FLOAT_EXT); 332 333 static void gen_ftdiv(DisasContext *ctx) 334 { 335 TCGv_i64 t0; 336 TCGv_i64 t1; 337 if (unlikely(!ctx->fpu_enabled)) { 338 gen_exception(ctx, POWERPC_EXCP_FPU); 339 return; 340 } 341 t0 = tcg_temp_new_i64(); 342 t1 = tcg_temp_new_i64(); 343 get_fpr(t0, rA(ctx->opcode)); 344 get_fpr(t1, rB(ctx->opcode)); 345 gen_helper_ftdiv(cpu_crf[crfD(ctx->opcode)], t0, t1); 346 tcg_temp_free_i64(t0); 347 tcg_temp_free_i64(t1); 348 } 349 350 static void gen_ftsqrt(DisasContext *ctx) 351 { 352 TCGv_i64 t0; 353 if (unlikely(!ctx->fpu_enabled)) { 354 gen_exception(ctx, POWERPC_EXCP_FPU); 355 return; 356 } 357 t0 = tcg_temp_new_i64(); 358 get_fpr(t0, rB(ctx->opcode)); 359 gen_helper_ftsqrt(cpu_crf[crfD(ctx->opcode)], t0); 360 tcg_temp_free_i64(t0); 361 } 362 363 364 365 /*** Floating-Point compare ***/ 366 367 /* fcmpo */ 368 static void gen_fcmpo(DisasContext *ctx) 369 { 370 TCGv_i32 crf; 371 TCGv_i64 t0; 372 TCGv_i64 t1; 373 if (unlikely(!ctx->fpu_enabled)) { 374 gen_exception(ctx, POWERPC_EXCP_FPU); 375 return; 376 } 377 t0 = tcg_temp_new_i64(); 378 t1 = tcg_temp_new_i64(); 379 gen_reset_fpstatus(); 380 crf = tcg_const_i32(crfD(ctx->opcode)); 381 get_fpr(t0, rA(ctx->opcode)); 382 get_fpr(t1, rB(ctx->opcode)); 383 gen_helper_fcmpo(cpu_env, t0, t1, crf); 384 tcg_temp_free_i32(crf); 385 gen_helper_float_check_status(cpu_env); 386 tcg_temp_free_i64(t0); 387 tcg_temp_free_i64(t1); 388 } 389 390 /* fcmpu */ 391 static void gen_fcmpu(DisasContext *ctx) 392 { 393 TCGv_i32 crf; 394 TCGv_i64 t0; 395 TCGv_i64 t1; 396 if (unlikely(!ctx->fpu_enabled)) { 397 gen_exception(ctx, POWERPC_EXCP_FPU); 398 return; 399 } 400 t0 = tcg_temp_new_i64(); 401 t1 = tcg_temp_new_i64(); 402 gen_reset_fpstatus(); 403 crf = tcg_const_i32(crfD(ctx->opcode)); 404 get_fpr(t0, rA(ctx->opcode)); 405 get_fpr(t1, rB(ctx->opcode)); 406 gen_helper_fcmpu(cpu_env, t0, t1, crf); 407 tcg_temp_free_i32(crf); 408 gen_helper_float_check_status(cpu_env); 409 tcg_temp_free_i64(t0); 410 tcg_temp_free_i64(t1); 411 } 412 413 /*** Floating-point move ***/ 414 /* fabs */ 415 /* XXX: beware that fabs never checks for NaNs nor update FPSCR */ 416 static void gen_fabs(DisasContext *ctx) 417 { 418 TCGv_i64 t0; 419 TCGv_i64 t1; 420 if (unlikely(!ctx->fpu_enabled)) { 421 gen_exception(ctx, POWERPC_EXCP_FPU); 422 return; 423 } 424 t0 = tcg_temp_new_i64(); 425 t1 = tcg_temp_new_i64(); 426 get_fpr(t0, rB(ctx->opcode)); 427 tcg_gen_andi_i64(t1, t0, ~(1ULL << 63)); 428 set_fpr(rD(ctx->opcode), t1); 429 if (unlikely(Rc(ctx->opcode))) { 430 gen_set_cr1_from_fpscr(ctx); 431 } 432 tcg_temp_free_i64(t0); 433 tcg_temp_free_i64(t1); 434 } 435 436 /* fmr - fmr. */ 437 /* XXX: beware that fmr never checks for NaNs nor update FPSCR */ 438 static void gen_fmr(DisasContext *ctx) 439 { 440 TCGv_i64 t0; 441 if (unlikely(!ctx->fpu_enabled)) { 442 gen_exception(ctx, POWERPC_EXCP_FPU); 443 return; 444 } 445 t0 = tcg_temp_new_i64(); 446 get_fpr(t0, rB(ctx->opcode)); 447 set_fpr(rD(ctx->opcode), t0); 448 if (unlikely(Rc(ctx->opcode))) { 449 gen_set_cr1_from_fpscr(ctx); 450 } 451 tcg_temp_free_i64(t0); 452 } 453 454 /* fnabs */ 455 /* XXX: beware that fnabs never checks for NaNs nor update FPSCR */ 456 static void gen_fnabs(DisasContext *ctx) 457 { 458 TCGv_i64 t0; 459 TCGv_i64 t1; 460 if (unlikely(!ctx->fpu_enabled)) { 461 gen_exception(ctx, POWERPC_EXCP_FPU); 462 return; 463 } 464 t0 = tcg_temp_new_i64(); 465 t1 = tcg_temp_new_i64(); 466 get_fpr(t0, rB(ctx->opcode)); 467 tcg_gen_ori_i64(t1, t0, 1ULL << 63); 468 set_fpr(rD(ctx->opcode), t1); 469 if (unlikely(Rc(ctx->opcode))) { 470 gen_set_cr1_from_fpscr(ctx); 471 } 472 tcg_temp_free_i64(t0); 473 tcg_temp_free_i64(t1); 474 } 475 476 /* fneg */ 477 /* XXX: beware that fneg never checks for NaNs nor update FPSCR */ 478 static void gen_fneg(DisasContext *ctx) 479 { 480 TCGv_i64 t0; 481 TCGv_i64 t1; 482 if (unlikely(!ctx->fpu_enabled)) { 483 gen_exception(ctx, POWERPC_EXCP_FPU); 484 return; 485 } 486 t0 = tcg_temp_new_i64(); 487 t1 = tcg_temp_new_i64(); 488 get_fpr(t0, rB(ctx->opcode)); 489 tcg_gen_xori_i64(t1, t0, 1ULL << 63); 490 set_fpr(rD(ctx->opcode), t1); 491 if (unlikely(Rc(ctx->opcode))) { 492 gen_set_cr1_from_fpscr(ctx); 493 } 494 tcg_temp_free_i64(t0); 495 tcg_temp_free_i64(t1); 496 } 497 498 /* fcpsgn: PowerPC 2.05 specification */ 499 /* XXX: beware that fcpsgn never checks for NaNs nor update FPSCR */ 500 static void gen_fcpsgn(DisasContext *ctx) 501 { 502 TCGv_i64 t0; 503 TCGv_i64 t1; 504 TCGv_i64 t2; 505 if (unlikely(!ctx->fpu_enabled)) { 506 gen_exception(ctx, POWERPC_EXCP_FPU); 507 return; 508 } 509 t0 = tcg_temp_new_i64(); 510 t1 = tcg_temp_new_i64(); 511 t2 = tcg_temp_new_i64(); 512 get_fpr(t0, rA(ctx->opcode)); 513 get_fpr(t1, rB(ctx->opcode)); 514 tcg_gen_deposit_i64(t2, t0, t1, 0, 63); 515 set_fpr(rD(ctx->opcode), t2); 516 if (unlikely(Rc(ctx->opcode))) { 517 gen_set_cr1_from_fpscr(ctx); 518 } 519 tcg_temp_free_i64(t0); 520 tcg_temp_free_i64(t1); 521 tcg_temp_free_i64(t2); 522 } 523 524 static void gen_fmrgew(DisasContext *ctx) 525 { 526 TCGv_i64 b0; 527 TCGv_i64 t0; 528 TCGv_i64 t1; 529 if (unlikely(!ctx->fpu_enabled)) { 530 gen_exception(ctx, POWERPC_EXCP_FPU); 531 return; 532 } 533 b0 = tcg_temp_new_i64(); 534 t0 = tcg_temp_new_i64(); 535 t1 = tcg_temp_new_i64(); 536 get_fpr(t0, rB(ctx->opcode)); 537 tcg_gen_shri_i64(b0, t0, 32); 538 get_fpr(t0, rA(ctx->opcode)); 539 tcg_gen_deposit_i64(t1, t0, b0, 0, 32); 540 set_fpr(rD(ctx->opcode), t1); 541 tcg_temp_free_i64(b0); 542 tcg_temp_free_i64(t0); 543 tcg_temp_free_i64(t1); 544 } 545 546 static void gen_fmrgow(DisasContext *ctx) 547 { 548 TCGv_i64 t0; 549 TCGv_i64 t1; 550 TCGv_i64 t2; 551 if (unlikely(!ctx->fpu_enabled)) { 552 gen_exception(ctx, POWERPC_EXCP_FPU); 553 return; 554 } 555 t0 = tcg_temp_new_i64(); 556 t1 = tcg_temp_new_i64(); 557 t2 = tcg_temp_new_i64(); 558 get_fpr(t0, rB(ctx->opcode)); 559 get_fpr(t1, rA(ctx->opcode)); 560 tcg_gen_deposit_i64(t2, t0, t1, 32, 32); 561 set_fpr(rD(ctx->opcode), t2); 562 tcg_temp_free_i64(t0); 563 tcg_temp_free_i64(t1); 564 tcg_temp_free_i64(t2); 565 } 566 567 /*** Floating-Point status & ctrl register ***/ 568 569 /* mcrfs */ 570 static void gen_mcrfs(DisasContext *ctx) 571 { 572 TCGv tmp = tcg_temp_new(); 573 TCGv_i32 tmask; 574 TCGv_i64 tnew_fpscr = tcg_temp_new_i64(); 575 int bfa; 576 int nibble; 577 int shift; 578 579 if (unlikely(!ctx->fpu_enabled)) { 580 gen_exception(ctx, POWERPC_EXCP_FPU); 581 return; 582 } 583 bfa = crfS(ctx->opcode); 584 nibble = 7 - bfa; 585 shift = 4 * nibble; 586 tcg_gen_shri_tl(tmp, cpu_fpscr, shift); 587 tcg_gen_trunc_tl_i32(cpu_crf[crfD(ctx->opcode)], tmp); 588 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], 589 0xf); 590 tcg_temp_free(tmp); 591 tcg_gen_extu_tl_i64(tnew_fpscr, cpu_fpscr); 592 /* Only the exception bits (including FX) should be cleared if read */ 593 tcg_gen_andi_i64(tnew_fpscr, tnew_fpscr, 594 ~((0xF << shift) & FP_EX_CLEAR_BITS)); 595 /* FEX and VX need to be updated, so don't set fpscr directly */ 596 tmask = tcg_const_i32(1 << nibble); 597 gen_helper_store_fpscr(cpu_env, tnew_fpscr, tmask); 598 tcg_temp_free_i32(tmask); 599 tcg_temp_free_i64(tnew_fpscr); 600 } 601 602 static TCGv_i64 place_from_fpscr(int rt, uint64_t mask) 603 { 604 TCGv_i64 fpscr = tcg_temp_new_i64(); 605 TCGv_i64 fpscr_masked = tcg_temp_new_i64(); 606 607 tcg_gen_extu_tl_i64(fpscr, cpu_fpscr); 608 tcg_gen_andi_i64(fpscr_masked, fpscr, mask); 609 set_fpr(rt, fpscr_masked); 610 611 tcg_temp_free_i64(fpscr_masked); 612 613 return fpscr; 614 } 615 616 static void store_fpscr_masked(TCGv_i64 fpscr, uint64_t clear_mask, 617 TCGv_i64 set_mask, uint32_t store_mask) 618 { 619 TCGv_i64 fpscr_masked = tcg_temp_new_i64(); 620 TCGv_i32 st_mask = tcg_constant_i32(store_mask); 621 622 tcg_gen_andi_i64(fpscr_masked, fpscr, ~clear_mask); 623 tcg_gen_or_i64(fpscr_masked, fpscr_masked, set_mask); 624 gen_helper_store_fpscr(cpu_env, fpscr_masked, st_mask); 625 626 tcg_temp_free_i64(fpscr_masked); 627 } 628 629 static bool trans_MFFS(DisasContext *ctx, arg_X_t_rc *a) 630 { 631 TCGv_i64 fpscr; 632 633 REQUIRE_FPU(ctx); 634 635 gen_reset_fpstatus(); 636 fpscr = place_from_fpscr(a->rt, UINT64_MAX); 637 if (a->rc) { 638 gen_set_cr1_from_fpscr(ctx); 639 } 640 641 tcg_temp_free_i64(fpscr); 642 643 return true; 644 } 645 646 static bool trans_MFFSCE(DisasContext *ctx, arg_X_t *a) 647 { 648 TCGv_i64 fpscr; 649 650 REQUIRE_INSNS_FLAGS2(ctx, ISA300); 651 REQUIRE_FPU(ctx); 652 653 gen_reset_fpstatus(); 654 fpscr = place_from_fpscr(a->rt, UINT64_MAX); 655 store_fpscr_masked(fpscr, FP_ENABLES, tcg_constant_i64(0), 0x0003); 656 657 tcg_temp_free_i64(fpscr); 658 659 return true; 660 } 661 662 static bool trans_MFFSCRN(DisasContext *ctx, arg_X_tb *a) 663 { 664 TCGv_i64 t1, fpscr; 665 666 REQUIRE_INSNS_FLAGS2(ctx, ISA300); 667 REQUIRE_FPU(ctx); 668 669 t1 = tcg_temp_new_i64(); 670 get_fpr(t1, a->rb); 671 tcg_gen_andi_i64(t1, t1, FP_RN); 672 673 gen_reset_fpstatus(); 674 fpscr = place_from_fpscr(a->rt, FP_DRN | FP_ENABLES | FP_NI | FP_RN); 675 store_fpscr_masked(fpscr, FP_RN, t1, 0x0001); 676 677 tcg_temp_free_i64(t1); 678 tcg_temp_free_i64(fpscr); 679 680 return true; 681 } 682 683 static bool trans_MFFSCDRN(DisasContext *ctx, arg_X_tb *a) 684 { 685 TCGv_i64 t1, fpscr; 686 687 REQUIRE_INSNS_FLAGS2(ctx, ISA300); 688 REQUIRE_FPU(ctx); 689 690 t1 = tcg_temp_new_i64(); 691 get_fpr(t1, a->rb); 692 tcg_gen_andi_i64(t1, t1, FP_DRN); 693 694 gen_reset_fpstatus(); 695 fpscr = place_from_fpscr(a->rt, FP_DRN | FP_ENABLES | FP_NI | FP_RN); 696 store_fpscr_masked(fpscr, FP_DRN, t1, 0x0100); 697 698 tcg_temp_free_i64(t1); 699 tcg_temp_free_i64(fpscr); 700 701 return true; 702 } 703 704 static bool trans_MFFSCRNI(DisasContext *ctx, arg_X_imm2 *a) 705 { 706 TCGv_i64 t1, fpscr; 707 708 REQUIRE_INSNS_FLAGS2(ctx, ISA300); 709 REQUIRE_FPU(ctx); 710 711 t1 = tcg_temp_new_i64(); 712 tcg_gen_movi_i64(t1, a->imm); 713 714 gen_reset_fpstatus(); 715 fpscr = place_from_fpscr(a->rt, FP_DRN | FP_ENABLES | FP_NI | FP_RN); 716 store_fpscr_masked(fpscr, FP_RN, t1, 0x0001); 717 718 tcg_temp_free_i64(t1); 719 tcg_temp_free_i64(fpscr); 720 721 return true; 722 } 723 724 static bool trans_MFFSCDRNI(DisasContext *ctx, arg_X_imm3 *a) 725 { 726 TCGv_i64 t1, fpscr; 727 728 REQUIRE_INSNS_FLAGS2(ctx, ISA300); 729 REQUIRE_FPU(ctx); 730 731 t1 = tcg_temp_new_i64(); 732 tcg_gen_movi_i64(t1, (uint64_t)a->imm << FPSCR_DRN0); 733 734 gen_reset_fpstatus(); 735 fpscr = place_from_fpscr(a->rt, FP_DRN | FP_ENABLES | FP_NI | FP_RN); 736 store_fpscr_masked(fpscr, FP_DRN, t1, 0x0100); 737 738 tcg_temp_free_i64(t1); 739 tcg_temp_free_i64(fpscr); 740 741 return true; 742 } 743 744 static bool trans_MFFSL(DisasContext *ctx, arg_X_t *a) 745 { 746 TCGv_i64 fpscr; 747 748 REQUIRE_INSNS_FLAGS2(ctx, ISA300); 749 REQUIRE_FPU(ctx); 750 751 gen_reset_fpstatus(); 752 fpscr = place_from_fpscr(a->rt, 753 FP_DRN | FP_STATUS | FP_ENABLES | FP_NI | FP_RN); 754 755 tcg_temp_free_i64(fpscr); 756 757 return true; 758 } 759 760 /* mtfsb0 */ 761 static void gen_mtfsb0(DisasContext *ctx) 762 { 763 uint8_t crb; 764 765 if (unlikely(!ctx->fpu_enabled)) { 766 gen_exception(ctx, POWERPC_EXCP_FPU); 767 return; 768 } 769 crb = 31 - crbD(ctx->opcode); 770 gen_reset_fpstatus(); 771 if (likely(crb != FPSCR_FEX && crb != FPSCR_VX)) { 772 TCGv_i32 t0; 773 t0 = tcg_const_i32(crb); 774 gen_helper_fpscr_clrbit(cpu_env, t0); 775 tcg_temp_free_i32(t0); 776 } 777 if (unlikely(Rc(ctx->opcode) != 0)) { 778 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr); 779 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX); 780 } 781 } 782 783 /* mtfsb1 */ 784 static void gen_mtfsb1(DisasContext *ctx) 785 { 786 uint8_t crb; 787 788 if (unlikely(!ctx->fpu_enabled)) { 789 gen_exception(ctx, POWERPC_EXCP_FPU); 790 return; 791 } 792 crb = 31 - crbD(ctx->opcode); 793 /* XXX: we pretend we can only do IEEE floating-point computations */ 794 if (likely(crb != FPSCR_FEX && crb != FPSCR_VX && crb != FPSCR_NI)) { 795 TCGv_i32 t0; 796 t0 = tcg_const_i32(crb); 797 gen_helper_fpscr_setbit(cpu_env, t0); 798 tcg_temp_free_i32(t0); 799 } 800 if (unlikely(Rc(ctx->opcode) != 0)) { 801 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr); 802 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX); 803 } 804 /* We can raise a deferred exception */ 805 gen_helper_fpscr_check_status(cpu_env); 806 } 807 808 /* mtfsf */ 809 static void gen_mtfsf(DisasContext *ctx) 810 { 811 TCGv_i32 t0; 812 TCGv_i64 t1; 813 int flm, l, w; 814 815 if (unlikely(!ctx->fpu_enabled)) { 816 gen_exception(ctx, POWERPC_EXCP_FPU); 817 return; 818 } 819 flm = FPFLM(ctx->opcode); 820 l = FPL(ctx->opcode); 821 w = FPW(ctx->opcode); 822 if (unlikely(w & !(ctx->insns_flags2 & PPC2_ISA205))) { 823 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 824 return; 825 } 826 if (l) { 827 t0 = tcg_const_i32((ctx->insns_flags2 & PPC2_ISA205) ? 0xffff : 0xff); 828 } else { 829 t0 = tcg_const_i32(flm << (w * 8)); 830 } 831 t1 = tcg_temp_new_i64(); 832 get_fpr(t1, rB(ctx->opcode)); 833 gen_helper_store_fpscr(cpu_env, t1, t0); 834 tcg_temp_free_i32(t0); 835 if (unlikely(Rc(ctx->opcode) != 0)) { 836 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr); 837 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX); 838 } 839 /* We can raise a deferred exception */ 840 gen_helper_fpscr_check_status(cpu_env); 841 tcg_temp_free_i64(t1); 842 } 843 844 /* mtfsfi */ 845 static void gen_mtfsfi(DisasContext *ctx) 846 { 847 int bf, sh, w; 848 TCGv_i64 t0; 849 TCGv_i32 t1; 850 851 if (unlikely(!ctx->fpu_enabled)) { 852 gen_exception(ctx, POWERPC_EXCP_FPU); 853 return; 854 } 855 w = FPW(ctx->opcode); 856 bf = FPBF(ctx->opcode); 857 if (unlikely(w & !(ctx->insns_flags2 & PPC2_ISA205))) { 858 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 859 return; 860 } 861 sh = (8 * w) + 7 - bf; 862 t0 = tcg_const_i64(((uint64_t)FPIMM(ctx->opcode)) << (4 * sh)); 863 t1 = tcg_const_i32(1 << sh); 864 gen_helper_store_fpscr(cpu_env, t0, t1); 865 tcg_temp_free_i64(t0); 866 tcg_temp_free_i32(t1); 867 if (unlikely(Rc(ctx->opcode) != 0)) { 868 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr); 869 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX); 870 } 871 /* We can raise a deferred exception */ 872 gen_helper_fpscr_check_status(cpu_env); 873 } 874 875 static void gen_qemu_ld32fs(DisasContext *ctx, TCGv_i64 dest, TCGv addr) 876 { 877 TCGv_i32 tmp = tcg_temp_new_i32(); 878 tcg_gen_qemu_ld_i32(tmp, addr, ctx->mem_idx, DEF_MEMOP(MO_UL)); 879 gen_helper_todouble(dest, tmp); 880 tcg_temp_free_i32(tmp); 881 } 882 883 /* lfdepx (external PID lfdx) */ 884 static void gen_lfdepx(DisasContext *ctx) 885 { 886 TCGv EA; 887 TCGv_i64 t0; 888 CHK_SV(ctx); 889 if (unlikely(!ctx->fpu_enabled)) { 890 gen_exception(ctx, POWERPC_EXCP_FPU); 891 return; 892 } 893 gen_set_access_type(ctx, ACCESS_FLOAT); 894 EA = tcg_temp_new(); 895 t0 = tcg_temp_new_i64(); 896 gen_addr_reg_index(ctx, EA); 897 tcg_gen_qemu_ld_i64(t0, EA, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UQ)); 898 set_fpr(rD(ctx->opcode), t0); 899 tcg_temp_free(EA); 900 tcg_temp_free_i64(t0); 901 } 902 903 /* lfdp */ 904 static void gen_lfdp(DisasContext *ctx) 905 { 906 TCGv EA; 907 TCGv_i64 t0; 908 if (unlikely(!ctx->fpu_enabled)) { 909 gen_exception(ctx, POWERPC_EXCP_FPU); 910 return; 911 } 912 gen_set_access_type(ctx, ACCESS_FLOAT); 913 EA = tcg_temp_new(); 914 gen_addr_imm_index(ctx, EA, 0); 915 t0 = tcg_temp_new_i64(); 916 /* 917 * We only need to swap high and low halves. gen_qemu_ld64_i64 918 * does necessary 64-bit byteswap already. 919 */ 920 if (unlikely(ctx->le_mode)) { 921 gen_qemu_ld64_i64(ctx, t0, EA); 922 set_fpr(rD(ctx->opcode) + 1, t0); 923 tcg_gen_addi_tl(EA, EA, 8); 924 gen_qemu_ld64_i64(ctx, t0, EA); 925 set_fpr(rD(ctx->opcode), t0); 926 } else { 927 gen_qemu_ld64_i64(ctx, t0, EA); 928 set_fpr(rD(ctx->opcode), t0); 929 tcg_gen_addi_tl(EA, EA, 8); 930 gen_qemu_ld64_i64(ctx, t0, EA); 931 set_fpr(rD(ctx->opcode) + 1, t0); 932 } 933 tcg_temp_free(EA); 934 tcg_temp_free_i64(t0); 935 } 936 937 /* lfdpx */ 938 static void gen_lfdpx(DisasContext *ctx) 939 { 940 TCGv EA; 941 TCGv_i64 t0; 942 if (unlikely(!ctx->fpu_enabled)) { 943 gen_exception(ctx, POWERPC_EXCP_FPU); 944 return; 945 } 946 gen_set_access_type(ctx, ACCESS_FLOAT); 947 EA = tcg_temp_new(); 948 gen_addr_reg_index(ctx, EA); 949 t0 = tcg_temp_new_i64(); 950 /* 951 * We only need to swap high and low halves. gen_qemu_ld64_i64 952 * does necessary 64-bit byteswap already. 953 */ 954 if (unlikely(ctx->le_mode)) { 955 gen_qemu_ld64_i64(ctx, t0, EA); 956 set_fpr(rD(ctx->opcode) + 1, t0); 957 tcg_gen_addi_tl(EA, EA, 8); 958 gen_qemu_ld64_i64(ctx, t0, EA); 959 set_fpr(rD(ctx->opcode), t0); 960 } else { 961 gen_qemu_ld64_i64(ctx, t0, EA); 962 set_fpr(rD(ctx->opcode), t0); 963 tcg_gen_addi_tl(EA, EA, 8); 964 gen_qemu_ld64_i64(ctx, t0, EA); 965 set_fpr(rD(ctx->opcode) + 1, t0); 966 } 967 tcg_temp_free(EA); 968 tcg_temp_free_i64(t0); 969 } 970 971 /* lfiwax */ 972 static void gen_lfiwax(DisasContext *ctx) 973 { 974 TCGv EA; 975 TCGv t0; 976 TCGv_i64 t1; 977 if (unlikely(!ctx->fpu_enabled)) { 978 gen_exception(ctx, POWERPC_EXCP_FPU); 979 return; 980 } 981 gen_set_access_type(ctx, ACCESS_FLOAT); 982 EA = tcg_temp_new(); 983 t0 = tcg_temp_new(); 984 t1 = tcg_temp_new_i64(); 985 gen_addr_reg_index(ctx, EA); 986 gen_qemu_ld32s(ctx, t0, EA); 987 tcg_gen_ext_tl_i64(t1, t0); 988 set_fpr(rD(ctx->opcode), t1); 989 tcg_temp_free(EA); 990 tcg_temp_free(t0); 991 tcg_temp_free_i64(t1); 992 } 993 994 /* lfiwzx */ 995 static void gen_lfiwzx(DisasContext *ctx) 996 { 997 TCGv EA; 998 TCGv_i64 t0; 999 if (unlikely(!ctx->fpu_enabled)) { 1000 gen_exception(ctx, POWERPC_EXCP_FPU); 1001 return; 1002 } 1003 gen_set_access_type(ctx, ACCESS_FLOAT); 1004 EA = tcg_temp_new(); 1005 t0 = tcg_temp_new_i64(); 1006 gen_addr_reg_index(ctx, EA); 1007 gen_qemu_ld32u_i64(ctx, t0, EA); 1008 set_fpr(rD(ctx->opcode), t0); 1009 tcg_temp_free(EA); 1010 tcg_temp_free_i64(t0); 1011 } 1012 1013 #define GEN_STXF(name, stop, opc2, opc3, type) \ 1014 static void glue(gen_, name##x)(DisasContext *ctx) \ 1015 { \ 1016 TCGv EA; \ 1017 TCGv_i64 t0; \ 1018 if (unlikely(!ctx->fpu_enabled)) { \ 1019 gen_exception(ctx, POWERPC_EXCP_FPU); \ 1020 return; \ 1021 } \ 1022 gen_set_access_type(ctx, ACCESS_FLOAT); \ 1023 EA = tcg_temp_new(); \ 1024 t0 = tcg_temp_new_i64(); \ 1025 gen_addr_reg_index(ctx, EA); \ 1026 get_fpr(t0, rS(ctx->opcode)); \ 1027 gen_qemu_##stop(ctx, t0, EA); \ 1028 tcg_temp_free(EA); \ 1029 tcg_temp_free_i64(t0); \ 1030 } 1031 1032 static void gen_qemu_st32fs(DisasContext *ctx, TCGv_i64 src, TCGv addr) 1033 { 1034 TCGv_i32 tmp = tcg_temp_new_i32(); 1035 gen_helper_tosingle(tmp, src); 1036 tcg_gen_qemu_st_i32(tmp, addr, ctx->mem_idx, DEF_MEMOP(MO_UL)); 1037 tcg_temp_free_i32(tmp); 1038 } 1039 1040 /* stfdepx (external PID lfdx) */ 1041 static void gen_stfdepx(DisasContext *ctx) 1042 { 1043 TCGv EA; 1044 TCGv_i64 t0; 1045 CHK_SV(ctx); 1046 if (unlikely(!ctx->fpu_enabled)) { 1047 gen_exception(ctx, POWERPC_EXCP_FPU); 1048 return; 1049 } 1050 gen_set_access_type(ctx, ACCESS_FLOAT); 1051 EA = tcg_temp_new(); 1052 t0 = tcg_temp_new_i64(); 1053 gen_addr_reg_index(ctx, EA); 1054 get_fpr(t0, rD(ctx->opcode)); 1055 tcg_gen_qemu_st_i64(t0, EA, PPC_TLB_EPID_STORE, DEF_MEMOP(MO_UQ)); 1056 tcg_temp_free(EA); 1057 tcg_temp_free_i64(t0); 1058 } 1059 1060 /* stfdp */ 1061 static void gen_stfdp(DisasContext *ctx) 1062 { 1063 TCGv EA; 1064 TCGv_i64 t0; 1065 if (unlikely(!ctx->fpu_enabled)) { 1066 gen_exception(ctx, POWERPC_EXCP_FPU); 1067 return; 1068 } 1069 gen_set_access_type(ctx, ACCESS_FLOAT); 1070 EA = tcg_temp_new(); 1071 t0 = tcg_temp_new_i64(); 1072 gen_addr_imm_index(ctx, EA, 0); 1073 /* 1074 * We only need to swap high and low halves. gen_qemu_st64_i64 1075 * does necessary 64-bit byteswap already. 1076 */ 1077 if (unlikely(ctx->le_mode)) { 1078 get_fpr(t0, rD(ctx->opcode) + 1); 1079 gen_qemu_st64_i64(ctx, t0, EA); 1080 tcg_gen_addi_tl(EA, EA, 8); 1081 get_fpr(t0, rD(ctx->opcode)); 1082 gen_qemu_st64_i64(ctx, t0, EA); 1083 } else { 1084 get_fpr(t0, rD(ctx->opcode)); 1085 gen_qemu_st64_i64(ctx, t0, EA); 1086 tcg_gen_addi_tl(EA, EA, 8); 1087 get_fpr(t0, rD(ctx->opcode) + 1); 1088 gen_qemu_st64_i64(ctx, t0, EA); 1089 } 1090 tcg_temp_free(EA); 1091 tcg_temp_free_i64(t0); 1092 } 1093 1094 /* stfdpx */ 1095 static void gen_stfdpx(DisasContext *ctx) 1096 { 1097 TCGv EA; 1098 TCGv_i64 t0; 1099 if (unlikely(!ctx->fpu_enabled)) { 1100 gen_exception(ctx, POWERPC_EXCP_FPU); 1101 return; 1102 } 1103 gen_set_access_type(ctx, ACCESS_FLOAT); 1104 EA = tcg_temp_new(); 1105 t0 = tcg_temp_new_i64(); 1106 gen_addr_reg_index(ctx, EA); 1107 /* 1108 * We only need to swap high and low halves. gen_qemu_st64_i64 1109 * does necessary 64-bit byteswap already. 1110 */ 1111 if (unlikely(ctx->le_mode)) { 1112 get_fpr(t0, rD(ctx->opcode) + 1); 1113 gen_qemu_st64_i64(ctx, t0, EA); 1114 tcg_gen_addi_tl(EA, EA, 8); 1115 get_fpr(t0, rD(ctx->opcode)); 1116 gen_qemu_st64_i64(ctx, t0, EA); 1117 } else { 1118 get_fpr(t0, rD(ctx->opcode)); 1119 gen_qemu_st64_i64(ctx, t0, EA); 1120 tcg_gen_addi_tl(EA, EA, 8); 1121 get_fpr(t0, rD(ctx->opcode) + 1); 1122 gen_qemu_st64_i64(ctx, t0, EA); 1123 } 1124 tcg_temp_free(EA); 1125 tcg_temp_free_i64(t0); 1126 } 1127 1128 /* Optional: */ 1129 static inline void gen_qemu_st32fiw(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2) 1130 { 1131 TCGv t0 = tcg_temp_new(); 1132 tcg_gen_trunc_i64_tl(t0, arg1), 1133 gen_qemu_st32(ctx, t0, arg2); 1134 tcg_temp_free(t0); 1135 } 1136 /* stfiwx */ 1137 GEN_STXF(stfiw, st32fiw, 0x17, 0x1E, PPC_FLOAT_STFIWX); 1138 1139 /* Floating-point Load/Store Instructions */ 1140 static bool do_lsfpsd(DisasContext *ctx, int rt, int ra, TCGv displ, 1141 bool update, bool store, bool single) 1142 { 1143 TCGv ea; 1144 TCGv_i64 t0; 1145 REQUIRE_INSNS_FLAGS(ctx, FLOAT); 1146 REQUIRE_FPU(ctx); 1147 if (update && ra == 0) { 1148 gen_invalid(ctx); 1149 return true; 1150 } 1151 gen_set_access_type(ctx, ACCESS_FLOAT); 1152 t0 = tcg_temp_new_i64(); 1153 ea = do_ea_calc(ctx, ra, displ); 1154 if (store) { 1155 get_fpr(t0, rt); 1156 if (single) { 1157 gen_qemu_st32fs(ctx, t0, ea); 1158 } else { 1159 gen_qemu_st64_i64(ctx, t0, ea); 1160 } 1161 } else { 1162 if (single) { 1163 gen_qemu_ld32fs(ctx, t0, ea); 1164 } else { 1165 gen_qemu_ld64_i64(ctx, t0, ea); 1166 } 1167 set_fpr(rt, t0); 1168 } 1169 if (update) { 1170 tcg_gen_mov_tl(cpu_gpr[ra], ea); 1171 } 1172 tcg_temp_free_i64(t0); 1173 tcg_temp_free(ea); 1174 return true; 1175 } 1176 1177 static bool do_lsfp_D(DisasContext *ctx, arg_D *a, bool update, bool store, 1178 bool single) 1179 { 1180 return do_lsfpsd(ctx, a->rt, a->ra, tcg_constant_tl(a->si), update, store, 1181 single); 1182 } 1183 1184 static bool do_lsfp_PLS_D(DisasContext *ctx, arg_PLS_D *a, bool update, 1185 bool store, bool single) 1186 { 1187 arg_D d; 1188 if (!resolve_PLS_D(ctx, &d, a)) { 1189 return true; 1190 } 1191 return do_lsfp_D(ctx, &d, update, store, single); 1192 } 1193 1194 static bool do_lsfp_X(DisasContext *ctx, arg_X *a, bool update, 1195 bool store, bool single) 1196 { 1197 return do_lsfpsd(ctx, a->rt, a->ra, cpu_gpr[a->rb], update, store, single); 1198 } 1199 1200 TRANS(LFS, do_lsfp_D, false, false, true) 1201 TRANS(LFSU, do_lsfp_D, true, false, true) 1202 TRANS(LFSX, do_lsfp_X, false, false, true) 1203 TRANS(LFSUX, do_lsfp_X, true, false, true) 1204 TRANS(PLFS, do_lsfp_PLS_D, false, false, true) 1205 1206 TRANS(LFD, do_lsfp_D, false, false, false) 1207 TRANS(LFDU, do_lsfp_D, true, false, false) 1208 TRANS(LFDX, do_lsfp_X, false, false, false) 1209 TRANS(LFDUX, do_lsfp_X, true, false, false) 1210 TRANS(PLFD, do_lsfp_PLS_D, false, false, false) 1211 1212 TRANS(STFS, do_lsfp_D, false, true, true) 1213 TRANS(STFSU, do_lsfp_D, true, true, true) 1214 TRANS(STFSX, do_lsfp_X, false, true, true) 1215 TRANS(STFSUX, do_lsfp_X, true, true, true) 1216 TRANS(PSTFS, do_lsfp_PLS_D, false, true, true) 1217 1218 TRANS(STFD, do_lsfp_D, false, true, false) 1219 TRANS(STFDU, do_lsfp_D, true, true, false) 1220 TRANS(STFDX, do_lsfp_X, false, true, false) 1221 TRANS(STFDUX, do_lsfp_X, true, true, false) 1222 TRANS(PSTFD, do_lsfp_PLS_D, false, true, false) 1223 1224 #undef _GEN_FLOAT_ACB 1225 #undef GEN_FLOAT_ACB 1226 #undef _GEN_FLOAT_AB 1227 #undef GEN_FLOAT_AB 1228 #undef _GEN_FLOAT_AC 1229 #undef GEN_FLOAT_AC 1230 #undef GEN_FLOAT_B 1231 #undef GEN_FLOAT_BS 1232 1233 #undef GEN_LDF 1234 #undef GEN_LDUF 1235 #undef GEN_LDUXF 1236 #undef GEN_LDXF 1237 #undef GEN_LDFS 1238 1239 #undef GEN_STF 1240 #undef GEN_STUF 1241 #undef GEN_STUXF 1242 #undef GEN_STXF 1243 #undef GEN_STFS