qemu

FORK: QEMU emulator
git clone https://git.neptards.moe/neptards/qemu.git
Log | Files | Refs | Submodules | LICENSE

tcg-target.c.inc (54073B)


      1 /*
      2  * Tiny Code Generator for QEMU
      3  *
      4  * Copyright (c) 2018 SiFive, Inc
      5  * Copyright (c) 2008-2009 Arnaud Patard <arnaud.patard@rtp-net.org>
      6  * Copyright (c) 2009 Aurelien Jarno <aurelien@aurel32.net>
      7  * Copyright (c) 2008 Fabrice Bellard
      8  *
      9  * Based on i386/tcg-target.c and mips/tcg-target.c
     10  *
     11  * Permission is hereby granted, free of charge, to any person obtaining a copy
     12  * of this software and associated documentation files (the "Software"), to deal
     13  * in the Software without restriction, including without limitation the rights
     14  * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
     15  * copies of the Software, and to permit persons to whom the Software is
     16  * furnished to do so, subject to the following conditions:
     17  *
     18  * The above copyright notice and this permission notice shall be included in
     19  * all copies or substantial portions of the Software.
     20  *
     21  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
     22  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
     23  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
     24  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
     25  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
     26  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
     27  * THE SOFTWARE.
     28  */
     29 
     30 #include "../tcg-ldst.c.inc"
     31 #include "../tcg-pool.c.inc"
     32 
     33 #ifdef CONFIG_DEBUG_TCG
     34 static const char * const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
     35     "zero",
     36     "ra",
     37     "sp",
     38     "gp",
     39     "tp",
     40     "t0",
     41     "t1",
     42     "t2",
     43     "s0",
     44     "s1",
     45     "a0",
     46     "a1",
     47     "a2",
     48     "a3",
     49     "a4",
     50     "a5",
     51     "a6",
     52     "a7",
     53     "s2",
     54     "s3",
     55     "s4",
     56     "s5",
     57     "s6",
     58     "s7",
     59     "s8",
     60     "s9",
     61     "s10",
     62     "s11",
     63     "t3",
     64     "t4",
     65     "t5",
     66     "t6"
     67 };
     68 #endif
     69 
     70 static const int tcg_target_reg_alloc_order[] = {
     71     /* Call saved registers */
     72     /* TCG_REG_S0 reservered for TCG_AREG0 */
     73     TCG_REG_S1,
     74     TCG_REG_S2,
     75     TCG_REG_S3,
     76     TCG_REG_S4,
     77     TCG_REG_S5,
     78     TCG_REG_S6,
     79     TCG_REG_S7,
     80     TCG_REG_S8,
     81     TCG_REG_S9,
     82     TCG_REG_S10,
     83     TCG_REG_S11,
     84 
     85     /* Call clobbered registers */
     86     TCG_REG_T0,
     87     TCG_REG_T1,
     88     TCG_REG_T2,
     89     TCG_REG_T3,
     90     TCG_REG_T4,
     91     TCG_REG_T5,
     92     TCG_REG_T6,
     93 
     94     /* Argument registers */
     95     TCG_REG_A0,
     96     TCG_REG_A1,
     97     TCG_REG_A2,
     98     TCG_REG_A3,
     99     TCG_REG_A4,
    100     TCG_REG_A5,
    101     TCG_REG_A6,
    102     TCG_REG_A7,
    103 };
    104 
    105 static const int tcg_target_call_iarg_regs[] = {
    106     TCG_REG_A0,
    107     TCG_REG_A1,
    108     TCG_REG_A2,
    109     TCG_REG_A3,
    110     TCG_REG_A4,
    111     TCG_REG_A5,
    112     TCG_REG_A6,
    113     TCG_REG_A7,
    114 };
    115 
    116 static const int tcg_target_call_oarg_regs[] = {
    117     TCG_REG_A0,
    118     TCG_REG_A1,
    119 };
    120 
    121 #define TCG_CT_CONST_ZERO  0x100
    122 #define TCG_CT_CONST_S12   0x200
    123 #define TCG_CT_CONST_N12   0x400
    124 #define TCG_CT_CONST_M12   0x800
    125 
    126 #define ALL_GENERAL_REGS      MAKE_64BIT_MASK(0, 32)
    127 /*
    128  * For softmmu, we need to avoid conflicts with the first 5
    129  * argument registers to call the helper.  Some of these are
    130  * also used for the tlb lookup.
    131  */
    132 #ifdef CONFIG_SOFTMMU
    133 #define SOFTMMU_RESERVE_REGS  MAKE_64BIT_MASK(TCG_REG_A0, 5)
    134 #else
    135 #define SOFTMMU_RESERVE_REGS  0
    136 #endif
    137 
    138 
    139 static inline tcg_target_long sextreg(tcg_target_long val, int pos, int len)
    140 {
    141     if (TCG_TARGET_REG_BITS == 32) {
    142         return sextract32(val, pos, len);
    143     } else {
    144         return sextract64(val, pos, len);
    145     }
    146 }
    147 
    148 /* test if a constant matches the constraint */
    149 static bool tcg_target_const_match(int64_t val, TCGType type, int ct)
    150 {
    151     if (ct & TCG_CT_CONST) {
    152         return 1;
    153     }
    154     if ((ct & TCG_CT_CONST_ZERO) && val == 0) {
    155         return 1;
    156     }
    157     if ((ct & TCG_CT_CONST_S12) && val == sextreg(val, 0, 12)) {
    158         return 1;
    159     }
    160     if ((ct & TCG_CT_CONST_N12) && -val == sextreg(-val, 0, 12)) {
    161         return 1;
    162     }
    163     if ((ct & TCG_CT_CONST_M12) && val >= -0xfff && val <= 0xfff) {
    164         return 1;
    165     }
    166     return 0;
    167 }
    168 
    169 /*
    170  * RISC-V Base ISA opcodes (IM)
    171  */
    172 
    173 typedef enum {
    174     OPC_ADD = 0x33,
    175     OPC_ADDI = 0x13,
    176     OPC_AND = 0x7033,
    177     OPC_ANDI = 0x7013,
    178     OPC_AUIPC = 0x17,
    179     OPC_BEQ = 0x63,
    180     OPC_BGE = 0x5063,
    181     OPC_BGEU = 0x7063,
    182     OPC_BLT = 0x4063,
    183     OPC_BLTU = 0x6063,
    184     OPC_BNE = 0x1063,
    185     OPC_DIV = 0x2004033,
    186     OPC_DIVU = 0x2005033,
    187     OPC_JAL = 0x6f,
    188     OPC_JALR = 0x67,
    189     OPC_LB = 0x3,
    190     OPC_LBU = 0x4003,
    191     OPC_LD = 0x3003,
    192     OPC_LH = 0x1003,
    193     OPC_LHU = 0x5003,
    194     OPC_LUI = 0x37,
    195     OPC_LW = 0x2003,
    196     OPC_LWU = 0x6003,
    197     OPC_MUL = 0x2000033,
    198     OPC_MULH = 0x2001033,
    199     OPC_MULHSU = 0x2002033,
    200     OPC_MULHU = 0x2003033,
    201     OPC_OR = 0x6033,
    202     OPC_ORI = 0x6013,
    203     OPC_REM = 0x2006033,
    204     OPC_REMU = 0x2007033,
    205     OPC_SB = 0x23,
    206     OPC_SD = 0x3023,
    207     OPC_SH = 0x1023,
    208     OPC_SLL = 0x1033,
    209     OPC_SLLI = 0x1013,
    210     OPC_SLT = 0x2033,
    211     OPC_SLTI = 0x2013,
    212     OPC_SLTIU = 0x3013,
    213     OPC_SLTU = 0x3033,
    214     OPC_SRA = 0x40005033,
    215     OPC_SRAI = 0x40005013,
    216     OPC_SRL = 0x5033,
    217     OPC_SRLI = 0x5013,
    218     OPC_SUB = 0x40000033,
    219     OPC_SW = 0x2023,
    220     OPC_XOR = 0x4033,
    221     OPC_XORI = 0x4013,
    222 
    223 #if TCG_TARGET_REG_BITS == 64
    224     OPC_ADDIW = 0x1b,
    225     OPC_ADDW = 0x3b,
    226     OPC_DIVUW = 0x200503b,
    227     OPC_DIVW = 0x200403b,
    228     OPC_MULW = 0x200003b,
    229     OPC_REMUW = 0x200703b,
    230     OPC_REMW = 0x200603b,
    231     OPC_SLLIW = 0x101b,
    232     OPC_SLLW = 0x103b,
    233     OPC_SRAIW = 0x4000501b,
    234     OPC_SRAW = 0x4000503b,
    235     OPC_SRLIW = 0x501b,
    236     OPC_SRLW = 0x503b,
    237     OPC_SUBW = 0x4000003b,
    238 #else
    239     /* Simplify code throughout by defining aliases for RV32.  */
    240     OPC_ADDIW = OPC_ADDI,
    241     OPC_ADDW = OPC_ADD,
    242     OPC_DIVUW = OPC_DIVU,
    243     OPC_DIVW = OPC_DIV,
    244     OPC_MULW = OPC_MUL,
    245     OPC_REMUW = OPC_REMU,
    246     OPC_REMW = OPC_REM,
    247     OPC_SLLIW = OPC_SLLI,
    248     OPC_SLLW = OPC_SLL,
    249     OPC_SRAIW = OPC_SRAI,
    250     OPC_SRAW = OPC_SRA,
    251     OPC_SRLIW = OPC_SRLI,
    252     OPC_SRLW = OPC_SRL,
    253     OPC_SUBW = OPC_SUB,
    254 #endif
    255 
    256     OPC_FENCE = 0x0000000f,
    257 } RISCVInsn;
    258 
    259 /*
    260  * RISC-V immediate and instruction encoders (excludes 16-bit RVC)
    261  */
    262 
    263 /* Type-R */
    264 
    265 static int32_t encode_r(RISCVInsn opc, TCGReg rd, TCGReg rs1, TCGReg rs2)
    266 {
    267     return opc | (rd & 0x1f) << 7 | (rs1 & 0x1f) << 15 | (rs2 & 0x1f) << 20;
    268 }
    269 
    270 /* Type-I */
    271 
    272 static int32_t encode_imm12(uint32_t imm)
    273 {
    274     return (imm & 0xfff) << 20;
    275 }
    276 
    277 static int32_t encode_i(RISCVInsn opc, TCGReg rd, TCGReg rs1, uint32_t imm)
    278 {
    279     return opc | (rd & 0x1f) << 7 | (rs1 & 0x1f) << 15 | encode_imm12(imm);
    280 }
    281 
    282 /* Type-S */
    283 
    284 static int32_t encode_simm12(uint32_t imm)
    285 {
    286     int32_t ret = 0;
    287 
    288     ret |= (imm & 0xFE0) << 20;
    289     ret |= (imm & 0x1F) << 7;
    290 
    291     return ret;
    292 }
    293 
    294 static int32_t encode_s(RISCVInsn opc, TCGReg rs1, TCGReg rs2, uint32_t imm)
    295 {
    296     return opc | (rs1 & 0x1f) << 15 | (rs2 & 0x1f) << 20 | encode_simm12(imm);
    297 }
    298 
    299 /* Type-SB */
    300 
    301 static int32_t encode_sbimm12(uint32_t imm)
    302 {
    303     int32_t ret = 0;
    304 
    305     ret |= (imm & 0x1000) << 19;
    306     ret |= (imm & 0x7e0) << 20;
    307     ret |= (imm & 0x1e) << 7;
    308     ret |= (imm & 0x800) >> 4;
    309 
    310     return ret;
    311 }
    312 
    313 static int32_t encode_sb(RISCVInsn opc, TCGReg rs1, TCGReg rs2, uint32_t imm)
    314 {
    315     return opc | (rs1 & 0x1f) << 15 | (rs2 & 0x1f) << 20 | encode_sbimm12(imm);
    316 }
    317 
    318 /* Type-U */
    319 
    320 static int32_t encode_uimm20(uint32_t imm)
    321 {
    322     return imm & 0xfffff000;
    323 }
    324 
    325 static int32_t encode_u(RISCVInsn opc, TCGReg rd, uint32_t imm)
    326 {
    327     return opc | (rd & 0x1f) << 7 | encode_uimm20(imm);
    328 }
    329 
    330 /* Type-UJ */
    331 
    332 static int32_t encode_ujimm20(uint32_t imm)
    333 {
    334     int32_t ret = 0;
    335 
    336     ret |= (imm & 0x0007fe) << (21 - 1);
    337     ret |= (imm & 0x000800) << (20 - 11);
    338     ret |= (imm & 0x0ff000) << (12 - 12);
    339     ret |= (imm & 0x100000) << (31 - 20);
    340 
    341     return ret;
    342 }
    343 
    344 static int32_t encode_uj(RISCVInsn opc, TCGReg rd, uint32_t imm)
    345 {
    346     return opc | (rd & 0x1f) << 7 | encode_ujimm20(imm);
    347 }
    348 
    349 /*
    350  * RISC-V instruction emitters
    351  */
    352 
    353 static void tcg_out_opc_reg(TCGContext *s, RISCVInsn opc,
    354                             TCGReg rd, TCGReg rs1, TCGReg rs2)
    355 {
    356     tcg_out32(s, encode_r(opc, rd, rs1, rs2));
    357 }
    358 
    359 static void tcg_out_opc_imm(TCGContext *s, RISCVInsn opc,
    360                             TCGReg rd, TCGReg rs1, TCGArg imm)
    361 {
    362     tcg_out32(s, encode_i(opc, rd, rs1, imm));
    363 }
    364 
    365 static void tcg_out_opc_store(TCGContext *s, RISCVInsn opc,
    366                               TCGReg rs1, TCGReg rs2, uint32_t imm)
    367 {
    368     tcg_out32(s, encode_s(opc, rs1, rs2, imm));
    369 }
    370 
    371 static void tcg_out_opc_branch(TCGContext *s, RISCVInsn opc,
    372                                TCGReg rs1, TCGReg rs2, uint32_t imm)
    373 {
    374     tcg_out32(s, encode_sb(opc, rs1, rs2, imm));
    375 }
    376 
    377 static void tcg_out_opc_upper(TCGContext *s, RISCVInsn opc,
    378                               TCGReg rd, uint32_t imm)
    379 {
    380     tcg_out32(s, encode_u(opc, rd, imm));
    381 }
    382 
    383 static void tcg_out_opc_jump(TCGContext *s, RISCVInsn opc,
    384                              TCGReg rd, uint32_t imm)
    385 {
    386     tcg_out32(s, encode_uj(opc, rd, imm));
    387 }
    388 
    389 static void tcg_out_nop_fill(tcg_insn_unit *p, int count)
    390 {
    391     int i;
    392     for (i = 0; i < count; ++i) {
    393         p[i] = encode_i(OPC_ADDI, TCG_REG_ZERO, TCG_REG_ZERO, 0);
    394     }
    395 }
    396 
    397 /*
    398  * Relocations
    399  */
    400 
    401 static bool reloc_sbimm12(tcg_insn_unit *src_rw, const tcg_insn_unit *target)
    402 {
    403     const tcg_insn_unit *src_rx = tcg_splitwx_to_rx(src_rw);
    404     intptr_t offset = (intptr_t)target - (intptr_t)src_rx;
    405 
    406     tcg_debug_assert((offset & 1) == 0);
    407     if (offset == sextreg(offset, 0, 12)) {
    408         *src_rw |= encode_sbimm12(offset);
    409         return true;
    410     }
    411 
    412     return false;
    413 }
    414 
    415 static bool reloc_jimm20(tcg_insn_unit *src_rw, const tcg_insn_unit *target)
    416 {
    417     const tcg_insn_unit *src_rx = tcg_splitwx_to_rx(src_rw);
    418     intptr_t offset = (intptr_t)target - (intptr_t)src_rx;
    419 
    420     tcg_debug_assert((offset & 1) == 0);
    421     if (offset == sextreg(offset, 0, 20)) {
    422         *src_rw |= encode_ujimm20(offset);
    423         return true;
    424     }
    425 
    426     return false;
    427 }
    428 
    429 static bool reloc_call(tcg_insn_unit *src_rw, const tcg_insn_unit *target)
    430 {
    431     const tcg_insn_unit *src_rx = tcg_splitwx_to_rx(src_rw);
    432     intptr_t offset = (intptr_t)target - (intptr_t)src_rx;
    433     int32_t lo = sextreg(offset, 0, 12);
    434     int32_t hi = offset - lo;
    435 
    436     if (offset == hi + lo) {
    437         src_rw[0] |= encode_uimm20(hi);
    438         src_rw[1] |= encode_imm12(lo);
    439         return true;
    440     }
    441 
    442     return false;
    443 }
    444 
    445 static bool patch_reloc(tcg_insn_unit *code_ptr, int type,
    446                         intptr_t value, intptr_t addend)
    447 {
    448     tcg_debug_assert(addend == 0);
    449     switch (type) {
    450     case R_RISCV_BRANCH:
    451         return reloc_sbimm12(code_ptr, (tcg_insn_unit *)value);
    452     case R_RISCV_JAL:
    453         return reloc_jimm20(code_ptr, (tcg_insn_unit *)value);
    454     case R_RISCV_CALL:
    455         return reloc_call(code_ptr, (tcg_insn_unit *)value);
    456     default:
    457         g_assert_not_reached();
    458     }
    459 }
    460 
    461 /*
    462  * TCG intrinsics
    463  */
    464 
    465 static bool tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
    466 {
    467     if (ret == arg) {
    468         return true;
    469     }
    470     switch (type) {
    471     case TCG_TYPE_I32:
    472     case TCG_TYPE_I64:
    473         tcg_out_opc_imm(s, OPC_ADDI, ret, arg, 0);
    474         break;
    475     default:
    476         g_assert_not_reached();
    477     }
    478     return true;
    479 }
    480 
    481 static void tcg_out_movi(TCGContext *s, TCGType type, TCGReg rd,
    482                          tcg_target_long val)
    483 {
    484     tcg_target_long lo, hi, tmp;
    485     int shift, ret;
    486 
    487     if (TCG_TARGET_REG_BITS == 64 && type == TCG_TYPE_I32) {
    488         val = (int32_t)val;
    489     }
    490 
    491     lo = sextreg(val, 0, 12);
    492     if (val == lo) {
    493         tcg_out_opc_imm(s, OPC_ADDI, rd, TCG_REG_ZERO, lo);
    494         return;
    495     }
    496 
    497     hi = val - lo;
    498     if (TCG_TARGET_REG_BITS == 32 || val == (int32_t)val) {
    499         tcg_out_opc_upper(s, OPC_LUI, rd, hi);
    500         if (lo != 0) {
    501             tcg_out_opc_imm(s, OPC_ADDIW, rd, rd, lo);
    502         }
    503         return;
    504     }
    505 
    506     /* We can only be here if TCG_TARGET_REG_BITS != 32 */
    507     tmp = tcg_pcrel_diff(s, (void *)val);
    508     if (tmp == (int32_t)tmp) {
    509         tcg_out_opc_upper(s, OPC_AUIPC, rd, 0);
    510         tcg_out_opc_imm(s, OPC_ADDI, rd, rd, 0);
    511         ret = reloc_call(s->code_ptr - 2, (const tcg_insn_unit *)val);
    512         tcg_debug_assert(ret == true);
    513         return;
    514     }
    515 
    516     /* Look for a single 20-bit section.  */
    517     shift = ctz64(val);
    518     tmp = val >> shift;
    519     if (tmp == sextreg(tmp, 0, 20)) {
    520         tcg_out_opc_upper(s, OPC_LUI, rd, tmp << 12);
    521         if (shift > 12) {
    522             tcg_out_opc_imm(s, OPC_SLLI, rd, rd, shift - 12);
    523         } else {
    524             tcg_out_opc_imm(s, OPC_SRAI, rd, rd, 12 - shift);
    525         }
    526         return;
    527     }
    528 
    529     /* Look for a few high zero bits, with lots of bits set in the middle.  */
    530     shift = clz64(val);
    531     tmp = val << shift;
    532     if (tmp == sextreg(tmp, 12, 20) << 12) {
    533         tcg_out_opc_upper(s, OPC_LUI, rd, tmp);
    534         tcg_out_opc_imm(s, OPC_SRLI, rd, rd, shift);
    535         return;
    536     } else if (tmp == sextreg(tmp, 0, 12)) {
    537         tcg_out_opc_imm(s, OPC_ADDI, rd, TCG_REG_ZERO, tmp);
    538         tcg_out_opc_imm(s, OPC_SRLI, rd, rd, shift);
    539         return;
    540     }
    541 
    542     /* Drop into the constant pool.  */
    543     new_pool_label(s, val, R_RISCV_CALL, s->code_ptr, 0);
    544     tcg_out_opc_upper(s, OPC_AUIPC, rd, 0);
    545     tcg_out_opc_imm(s, OPC_LD, rd, rd, 0);
    546 }
    547 
    548 static void tcg_out_ext8u(TCGContext *s, TCGReg ret, TCGReg arg)
    549 {
    550     tcg_out_opc_imm(s, OPC_ANDI, ret, arg, 0xff);
    551 }
    552 
    553 static void tcg_out_ext16u(TCGContext *s, TCGReg ret, TCGReg arg)
    554 {
    555     tcg_out_opc_imm(s, OPC_SLLIW, ret, arg, 16);
    556     tcg_out_opc_imm(s, OPC_SRLIW, ret, ret, 16);
    557 }
    558 
    559 static void tcg_out_ext32u(TCGContext *s, TCGReg ret, TCGReg arg)
    560 {
    561     tcg_out_opc_imm(s, OPC_SLLI, ret, arg, 32);
    562     tcg_out_opc_imm(s, OPC_SRLI, ret, ret, 32);
    563 }
    564 
    565 static void tcg_out_ext8s(TCGContext *s, TCGReg ret, TCGReg arg)
    566 {
    567     tcg_out_opc_imm(s, OPC_SLLIW, ret, arg, 24);
    568     tcg_out_opc_imm(s, OPC_SRAIW, ret, ret, 24);
    569 }
    570 
    571 static void tcg_out_ext16s(TCGContext *s, TCGReg ret, TCGReg arg)
    572 {
    573     tcg_out_opc_imm(s, OPC_SLLIW, ret, arg, 16);
    574     tcg_out_opc_imm(s, OPC_SRAIW, ret, ret, 16);
    575 }
    576 
    577 static void tcg_out_ext32s(TCGContext *s, TCGReg ret, TCGReg arg)
    578 {
    579     tcg_out_opc_imm(s, OPC_ADDIW, ret, arg, 0);
    580 }
    581 
    582 static void tcg_out_ldst(TCGContext *s, RISCVInsn opc, TCGReg data,
    583                          TCGReg addr, intptr_t offset)
    584 {
    585     intptr_t imm12 = sextreg(offset, 0, 12);
    586 
    587     if (offset != imm12) {
    588         intptr_t diff = offset - (uintptr_t)s->code_ptr;
    589 
    590         if (addr == TCG_REG_ZERO && diff == (int32_t)diff) {
    591             imm12 = sextreg(diff, 0, 12);
    592             tcg_out_opc_upper(s, OPC_AUIPC, TCG_REG_TMP2, diff - imm12);
    593         } else {
    594             tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_TMP2, offset - imm12);
    595             if (addr != TCG_REG_ZERO) {
    596                 tcg_out_opc_reg(s, OPC_ADD, TCG_REG_TMP2, TCG_REG_TMP2, addr);
    597             }
    598         }
    599         addr = TCG_REG_TMP2;
    600     }
    601 
    602     switch (opc) {
    603     case OPC_SB:
    604     case OPC_SH:
    605     case OPC_SW:
    606     case OPC_SD:
    607         tcg_out_opc_store(s, opc, addr, data, imm12);
    608         break;
    609     case OPC_LB:
    610     case OPC_LBU:
    611     case OPC_LH:
    612     case OPC_LHU:
    613     case OPC_LW:
    614     case OPC_LWU:
    615     case OPC_LD:
    616         tcg_out_opc_imm(s, opc, data, addr, imm12);
    617         break;
    618     default:
    619         g_assert_not_reached();
    620     }
    621 }
    622 
    623 static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg arg,
    624                        TCGReg arg1, intptr_t arg2)
    625 {
    626     bool is32bit = (TCG_TARGET_REG_BITS == 32 || type == TCG_TYPE_I32);
    627     tcg_out_ldst(s, is32bit ? OPC_LW : OPC_LD, arg, arg1, arg2);
    628 }
    629 
    630 static void tcg_out_st(TCGContext *s, TCGType type, TCGReg arg,
    631                        TCGReg arg1, intptr_t arg2)
    632 {
    633     bool is32bit = (TCG_TARGET_REG_BITS == 32 || type == TCG_TYPE_I32);
    634     tcg_out_ldst(s, is32bit ? OPC_SW : OPC_SD, arg, arg1, arg2);
    635 }
    636 
    637 static bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val,
    638                         TCGReg base, intptr_t ofs)
    639 {
    640     if (val == 0) {
    641         tcg_out_st(s, type, TCG_REG_ZERO, base, ofs);
    642         return true;
    643     }
    644     return false;
    645 }
    646 
    647 static void tcg_out_addsub2(TCGContext *s,
    648                             TCGReg rl, TCGReg rh,
    649                             TCGReg al, TCGReg ah,
    650                             TCGArg bl, TCGArg bh,
    651                             bool cbl, bool cbh, bool is_sub, bool is32bit)
    652 {
    653     const RISCVInsn opc_add = is32bit ? OPC_ADDW : OPC_ADD;
    654     const RISCVInsn opc_addi = is32bit ? OPC_ADDIW : OPC_ADDI;
    655     const RISCVInsn opc_sub = is32bit ? OPC_SUBW : OPC_SUB;
    656     TCGReg th = TCG_REG_TMP1;
    657 
    658     /* If we have a negative constant such that negating it would
    659        make the high part zero, we can (usually) eliminate one insn.  */
    660     if (cbl && cbh && bh == -1 && bl != 0) {
    661         bl = -bl;
    662         bh = 0;
    663         is_sub = !is_sub;
    664     }
    665 
    666     /* By operating on the high part first, we get to use the final
    667        carry operation to move back from the temporary.  */
    668     if (!cbh) {
    669         tcg_out_opc_reg(s, (is_sub ? opc_sub : opc_add), th, ah, bh);
    670     } else if (bh != 0 || ah == rl) {
    671         tcg_out_opc_imm(s, opc_addi, th, ah, (is_sub ? -bh : bh));
    672     } else {
    673         th = ah;
    674     }
    675 
    676     /* Note that tcg optimization should eliminate the bl == 0 case.  */
    677     if (is_sub) {
    678         if (cbl) {
    679             tcg_out_opc_imm(s, OPC_SLTIU, TCG_REG_TMP0, al, bl);
    680             tcg_out_opc_imm(s, opc_addi, rl, al, -bl);
    681         } else {
    682             tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_TMP0, al, bl);
    683             tcg_out_opc_reg(s, opc_sub, rl, al, bl);
    684         }
    685         tcg_out_opc_reg(s, opc_sub, rh, th, TCG_REG_TMP0);
    686     } else {
    687         if (cbl) {
    688             tcg_out_opc_imm(s, opc_addi, rl, al, bl);
    689             tcg_out_opc_imm(s, OPC_SLTIU, TCG_REG_TMP0, rl, bl);
    690         } else if (rl == al && rl == bl) {
    691             tcg_out_opc_imm(s, OPC_SLTI, TCG_REG_TMP0, al, 0);
    692             tcg_out_opc_reg(s, opc_addi, rl, al, bl);
    693         } else {
    694             tcg_out_opc_reg(s, opc_add, rl, al, bl);
    695             tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_TMP0,
    696                             rl, (rl == bl ? al : bl));
    697         }
    698         tcg_out_opc_reg(s, opc_add, rh, th, TCG_REG_TMP0);
    699     }
    700 }
    701 
    702 static const struct {
    703     RISCVInsn op;
    704     bool swap;
    705 } tcg_brcond_to_riscv[] = {
    706     [TCG_COND_EQ] =  { OPC_BEQ,  false },
    707     [TCG_COND_NE] =  { OPC_BNE,  false },
    708     [TCG_COND_LT] =  { OPC_BLT,  false },
    709     [TCG_COND_GE] =  { OPC_BGE,  false },
    710     [TCG_COND_LE] =  { OPC_BGE,  true  },
    711     [TCG_COND_GT] =  { OPC_BLT,  true  },
    712     [TCG_COND_LTU] = { OPC_BLTU, false },
    713     [TCG_COND_GEU] = { OPC_BGEU, false },
    714     [TCG_COND_LEU] = { OPC_BGEU, true  },
    715     [TCG_COND_GTU] = { OPC_BLTU, true  }
    716 };
    717 
    718 static void tcg_out_brcond(TCGContext *s, TCGCond cond, TCGReg arg1,
    719                            TCGReg arg2, TCGLabel *l)
    720 {
    721     RISCVInsn op = tcg_brcond_to_riscv[cond].op;
    722 
    723     tcg_debug_assert(op != 0);
    724 
    725     if (tcg_brcond_to_riscv[cond].swap) {
    726         TCGReg t = arg1;
    727         arg1 = arg2;
    728         arg2 = t;
    729     }
    730 
    731     tcg_out_reloc(s, s->code_ptr, R_RISCV_BRANCH, l, 0);
    732     tcg_out_opc_branch(s, op, arg1, arg2, 0);
    733 }
    734 
    735 static void tcg_out_setcond(TCGContext *s, TCGCond cond, TCGReg ret,
    736                             TCGReg arg1, TCGReg arg2)
    737 {
    738     switch (cond) {
    739     case TCG_COND_EQ:
    740         tcg_out_opc_reg(s, OPC_SUB, ret, arg1, arg2);
    741         tcg_out_opc_imm(s, OPC_SLTIU, ret, ret, 1);
    742         break;
    743     case TCG_COND_NE:
    744         tcg_out_opc_reg(s, OPC_SUB, ret, arg1, arg2);
    745         tcg_out_opc_reg(s, OPC_SLTU, ret, TCG_REG_ZERO, ret);
    746         break;
    747     case TCG_COND_LT:
    748         tcg_out_opc_reg(s, OPC_SLT, ret, arg1, arg2);
    749         break;
    750     case TCG_COND_GE:
    751         tcg_out_opc_reg(s, OPC_SLT, ret, arg1, arg2);
    752         tcg_out_opc_imm(s, OPC_XORI, ret, ret, 1);
    753         break;
    754     case TCG_COND_LE:
    755         tcg_out_opc_reg(s, OPC_SLT, ret, arg2, arg1);
    756         tcg_out_opc_imm(s, OPC_XORI, ret, ret, 1);
    757         break;
    758     case TCG_COND_GT:
    759         tcg_out_opc_reg(s, OPC_SLT, ret, arg2, arg1);
    760         break;
    761     case TCG_COND_LTU:
    762         tcg_out_opc_reg(s, OPC_SLTU, ret, arg1, arg2);
    763         break;
    764     case TCG_COND_GEU:
    765         tcg_out_opc_reg(s, OPC_SLTU, ret, arg1, arg2);
    766         tcg_out_opc_imm(s, OPC_XORI, ret, ret, 1);
    767         break;
    768     case TCG_COND_LEU:
    769         tcg_out_opc_reg(s, OPC_SLTU, ret, arg2, arg1);
    770         tcg_out_opc_imm(s, OPC_XORI, ret, ret, 1);
    771         break;
    772     case TCG_COND_GTU:
    773         tcg_out_opc_reg(s, OPC_SLTU, ret, arg2, arg1);
    774         break;
    775     default:
    776          g_assert_not_reached();
    777          break;
    778      }
    779 }
    780 
    781 static void tcg_out_brcond2(TCGContext *s, TCGCond cond, TCGReg al, TCGReg ah,
    782                             TCGReg bl, TCGReg bh, TCGLabel *l)
    783 {
    784     /* todo */
    785     g_assert_not_reached();
    786 }
    787 
    788 static void tcg_out_setcond2(TCGContext *s, TCGCond cond, TCGReg ret,
    789                              TCGReg al, TCGReg ah, TCGReg bl, TCGReg bh)
    790 {
    791     /* todo */
    792     g_assert_not_reached();
    793 }
    794 
    795 static void tcg_out_call_int(TCGContext *s, const tcg_insn_unit *arg, bool tail)
    796 {
    797     TCGReg link = tail ? TCG_REG_ZERO : TCG_REG_RA;
    798     ptrdiff_t offset = tcg_pcrel_diff(s, arg);
    799     int ret;
    800 
    801     tcg_debug_assert((offset & 1) == 0);
    802     if (offset == sextreg(offset, 0, 20)) {
    803         /* short jump: -2097150 to 2097152 */
    804         tcg_out_opc_jump(s, OPC_JAL, link, offset);
    805     } else if (TCG_TARGET_REG_BITS == 32 || offset == (int32_t)offset) {
    806         /* long jump: -2147483646 to 2147483648 */
    807         tcg_out_opc_upper(s, OPC_AUIPC, TCG_REG_TMP0, 0);
    808         tcg_out_opc_imm(s, OPC_JALR, link, TCG_REG_TMP0, 0);
    809         ret = reloc_call(s->code_ptr - 2, arg);
    810         tcg_debug_assert(ret == true);
    811     } else if (TCG_TARGET_REG_BITS == 64) {
    812         /* far jump: 64-bit */
    813         tcg_target_long imm = sextreg((tcg_target_long)arg, 0, 12);
    814         tcg_target_long base = (tcg_target_long)arg - imm;
    815         tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_TMP0, base);
    816         tcg_out_opc_imm(s, OPC_JALR, link, TCG_REG_TMP0, imm);
    817     } else {
    818         g_assert_not_reached();
    819     }
    820 }
    821 
    822 static void tcg_out_call(TCGContext *s, const tcg_insn_unit *arg)
    823 {
    824     tcg_out_call_int(s, arg, false);
    825 }
    826 
    827 static void tcg_out_mb(TCGContext *s, TCGArg a0)
    828 {
    829     tcg_insn_unit insn = OPC_FENCE;
    830 
    831     if (a0 & TCG_MO_LD_LD) {
    832         insn |= 0x02200000;
    833     }
    834     if (a0 & TCG_MO_ST_LD) {
    835         insn |= 0x01200000;
    836     }
    837     if (a0 & TCG_MO_LD_ST) {
    838         insn |= 0x02100000;
    839     }
    840     if (a0 & TCG_MO_ST_ST) {
    841         insn |= 0x02200000;
    842     }
    843     tcg_out32(s, insn);
    844 }
    845 
    846 /*
    847  * Load/store and TLB
    848  */
    849 
    850 #if defined(CONFIG_SOFTMMU)
    851 /* helper signature: helper_ret_ld_mmu(CPUState *env, target_ulong addr,
    852  *                                     MemOpIdx oi, uintptr_t ra)
    853  */
    854 static void * const qemu_ld_helpers[MO_SSIZE + 1] = {
    855     [MO_UB] = helper_ret_ldub_mmu,
    856     [MO_SB] = helper_ret_ldsb_mmu,
    857 #if HOST_BIG_ENDIAN
    858     [MO_UW] = helper_be_lduw_mmu,
    859     [MO_SW] = helper_be_ldsw_mmu,
    860     [MO_UL] = helper_be_ldul_mmu,
    861 #if TCG_TARGET_REG_BITS == 64
    862     [MO_SL] = helper_be_ldsl_mmu,
    863 #endif
    864     [MO_UQ] = helper_be_ldq_mmu,
    865 #else
    866     [MO_UW] = helper_le_lduw_mmu,
    867     [MO_SW] = helper_le_ldsw_mmu,
    868     [MO_UL] = helper_le_ldul_mmu,
    869 #if TCG_TARGET_REG_BITS == 64
    870     [MO_SL] = helper_le_ldsl_mmu,
    871 #endif
    872     [MO_UQ] = helper_le_ldq_mmu,
    873 #endif
    874 };
    875 
    876 /* helper signature: helper_ret_st_mmu(CPUState *env, target_ulong addr,
    877  *                                     uintxx_t val, MemOpIdx oi,
    878  *                                     uintptr_t ra)
    879  */
    880 static void * const qemu_st_helpers[MO_SIZE + 1] = {
    881     [MO_8]   = helper_ret_stb_mmu,
    882 #if HOST_BIG_ENDIAN
    883     [MO_16] = helper_be_stw_mmu,
    884     [MO_32] = helper_be_stl_mmu,
    885     [MO_64] = helper_be_stq_mmu,
    886 #else
    887     [MO_16] = helper_le_stw_mmu,
    888     [MO_32] = helper_le_stl_mmu,
    889     [MO_64] = helper_le_stq_mmu,
    890 #endif
    891 };
    892 
    893 /* We don't support oversize guests */
    894 QEMU_BUILD_BUG_ON(TCG_TARGET_REG_BITS < TARGET_LONG_BITS);
    895 
    896 /* We expect to use a 12-bit negative offset from ENV.  */
    897 QEMU_BUILD_BUG_ON(TLB_MASK_TABLE_OFS(0) > 0);
    898 QEMU_BUILD_BUG_ON(TLB_MASK_TABLE_OFS(0) < -(1 << 11));
    899 
    900 static void tcg_out_goto(TCGContext *s, const tcg_insn_unit *target)
    901 {
    902     tcg_out_opc_jump(s, OPC_JAL, TCG_REG_ZERO, 0);
    903     bool ok = reloc_jimm20(s->code_ptr - 1, target);
    904     tcg_debug_assert(ok);
    905 }
    906 
    907 static void tcg_out_tlb_load(TCGContext *s, TCGReg addrl,
    908                              TCGReg addrh, MemOpIdx oi,
    909                              tcg_insn_unit **label_ptr, bool is_load)
    910 {
    911     MemOp opc = get_memop(oi);
    912     unsigned s_bits = opc & MO_SIZE;
    913     unsigned a_bits = get_alignment_bits(opc);
    914     tcg_target_long compare_mask;
    915     int mem_index = get_mmuidx(oi);
    916     int fast_ofs = TLB_MASK_TABLE_OFS(mem_index);
    917     int mask_ofs = fast_ofs + offsetof(CPUTLBDescFast, mask);
    918     int table_ofs = fast_ofs + offsetof(CPUTLBDescFast, table);
    919     TCGReg mask_base = TCG_AREG0, table_base = TCG_AREG0;
    920 
    921     tcg_out_ld(s, TCG_TYPE_PTR, TCG_REG_TMP0, mask_base, mask_ofs);
    922     tcg_out_ld(s, TCG_TYPE_PTR, TCG_REG_TMP1, table_base, table_ofs);
    923 
    924     tcg_out_opc_imm(s, OPC_SRLI, TCG_REG_TMP2, addrl,
    925                     TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS);
    926     tcg_out_opc_reg(s, OPC_AND, TCG_REG_TMP2, TCG_REG_TMP2, TCG_REG_TMP0);
    927     tcg_out_opc_reg(s, OPC_ADD, TCG_REG_TMP2, TCG_REG_TMP2, TCG_REG_TMP1);
    928 
    929     /* Load the tlb comparator and the addend.  */
    930     tcg_out_ld(s, TCG_TYPE_TL, TCG_REG_TMP0, TCG_REG_TMP2,
    931                is_load ? offsetof(CPUTLBEntry, addr_read)
    932                : offsetof(CPUTLBEntry, addr_write));
    933     tcg_out_ld(s, TCG_TYPE_PTR, TCG_REG_TMP2, TCG_REG_TMP2,
    934                offsetof(CPUTLBEntry, addend));
    935 
    936     /* We don't support unaligned accesses. */
    937     if (a_bits < s_bits) {
    938         a_bits = s_bits;
    939     }
    940     /* Clear the non-page, non-alignment bits from the address.  */
    941     compare_mask = (tcg_target_long)TARGET_PAGE_MASK | ((1 << a_bits) - 1);
    942     if (compare_mask == sextreg(compare_mask, 0, 12)) {
    943         tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_TMP1, addrl, compare_mask);
    944     } else {
    945         tcg_out_movi(s, TCG_TYPE_TL, TCG_REG_TMP1, compare_mask);
    946         tcg_out_opc_reg(s, OPC_AND, TCG_REG_TMP1, TCG_REG_TMP1, addrl);
    947     }
    948 
    949     /* Compare masked address with the TLB entry. */
    950     label_ptr[0] = s->code_ptr;
    951     tcg_out_opc_branch(s, OPC_BNE, TCG_REG_TMP0, TCG_REG_TMP1, 0);
    952 
    953     /* TLB Hit - translate address using addend.  */
    954     if (TCG_TARGET_REG_BITS > TARGET_LONG_BITS) {
    955         tcg_out_ext32u(s, TCG_REG_TMP0, addrl);
    956         addrl = TCG_REG_TMP0;
    957     }
    958     tcg_out_opc_reg(s, OPC_ADD, TCG_REG_TMP0, TCG_REG_TMP2, addrl);
    959 }
    960 
    961 static void add_qemu_ldst_label(TCGContext *s, int is_ld, MemOpIdx oi,
    962                                 TCGType ext,
    963                                 TCGReg datalo, TCGReg datahi,
    964                                 TCGReg addrlo, TCGReg addrhi,
    965                                 void *raddr, tcg_insn_unit **label_ptr)
    966 {
    967     TCGLabelQemuLdst *label = new_ldst_label(s);
    968 
    969     label->is_ld = is_ld;
    970     label->oi = oi;
    971     label->type = ext;
    972     label->datalo_reg = datalo;
    973     label->datahi_reg = datahi;
    974     label->addrlo_reg = addrlo;
    975     label->addrhi_reg = addrhi;
    976     label->raddr = tcg_splitwx_to_rx(raddr);
    977     label->label_ptr[0] = label_ptr[0];
    978 }
    979 
    980 static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
    981 {
    982     MemOpIdx oi = l->oi;
    983     MemOp opc = get_memop(oi);
    984     TCGReg a0 = tcg_target_call_iarg_regs[0];
    985     TCGReg a1 = tcg_target_call_iarg_regs[1];
    986     TCGReg a2 = tcg_target_call_iarg_regs[2];
    987     TCGReg a3 = tcg_target_call_iarg_regs[3];
    988 
    989     /* We don't support oversize guests */
    990     if (TCG_TARGET_REG_BITS < TARGET_LONG_BITS) {
    991         g_assert_not_reached();
    992     }
    993 
    994     /* resolve label address */
    995     if (!reloc_sbimm12(l->label_ptr[0], tcg_splitwx_to_rx(s->code_ptr))) {
    996         return false;
    997     }
    998 
    999     /* call load helper */
   1000     tcg_out_mov(s, TCG_TYPE_PTR, a0, TCG_AREG0);
   1001     tcg_out_mov(s, TCG_TYPE_PTR, a1, l->addrlo_reg);
   1002     tcg_out_movi(s, TCG_TYPE_PTR, a2, oi);
   1003     tcg_out_movi(s, TCG_TYPE_PTR, a3, (tcg_target_long)l->raddr);
   1004 
   1005     tcg_out_call(s, qemu_ld_helpers[opc & MO_SSIZE]);
   1006     tcg_out_mov(s, (opc & MO_SIZE) == MO_64, l->datalo_reg, a0);
   1007 
   1008     tcg_out_goto(s, l->raddr);
   1009     return true;
   1010 }
   1011 
   1012 static bool tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
   1013 {
   1014     MemOpIdx oi = l->oi;
   1015     MemOp opc = get_memop(oi);
   1016     MemOp s_bits = opc & MO_SIZE;
   1017     TCGReg a0 = tcg_target_call_iarg_regs[0];
   1018     TCGReg a1 = tcg_target_call_iarg_regs[1];
   1019     TCGReg a2 = tcg_target_call_iarg_regs[2];
   1020     TCGReg a3 = tcg_target_call_iarg_regs[3];
   1021     TCGReg a4 = tcg_target_call_iarg_regs[4];
   1022 
   1023     /* We don't support oversize guests */
   1024     if (TCG_TARGET_REG_BITS < TARGET_LONG_BITS) {
   1025         g_assert_not_reached();
   1026     }
   1027 
   1028     /* resolve label address */
   1029     if (!reloc_sbimm12(l->label_ptr[0], tcg_splitwx_to_rx(s->code_ptr))) {
   1030         return false;
   1031     }
   1032 
   1033     /* call store helper */
   1034     tcg_out_mov(s, TCG_TYPE_PTR, a0, TCG_AREG0);
   1035     tcg_out_mov(s, TCG_TYPE_PTR, a1, l->addrlo_reg);
   1036     tcg_out_mov(s, TCG_TYPE_PTR, a2, l->datalo_reg);
   1037     switch (s_bits) {
   1038     case MO_8:
   1039         tcg_out_ext8u(s, a2, a2);
   1040         break;
   1041     case MO_16:
   1042         tcg_out_ext16u(s, a2, a2);
   1043         break;
   1044     default:
   1045         break;
   1046     }
   1047     tcg_out_movi(s, TCG_TYPE_PTR, a3, oi);
   1048     tcg_out_movi(s, TCG_TYPE_PTR, a4, (tcg_target_long)l->raddr);
   1049 
   1050     tcg_out_call(s, qemu_st_helpers[opc & MO_SIZE]);
   1051 
   1052     tcg_out_goto(s, l->raddr);
   1053     return true;
   1054 }
   1055 #else
   1056 
   1057 static void tcg_out_test_alignment(TCGContext *s, bool is_ld, TCGReg addr_reg,
   1058                                    unsigned a_bits)
   1059 {
   1060     unsigned a_mask = (1 << a_bits) - 1;
   1061     TCGLabelQemuLdst *l = new_ldst_label(s);
   1062 
   1063     l->is_ld = is_ld;
   1064     l->addrlo_reg = addr_reg;
   1065 
   1066     /* We are expecting a_bits to max out at 7, so we can always use andi. */
   1067     tcg_debug_assert(a_bits < 12);
   1068     tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_TMP1, addr_reg, a_mask);
   1069 
   1070     l->label_ptr[0] = s->code_ptr;
   1071     tcg_out_opc_branch(s, OPC_BNE, TCG_REG_TMP1, TCG_REG_ZERO, 0);
   1072 
   1073     l->raddr = tcg_splitwx_to_rx(s->code_ptr);
   1074 }
   1075 
   1076 static bool tcg_out_fail_alignment(TCGContext *s, TCGLabelQemuLdst *l)
   1077 {
   1078     /* resolve label address */
   1079     if (!reloc_sbimm12(l->label_ptr[0], tcg_splitwx_to_rx(s->code_ptr))) {
   1080         return false;
   1081     }
   1082 
   1083     tcg_out_mov(s, TCG_TYPE_TL, TCG_REG_A1, l->addrlo_reg);
   1084     tcg_out_mov(s, TCG_TYPE_PTR, TCG_REG_A0, TCG_AREG0);
   1085 
   1086     /* tail call, with the return address back inline. */
   1087     tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_RA, (uintptr_t)l->raddr);
   1088     tcg_out_call_int(s, (const void *)(l->is_ld ? helper_unaligned_ld
   1089                                        : helper_unaligned_st), true);
   1090     return true;
   1091 }
   1092 
   1093 static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
   1094 {
   1095     return tcg_out_fail_alignment(s, l);
   1096 }
   1097 
   1098 static bool tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
   1099 {
   1100     return tcg_out_fail_alignment(s, l);
   1101 }
   1102 
   1103 #endif /* CONFIG_SOFTMMU */
   1104 
   1105 static void tcg_out_qemu_ld_direct(TCGContext *s, TCGReg lo, TCGReg hi,
   1106                                    TCGReg base, MemOp opc, bool is_64)
   1107 {
   1108     /* Byte swapping is left to middle-end expansion. */
   1109     tcg_debug_assert((opc & MO_BSWAP) == 0);
   1110 
   1111     switch (opc & (MO_SSIZE)) {
   1112     case MO_UB:
   1113         tcg_out_opc_imm(s, OPC_LBU, lo, base, 0);
   1114         break;
   1115     case MO_SB:
   1116         tcg_out_opc_imm(s, OPC_LB, lo, base, 0);
   1117         break;
   1118     case MO_UW:
   1119         tcg_out_opc_imm(s, OPC_LHU, lo, base, 0);
   1120         break;
   1121     case MO_SW:
   1122         tcg_out_opc_imm(s, OPC_LH, lo, base, 0);
   1123         break;
   1124     case MO_UL:
   1125         if (TCG_TARGET_REG_BITS == 64 && is_64) {
   1126             tcg_out_opc_imm(s, OPC_LWU, lo, base, 0);
   1127             break;
   1128         }
   1129         /* FALLTHRU */
   1130     case MO_SL:
   1131         tcg_out_opc_imm(s, OPC_LW, lo, base, 0);
   1132         break;
   1133     case MO_UQ:
   1134         /* Prefer to load from offset 0 first, but allow for overlap.  */
   1135         if (TCG_TARGET_REG_BITS == 64) {
   1136             tcg_out_opc_imm(s, OPC_LD, lo, base, 0);
   1137         } else if (lo != base) {
   1138             tcg_out_opc_imm(s, OPC_LW, lo, base, 0);
   1139             tcg_out_opc_imm(s, OPC_LW, hi, base, 4);
   1140         } else {
   1141             tcg_out_opc_imm(s, OPC_LW, hi, base, 4);
   1142             tcg_out_opc_imm(s, OPC_LW, lo, base, 0);
   1143         }
   1144         break;
   1145     default:
   1146         g_assert_not_reached();
   1147     }
   1148 }
   1149 
   1150 static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args, bool is_64)
   1151 {
   1152     TCGReg addr_regl, addr_regh __attribute__((unused));
   1153     TCGReg data_regl, data_regh;
   1154     MemOpIdx oi;
   1155     MemOp opc;
   1156 #if defined(CONFIG_SOFTMMU)
   1157     tcg_insn_unit *label_ptr[1];
   1158 #else
   1159     unsigned a_bits;
   1160 #endif
   1161     TCGReg base = TCG_REG_TMP0;
   1162 
   1163     data_regl = *args++;
   1164     data_regh = (TCG_TARGET_REG_BITS == 32 && is_64 ? *args++ : 0);
   1165     addr_regl = *args++;
   1166     addr_regh = (TCG_TARGET_REG_BITS < TARGET_LONG_BITS ? *args++ : 0);
   1167     oi = *args++;
   1168     opc = get_memop(oi);
   1169 
   1170 #if defined(CONFIG_SOFTMMU)
   1171     tcg_out_tlb_load(s, addr_regl, addr_regh, oi, label_ptr, 1);
   1172     tcg_out_qemu_ld_direct(s, data_regl, data_regh, base, opc, is_64);
   1173     add_qemu_ldst_label(s, 1, oi,
   1174                         (is_64 ? TCG_TYPE_I64 : TCG_TYPE_I32),
   1175                         data_regl, data_regh, addr_regl, addr_regh,
   1176                         s->code_ptr, label_ptr);
   1177 #else
   1178     if (TCG_TARGET_REG_BITS > TARGET_LONG_BITS) {
   1179         tcg_out_ext32u(s, base, addr_regl);
   1180         addr_regl = base;
   1181     }
   1182     a_bits = get_alignment_bits(opc);
   1183     if (a_bits) {
   1184         tcg_out_test_alignment(s, true, addr_regl, a_bits);
   1185     }
   1186     if (guest_base != 0) {
   1187         tcg_out_opc_reg(s, OPC_ADD, base, TCG_GUEST_BASE_REG, addr_regl);
   1188     }
   1189     tcg_out_qemu_ld_direct(s, data_regl, data_regh, base, opc, is_64);
   1190 #endif
   1191 }
   1192 
   1193 static void tcg_out_qemu_st_direct(TCGContext *s, TCGReg lo, TCGReg hi,
   1194                                    TCGReg base, MemOp opc)
   1195 {
   1196     /* Byte swapping is left to middle-end expansion. */
   1197     tcg_debug_assert((opc & MO_BSWAP) == 0);
   1198 
   1199     switch (opc & (MO_SSIZE)) {
   1200     case MO_8:
   1201         tcg_out_opc_store(s, OPC_SB, base, lo, 0);
   1202         break;
   1203     case MO_16:
   1204         tcg_out_opc_store(s, OPC_SH, base, lo, 0);
   1205         break;
   1206     case MO_32:
   1207         tcg_out_opc_store(s, OPC_SW, base, lo, 0);
   1208         break;
   1209     case MO_64:
   1210         if (TCG_TARGET_REG_BITS == 64) {
   1211             tcg_out_opc_store(s, OPC_SD, base, lo, 0);
   1212         } else {
   1213             tcg_out_opc_store(s, OPC_SW, base, lo, 0);
   1214             tcg_out_opc_store(s, OPC_SW, base, hi, 4);
   1215         }
   1216         break;
   1217     default:
   1218         g_assert_not_reached();
   1219     }
   1220 }
   1221 
   1222 static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args, bool is_64)
   1223 {
   1224     TCGReg addr_regl, addr_regh __attribute__((unused));
   1225     TCGReg data_regl, data_regh;
   1226     MemOpIdx oi;
   1227     MemOp opc;
   1228 #if defined(CONFIG_SOFTMMU)
   1229     tcg_insn_unit *label_ptr[1];
   1230 #else
   1231     unsigned a_bits;
   1232 #endif
   1233     TCGReg base = TCG_REG_TMP0;
   1234 
   1235     data_regl = *args++;
   1236     data_regh = (TCG_TARGET_REG_BITS == 32 && is_64 ? *args++ : 0);
   1237     addr_regl = *args++;
   1238     addr_regh = (TCG_TARGET_REG_BITS < TARGET_LONG_BITS ? *args++ : 0);
   1239     oi = *args++;
   1240     opc = get_memop(oi);
   1241 
   1242 #if defined(CONFIG_SOFTMMU)
   1243     tcg_out_tlb_load(s, addr_regl, addr_regh, oi, label_ptr, 0);
   1244     tcg_out_qemu_st_direct(s, data_regl, data_regh, base, opc);
   1245     add_qemu_ldst_label(s, 0, oi,
   1246                         (is_64 ? TCG_TYPE_I64 : TCG_TYPE_I32),
   1247                         data_regl, data_regh, addr_regl, addr_regh,
   1248                         s->code_ptr, label_ptr);
   1249 #else
   1250     if (TCG_TARGET_REG_BITS > TARGET_LONG_BITS) {
   1251         tcg_out_ext32u(s, base, addr_regl);
   1252         addr_regl = base;
   1253     }
   1254     a_bits = get_alignment_bits(opc);
   1255     if (a_bits) {
   1256         tcg_out_test_alignment(s, false, addr_regl, a_bits);
   1257     }
   1258     if (guest_base != 0) {
   1259         tcg_out_opc_reg(s, OPC_ADD, base, TCG_GUEST_BASE_REG, addr_regl);
   1260     }
   1261     tcg_out_qemu_st_direct(s, data_regl, data_regh, base, opc);
   1262 #endif
   1263 }
   1264 
   1265 static const tcg_insn_unit *tb_ret_addr;
   1266 
   1267 static void tcg_out_op(TCGContext *s, TCGOpcode opc,
   1268                        const TCGArg args[TCG_MAX_OP_ARGS],
   1269                        const int const_args[TCG_MAX_OP_ARGS])
   1270 {
   1271     TCGArg a0 = args[0];
   1272     TCGArg a1 = args[1];
   1273     TCGArg a2 = args[2];
   1274     int c2 = const_args[2];
   1275 
   1276     switch (opc) {
   1277     case INDEX_op_exit_tb:
   1278         /* Reuse the zeroing that exists for goto_ptr.  */
   1279         if (a0 == 0) {
   1280             tcg_out_call_int(s, tcg_code_gen_epilogue, true);
   1281         } else {
   1282             tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_A0, a0);
   1283             tcg_out_call_int(s, tb_ret_addr, true);
   1284         }
   1285         break;
   1286 
   1287     case INDEX_op_goto_tb:
   1288         assert(s->tb_jmp_insn_offset == 0);
   1289         /* indirect jump method */
   1290         tcg_out_ld(s, TCG_TYPE_PTR, TCG_REG_TMP0, TCG_REG_ZERO,
   1291                    (uintptr_t)(s->tb_jmp_target_addr + a0));
   1292         tcg_out_opc_imm(s, OPC_JALR, TCG_REG_ZERO, TCG_REG_TMP0, 0);
   1293         set_jmp_reset_offset(s, a0);
   1294         break;
   1295 
   1296     case INDEX_op_goto_ptr:
   1297         tcg_out_opc_imm(s, OPC_JALR, TCG_REG_ZERO, a0, 0);
   1298         break;
   1299 
   1300     case INDEX_op_br:
   1301         tcg_out_reloc(s, s->code_ptr, R_RISCV_JAL, arg_label(a0), 0);
   1302         tcg_out_opc_jump(s, OPC_JAL, TCG_REG_ZERO, 0);
   1303         break;
   1304 
   1305     case INDEX_op_ld8u_i32:
   1306     case INDEX_op_ld8u_i64:
   1307         tcg_out_ldst(s, OPC_LBU, a0, a1, a2);
   1308         break;
   1309     case INDEX_op_ld8s_i32:
   1310     case INDEX_op_ld8s_i64:
   1311         tcg_out_ldst(s, OPC_LB, a0, a1, a2);
   1312         break;
   1313     case INDEX_op_ld16u_i32:
   1314     case INDEX_op_ld16u_i64:
   1315         tcg_out_ldst(s, OPC_LHU, a0, a1, a2);
   1316         break;
   1317     case INDEX_op_ld16s_i32:
   1318     case INDEX_op_ld16s_i64:
   1319         tcg_out_ldst(s, OPC_LH, a0, a1, a2);
   1320         break;
   1321     case INDEX_op_ld32u_i64:
   1322         tcg_out_ldst(s, OPC_LWU, a0, a1, a2);
   1323         break;
   1324     case INDEX_op_ld_i32:
   1325     case INDEX_op_ld32s_i64:
   1326         tcg_out_ldst(s, OPC_LW, a0, a1, a2);
   1327         break;
   1328     case INDEX_op_ld_i64:
   1329         tcg_out_ldst(s, OPC_LD, a0, a1, a2);
   1330         break;
   1331 
   1332     case INDEX_op_st8_i32:
   1333     case INDEX_op_st8_i64:
   1334         tcg_out_ldst(s, OPC_SB, a0, a1, a2);
   1335         break;
   1336     case INDEX_op_st16_i32:
   1337     case INDEX_op_st16_i64:
   1338         tcg_out_ldst(s, OPC_SH, a0, a1, a2);
   1339         break;
   1340     case INDEX_op_st_i32:
   1341     case INDEX_op_st32_i64:
   1342         tcg_out_ldst(s, OPC_SW, a0, a1, a2);
   1343         break;
   1344     case INDEX_op_st_i64:
   1345         tcg_out_ldst(s, OPC_SD, a0, a1, a2);
   1346         break;
   1347 
   1348     case INDEX_op_add_i32:
   1349         if (c2) {
   1350             tcg_out_opc_imm(s, OPC_ADDIW, a0, a1, a2);
   1351         } else {
   1352             tcg_out_opc_reg(s, OPC_ADDW, a0, a1, a2);
   1353         }
   1354         break;
   1355     case INDEX_op_add_i64:
   1356         if (c2) {
   1357             tcg_out_opc_imm(s, OPC_ADDI, a0, a1, a2);
   1358         } else {
   1359             tcg_out_opc_reg(s, OPC_ADD, a0, a1, a2);
   1360         }
   1361         break;
   1362 
   1363     case INDEX_op_sub_i32:
   1364         if (c2) {
   1365             tcg_out_opc_imm(s, OPC_ADDIW, a0, a1, -a2);
   1366         } else {
   1367             tcg_out_opc_reg(s, OPC_SUBW, a0, a1, a2);
   1368         }
   1369         break;
   1370     case INDEX_op_sub_i64:
   1371         if (c2) {
   1372             tcg_out_opc_imm(s, OPC_ADDI, a0, a1, -a2);
   1373         } else {
   1374             tcg_out_opc_reg(s, OPC_SUB, a0, a1, a2);
   1375         }
   1376         break;
   1377 
   1378     case INDEX_op_and_i32:
   1379     case INDEX_op_and_i64:
   1380         if (c2) {
   1381             tcg_out_opc_imm(s, OPC_ANDI, a0, a1, a2);
   1382         } else {
   1383             tcg_out_opc_reg(s, OPC_AND, a0, a1, a2);
   1384         }
   1385         break;
   1386 
   1387     case INDEX_op_or_i32:
   1388     case INDEX_op_or_i64:
   1389         if (c2) {
   1390             tcg_out_opc_imm(s, OPC_ORI, a0, a1, a2);
   1391         } else {
   1392             tcg_out_opc_reg(s, OPC_OR, a0, a1, a2);
   1393         }
   1394         break;
   1395 
   1396     case INDEX_op_xor_i32:
   1397     case INDEX_op_xor_i64:
   1398         if (c2) {
   1399             tcg_out_opc_imm(s, OPC_XORI, a0, a1, a2);
   1400         } else {
   1401             tcg_out_opc_reg(s, OPC_XOR, a0, a1, a2);
   1402         }
   1403         break;
   1404 
   1405     case INDEX_op_not_i32:
   1406     case INDEX_op_not_i64:
   1407         tcg_out_opc_imm(s, OPC_XORI, a0, a1, -1);
   1408         break;
   1409 
   1410     case INDEX_op_neg_i32:
   1411         tcg_out_opc_reg(s, OPC_SUBW, a0, TCG_REG_ZERO, a1);
   1412         break;
   1413     case INDEX_op_neg_i64:
   1414         tcg_out_opc_reg(s, OPC_SUB, a0, TCG_REG_ZERO, a1);
   1415         break;
   1416 
   1417     case INDEX_op_mul_i32:
   1418         tcg_out_opc_reg(s, OPC_MULW, a0, a1, a2);
   1419         break;
   1420     case INDEX_op_mul_i64:
   1421         tcg_out_opc_reg(s, OPC_MUL, a0, a1, a2);
   1422         break;
   1423 
   1424     case INDEX_op_div_i32:
   1425         tcg_out_opc_reg(s, OPC_DIVW, a0, a1, a2);
   1426         break;
   1427     case INDEX_op_div_i64:
   1428         tcg_out_opc_reg(s, OPC_DIV, a0, a1, a2);
   1429         break;
   1430 
   1431     case INDEX_op_divu_i32:
   1432         tcg_out_opc_reg(s, OPC_DIVUW, a0, a1, a2);
   1433         break;
   1434     case INDEX_op_divu_i64:
   1435         tcg_out_opc_reg(s, OPC_DIVU, a0, a1, a2);
   1436         break;
   1437 
   1438     case INDEX_op_rem_i32:
   1439         tcg_out_opc_reg(s, OPC_REMW, a0, a1, a2);
   1440         break;
   1441     case INDEX_op_rem_i64:
   1442         tcg_out_opc_reg(s, OPC_REM, a0, a1, a2);
   1443         break;
   1444 
   1445     case INDEX_op_remu_i32:
   1446         tcg_out_opc_reg(s, OPC_REMUW, a0, a1, a2);
   1447         break;
   1448     case INDEX_op_remu_i64:
   1449         tcg_out_opc_reg(s, OPC_REMU, a0, a1, a2);
   1450         break;
   1451 
   1452     case INDEX_op_shl_i32:
   1453         if (c2) {
   1454             tcg_out_opc_imm(s, OPC_SLLIW, a0, a1, a2 & 0x1f);
   1455         } else {
   1456             tcg_out_opc_reg(s, OPC_SLLW, a0, a1, a2);
   1457         }
   1458         break;
   1459     case INDEX_op_shl_i64:
   1460         if (c2) {
   1461             tcg_out_opc_imm(s, OPC_SLLI, a0, a1, a2 & 0x3f);
   1462         } else {
   1463             tcg_out_opc_reg(s, OPC_SLL, a0, a1, a2);
   1464         }
   1465         break;
   1466 
   1467     case INDEX_op_shr_i32:
   1468         if (c2) {
   1469             tcg_out_opc_imm(s, OPC_SRLIW, a0, a1, a2 & 0x1f);
   1470         } else {
   1471             tcg_out_opc_reg(s, OPC_SRLW, a0, a1, a2);
   1472         }
   1473         break;
   1474     case INDEX_op_shr_i64:
   1475         if (c2) {
   1476             tcg_out_opc_imm(s, OPC_SRLI, a0, a1, a2 & 0x3f);
   1477         } else {
   1478             tcg_out_opc_reg(s, OPC_SRL, a0, a1, a2);
   1479         }
   1480         break;
   1481 
   1482     case INDEX_op_sar_i32:
   1483         if (c2) {
   1484             tcg_out_opc_imm(s, OPC_SRAIW, a0, a1, a2 & 0x1f);
   1485         } else {
   1486             tcg_out_opc_reg(s, OPC_SRAW, a0, a1, a2);
   1487         }
   1488         break;
   1489     case INDEX_op_sar_i64:
   1490         if (c2) {
   1491             tcg_out_opc_imm(s, OPC_SRAI, a0, a1, a2 & 0x3f);
   1492         } else {
   1493             tcg_out_opc_reg(s, OPC_SRA, a0, a1, a2);
   1494         }
   1495         break;
   1496 
   1497     case INDEX_op_add2_i32:
   1498         tcg_out_addsub2(s, a0, a1, a2, args[3], args[4], args[5],
   1499                         const_args[4], const_args[5], false, true);
   1500         break;
   1501     case INDEX_op_add2_i64:
   1502         tcg_out_addsub2(s, a0, a1, a2, args[3], args[4], args[5],
   1503                         const_args[4], const_args[5], false, false);
   1504         break;
   1505     case INDEX_op_sub2_i32:
   1506         tcg_out_addsub2(s, a0, a1, a2, args[3], args[4], args[5],
   1507                         const_args[4], const_args[5], true, true);
   1508         break;
   1509     case INDEX_op_sub2_i64:
   1510         tcg_out_addsub2(s, a0, a1, a2, args[3], args[4], args[5],
   1511                         const_args[4], const_args[5], true, false);
   1512         break;
   1513 
   1514     case INDEX_op_brcond_i32:
   1515     case INDEX_op_brcond_i64:
   1516         tcg_out_brcond(s, a2, a0, a1, arg_label(args[3]));
   1517         break;
   1518     case INDEX_op_brcond2_i32:
   1519         tcg_out_brcond2(s, args[4], a0, a1, a2, args[3], arg_label(args[5]));
   1520         break;
   1521 
   1522     case INDEX_op_setcond_i32:
   1523     case INDEX_op_setcond_i64:
   1524         tcg_out_setcond(s, args[3], a0, a1, a2);
   1525         break;
   1526     case INDEX_op_setcond2_i32:
   1527         tcg_out_setcond2(s, args[5], a0, a1, a2, args[3], args[4]);
   1528         break;
   1529 
   1530     case INDEX_op_qemu_ld_i32:
   1531         tcg_out_qemu_ld(s, args, false);
   1532         break;
   1533     case INDEX_op_qemu_ld_i64:
   1534         tcg_out_qemu_ld(s, args, true);
   1535         break;
   1536     case INDEX_op_qemu_st_i32:
   1537         tcg_out_qemu_st(s, args, false);
   1538         break;
   1539     case INDEX_op_qemu_st_i64:
   1540         tcg_out_qemu_st(s, args, true);
   1541         break;
   1542 
   1543     case INDEX_op_ext8u_i32:
   1544     case INDEX_op_ext8u_i64:
   1545         tcg_out_ext8u(s, a0, a1);
   1546         break;
   1547 
   1548     case INDEX_op_ext16u_i32:
   1549     case INDEX_op_ext16u_i64:
   1550         tcg_out_ext16u(s, a0, a1);
   1551         break;
   1552 
   1553     case INDEX_op_ext32u_i64:
   1554     case INDEX_op_extu_i32_i64:
   1555         tcg_out_ext32u(s, a0, a1);
   1556         break;
   1557 
   1558     case INDEX_op_ext8s_i32:
   1559     case INDEX_op_ext8s_i64:
   1560         tcg_out_ext8s(s, a0, a1);
   1561         break;
   1562 
   1563     case INDEX_op_ext16s_i32:
   1564     case INDEX_op_ext16s_i64:
   1565         tcg_out_ext16s(s, a0, a1);
   1566         break;
   1567 
   1568     case INDEX_op_ext32s_i64:
   1569     case INDEX_op_extrl_i64_i32:
   1570     case INDEX_op_ext_i32_i64:
   1571         tcg_out_ext32s(s, a0, a1);
   1572         break;
   1573 
   1574     case INDEX_op_extrh_i64_i32:
   1575         tcg_out_opc_imm(s, OPC_SRAI, a0, a1, 32);
   1576         break;
   1577 
   1578     case INDEX_op_mulsh_i32:
   1579     case INDEX_op_mulsh_i64:
   1580         tcg_out_opc_reg(s, OPC_MULH, a0, a1, a2);
   1581         break;
   1582 
   1583     case INDEX_op_muluh_i32:
   1584     case INDEX_op_muluh_i64:
   1585         tcg_out_opc_reg(s, OPC_MULHU, a0, a1, a2);
   1586         break;
   1587 
   1588     case INDEX_op_mb:
   1589         tcg_out_mb(s, a0);
   1590         break;
   1591 
   1592     case INDEX_op_mov_i32:  /* Always emitted via tcg_out_mov.  */
   1593     case INDEX_op_mov_i64:
   1594     case INDEX_op_call:     /* Always emitted via tcg_out_call.  */
   1595     default:
   1596         g_assert_not_reached();
   1597     }
   1598 }
   1599 
   1600 static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op)
   1601 {
   1602     switch (op) {
   1603     case INDEX_op_goto_ptr:
   1604         return C_O0_I1(r);
   1605 
   1606     case INDEX_op_ld8u_i32:
   1607     case INDEX_op_ld8s_i32:
   1608     case INDEX_op_ld16u_i32:
   1609     case INDEX_op_ld16s_i32:
   1610     case INDEX_op_ld_i32:
   1611     case INDEX_op_not_i32:
   1612     case INDEX_op_neg_i32:
   1613     case INDEX_op_ld8u_i64:
   1614     case INDEX_op_ld8s_i64:
   1615     case INDEX_op_ld16u_i64:
   1616     case INDEX_op_ld16s_i64:
   1617     case INDEX_op_ld32s_i64:
   1618     case INDEX_op_ld32u_i64:
   1619     case INDEX_op_ld_i64:
   1620     case INDEX_op_not_i64:
   1621     case INDEX_op_neg_i64:
   1622     case INDEX_op_ext8u_i32:
   1623     case INDEX_op_ext8u_i64:
   1624     case INDEX_op_ext16u_i32:
   1625     case INDEX_op_ext16u_i64:
   1626     case INDEX_op_ext32u_i64:
   1627     case INDEX_op_extu_i32_i64:
   1628     case INDEX_op_ext8s_i32:
   1629     case INDEX_op_ext8s_i64:
   1630     case INDEX_op_ext16s_i32:
   1631     case INDEX_op_ext16s_i64:
   1632     case INDEX_op_ext32s_i64:
   1633     case INDEX_op_extrl_i64_i32:
   1634     case INDEX_op_extrh_i64_i32:
   1635     case INDEX_op_ext_i32_i64:
   1636         return C_O1_I1(r, r);
   1637 
   1638     case INDEX_op_st8_i32:
   1639     case INDEX_op_st16_i32:
   1640     case INDEX_op_st_i32:
   1641     case INDEX_op_st8_i64:
   1642     case INDEX_op_st16_i64:
   1643     case INDEX_op_st32_i64:
   1644     case INDEX_op_st_i64:
   1645         return C_O0_I2(rZ, r);
   1646 
   1647     case INDEX_op_add_i32:
   1648     case INDEX_op_and_i32:
   1649     case INDEX_op_or_i32:
   1650     case INDEX_op_xor_i32:
   1651     case INDEX_op_add_i64:
   1652     case INDEX_op_and_i64:
   1653     case INDEX_op_or_i64:
   1654     case INDEX_op_xor_i64:
   1655         return C_O1_I2(r, r, rI);
   1656 
   1657     case INDEX_op_sub_i32:
   1658     case INDEX_op_sub_i64:
   1659         return C_O1_I2(r, rZ, rN);
   1660 
   1661     case INDEX_op_mul_i32:
   1662     case INDEX_op_mulsh_i32:
   1663     case INDEX_op_muluh_i32:
   1664     case INDEX_op_div_i32:
   1665     case INDEX_op_divu_i32:
   1666     case INDEX_op_rem_i32:
   1667     case INDEX_op_remu_i32:
   1668     case INDEX_op_setcond_i32:
   1669     case INDEX_op_mul_i64:
   1670     case INDEX_op_mulsh_i64:
   1671     case INDEX_op_muluh_i64:
   1672     case INDEX_op_div_i64:
   1673     case INDEX_op_divu_i64:
   1674     case INDEX_op_rem_i64:
   1675     case INDEX_op_remu_i64:
   1676     case INDEX_op_setcond_i64:
   1677         return C_O1_I2(r, rZ, rZ);
   1678 
   1679     case INDEX_op_shl_i32:
   1680     case INDEX_op_shr_i32:
   1681     case INDEX_op_sar_i32:
   1682     case INDEX_op_shl_i64:
   1683     case INDEX_op_shr_i64:
   1684     case INDEX_op_sar_i64:
   1685         return C_O1_I2(r, r, ri);
   1686 
   1687     case INDEX_op_brcond_i32:
   1688     case INDEX_op_brcond_i64:
   1689         return C_O0_I2(rZ, rZ);
   1690 
   1691     case INDEX_op_add2_i32:
   1692     case INDEX_op_add2_i64:
   1693     case INDEX_op_sub2_i32:
   1694     case INDEX_op_sub2_i64:
   1695         return C_O2_I4(r, r, rZ, rZ, rM, rM);
   1696 
   1697     case INDEX_op_brcond2_i32:
   1698         return C_O0_I4(rZ, rZ, rZ, rZ);
   1699 
   1700     case INDEX_op_setcond2_i32:
   1701         return C_O1_I4(r, rZ, rZ, rZ, rZ);
   1702 
   1703     case INDEX_op_qemu_ld_i32:
   1704         return (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS
   1705                 ? C_O1_I1(r, L) : C_O1_I2(r, L, L));
   1706     case INDEX_op_qemu_st_i32:
   1707         return (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS
   1708                 ? C_O0_I2(LZ, L) : C_O0_I3(LZ, L, L));
   1709     case INDEX_op_qemu_ld_i64:
   1710         return (TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, L)
   1711                : TARGET_LONG_BITS <= TCG_TARGET_REG_BITS ? C_O2_I1(r, r, L)
   1712                : C_O2_I2(r, r, L, L));
   1713     case INDEX_op_qemu_st_i64:
   1714         return (TCG_TARGET_REG_BITS == 64 ? C_O0_I2(LZ, L)
   1715                : TARGET_LONG_BITS <= TCG_TARGET_REG_BITS ? C_O0_I3(LZ, LZ, L)
   1716                : C_O0_I4(LZ, LZ, L, L));
   1717 
   1718     default:
   1719         g_assert_not_reached();
   1720     }
   1721 }
   1722 
   1723 static const int tcg_target_callee_save_regs[] = {
   1724     TCG_REG_S0,       /* used for the global env (TCG_AREG0) */
   1725     TCG_REG_S1,
   1726     TCG_REG_S2,
   1727     TCG_REG_S3,
   1728     TCG_REG_S4,
   1729     TCG_REG_S5,
   1730     TCG_REG_S6,
   1731     TCG_REG_S7,
   1732     TCG_REG_S8,
   1733     TCG_REG_S9,
   1734     TCG_REG_S10,
   1735     TCG_REG_S11,
   1736     TCG_REG_RA,       /* should be last for ABI compliance */
   1737 };
   1738 
   1739 /* Stack frame parameters.  */
   1740 #define REG_SIZE   (TCG_TARGET_REG_BITS / 8)
   1741 #define SAVE_SIZE  ((int)ARRAY_SIZE(tcg_target_callee_save_regs) * REG_SIZE)
   1742 #define TEMP_SIZE  (CPU_TEMP_BUF_NLONGS * (int)sizeof(long))
   1743 #define FRAME_SIZE ((TCG_STATIC_CALL_ARGS_SIZE + TEMP_SIZE + SAVE_SIZE \
   1744                      + TCG_TARGET_STACK_ALIGN - 1) \
   1745                     & -TCG_TARGET_STACK_ALIGN)
   1746 #define SAVE_OFS   (TCG_STATIC_CALL_ARGS_SIZE + TEMP_SIZE)
   1747 
   1748 /* We're expecting to be able to use an immediate for frame allocation.  */
   1749 QEMU_BUILD_BUG_ON(FRAME_SIZE > 0x7ff);
   1750 
   1751 /* Generate global QEMU prologue and epilogue code */
   1752 static void tcg_target_qemu_prologue(TCGContext *s)
   1753 {
   1754     int i;
   1755 
   1756     tcg_set_frame(s, TCG_REG_SP, TCG_STATIC_CALL_ARGS_SIZE, TEMP_SIZE);
   1757 
   1758     /* TB prologue */
   1759     tcg_out_opc_imm(s, OPC_ADDI, TCG_REG_SP, TCG_REG_SP, -FRAME_SIZE);
   1760     for (i = 0; i < ARRAY_SIZE(tcg_target_callee_save_regs); i++) {
   1761         tcg_out_st(s, TCG_TYPE_REG, tcg_target_callee_save_regs[i],
   1762                    TCG_REG_SP, SAVE_OFS + i * REG_SIZE);
   1763     }
   1764 
   1765 #if !defined(CONFIG_SOFTMMU)
   1766     tcg_out_movi(s, TCG_TYPE_PTR, TCG_GUEST_BASE_REG, guest_base);
   1767     tcg_regset_set_reg(s->reserved_regs, TCG_GUEST_BASE_REG);
   1768 #endif
   1769 
   1770     /* Call generated code */
   1771     tcg_out_mov(s, TCG_TYPE_PTR, TCG_AREG0, tcg_target_call_iarg_regs[0]);
   1772     tcg_out_opc_imm(s, OPC_JALR, TCG_REG_ZERO, tcg_target_call_iarg_regs[1], 0);
   1773 
   1774     /* Return path for goto_ptr. Set return value to 0 */
   1775     tcg_code_gen_epilogue = tcg_splitwx_to_rx(s->code_ptr);
   1776     tcg_out_mov(s, TCG_TYPE_REG, TCG_REG_A0, TCG_REG_ZERO);
   1777 
   1778     /* TB epilogue */
   1779     tb_ret_addr = tcg_splitwx_to_rx(s->code_ptr);
   1780     for (i = 0; i < ARRAY_SIZE(tcg_target_callee_save_regs); i++) {
   1781         tcg_out_ld(s, TCG_TYPE_REG, tcg_target_callee_save_regs[i],
   1782                    TCG_REG_SP, SAVE_OFS + i * REG_SIZE);
   1783     }
   1784 
   1785     tcg_out_opc_imm(s, OPC_ADDI, TCG_REG_SP, TCG_REG_SP, FRAME_SIZE);
   1786     tcg_out_opc_imm(s, OPC_JALR, TCG_REG_ZERO, TCG_REG_RA, 0);
   1787 }
   1788 
   1789 static void tcg_target_init(TCGContext *s)
   1790 {
   1791     tcg_target_available_regs[TCG_TYPE_I32] = 0xffffffff;
   1792     if (TCG_TARGET_REG_BITS == 64) {
   1793         tcg_target_available_regs[TCG_TYPE_I64] = 0xffffffff;
   1794     }
   1795 
   1796     tcg_target_call_clobber_regs = -1u;
   1797     tcg_regset_reset_reg(tcg_target_call_clobber_regs, TCG_REG_S0);
   1798     tcg_regset_reset_reg(tcg_target_call_clobber_regs, TCG_REG_S1);
   1799     tcg_regset_reset_reg(tcg_target_call_clobber_regs, TCG_REG_S2);
   1800     tcg_regset_reset_reg(tcg_target_call_clobber_regs, TCG_REG_S3);
   1801     tcg_regset_reset_reg(tcg_target_call_clobber_regs, TCG_REG_S4);
   1802     tcg_regset_reset_reg(tcg_target_call_clobber_regs, TCG_REG_S5);
   1803     tcg_regset_reset_reg(tcg_target_call_clobber_regs, TCG_REG_S6);
   1804     tcg_regset_reset_reg(tcg_target_call_clobber_regs, TCG_REG_S7);
   1805     tcg_regset_reset_reg(tcg_target_call_clobber_regs, TCG_REG_S8);
   1806     tcg_regset_reset_reg(tcg_target_call_clobber_regs, TCG_REG_S9);
   1807     tcg_regset_reset_reg(tcg_target_call_clobber_regs, TCG_REG_S10);
   1808     tcg_regset_reset_reg(tcg_target_call_clobber_regs, TCG_REG_S11);
   1809 
   1810     s->reserved_regs = 0;
   1811     tcg_regset_set_reg(s->reserved_regs, TCG_REG_ZERO);
   1812     tcg_regset_set_reg(s->reserved_regs, TCG_REG_TMP0);
   1813     tcg_regset_set_reg(s->reserved_regs, TCG_REG_TMP1);
   1814     tcg_regset_set_reg(s->reserved_regs, TCG_REG_TMP2);
   1815     tcg_regset_set_reg(s->reserved_regs, TCG_REG_SP);
   1816     tcg_regset_set_reg(s->reserved_regs, TCG_REG_GP);
   1817     tcg_regset_set_reg(s->reserved_regs, TCG_REG_TP);
   1818 }
   1819 
   1820 typedef struct {
   1821     DebugFrameHeader h;
   1822     uint8_t fde_def_cfa[4];
   1823     uint8_t fde_reg_ofs[ARRAY_SIZE(tcg_target_callee_save_regs) * 2];
   1824 } DebugFrame;
   1825 
   1826 #define ELF_HOST_MACHINE EM_RISCV
   1827 
   1828 static const DebugFrame debug_frame = {
   1829     .h.cie.len = sizeof(DebugFrameCIE) - 4, /* length after .len member */
   1830     .h.cie.id = -1,
   1831     .h.cie.version = 1,
   1832     .h.cie.code_align = 1,
   1833     .h.cie.data_align = -(TCG_TARGET_REG_BITS / 8) & 0x7f, /* sleb128 */
   1834     .h.cie.return_column = TCG_REG_RA,
   1835 
   1836     /* Total FDE size does not include the "len" member.  */
   1837     .h.fde.len = sizeof(DebugFrame) - offsetof(DebugFrame, h.fde.cie_offset),
   1838 
   1839     .fde_def_cfa = {
   1840         12, TCG_REG_SP,                 /* DW_CFA_def_cfa sp, ... */
   1841         (FRAME_SIZE & 0x7f) | 0x80,     /* ... uleb128 FRAME_SIZE */
   1842         (FRAME_SIZE >> 7)
   1843     },
   1844     .fde_reg_ofs = {
   1845         0x80 + 9,  12,                  /* DW_CFA_offset, s1,  -96 */
   1846         0x80 + 18, 11,                  /* DW_CFA_offset, s2,  -88 */
   1847         0x80 + 19, 10,                  /* DW_CFA_offset, s3,  -80 */
   1848         0x80 + 20, 9,                   /* DW_CFA_offset, s4,  -72 */
   1849         0x80 + 21, 8,                   /* DW_CFA_offset, s5,  -64 */
   1850         0x80 + 22, 7,                   /* DW_CFA_offset, s6,  -56 */
   1851         0x80 + 23, 6,                   /* DW_CFA_offset, s7,  -48 */
   1852         0x80 + 24, 5,                   /* DW_CFA_offset, s8,  -40 */
   1853         0x80 + 25, 4,                   /* DW_CFA_offset, s9,  -32 */
   1854         0x80 + 26, 3,                   /* DW_CFA_offset, s10, -24 */
   1855         0x80 + 27, 2,                   /* DW_CFA_offset, s11, -16 */
   1856         0x80 + 1 , 1,                   /* DW_CFA_offset, ra,  -8 */
   1857     }
   1858 };
   1859 
   1860 void tcg_register_jit(const void *buf, size_t buf_size)
   1861 {
   1862     tcg_register_jit_int(buf, buf_size, &debug_frame, sizeof(debug_frame));
   1863 }