duckstation

duckstation, but archived from the revision just before upstream changed it to a proprietary software project, this version is the libre one
git clone https://git.neptards.moe/u3shit/duckstation.git
Log | Files | Refs | README | LICENSE

cpu_newrec_compiler_aarch64.h (8385B)


      1 // SPDX-FileCopyrightText: 2023 Connor McLaughlin <stenzek@gmail.com>
      2 // SPDX-License-Identifier: (GPL-3.0 OR CC-BY-NC-ND-4.0)
      3 
      4 #pragma once
      5 #include "cpu_newrec_compiler.h"
      6 #include <memory>
      7 
      8 #ifdef CPU_ARCH_ARM64
      9 
     10 #include "vixl/aarch64/assembler-aarch64.h"
     11 
     12 namespace CPU::NewRec {
     13 
     14 class AArch64Compiler final : public Compiler
     15 {
     16 public:
     17   AArch64Compiler();
     18   ~AArch64Compiler() override;
     19 
     20 protected:
     21   const char* GetHostRegName(u32 reg) const override;
     22 
     23   const void* GetCurrentCodePointer() override;
     24 
     25   void LoadHostRegWithConstant(u32 reg, u32 val) override;
     26   void LoadHostRegFromCPUPointer(u32 reg, const void* ptr) override;
     27   void StoreConstantToCPUPointer(u32 val, const void* ptr) override;
     28   void StoreHostRegToCPUPointer(u32 reg, const void* ptr) override;
     29   void CopyHostReg(u32 dst, u32 src) override;
     30 
     31   void Reset(CodeCache::Block* block, u8* code_buffer, u32 code_buffer_space, u8* far_code_buffer,
     32              u32 far_code_space) override;
     33   void BeginBlock() override;
     34   void GenerateBlockProtectCheck(const u8* ram_ptr, const u8* shadow_ptr, u32 size) override;
     35   void GenerateICacheCheckAndUpdate() override;
     36   void GenerateCall(const void* func, s32 arg1reg = -1, s32 arg2reg = -1, s32 arg3reg = -1) override;
     37   void EndBlock(const std::optional<u32>& newpc, bool do_event_test) override;
     38   void EndBlockWithException(Exception excode) override;
     39   void EndAndLinkBlock(const std::optional<u32>& newpc, bool do_event_test, bool force_run_events);
     40   const void* EndCompile(u32* code_size, u32* far_code_size) override;
     41 
     42   void Flush(u32 flags) override;
     43 
     44   void Compile_Fallback() override;
     45 
     46   void CheckBranchTarget(const vixl::aarch64::Register& pcreg);
     47   void Compile_jr(CompileFlags cf) override;
     48   void Compile_jalr(CompileFlags cf) override;
     49   void Compile_bxx(CompileFlags cf, BranchCondition cond) override;
     50 
     51   void Compile_addi(CompileFlags cf, bool overflow);
     52   void Compile_addi(CompileFlags cf) override;
     53   void Compile_addiu(CompileFlags cf) override;
     54   void Compile_slti(CompileFlags cf, bool sign);
     55   void Compile_slti(CompileFlags cf) override;
     56   void Compile_sltiu(CompileFlags cf) override;
     57   void Compile_andi(CompileFlags cf) override;
     58   void Compile_ori(CompileFlags cf) override;
     59   void Compile_xori(CompileFlags cf) override;
     60 
     61   void Compile_shift(CompileFlags cf, void (vixl::aarch64::Assembler::*op)(const vixl::aarch64::Register&,
     62                                                                            const vixl::aarch64::Register&, unsigned));
     63   void Compile_sll(CompileFlags cf) override;
     64   void Compile_srl(CompileFlags cf) override;
     65   void Compile_sra(CompileFlags cf) override;
     66   void Compile_variable_shift(CompileFlags cf,
     67                               void (vixl::aarch64::Assembler::*op)(const vixl::aarch64::Register&,
     68                                                                    const vixl::aarch64::Register&,
     69                                                                    const vixl::aarch64::Register&),
     70                               void (vixl::aarch64::Assembler::*op_const)(const vixl::aarch64::Register&,
     71                                                                          const vixl::aarch64::Register&, unsigned));
     72   void Compile_sllv(CompileFlags cf) override;
     73   void Compile_srlv(CompileFlags cf) override;
     74   void Compile_srav(CompileFlags cf) override;
     75   void Compile_mult(CompileFlags cf, bool sign);
     76   void Compile_mult(CompileFlags cf) override;
     77   void Compile_multu(CompileFlags cf) override;
     78   void Compile_div(CompileFlags cf) override;
     79   void Compile_divu(CompileFlags cf) override;
     80   void TestOverflow(const vixl::aarch64::Register& result);
     81   void Compile_dst_op(CompileFlags cf,
     82                       void (vixl::aarch64::Assembler::*op)(const vixl::aarch64::Register&,
     83                                                            const vixl::aarch64::Register&,
     84                                                            const vixl::aarch64::Operand&),
     85                       bool commutative, bool logical, bool overflow);
     86   void Compile_add(CompileFlags cf) override;
     87   void Compile_addu(CompileFlags cf) override;
     88   void Compile_sub(CompileFlags cf) override;
     89   void Compile_subu(CompileFlags cf) override;
     90   void Compile_and(CompileFlags cf) override;
     91   void Compile_or(CompileFlags cf) override;
     92   void Compile_xor(CompileFlags cf) override;
     93   void Compile_nor(CompileFlags cf) override;
     94   void Compile_slt(CompileFlags cf, bool sign);
     95   void Compile_slt(CompileFlags cf) override;
     96   void Compile_sltu(CompileFlags cf) override;
     97 
     98   vixl::aarch64::Register
     99   ComputeLoadStoreAddressArg(CompileFlags cf, const std::optional<VirtualMemoryAddress>& address,
    100                              const std::optional<const vixl::aarch64::Register>& reg = std::nullopt);
    101   template<typename RegAllocFn>
    102   vixl::aarch64::Register GenerateLoad(const vixl::aarch64::Register& addr_reg, MemoryAccessSize size, bool sign,
    103                                        bool use_fastmem, const RegAllocFn& dst_reg_alloc);
    104   void GenerateStore(const vixl::aarch64::Register& addr_reg, const vixl::aarch64::Register& value_reg,
    105                      MemoryAccessSize size, bool use_fastmem);
    106   void Compile_lxx(CompileFlags cf, MemoryAccessSize size, bool sign, bool use_fastmem,
    107                    const std::optional<VirtualMemoryAddress>& address) override;
    108   void Compile_lwx(CompileFlags cf, MemoryAccessSize size, bool sign, bool use_fastmem,
    109                    const std::optional<VirtualMemoryAddress>& address) override;
    110   void Compile_lwc2(CompileFlags cf, MemoryAccessSize size, bool sign, bool use_fastmem,
    111                     const std::optional<VirtualMemoryAddress>& address) override;
    112   void Compile_sxx(CompileFlags cf, MemoryAccessSize size, bool sign, bool use_fastmem,
    113                    const std::optional<VirtualMemoryAddress>& address) override;
    114   void Compile_swx(CompileFlags cf, MemoryAccessSize size, bool sign, bool use_fastmem,
    115                    const std::optional<VirtualMemoryAddress>& address) override;
    116   void Compile_swc2(CompileFlags cf, MemoryAccessSize size, bool sign, bool use_fastmem,
    117                     const std::optional<VirtualMemoryAddress>& address) override;
    118 
    119   void TestInterrupts(const vixl::aarch64::Register& sr);
    120   void Compile_mtc0(CompileFlags cf) override;
    121   void Compile_rfe(CompileFlags cf) override;
    122 
    123   void Compile_mfc2(CompileFlags cf) override;
    124   void Compile_mtc2(CompileFlags cf) override;
    125   void Compile_cop2(CompileFlags cf) override;
    126 
    127   void GeneratePGXPCallWithMIPSRegs(const void* func, u32 arg1val, Reg arg2reg = Reg::count,
    128                                     Reg arg3reg = Reg::count) override;
    129 
    130 private:
    131   void EmitMov(const vixl::aarch64::Register& dst, u32 val);
    132   void EmitCall(const void* ptr, bool force_inline = false);
    133 
    134   vixl::aarch64::Operand armCheckAddSubConstant(s32 val);
    135   vixl::aarch64::Operand armCheckAddSubConstant(u32 val);
    136   vixl::aarch64::Operand armCheckCompareConstant(s32 val);
    137   vixl::aarch64::Operand armCheckLogicalConstant(u32 val);
    138 
    139   void SwitchToFarCode(bool emit_jump, vixl::aarch64::Condition cond = vixl::aarch64::Condition::al);
    140   void SwitchToFarCodeIfBitSet(const vixl::aarch64::Register& reg, u32 bit);
    141   void SwitchToFarCodeIfRegZeroOrNonZero(const vixl::aarch64::Register& reg, bool nonzero);
    142   void SwitchToNearCode(bool emit_jump, vixl::aarch64::Condition cond = vixl::aarch64::Condition::al);
    143 
    144   void AssertRegOrConstS(CompileFlags cf) const;
    145   void AssertRegOrConstT(CompileFlags cf) const;
    146   vixl::aarch64::MemOperand MipsPtr(Reg r) const;
    147   vixl::aarch64::Register CFGetRegD(CompileFlags cf) const;
    148   vixl::aarch64::Register CFGetRegS(CompileFlags cf) const;
    149   vixl::aarch64::Register CFGetRegT(CompileFlags cf) const;
    150   vixl::aarch64::Register CFGetRegLO(CompileFlags cf) const;
    151   vixl::aarch64::Register CFGetRegHI(CompileFlags cf) const;
    152 
    153   void MoveSToReg(const vixl::aarch64::Register& dst, CompileFlags cf);
    154   void MoveTToReg(const vixl::aarch64::Register& dst, CompileFlags cf);
    155   void MoveMIPSRegToReg(const vixl::aarch64::Register& dst, Reg reg);
    156 
    157   vixl::aarch64::Assembler m_emitter;
    158   vixl::aarch64::Assembler m_far_emitter;
    159   vixl::aarch64::Assembler* armAsm;
    160 
    161 #ifdef VIXL_DEBUG
    162   std::unique_ptr<vixl::CodeBufferCheckScope> m_emitter_check;
    163   std::unique_ptr<vixl::CodeBufferCheckScope> m_far_emitter_check;
    164 #endif
    165 };
    166 
    167 } // namespace CPU::NewRec
    168 
    169 #endif // CPU_ARCH_ARM64