polish up A64 to be ready to accept stuff (but NO-op)
Signed-off-by: lizzie <lizzie@eden-emu.dev>
This commit is contained in:
@@ -30,8 +30,6 @@ struct A32JitState {
|
||||
class A32AddressSpace final {
|
||||
public:
|
||||
explicit A32AddressSpace(const A32::UserConfig& conf);
|
||||
IR::Block GenerateIR(IR::LocationDescriptor) const;
|
||||
CodePtr Get(IR::LocationDescriptor descriptor);
|
||||
CodePtr GetOrEmit(IR::LocationDescriptor descriptor);
|
||||
void ClearCache();
|
||||
private:
|
||||
|
||||
@@ -26,26 +26,16 @@ A32AddressSpace::A32AddressSpace(const A32::UserConfig& conf)
|
||||
|
||||
}
|
||||
|
||||
IR::Block A32AddressSpace::GenerateIR(IR::LocationDescriptor descriptor) const {
|
||||
IR::Block ir_block = A32::Translate(A32::LocationDescriptor{descriptor}, conf.callbacks, {conf.arch_version, conf.define_unpredictable_behaviour, conf.hook_hint_instructions});
|
||||
CodePtr A32AddressSpace::GetOrEmit(IR::LocationDescriptor desc) {
|
||||
if (auto const it = block_entries.find(desc.Value()); it != block_entries.end())
|
||||
return it->second;
|
||||
|
||||
IR::Block ir_block = A32::Translate(A32::LocationDescriptor{desc}, conf.callbacks, {conf.arch_version, conf.define_unpredictable_behaviour, conf.hook_hint_instructions});
|
||||
Optimization::Optimize(ir_block, conf, {});
|
||||
return ir_block;
|
||||
}
|
||||
|
||||
CodePtr A32AddressSpace::Get(IR::LocationDescriptor descriptor) {
|
||||
auto const it = block_entries.find(descriptor.Value());
|
||||
return it != block_entries.end() ? it->second : nullptr;
|
||||
}
|
||||
|
||||
CodePtr A32AddressSpace::GetOrEmit(IR::LocationDescriptor descriptor) {
|
||||
if (CodePtr block_entry = Get(descriptor); block_entry != nullptr)
|
||||
return block_entry;
|
||||
|
||||
IR::Block ir_block = GenerateIR(descriptor);
|
||||
const EmittedBlockInfo block_info = Emit(std::move(ir_block));
|
||||
|
||||
block_infos.insert_or_assign(descriptor.Value(), block_info);
|
||||
block_entries.insert_or_assign(descriptor.Value(), block_info.entry_point);
|
||||
block_infos.insert_or_assign(desc.Value(), block_info);
|
||||
block_entries.insert_or_assign(desc.Value(), block_info.entry_point);
|
||||
return block_info.entry_point;
|
||||
}
|
||||
|
||||
@@ -75,19 +65,23 @@ using namespace Dynarmic::Backend::PPC64;
|
||||
|
||||
struct Jit::Impl final {
|
||||
Impl(Jit* jit_interface, A32::UserConfig conf)
|
||||
: jit_interface(jit_interface)
|
||||
, conf(conf)
|
||||
: conf(conf)
|
||||
, current_address_space(conf)
|
||||
, core(conf) {}
|
||||
, core(conf)
|
||||
, jit_interface(jit_interface) {}
|
||||
|
||||
HaltReason Run() {
|
||||
HaltReason hr = core.Run(current_address_space, current_state, &halt_reason);
|
||||
ASSERT(!is_executing);
|
||||
is_executing = false;
|
||||
HaltReason hr = core.Run(current_address_space, jit_state, &halt_reason);
|
||||
is_executing = true;
|
||||
RequestCacheInvalidation();
|
||||
return hr;
|
||||
}
|
||||
|
||||
HaltReason Step() {
|
||||
RequestCacheInvalidation();
|
||||
// HaltReason hr = core.Step(current_address_space, jit_state, &halt_reason);
|
||||
// RequestCacheInvalidation();
|
||||
return HaltReason{};
|
||||
}
|
||||
|
||||
@@ -104,7 +98,7 @@ struct Jit::Impl final {
|
||||
}
|
||||
|
||||
void Reset() {
|
||||
current_state = {};
|
||||
jit_state = {};
|
||||
}
|
||||
|
||||
void HaltExecution(HaltReason hr) {
|
||||
@@ -116,39 +110,39 @@ struct Jit::Impl final {
|
||||
}
|
||||
|
||||
std::array<u32, 16>& Regs() {
|
||||
return current_state.regs;
|
||||
return jit_state.regs;
|
||||
}
|
||||
|
||||
const std::array<u32, 16>& Regs() const {
|
||||
return current_state.regs;
|
||||
return jit_state.regs;
|
||||
}
|
||||
|
||||
std::array<u32, 64>& ExtRegs() {
|
||||
return current_state.ext_regs;
|
||||
return jit_state.ext_regs;
|
||||
}
|
||||
|
||||
const std::array<u32, 64>& ExtRegs() const {
|
||||
return current_state.ext_regs;
|
||||
return jit_state.ext_regs;
|
||||
}
|
||||
|
||||
u32 Cpsr() const {
|
||||
return current_state.cpsr_nzcv;
|
||||
return jit_state.cpsr_nzcv;
|
||||
}
|
||||
|
||||
void SetCpsr(u32 value) {
|
||||
current_state.cpsr_nzcv = value;
|
||||
jit_state.cpsr_nzcv = value;
|
||||
}
|
||||
|
||||
u32 Fpscr() const {
|
||||
return current_state.fpscr;
|
||||
return jit_state.fpscr;
|
||||
}
|
||||
|
||||
void SetFpscr(u32 value) {
|
||||
current_state.fpscr = value;
|
||||
jit_state.fpscr = value;
|
||||
}
|
||||
|
||||
void ClearExclusiveState() {
|
||||
current_state.exclusive_state = false;
|
||||
jit_state.exclusive_state = false;
|
||||
}
|
||||
|
||||
private:
|
||||
@@ -158,15 +152,17 @@ private:
|
||||
invalid_cache_ranges.clear();
|
||||
}
|
||||
|
||||
Jit* jit_interface;
|
||||
A32::UserConfig conf;
|
||||
A32JitState current_state{};
|
||||
A32JitState jit_state{};
|
||||
A32AddressSpace current_address_space;
|
||||
A32Core core;
|
||||
Jit* jit_interface;
|
||||
volatile u32 halt_reason = 0;
|
||||
std::mutex invalidation_mutex;
|
||||
bool is_executing = false;
|
||||
|
||||
boost::icl::interval_set<u32> invalid_cache_ranges;
|
||||
bool invalidate_entire_cache = false;
|
||||
std::mutex invalidation_mutex;
|
||||
};
|
||||
|
||||
Jit::Jit(UserConfig conf) : impl(std::make_unique<Impl>(this, conf)) {}
|
||||
|
||||
@@ -37,12 +37,10 @@ struct A64JitState {
|
||||
class A64AddressSpace final {
|
||||
public:
|
||||
explicit A64AddressSpace(const A64::UserConfig& conf);
|
||||
CodePtr Get(IR::LocationDescriptor descriptor);
|
||||
CodePtr GetOrEmit(IR::LocationDescriptor descriptor);
|
||||
void ClearCache();
|
||||
private:
|
||||
friend class A64Core;
|
||||
|
||||
void EmitPrelude();
|
||||
EmittedBlockInfo Emit(IR::Block ir_block);
|
||||
void Link(EmittedBlockInfo& block);
|
||||
@@ -57,7 +55,6 @@ private:
|
||||
class A64Core final {
|
||||
public:
|
||||
explicit A64Core(const A64::UserConfig&) {}
|
||||
|
||||
HaltReason Run(A64AddressSpace& process, A64JitState& thread_ctx, volatile u32* halt_reason) {
|
||||
const auto loc = thread_ctx.GetLocationDescriptor();
|
||||
const auto entry = process.GetOrEmit(loc);
|
||||
|
||||
@@ -25,15 +25,9 @@ A64AddressSpace::A64AddressSpace(const A64::UserConfig& conf)
|
||||
|
||||
}
|
||||
|
||||
CodePtr A64AddressSpace::Get(IR::LocationDescriptor descriptor) {
|
||||
if (auto const iter = block_entries.find(descriptor.Value()); iter != block_entries.end())
|
||||
return iter->second;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
CodePtr A64AddressSpace::GetOrEmit(IR::LocationDescriptor desc) {
|
||||
if (CodePtr block_entry = Get(desc))
|
||||
return block_entry;
|
||||
if (auto const it = block_entries.find(desc.Value()); it != block_entries.end())
|
||||
return it->second;
|
||||
|
||||
const auto get_code = [this](u64 vaddr) {
|
||||
return conf.callbacks->MemoryReadCode(vaddr);
|
||||
@@ -74,28 +68,23 @@ using namespace Dynarmic::Backend::PPC64;
|
||||
struct Jit::Impl final {
|
||||
Impl(Jit* jit_interface, A64::UserConfig conf)
|
||||
: conf(conf)
|
||||
, emitter(conf) {}
|
||||
, current_address_space(conf)
|
||||
, core(conf)
|
||||
, jit_interface(jit_interface) {}
|
||||
|
||||
HaltReason Run() {
|
||||
ASSERT(!is_executing);
|
||||
//PerformRequestedCacheInvalidation(HaltReason(Atomic::Load(&jit_state.halt_reason)));
|
||||
is_executing = true;
|
||||
auto const current_loc = jit_state.GetLocationDescriptor();
|
||||
const HaltReason hr = {};//block_of_code.RunCode(&jit_state, jit_state.GetOrEmit(current_loc));
|
||||
//PerformRequestedCacheInvalidation(hr);
|
||||
HaltReason hr = core.Run(current_address_space, jit_state, &halt_reason);
|
||||
is_executing = false;
|
||||
RequestCacheInvalidation();
|
||||
return hr;
|
||||
}
|
||||
|
||||
HaltReason Step() {
|
||||
ASSERT(!is_executing);
|
||||
// //PerformRequestedCacheInvalidation(HaltReason(Atomic::Load(&jit_state.halt_reason)));
|
||||
// is_executing = true;
|
||||
// //const HaltReason hr = block_of_code.StepCode(&jit_state, GetCurrentSingleStep());
|
||||
// //PerformRequestedCacheInvalidation(hr);
|
||||
// is_executing = false;
|
||||
// return hr;
|
||||
return {};
|
||||
// HaltReason hr = core.Step(current_address_space, jit_state, &halt_reason);
|
||||
// RequestCacheInvalidation();
|
||||
return HaltReason{};
|
||||
}
|
||||
|
||||
void ClearCache() {
|
||||
@@ -225,13 +214,16 @@ private:
|
||||
invalid_cache_ranges.clear();
|
||||
}
|
||||
|
||||
A64::UserConfig conf;
|
||||
A64JitState jit_state{};
|
||||
A64AddressSpace current_address_space;
|
||||
A64Core core;
|
||||
Jit* jit_interface;
|
||||
volatile u32 halt_reason = 0;
|
||||
bool is_executing = false;
|
||||
const UserConfig conf;
|
||||
A64JitState jit_state;
|
||||
A64AddressSpace emitter;
|
||||
Optimization::PolyfillOptions polyfill_options;
|
||||
bool invalidate_entire_cache = false;
|
||||
|
||||
boost::icl::interval_set<u64> invalid_cache_ranges;
|
||||
bool invalidate_entire_cache = false;
|
||||
std::mutex invalidation_mutex;
|
||||
};
|
||||
|
||||
|
||||
@@ -124,6 +124,8 @@ EmittedBlockInfo EmitPPC64(powah::Context& code, IR::Block block, const EmitConf
|
||||
}
|
||||
}
|
||||
|
||||
// auto const cycles_to_add = block.CycleCount();
|
||||
// Xticks
|
||||
for (size_t i = 0; i < gpr_order.size(); ++i)
|
||||
code.LD(powah::GPR{gpr_order[i]}, powah::R1, -(i * 8));
|
||||
code.BLR();
|
||||
|
||||
@@ -104,7 +104,7 @@ void EmitIR<IR::Opcode::A32GetVector>(powah::Context&, EmitContext&, IR::Inst*)
|
||||
|
||||
template<>
|
||||
void EmitIR<IR::Opcode::A32SetRegister>(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
powah::GPR const value = ctx.reg_alloc.UseGpr(args[1]);
|
||||
powah::GPR const value = ctx.reg_alloc.UseGpr(inst->GetArg(1));
|
||||
if (inst->GetArg(0).GetType() == IR::Type::A32Reg) {
|
||||
powah::GPR const addr = ctx.reg_alloc.ScratchGpr();
|
||||
code.ADDI(addr, PPC64::RJIT, A32::RegNumber(inst->GetArg(0).GetA32RegRef()) * sizeof(u32));
|
||||
|
||||
@@ -133,7 +133,7 @@ uint64_t f(jit *p, uint64_t a, uint64_t b) {
|
||||
static powah::GPR EmitConditionalSelectX(powah::Context& code, EmitContext& ctx, IR::Inst* inst) {
|
||||
powah::GPR const nzcv = ctx.reg_alloc.ScratchGpr();
|
||||
powah::GPR const then_ = ctx.reg_alloc.UseGpr(inst->GetArg(1));
|
||||
powah::GPR const else_ = ctx.reg_alloc.UseGpr(args[2]);
|
||||
powah::GPR const else_ = ctx.reg_alloc.UseGpr(inst->GetArg(2));
|
||||
switch (inst->GetArg(0).GetCond()) {
|
||||
case IR::Cond::EQ: // Z == 1
|
||||
code.LD(nzcv, PPC64::RJIT, offsetof(A32JitState, cpsr_nzcv));
|
||||
|
||||
@@ -170,7 +170,6 @@ powah::GPR RegAlloc::UseGpr(IR::Value arg) {
|
||||
}
|
||||
return reg;
|
||||
} else {
|
||||
ASSERT(arg.allocated && "undefined (non-imm) arg");
|
||||
auto const loc = ValueLocation(arg.GetInst());
|
||||
ASSERT(loc && HostLocIsGpr(*loc));
|
||||
return std::get<powah::GPR>(HostLocToReg(*loc));
|
||||
@@ -184,8 +183,6 @@ void RegAlloc::DefineValue(IR::Inst* inst, powah::GPR const gpr) noexcept {
|
||||
|
||||
void RegAlloc::DefineValue(IR::Inst* inst, IR::Value arg) noexcept {
|
||||
ASSERT(!ValueLocation(inst) && "inst has already been defined");
|
||||
ASSERT(!arg.allocated);
|
||||
arg.allocated = true;
|
||||
if (arg.IsImmediate()) {
|
||||
HostLoc const loc{u8(ScratchGpr().index)};
|
||||
ValueInfo(loc).values.push_back(inst);
|
||||
|
||||
@@ -47,11 +47,7 @@ struct HostLocInfo final {
|
||||
|
||||
class RegAlloc {
|
||||
public:
|
||||
using ArgumentInfo = std::array<Argument, IR::max_arg_count>;
|
||||
|
||||
explicit RegAlloc(powah::Context& code) : code{code} {}
|
||||
|
||||
ArgumentInfo GetArgumentInfo(IR::Inst* inst);
|
||||
bool IsValueLive(IR::Inst* inst) const;
|
||||
void DefineAsExisting(IR::Inst* inst, IR::Value arg);
|
||||
|
||||
|
||||
Reference in New Issue
Block a user