mirror of
https://github.com/yuzu-emu/yuzu-android
synced 2024-12-26 22:51:20 -08:00
glasm: Implement storage atomic ops
This commit is contained in:
parent
ad61b47f80
commit
80813b1d14
@ -149,6 +149,18 @@ void EmitInst(EmitContext& ctx, IR::Inst* inst) {
|
||||
}
|
||||
throw LogicError("Invalid opcode {}", inst->GetOpcode());
|
||||
}
|
||||
|
||||
void SetupOptions(std::string& header, Info info) {
|
||||
if (info.uses_int64_bit_atomics) {
|
||||
header += "OPTION NV_shader_atomic_int64;";
|
||||
}
|
||||
if (info.uses_atomic_f32_add) {
|
||||
header += "OPTION NV_shader_atomic_float;";
|
||||
}
|
||||
if (info.uses_atomic_f16x2_add || info.uses_atomic_f16x2_min || info.uses_atomic_f16x2_max) {
|
||||
header += "OPTION NV_shader_atomic_fp16_vector;";
|
||||
}
|
||||
}
|
||||
} // Anonymous namespace
|
||||
|
||||
std::string EmitGLASM(const Profile&, IR::Program& program, Bindings&) {
|
||||
@ -160,6 +172,7 @@ std::string EmitGLASM(const Profile&, IR::Program& program, Bindings&) {
|
||||
}
|
||||
std::string header = "!!NVcp5.0\n"
|
||||
"OPTION NV_internal;";
|
||||
SetupOptions(header, program.info);
|
||||
switch (program.stage) {
|
||||
case Stage::Compute:
|
||||
header += fmt::format("GROUP_SIZE {} {} {};", program.workgroup_size[0],
|
||||
|
@ -0,0 +1,291 @@
|
||||
// Copyright 2021 yuzu Emulator Project
|
||||
// Licensed under GPLv2 or any later version
|
||||
// Refer to the license.txt file included.
|
||||
|
||||
#include "shader_recompiler/backend/glasm/emit_context.h"
|
||||
#include "shader_recompiler/backend/glasm/emit_glasm_instructions.h"
|
||||
#include "shader_recompiler/frontend/ir/value.h"
|
||||
|
||||
namespace Shader::Backend::GLASM {
|
||||
namespace {
|
||||
void StorageOp(EmitContext& ctx, const IR::Value& binding, ScalarU32 offset,
|
||||
std::string_view then_expr, std::string_view else_expr = {}) {
|
||||
// Operate on bindless SSBO, call the expression with bounds checking
|
||||
// address = c[binding].xy
|
||||
// length = c[binding].z
|
||||
const u32 sb_binding{binding.U32()};
|
||||
ctx.Add("PK64.U DC,c[{}];" // pointer = address
|
||||
"CVT.U64.U32 DC.z,{};" // offset = uint64_t(offset)
|
||||
"ADD.U64 DC.x,DC.x,DC.z;" // pointer += offset
|
||||
"SLT.U.CC RC.x,{},c[{}].z;", // cc = offset < length
|
||||
sb_binding, offset, offset, sb_binding);
|
||||
if (else_expr.empty()) {
|
||||
ctx.Add("IF NE.x;{}ENDIF;", then_expr);
|
||||
} else {
|
||||
ctx.Add("IF NE.x;{}ELSE;{}ENDIF;", then_expr, else_expr);
|
||||
}
|
||||
}
|
||||
|
||||
template <typename ValueType>
|
||||
void Atom(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding, ScalarU32 offset,
|
||||
ValueType value, std::string_view operation, std::string_view size) {
|
||||
const Register ret{ctx.reg_alloc.Define(inst)};
|
||||
StorageOp(ctx, binding, offset,
|
||||
fmt::format("ATOM.{}.{} {},{},DC.x;", operation, size, ret, value));
|
||||
}
|
||||
} // namespace
|
||||
|
||||
void EmitStorageAtomicIAdd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, ScalarU32 value) {
|
||||
Atom(ctx, inst, binding, offset, value, "ADD", "U32");
|
||||
}
|
||||
|
||||
void EmitStorageAtomicSMin32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, ScalarS32 value) {
|
||||
Atom(ctx, inst, binding, offset, value, "MIN", "S32");
|
||||
}
|
||||
|
||||
void EmitStorageAtomicUMin32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, ScalarU32 value) {
|
||||
Atom(ctx, inst, binding, offset, value, "MIN", "U32");
|
||||
}
|
||||
|
||||
void EmitStorageAtomicSMax32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, ScalarS32 value) {
|
||||
Atom(ctx, inst, binding, offset, value, "MAX", "S32");
|
||||
}
|
||||
|
||||
void EmitStorageAtomicUMax32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, ScalarU32 value) {
|
||||
Atom(ctx, inst, binding, offset, value, "MAX", "U32");
|
||||
}
|
||||
|
||||
void EmitStorageAtomicInc32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, ScalarU32 value) {
|
||||
Atom(ctx, inst, binding, offset, value, "IWRAP", "U32");
|
||||
}
|
||||
|
||||
void EmitStorageAtomicDec32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, ScalarU32 value) {
|
||||
Atom(ctx, inst, binding, offset, value, "DWRAP", "U32");
|
||||
}
|
||||
|
||||
void EmitStorageAtomicAnd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, ScalarU32 value) {
|
||||
Atom(ctx, inst, binding, offset, value, "AND", "U32");
|
||||
}
|
||||
|
||||
void EmitStorageAtomicOr32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, ScalarU32 value) {
|
||||
Atom(ctx, inst, binding, offset, value, "OR", "U32");
|
||||
}
|
||||
|
||||
void EmitStorageAtomicXor32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, ScalarU32 value) {
|
||||
Atom(ctx, inst, binding, offset, value, "XOR", "U32");
|
||||
}
|
||||
|
||||
void EmitStorageAtomicExchange32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, ScalarU32 value) {
|
||||
Atom(ctx, inst, binding, offset, value, "EXCH", "U32");
|
||||
}
|
||||
|
||||
void EmitStorageAtomicIAdd64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, Register value) {
|
||||
Atom(ctx, inst, binding, offset, value, "ADD", "U64");
|
||||
}
|
||||
|
||||
void EmitStorageAtomicSMin64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, Register value) {
|
||||
Atom(ctx, inst, binding, offset, value, "MIN", "S64");
|
||||
}
|
||||
|
||||
void EmitStorageAtomicUMin64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, Register value) {
|
||||
Atom(ctx, inst, binding, offset, value, "MIN", "U64");
|
||||
}
|
||||
|
||||
void EmitStorageAtomicSMax64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, Register value) {
|
||||
Atom(ctx, inst, binding, offset, value, "MAX", "S64");
|
||||
}
|
||||
|
||||
void EmitStorageAtomicUMax64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, Register value) {
|
||||
Atom(ctx, inst, binding, offset, value, "MAX", "U64");
|
||||
}
|
||||
|
||||
void EmitStorageAtomicAnd64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, Register value) {
|
||||
Atom(ctx, inst, binding, offset, value, "AND", "U64");
|
||||
}
|
||||
|
||||
void EmitStorageAtomicOr64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, Register value) {
|
||||
Atom(ctx, inst, binding, offset, value, "OR", "U64");
|
||||
}
|
||||
|
||||
void EmitStorageAtomicXor64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, Register value) {
|
||||
Atom(ctx, inst, binding, offset, value, "XOR", "U64");
|
||||
}
|
||||
|
||||
void EmitStorageAtomicExchange64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, Register value) {
|
||||
Atom(ctx, inst, binding, offset, value, "EXCH", "U64");
|
||||
}
|
||||
|
||||
void EmitStorageAtomicAddF32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, ScalarF32 value) {
|
||||
Atom(ctx, inst, binding, offset, value, "ADD", "F32");
|
||||
}
|
||||
|
||||
void EmitStorageAtomicAddF16x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, Register value) {
|
||||
Atom(ctx, inst, binding, offset, value, "ADD", "F16x2");
|
||||
}
|
||||
|
||||
void EmitStorageAtomicAddF32x2([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst,
|
||||
[[maybe_unused]] const IR::Value& binding,
|
||||
[[maybe_unused]] ScalarU32 offset, [[maybe_unused]] Register value) {
|
||||
throw NotImplementedException("GLASM instruction");
|
||||
}
|
||||
|
||||
void EmitStorageAtomicMinF16x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, Register value) {
|
||||
Atom(ctx, inst, binding, offset, value, "MIN", "F16x2");
|
||||
}
|
||||
|
||||
void EmitStorageAtomicMinF32x2([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst,
|
||||
[[maybe_unused]] const IR::Value& binding,
|
||||
[[maybe_unused]] ScalarU32 offset, [[maybe_unused]] Register value) {
|
||||
throw NotImplementedException("GLASM instruction");
|
||||
}
|
||||
|
||||
void EmitStorageAtomicMaxF16x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, Register value) {
|
||||
Atom(ctx, inst, binding, offset, value, "MAX", "F16x2");
|
||||
}
|
||||
|
||||
void EmitStorageAtomicMaxF32x2([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst,
|
||||
[[maybe_unused]] const IR::Value& binding,
|
||||
[[maybe_unused]] ScalarU32 offset, [[maybe_unused]] Register value) {
|
||||
throw NotImplementedException("GLASM instruction");
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicIAdd32(EmitContext&) {
|
||||
throw NotImplementedException("GLASM instruction");
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicSMin32(EmitContext&) {
|
||||
throw NotImplementedException("GLASM instruction");
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicUMin32(EmitContext&) {
|
||||
throw NotImplementedException("GLASM instruction");
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicSMax32(EmitContext&) {
|
||||
throw NotImplementedException("GLASM instruction");
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicUMax32(EmitContext&) {
|
||||
throw NotImplementedException("GLASM instruction");
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicInc32(EmitContext&) {
|
||||
throw NotImplementedException("GLASM instruction");
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicDec32(EmitContext&) {
|
||||
throw NotImplementedException("GLASM instruction");
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicAnd32(EmitContext&) {
|
||||
throw NotImplementedException("GLASM instruction");
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicOr32(EmitContext&) {
|
||||
throw NotImplementedException("GLASM instruction");
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicXor32(EmitContext&) {
|
||||
throw NotImplementedException("GLASM instruction");
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicExchange32(EmitContext&) {
|
||||
throw NotImplementedException("GLASM instruction");
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicIAdd64(EmitContext&) {
|
||||
throw NotImplementedException("GLASM instruction");
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicSMin64(EmitContext&) {
|
||||
throw NotImplementedException("GLASM instruction");
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicUMin64(EmitContext&) {
|
||||
throw NotImplementedException("GLASM instruction");
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicSMax64(EmitContext&) {
|
||||
throw NotImplementedException("GLASM instruction");
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicUMax64(EmitContext&) {
|
||||
throw NotImplementedException("GLASM instruction");
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicInc64(EmitContext&) {
|
||||
throw NotImplementedException("GLASM instruction");
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicDec64(EmitContext&) {
|
||||
throw NotImplementedException("GLASM instruction");
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicAnd64(EmitContext&) {
|
||||
throw NotImplementedException("GLASM instruction");
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicOr64(EmitContext&) {
|
||||
throw NotImplementedException("GLASM instruction");
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicXor64(EmitContext&) {
|
||||
throw NotImplementedException("GLASM instruction");
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicExchange64(EmitContext&) {
|
||||
throw NotImplementedException("GLASM instruction");
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicAddF32(EmitContext&) {
|
||||
throw NotImplementedException("GLASM instruction");
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicAddF16x2(EmitContext&) {
|
||||
throw NotImplementedException("GLASM instruction");
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicAddF32x2(EmitContext&) {
|
||||
throw NotImplementedException("GLASM instruction");
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicMinF16x2(EmitContext&) {
|
||||
throw NotImplementedException("GLASM instruction");
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicMinF32x2(EmitContext&) {
|
||||
throw NotImplementedException("GLASM instruction");
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicMaxF16x2(EmitContext&) {
|
||||
throw NotImplementedException("GLASM instruction");
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicMaxF32x2(EmitContext&) {
|
||||
throw NotImplementedException("GLASM instruction");
|
||||
}
|
||||
} // namespace Shader::Backend::GLASM
|
@ -356,60 +356,60 @@ void EmitSharedAtomicOr32(EmitContext& ctx, ScalarU32 pointer_offset, ScalarU32
|
||||
void EmitSharedAtomicXor32(EmitContext& ctx, ScalarU32 pointer_offset, ScalarU32 value);
|
||||
void EmitSharedAtomicExchange32(EmitContext& ctx, ScalarU32 pointer_offset, ScalarU32 value);
|
||||
void EmitSharedAtomicExchange64(EmitContext& ctx, ScalarU32 pointer_offset, Register value);
|
||||
void EmitStorageAtomicIAdd32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
ScalarU32 value);
|
||||
void EmitStorageAtomicSMin32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
ScalarS32 value);
|
||||
void EmitStorageAtomicUMin32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
ScalarU32 value);
|
||||
void EmitStorageAtomicSMax32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
ScalarS32 value);
|
||||
void EmitStorageAtomicUMax32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
ScalarU32 value);
|
||||
void EmitStorageAtomicInc32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
ScalarU32 value);
|
||||
void EmitStorageAtomicDec32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
ScalarU32 value);
|
||||
void EmitStorageAtomicAnd32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
ScalarU32 value);
|
||||
void EmitStorageAtomicOr32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
ScalarU32 value);
|
||||
void EmitStorageAtomicXor32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
ScalarU32 value);
|
||||
void EmitStorageAtomicExchange32(EmitContext& ctx, const IR::Value& binding,
|
||||
const IR::Value& offset, ScalarU32 value);
|
||||
void EmitStorageAtomicIAdd64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
Register value);
|
||||
void EmitStorageAtomicSMin64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
Register value);
|
||||
void EmitStorageAtomicUMin64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
Register value);
|
||||
void EmitStorageAtomicSMax64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
Register value);
|
||||
void EmitStorageAtomicUMax64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
Register value);
|
||||
void EmitStorageAtomicAnd64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
Register value);
|
||||
void EmitStorageAtomicOr64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
Register value);
|
||||
void EmitStorageAtomicXor64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
Register value);
|
||||
void EmitStorageAtomicExchange64(EmitContext& ctx, const IR::Value& binding,
|
||||
const IR::Value& offset, Register value);
|
||||
void EmitStorageAtomicAddF32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
ScalarF32 value);
|
||||
void EmitStorageAtomicAddF16x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
Register value);
|
||||
void EmitStorageAtomicAddF32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
Register value);
|
||||
void EmitStorageAtomicMinF16x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
Register value);
|
||||
void EmitStorageAtomicMinF32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
Register value);
|
||||
void EmitStorageAtomicMaxF16x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
Register value);
|
||||
void EmitStorageAtomicMaxF32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
Register value);
|
||||
void EmitStorageAtomicIAdd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, ScalarU32 value);
|
||||
void EmitStorageAtomicSMin32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, ScalarS32 value);
|
||||
void EmitStorageAtomicUMin32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, ScalarU32 value);
|
||||
void EmitStorageAtomicSMax32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, ScalarS32 value);
|
||||
void EmitStorageAtomicUMax32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, ScalarU32 value);
|
||||
void EmitStorageAtomicInc32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, ScalarU32 value);
|
||||
void EmitStorageAtomicDec32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, ScalarU32 value);
|
||||
void EmitStorageAtomicAnd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, ScalarU32 value);
|
||||
void EmitStorageAtomicOr32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, ScalarU32 value);
|
||||
void EmitStorageAtomicXor32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, ScalarU32 value);
|
||||
void EmitStorageAtomicExchange32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, ScalarU32 value);
|
||||
void EmitStorageAtomicIAdd64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, Register value);
|
||||
void EmitStorageAtomicSMin64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, Register value);
|
||||
void EmitStorageAtomicUMin64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, Register value);
|
||||
void EmitStorageAtomicSMax64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, Register value);
|
||||
void EmitStorageAtomicUMax64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, Register value);
|
||||
void EmitStorageAtomicAnd64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, Register value);
|
||||
void EmitStorageAtomicOr64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, Register value);
|
||||
void EmitStorageAtomicXor64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, Register value);
|
||||
void EmitStorageAtomicExchange64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, Register value);
|
||||
void EmitStorageAtomicAddF32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, ScalarF32 value);
|
||||
void EmitStorageAtomicAddF16x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, Register value);
|
||||
void EmitStorageAtomicAddF32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, Register value);
|
||||
void EmitStorageAtomicMinF16x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, Register value);
|
||||
void EmitStorageAtomicMinF32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, Register value);
|
||||
void EmitStorageAtomicMaxF16x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, Register value);
|
||||
void EmitStorageAtomicMaxF32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||
ScalarU32 offset, Register value);
|
||||
void EmitGlobalAtomicIAdd32(EmitContext& ctx);
|
||||
void EmitGlobalAtomicSMin32(EmitContext& ctx);
|
||||
void EmitGlobalAtomicUMin32(EmitContext& ctx);
|
||||
|
@ -321,257 +321,6 @@ void EmitSharedAtomicExchange64(EmitContext& ctx, ScalarU32 pointer_offset, Regi
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitStorageAtomicIAdd32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
ScalarU32 value) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitStorageAtomicSMin32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
ScalarS32 value) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitStorageAtomicUMin32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
ScalarU32 value) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitStorageAtomicSMax32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
ScalarS32 value) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitStorageAtomicUMax32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
ScalarU32 value) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitStorageAtomicInc32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
ScalarU32 value) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitStorageAtomicDec32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
ScalarU32 value) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitStorageAtomicAnd32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
ScalarU32 value) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitStorageAtomicOr32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
ScalarU32 value) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitStorageAtomicXor32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
ScalarU32 value) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitStorageAtomicExchange32(EmitContext& ctx, const IR::Value& binding,
|
||||
const IR::Value& offset, ScalarU32 value) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitStorageAtomicIAdd64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
Register value) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitStorageAtomicSMin64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
Register value) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitStorageAtomicUMin64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
Register value) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitStorageAtomicSMax64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
Register value) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitStorageAtomicUMax64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
Register value) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitStorageAtomicAnd64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
Register value) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitStorageAtomicOr64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
Register value) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitStorageAtomicXor64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
Register value) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitStorageAtomicExchange64(EmitContext& ctx, const IR::Value& binding,
|
||||
const IR::Value& offset, Register value) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitStorageAtomicAddF32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
ScalarF32 value) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitStorageAtomicAddF16x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
Register value) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitStorageAtomicAddF32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
Register value) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitStorageAtomicMinF16x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
Register value) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitStorageAtomicMinF32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
Register value) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitStorageAtomicMaxF16x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
Register value) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitStorageAtomicMaxF32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||
Register value) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicIAdd32(EmitContext& ctx) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicSMin32(EmitContext& ctx) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicUMin32(EmitContext& ctx) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicSMax32(EmitContext& ctx) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicUMax32(EmitContext& ctx) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicInc32(EmitContext& ctx) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicDec32(EmitContext& ctx) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicAnd32(EmitContext& ctx) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicOr32(EmitContext& ctx) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicXor32(EmitContext& ctx) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicExchange32(EmitContext& ctx) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicIAdd64(EmitContext& ctx) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicSMin64(EmitContext& ctx) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicUMin64(EmitContext& ctx) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicSMax64(EmitContext& ctx) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicUMax64(EmitContext& ctx) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicInc64(EmitContext& ctx) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicDec64(EmitContext& ctx) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicAnd64(EmitContext& ctx) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicOr64(EmitContext& ctx) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicXor64(EmitContext& ctx) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicExchange64(EmitContext& ctx) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicAddF32(EmitContext& ctx) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicAddF16x2(EmitContext& ctx) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicAddF32x2(EmitContext& ctx) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicMinF16x2(EmitContext& ctx) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicMinF32x2(EmitContext& ctx) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicMaxF16x2(EmitContext& ctx) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitGlobalAtomicMaxF32x2(EmitContext& ctx) {
|
||||
NotImplemented();
|
||||
}
|
||||
|
||||
void EmitLogicalOr(EmitContext& ctx, IR::Inst& inst, ScalarS32 a, ScalarS32 b) {
|
||||
ctx.Add("OR.S {},{},{};", inst, a, b);
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user