2021-05-24 22:35:37 +00:00
|
|
|
// Copyright 2021 yuzu Emulator Project
|
|
|
|
// Licensed under GPLv2 or any later version
|
|
|
|
// Refer to the license.txt file included.
|
|
|
|
|
|
|
|
#include <string_view>
|
|
|
|
|
|
|
|
#include "shader_recompiler/backend/glsl/emit_context.h"
|
2021-05-29 06:09:29 +00:00
|
|
|
#include "shader_recompiler/backend/glsl/emit_glsl_instructions.h"
|
2021-05-24 22:35:37 +00:00
|
|
|
#include "shader_recompiler/frontend/ir/value.h"
|
|
|
|
|
|
|
|
namespace Shader::Backend::GLSL {
|
|
|
|
namespace {
|
2021-06-04 02:25:06 +00:00
|
|
|
constexpr char cas_loop[]{
|
|
|
|
"for (;;){{uint old={};{}=atomicCompSwap({},old,{}({},{}));if({}==old){{break;}}}}"};
|
2021-05-24 22:35:37 +00:00
|
|
|
|
2021-05-30 18:31:59 +00:00
|
|
|
void SharedCasFunction(EmitContext& ctx, IR::Inst& inst, std::string_view offset,
|
|
|
|
std::string_view value, std::string_view function) {
|
2021-05-30 21:27:00 +00:00
|
|
|
const auto ret{ctx.var_alloc.Define(inst, GlslVarType::U32)};
|
2021-05-31 04:25:54 +00:00
|
|
|
const std::string smem{fmt::format("smem[{}>>2]", offset)};
|
2021-06-04 00:57:52 +00:00
|
|
|
ctx.Add(cas_loop, smem, ret, smem, function, smem, value, ret);
|
2021-05-30 18:31:59 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void SsboCasFunction(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
|
|
|
const IR::Value& offset, std::string_view value, std::string_view function) {
|
2021-05-30 21:27:00 +00:00
|
|
|
const auto ret{ctx.var_alloc.Define(inst, GlslVarType::U32)};
|
2021-05-31 18:17:00 +00:00
|
|
|
const std::string ssbo{fmt::format("{}_ssbo{}[{}>>2]", ctx.stage_name, binding.U32(),
|
|
|
|
ctx.var_alloc.Consume(offset))};
|
2021-06-04 00:57:52 +00:00
|
|
|
ctx.Add(cas_loop, ssbo, ret, ssbo, function, ssbo, value, ret);
|
2021-05-25 05:35:30 +00:00
|
|
|
}
|
|
|
|
|
2021-05-30 18:31:59 +00:00
|
|
|
void SsboCasFunctionF32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
|
|
|
const IR::Value& offset, std::string_view value,
|
|
|
|
std::string_view function) {
|
2021-05-31 18:17:00 +00:00
|
|
|
const std::string ssbo{fmt::format("{}_ssbo{}[{}>>2]", ctx.stage_name, binding.U32(),
|
|
|
|
ctx.var_alloc.Consume(offset))};
|
2021-05-30 21:27:00 +00:00
|
|
|
const auto ret{ctx.var_alloc.Define(inst, GlslVarType::U32)};
|
2021-06-04 00:57:52 +00:00
|
|
|
ctx.Add(cas_loop, ssbo, ret, ssbo, function, ssbo, value, ret);
|
2021-06-03 23:15:36 +00:00
|
|
|
ctx.AddF32("{}=utof({});", inst, ret);
|
2021-05-25 05:35:30 +00:00
|
|
|
}
|
2021-06-04 00:57:52 +00:00
|
|
|
} // Anonymous namespace
|
2021-05-24 22:35:37 +00:00
|
|
|
|
2021-05-30 18:31:59 +00:00
|
|
|
void EmitSharedAtomicIAdd32(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset,
|
|
|
|
std::string_view value) {
|
2021-05-31 04:25:54 +00:00
|
|
|
ctx.AddU32("{}=atomicAdd(smem[{}>>2],{});", inst, pointer_offset, value);
|
2021-05-30 18:31:59 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void EmitSharedAtomicSMin32(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset,
|
|
|
|
std::string_view value) {
|
|
|
|
const std::string u32_value{fmt::format("uint({})", value)};
|
|
|
|
SharedCasFunction(ctx, inst, pointer_offset, u32_value, "CasMinS32");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitSharedAtomicUMin32(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset,
|
|
|
|
std::string_view value) {
|
2021-05-31 04:25:54 +00:00
|
|
|
ctx.AddU32("{}=atomicMin(smem[{}>>2],{});", inst, pointer_offset, value);
|
2021-05-30 18:31:59 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void EmitSharedAtomicSMax32(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset,
|
|
|
|
std::string_view value) {
|
|
|
|
const std::string u32_value{fmt::format("uint({})", value)};
|
|
|
|
SharedCasFunction(ctx, inst, pointer_offset, u32_value, "CasMaxS32");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitSharedAtomicUMax32(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset,
|
|
|
|
std::string_view value) {
|
2021-05-31 04:25:54 +00:00
|
|
|
ctx.AddU32("{}=atomicMax(smem[{}>>2],{});", inst, pointer_offset, value);
|
2021-05-30 18:31:59 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void EmitSharedAtomicInc32(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset,
|
|
|
|
std::string_view value) {
|
|
|
|
SharedCasFunction(ctx, inst, pointer_offset, value, "CasIncrement");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitSharedAtomicDec32(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset,
|
|
|
|
std::string_view value) {
|
|
|
|
SharedCasFunction(ctx, inst, pointer_offset, value, "CasDecrement");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitSharedAtomicAnd32(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset,
|
|
|
|
std::string_view value) {
|
2021-05-31 04:25:54 +00:00
|
|
|
ctx.AddU32("{}=atomicAnd(smem[{}>>2],{});", inst, pointer_offset, value);
|
2021-05-30 18:31:59 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void EmitSharedAtomicOr32(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset,
|
|
|
|
std::string_view value) {
|
2021-05-31 04:25:54 +00:00
|
|
|
ctx.AddU32("{}=atomicOr(smem[{}>>2],{});", inst, pointer_offset, value);
|
2021-05-30 18:31:59 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void EmitSharedAtomicXor32(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset,
|
|
|
|
std::string_view value) {
|
2021-05-31 04:25:54 +00:00
|
|
|
ctx.AddU32("{}=atomicXor(smem[{}>>2],{});", inst, pointer_offset, value);
|
2021-05-30 18:31:59 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void EmitSharedAtomicExchange32(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset,
|
|
|
|
std::string_view value) {
|
2021-05-31 04:25:54 +00:00
|
|
|
ctx.AddU32("{}=atomicExchange(smem[{}>>2],{});", inst, pointer_offset, value);
|
2021-05-30 18:31:59 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void EmitSharedAtomicExchange64(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset,
|
|
|
|
std::string_view value) {
|
2021-06-15 03:02:07 +00:00
|
|
|
LOG_WARNING(Shader_GLSL, "Int64 atomics not supported, fallback to non-atomic");
|
2021-05-31 04:25:54 +00:00
|
|
|
ctx.AddU64("{}=packUint2x32(uvec2(smem[{}>>2],smem[({}+4)>>2]));", inst, pointer_offset,
|
2021-05-30 21:27:00 +00:00
|
|
|
pointer_offset);
|
2021-05-31 04:25:54 +00:00
|
|
|
ctx.Add("smem[{}>>2]=unpackUint2x32({}).x;smem[({}+4)>>2]=unpackUint2x32({}).y;",
|
|
|
|
pointer_offset, value, pointer_offset, value);
|
2021-05-30 18:31:59 +00:00
|
|
|
}
|
|
|
|
|
2021-05-24 22:35:37 +00:00
|
|
|
void EmitStorageAtomicIAdd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
|
|
|
const IR::Value& offset, std::string_view value) {
|
2021-05-31 18:17:00 +00:00
|
|
|
ctx.AddU32("{}=atomicAdd({}_ssbo{}[{}>>2],{});", inst, ctx.stage_name, binding.U32(),
|
|
|
|
ctx.var_alloc.Consume(offset), value);
|
2021-05-24 22:35:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void EmitStorageAtomicSMin32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
|
|
|
const IR::Value& offset, std::string_view value) {
|
2021-05-25 05:35:30 +00:00
|
|
|
const std::string u32_value{fmt::format("uint({})", value)};
|
2021-05-30 18:31:59 +00:00
|
|
|
SsboCasFunction(ctx, inst, binding, offset, u32_value, "CasMinS32");
|
2021-05-24 22:35:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void EmitStorageAtomicUMin32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
|
|
|
const IR::Value& offset, std::string_view value) {
|
2021-05-31 18:17:00 +00:00
|
|
|
ctx.AddU32("{}=atomicMin({}_ssbo{}[{}>>2],{});", inst, ctx.stage_name, binding.U32(),
|
|
|
|
ctx.var_alloc.Consume(offset), value);
|
2021-05-24 22:35:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void EmitStorageAtomicSMax32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
|
|
|
const IR::Value& offset, std::string_view value) {
|
2021-05-25 05:35:30 +00:00
|
|
|
const std::string u32_value{fmt::format("uint({})", value)};
|
2021-05-30 18:31:59 +00:00
|
|
|
SsboCasFunction(ctx, inst, binding, offset, u32_value, "CasMaxS32");
|
2021-05-24 22:35:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void EmitStorageAtomicUMax32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
|
|
|
const IR::Value& offset, std::string_view value) {
|
2021-05-31 18:17:00 +00:00
|
|
|
ctx.AddU32("{}=atomicMax({}_ssbo{}[{}>>2],{});", inst, ctx.stage_name, binding.U32(),
|
|
|
|
ctx.var_alloc.Consume(offset), value);
|
2021-05-24 22:35:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void EmitStorageAtomicInc32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
2021-05-25 05:35:30 +00:00
|
|
|
const IR::Value& offset, std::string_view value) {
|
2021-05-30 18:31:59 +00:00
|
|
|
SsboCasFunction(ctx, inst, binding, offset, value, "CasIncrement");
|
2021-05-24 22:35:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void EmitStorageAtomicDec32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
|
|
|
const IR::Value& offset, std::string_view value) {
|
2021-05-30 18:31:59 +00:00
|
|
|
SsboCasFunction(ctx, inst, binding, offset, value, "CasDecrement");
|
2021-05-24 22:35:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void EmitStorageAtomicAnd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
|
|
|
const IR::Value& offset, std::string_view value) {
|
2021-05-31 18:17:00 +00:00
|
|
|
ctx.AddU32("{}=atomicAnd({}_ssbo{}[{}>>2],{});", inst, ctx.stage_name, binding.U32(),
|
|
|
|
ctx.var_alloc.Consume(offset), value);
|
2021-05-24 22:35:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void EmitStorageAtomicOr32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
|
|
|
const IR::Value& offset, std::string_view value) {
|
2021-05-31 18:17:00 +00:00
|
|
|
ctx.AddU32("{}=atomicOr({}_ssbo{}[{}>>2],{});", inst, ctx.stage_name, binding.U32(),
|
|
|
|
ctx.var_alloc.Consume(offset), value);
|
2021-05-24 22:35:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void EmitStorageAtomicXor32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
|
|
|
const IR::Value& offset, std::string_view value) {
|
2021-05-31 18:17:00 +00:00
|
|
|
ctx.AddU32("{}=atomicXor({}_ssbo{}[{}>>2],{});", inst, ctx.stage_name, binding.U32(),
|
|
|
|
ctx.var_alloc.Consume(offset), value);
|
2021-05-24 22:35:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void EmitStorageAtomicExchange32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
|
|
|
const IR::Value& offset, std::string_view value) {
|
2021-05-31 18:17:00 +00:00
|
|
|
ctx.AddU32("{}=atomicExchange({}_ssbo{}[{}>>2],{});", inst, ctx.stage_name, binding.U32(),
|
|
|
|
ctx.var_alloc.Consume(offset), value);
|
2021-05-24 22:35:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void EmitStorageAtomicIAdd64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
|
|
|
const IR::Value& offset, std::string_view value) {
|
2021-06-15 03:02:07 +00:00
|
|
|
LOG_WARNING(Shader_GLSL, "Int64 atomics not supported, fallback to non-atomic");
|
2021-05-31 18:17:00 +00:00
|
|
|
ctx.AddU64("{}=packUint2x32(uvec2({}_ssbo{}[{}>>2],{}_ssbo{}[({}>>2)+1]));", inst,
|
|
|
|
ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), ctx.stage_name,
|
|
|
|
binding.U32(), ctx.var_alloc.Consume(offset));
|
|
|
|
ctx.Add("{}_ssbo{}[{}>>2]+=unpackUint2x32({}).x;{}_ssbo{}[({}>>2)+1]+=unpackUint2x32({}).y;",
|
|
|
|
ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), value, ctx.stage_name,
|
|
|
|
binding.U32(), ctx.var_alloc.Consume(offset), value);
|
2021-05-24 22:35:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void EmitStorageAtomicSMin64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
|
|
|
const IR::Value& offset, std::string_view value) {
|
2021-06-15 03:02:07 +00:00
|
|
|
LOG_WARNING(Shader_GLSL, "Int64 atomics not supported, fallback to non-atomic");
|
2021-06-11 04:41:28 +00:00
|
|
|
ctx.AddU64("{}=packInt2x32(ivec2({}_ssbo{}[{}>>2],{}_ssbo{}[({}>>2)+1]));", inst,
|
2021-05-31 18:17:00 +00:00
|
|
|
ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), ctx.stage_name,
|
|
|
|
binding.U32(), ctx.var_alloc.Consume(offset));
|
2021-05-25 05:52:02 +00:00
|
|
|
ctx.Add("for(int i=0;i<2;++i){{ "
|
2021-05-31 18:17:00 +00:00
|
|
|
"{}_ssbo{}[({}>>2)+i]=uint(min(int({}_ssbo{}[({}>>2)+i]),unpackInt2x32(int64_t({}))[i])"
|
|
|
|
");}}",
|
|
|
|
ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), ctx.stage_name,
|
|
|
|
binding.U32(), ctx.var_alloc.Consume(offset), value);
|
2021-05-24 22:35:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void EmitStorageAtomicUMin64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
|
|
|
const IR::Value& offset, std::string_view value) {
|
2021-06-15 03:02:07 +00:00
|
|
|
LOG_WARNING(Shader_GLSL, "Int64 atomics not supported, fallback to non-atomic");
|
2021-05-31 18:17:00 +00:00
|
|
|
ctx.AddU64("{}=packUint2x32(uvec2({}_ssbo{}[{}>>2],{}_ssbo{}[({}>>2)+1]));", inst,
|
|
|
|
ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), ctx.stage_name,
|
|
|
|
binding.U32(), ctx.var_alloc.Consume(offset));
|
2021-05-31 00:02:44 +00:00
|
|
|
ctx.Add("for(int i=0;i<2;++i){{ "
|
2021-05-31 18:17:00 +00:00
|
|
|
"{}_ssbo{}[({}>>2)+i]=min({}_ssbo{}[({}>>2)+i],unpackUint2x32(uint64_t({}))[i]);}}",
|
|
|
|
ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), ctx.stage_name,
|
|
|
|
binding.U32(), ctx.var_alloc.Consume(offset), value);
|
2021-05-24 22:35:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void EmitStorageAtomicSMax64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
|
|
|
const IR::Value& offset, std::string_view value) {
|
2021-06-15 03:02:07 +00:00
|
|
|
LOG_WARNING(Shader_GLSL, "Int64 atomics not supported, fallback to non-atomic");
|
2021-06-11 04:41:28 +00:00
|
|
|
ctx.AddU64("{}=packInt2x32(ivec2({}_ssbo{}[{}>>2],{}_ssbo{}[({}>>2)+1]));", inst,
|
2021-05-31 18:17:00 +00:00
|
|
|
ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), ctx.stage_name,
|
|
|
|
binding.U32(), ctx.var_alloc.Consume(offset));
|
2021-05-25 05:52:02 +00:00
|
|
|
ctx.Add("for(int i=0;i<2;++i){{ "
|
2021-05-31 18:17:00 +00:00
|
|
|
"{}_ssbo{}[({}>>2)+i]=uint(max(int({}_ssbo{}[({}>>2)+i]),unpackInt2x32(int64_t({}))[i])"
|
|
|
|
");}}",
|
|
|
|
ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), ctx.stage_name,
|
|
|
|
binding.U32(), ctx.var_alloc.Consume(offset), value);
|
2021-05-24 22:35:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void EmitStorageAtomicUMax64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
|
|
|
const IR::Value& offset, std::string_view value) {
|
2021-06-15 03:02:07 +00:00
|
|
|
LOG_WARNING(Shader_GLSL, "Int64 atomics not supported, fallback to non-atomic");
|
2021-05-31 18:17:00 +00:00
|
|
|
ctx.AddU64("{}=packUint2x32(uvec2({}_ssbo{}[{}>>2],{}_ssbo{}[({}>>2)+1]));", inst,
|
|
|
|
ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), ctx.stage_name,
|
|
|
|
binding.U32(), ctx.var_alloc.Consume(offset));
|
2021-05-31 00:02:44 +00:00
|
|
|
ctx.Add("for(int "
|
2021-05-31 18:17:00 +00:00
|
|
|
"i=0;i<2;++i){{{}_ssbo{}[({}>>2)+i]=max({}_ssbo{}[({}>>2)+i],unpackUint2x32(uint64_t({}"
|
|
|
|
"))[i]);}}",
|
|
|
|
ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), ctx.stage_name,
|
|
|
|
binding.U32(), ctx.var_alloc.Consume(offset), value);
|
2021-05-24 22:35:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void EmitStorageAtomicAnd64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
|
|
|
const IR::Value& offset, std::string_view value) {
|
2021-05-31 18:17:00 +00:00
|
|
|
ctx.AddU64(
|
|
|
|
"{}=packUint2x32(uvec2(atomicAnd({}_ssbo{}[{}>>2],unpackUint2x32({}).x),atomicAnd({}_"
|
|
|
|
"ssbo{}[({}>>2)+1],unpackUint2x32({}).y)));",
|
|
|
|
inst, ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), value, ctx.stage_name,
|
|
|
|
binding.U32(), ctx.var_alloc.Consume(offset), value);
|
2021-05-24 22:35:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void EmitStorageAtomicOr64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
|
|
|
const IR::Value& offset, std::string_view value) {
|
2021-05-31 18:17:00 +00:00
|
|
|
ctx.AddU64("{}=packUint2x32(uvec2(atomicOr({}_ssbo{}[{}>>2],unpackUint2x32({}).x),atomicOr({}_"
|
|
|
|
"ssbo{}[({}>>2)+1],unpackUint2x32({}).y)));",
|
|
|
|
inst, ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), value,
|
|
|
|
ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), value);
|
2021-05-24 22:35:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void EmitStorageAtomicXor64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
|
|
|
const IR::Value& offset, std::string_view value) {
|
2021-05-31 18:17:00 +00:00
|
|
|
ctx.AddU64(
|
|
|
|
"{}=packUint2x32(uvec2(atomicXor({}_ssbo{}[{}>>2],unpackUint2x32({}).x),atomicXor({}_"
|
|
|
|
"ssbo{}[({}>>2)+1],unpackUint2x32({}).y)));",
|
|
|
|
inst, ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), value, ctx.stage_name,
|
|
|
|
binding.U32(), ctx.var_alloc.Consume(offset), value);
|
2021-05-24 22:35:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void EmitStorageAtomicExchange64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
|
|
|
const IR::Value& offset, std::string_view value) {
|
2021-05-31 18:17:00 +00:00
|
|
|
ctx.AddU64("{}=packUint2x32(uvec2(atomicExchange({}_ssbo{}[{}>>2],unpackUint2x32({}).x),"
|
|
|
|
"atomicExchange({}_ssbo{}[({}>>2)+1],unpackUint2x32({}).y)));",
|
|
|
|
inst, ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), value,
|
|
|
|
ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), value);
|
2021-05-24 22:35:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void EmitStorageAtomicAddF32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
|
|
|
const IR::Value& offset, std::string_view value) {
|
2021-05-30 18:31:59 +00:00
|
|
|
SsboCasFunctionF32(ctx, inst, binding, offset, value, "CasFloatAdd");
|
2021-05-24 22:35:37 +00:00
|
|
|
}
|
|
|
|
|
2021-05-25 05:52:02 +00:00
|
|
|
void EmitStorageAtomicAddF16x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
|
|
|
const IR::Value& offset, std::string_view value) {
|
2021-05-30 18:31:59 +00:00
|
|
|
SsboCasFunction(ctx, inst, binding, offset, value, "CasFloatAdd16x2");
|
2021-05-24 22:35:37 +00:00
|
|
|
}
|
|
|
|
|
2021-05-25 05:52:02 +00:00
|
|
|
void EmitStorageAtomicAddF32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
|
|
|
const IR::Value& offset, std::string_view value) {
|
2021-05-30 18:31:59 +00:00
|
|
|
SsboCasFunction(ctx, inst, binding, offset, value, "CasFloatAdd32x2");
|
2021-05-24 22:35:37 +00:00
|
|
|
}
|
|
|
|
|
2021-05-25 05:52:02 +00:00
|
|
|
void EmitStorageAtomicMinF16x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
|
|
|
const IR::Value& offset, std::string_view value) {
|
2021-05-30 18:31:59 +00:00
|
|
|
SsboCasFunction(ctx, inst, binding, offset, value, "CasFloatMin16x2");
|
2021-05-24 22:35:37 +00:00
|
|
|
}
|
|
|
|
|
2021-05-25 05:52:02 +00:00
|
|
|
void EmitStorageAtomicMinF32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
|
|
|
const IR::Value& offset, std::string_view value) {
|
2021-05-30 18:31:59 +00:00
|
|
|
SsboCasFunction(ctx, inst, binding, offset, value, "CasFloatMin32x2");
|
2021-05-24 22:35:37 +00:00
|
|
|
}
|
|
|
|
|
2021-05-25 05:52:02 +00:00
|
|
|
void EmitStorageAtomicMaxF16x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
|
|
|
const IR::Value& offset, std::string_view value) {
|
2021-05-30 18:31:59 +00:00
|
|
|
SsboCasFunction(ctx, inst, binding, offset, value, "CasFloatMax16x2");
|
2021-05-24 22:35:37 +00:00
|
|
|
}
|
|
|
|
|
2021-05-25 05:52:02 +00:00
|
|
|
void EmitStorageAtomicMaxF32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
|
|
|
const IR::Value& offset, std::string_view value) {
|
2021-05-30 18:31:59 +00:00
|
|
|
SsboCasFunction(ctx, inst, binding, offset, value, "CasFloatMax32x2");
|
2021-05-24 22:35:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void EmitGlobalAtomicIAdd32(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instrucion");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitGlobalAtomicSMin32(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instrucion");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitGlobalAtomicUMin32(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instrucion");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitGlobalAtomicSMax32(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instrucion");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitGlobalAtomicUMax32(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instrucion");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitGlobalAtomicInc32(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instrucion");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitGlobalAtomicDec32(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instrucion");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitGlobalAtomicAnd32(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instrucion");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitGlobalAtomicOr32(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instrucion");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitGlobalAtomicXor32(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instrucion");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitGlobalAtomicExchange32(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instrucion");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitGlobalAtomicIAdd64(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instrucion");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitGlobalAtomicSMin64(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instrucion");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitGlobalAtomicUMin64(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instrucion");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitGlobalAtomicSMax64(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instrucion");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitGlobalAtomicUMax64(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instrucion");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitGlobalAtomicInc64(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instrucion");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitGlobalAtomicDec64(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instrucion");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitGlobalAtomicAnd64(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instrucion");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitGlobalAtomicOr64(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instrucion");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitGlobalAtomicXor64(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instrucion");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitGlobalAtomicExchange64(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instrucion");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitGlobalAtomicAddF32(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instrucion");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitGlobalAtomicAddF16x2(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instrucion");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitGlobalAtomicAddF32x2(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instrucion");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitGlobalAtomicMinF16x2(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instrucion");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitGlobalAtomicMinF32x2(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instrucion");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitGlobalAtomicMaxF16x2(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instrucion");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitGlobalAtomicMaxF32x2(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instrucion");
|
|
|
|
}
|
|
|
|
} // namespace Shader::Backend::GLSL
|