GLSL: Return correct sign for OpArrayLength.

.length() returns int, not uint ...
This commit is contained in:
Hans-Kristian Arntzen 2019-05-07 19:02:32 +02:00
parent e9da5ed631
commit b6f8a20624
3 changed files with 4 additions and 3 deletions

View File

@ -9,6 +9,6 @@ layout(binding = 1, std140) buffer SSBO
void main()
{
_11.size = uint(int(_11.v.length()));
_11.size = uint(int(uint(_11.v.length())));
}

View File

@ -9,6 +9,6 @@ layout(binding = 1, std140) buffer SSBO
void main()
{
_11.size = uint(int(_11.v.length()));
_11.size = uint(int(uint(_11.v.length())));
}

View File

@ -7566,7 +7566,8 @@ void CompilerGLSL::emit_instruction(const Instruction &instruction)
uint32_t result_type = ops[0];
uint32_t id = ops[1];
auto e = access_chain_internal(ops[2], &ops[3], length - 3, ACCESS_CHAIN_INDEX_IS_LITERAL_BIT, nullptr);
set<SPIRExpression>(id, e + ".length()", result_type, true);
set<SPIRExpression>(id, join(type_to_glsl(get<SPIRType>(result_type)), "(", e, ".length())"), result_type,
true);
break;
}