fix the attrib format for ints

This commit is contained in:
David Marcec 2018-08-05 11:29:21 +10:00
parent 2b06301dbf
commit 6d1e30e041

View File

@ -169,8 +169,13 @@ std::pair<u8*, GLintptr> RasterizerOpenGL::SetupVertexArrays(u8* array_ptr,
ASSERT(buffer.IsEnabled()); ASSERT(buffer.IsEnabled());
glEnableVertexAttribArray(index); glEnableVertexAttribArray(index);
glVertexAttribFormat(index, attrib.ComponentCount(), MaxwellToGL::VertexType(attrib), if (attrib.type == Tegra::Engines::Maxwell3D::Regs::VertexAttribute::Type::SignedInt ||
attrib.IsNormalized() ? GL_TRUE : GL_FALSE, attrib.offset); attrib.type == Tegra::Engines::Maxwell3D::Regs::VertexAttribute::Type::UnsignedInt)
glVertexAttribIFormat(index, attrib.ComponentCount(), MaxwellToGL::VertexType(attrib),
attrib.offset);
else
glVertexAttribFormat(index, attrib.ComponentCount(), MaxwellToGL::VertexType(attrib),
attrib.IsNormalized() ? GL_TRUE : GL_FALSE, attrib.offset);
glVertexAttribBinding(index, attrib.buffer); glVertexAttribBinding(index, attrib.buffer);
} }