From 9de452bba5b592402ced6f20fbdc6d0b5c075416 Mon Sep 17 00:00:00 2001 From: Rhys Weatherley Date: Thu, 24 Jun 2010 11:23:57 +1000 Subject: Normalize integers when calling glVertexAttribPointer() When QGLShaderProgram::setAttributeArray() is used with a type like GL_UNSIGNED_BYTE, it is normally going to be a value that should be normalized to the range 0..1. But the function wasn't normalizing, which led to errors in programs that used per-vertex colors with the 4ub representation. Reviewed-by: Sarah Smith --- src/opengl/qglshaderprogram.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/opengl/qglshaderprogram.cpp b/src/opengl/qglshaderprogram.cpp index 83b4b21..c7689b8 100644 --- a/src/opengl/qglshaderprogram.cpp +++ b/src/opengl/qglshaderprogram.cpp @@ -1490,7 +1490,7 @@ void QGLShaderProgram::setAttributeArray Q_D(QGLShaderProgram); Q_UNUSED(d); if (location != -1) { - glVertexAttribPointer(location, tupleSize, type, GL_FALSE, + glVertexAttribPointer(location, tupleSize, type, GL_TRUE, stride, values); } } @@ -1634,7 +1634,7 @@ void QGLShaderProgram::setAttributeBuffer Q_D(QGLShaderProgram); Q_UNUSED(d); if (location != -1) { - glVertexAttribPointer(location, tupleSize, type, GL_FALSE, stride, + glVertexAttribPointer(location, tupleSize, type, GL_TRUE, stride, reinterpret_cast(offset)); } } -- cgit v0.12