Related
All the tutorials seem to indicate that I am doing things correctly, the vertex shader works, however it fails to recognize any input changes from the main program through the use of the glUniform1f function. I check glGetError after each line, there are no errors. I check glGetShaderiv and glGetShaderInfoLog, there are no issues. I am testing with OpenGL version 2.1 (unknown profile, but assuming the core profile) as reported by SDL.
#if defined(__WINDOWS__) || defined(_WIN32) || defined(_WIN64) || defined(__WIN32__) || defined(__TOS_WIN__)\
|| defined(__CYGWIN__)
/* Compiling for Windows */
#ifndef __WINDOWS__
#define __WINDOWS__
#endif
#include <windows.h>
#endif/* Predefined Windows macros */
#include <SDL2/SDL.h>
#include <GL/GL.h>
#include <stdlib.h>
#include <stdio.h>
#include <error.h>
//return type not verified
void glGenBuffers();
void glBindBuffer();
void glBufferData();
unsigned int glCreateShader();
void glShaderSource();
void glCompileShader();
void glGetShaderiv();
void glGetShaderInfoLog();
unsigned int glCreateProgram();
void glAttachShader();
void glLinkProgram();
void glGetProgramiv();
void glGetProgramInfoLog();
void glVertexAttribPointer();
void glEnableVertexAttribArray();
void glUseProgram();
void glDeleteShader();
void glGenVertexArrays();
void glBindVertexArray();
GLint glGetUniformLocation();
void glUniform1f();
void glDeleteProgram();
void glDeleteBuffers();
int fixSDLconsole() {
FILE *console = freopen("stdout.txt", "a",stdout);
if (console == NULL) {return errno;}
console = freopen("stdout.txt", "a",stderr);
if (console == NULL) {return errno;}
return 0;
}
void printGLVersionNumber() {
int majorVersion;
int minorVersion;
int profile;
SDL_GL_GetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, &majorVersion);
SDL_GL_GetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, &minorVersion);
SDL_GL_GetAttribute(SDL_GL_CONTEXT_PROFILE_MASK, &profile);
fprintf(stderr,"GL version %d.%d ",majorVersion,minorVersion);
switch (profile) {
case SDL_GL_CONTEXT_PROFILE_CORE: fprintf(stderr,"core (%d)\n",profile);break;
case SDL_GL_CONTEXT_PROFILE_COMPATIBILITY: fprintf(stderr,"compatibility (%d)\n",profile);break;
case SDL_GL_CONTEXT_PROFILE_ES: fprintf(stderr,"E.S. (%d)\n",profile);break;
default: fprintf(stderr, "unknown profile: %d\n",profile);break;
}
return;
}
#define checkGlError(label) {int error = glGetError();if (error != GL_NO_ERROR) {error_at_line(0,0,__FILE__,__LINE__,"error=%d", error);goto label;}}
int main(int argc, char **argv) {
SDL_Window *window = NULL;
SDL_GLContext context = NULL;
GLuint verticesGlIds[] = {0,0};
GLuint vertexShaderGlId = 0;
GLuint shaderProgramGlId = 0;
if (fixSDLconsole()) {
return errno;
}
if (SDL_Init(SDL_INIT_VIDEO) != 0) {
error_at_line(1,0,__FILE__,__LINE__,"Unable to initialize SDL: %s",SDL_GetError());
goto error;
}
printGLVersionNumber();
window = SDL_CreateWindow("Window Title",SDL_WINDOWPOS_UNDEFINED,SDL_WINDOWPOS_UNDEFINED,640,640,SDL_WINDOW_OPENGL);
if (window == NULL) {
error_at_line(0,0,__FILE__,__LINE__,"Could not create window: %s", SDL_GetError());
goto error;
}
context = SDL_GL_CreateContext(window);
if (context == NULL) {
error_at_line(0,0,__FILE__,__LINE__,"Could not create OpenGL context: %s", SDL_GetError());
goto error;
}
glViewport(0,0,640,640);checkGlError(error);
glClearColor(.9f,.9f,.9f,1.f);checkGlError(error);
glEnableClientState(GL_VERTEX_ARRAY);checkGlError(error);
glEnableClientState(GL_COLOR_ARRAY);checkGlError(error);
float vertices[] = {
-.5f,0.f,0.f,
0.f,.5f,0.f,
0.f,-.5f,0.f,
0.f,.5f,0.f,
.5f,.5f,0.f,
0.f,0.f,0.f
};
float colors[] = {
1.f,0.f,0.f,//red
.5f,0.f,0.f,//red
0.f,1.f,0.f,//green
0.f,.5f,0.f,//green
0.f,0.f,1.f,//blue
0.f,0.f,.5f//blue
};
glGenBuffers(2, &verticesGlIds);checkGlError(error);
glBindBuffer(GL_ARRAY_BUFFER, verticesGlIds[0]);checkGlError(error);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);checkGlError(error);
glBindBuffer(GL_ARRAY_BUFFER, verticesGlIds[1]);checkGlError(error);
glBufferData(GL_ARRAY_BUFFER,sizeof(colors),colors, GL_STATIC_DRAW);checkGlError(error);
char *vertexShader =
"#version 120\n"\
"attribute vec3 aPos;\n"\
"uniform float i;\n"\
"void main() {\n"\
"gl_FrontColor=gl_Color;\n"\
"gl_Position = vec4(aPos.x+i/2,aPos.y,aPos.z,1.0);\n"\
"}\n";
vertexShaderGlId = glCreateShader(GL_VERTEX_SHADER);checkGlError(error);
if (vertexShaderGlId == 0) {error_at_line(0,0,__FILE__,__LINE__,"vertex shader could not be created");goto error;}
glShaderSource(vertexShaderGlId, 1, &vertexShader, NULL);checkGlError(error);
glCompileShader(vertexShaderGlId);checkGlError(error);
{
GLint success;
glGetShaderiv(vertexShaderGlId, GL_COMPILE_STATUS, &success);checkGlError(error);
if (success == GL_FALSE) {
char infoLog[512];
glGetShaderInfoLog(vertexShaderGlId, 512, NULL, infoLog);checkGlError(error);
error_at_line(0,0,__FILE__,__LINE__,"Vertex Shader problem: %s", infoLog);
goto error;
}
}
shaderProgramGlId = glCreateProgram();checkGlError(error);
if (shaderProgramGlId == 0) {error_at_line(0,0,__FILE__,__LINE__,"shader program could not be created");goto error;}
glAttachShader(shaderProgramGlId, vertexShaderGlId);checkGlError(error);
glLinkProgram(shaderProgramGlId);checkGlError(error);
{
int success;
glGetProgramiv(shaderProgramGlId, GL_LINK_STATUS, &success);checkGlError(error);
if (!success) {
char infoLog[512];
glGetProgramInfoLog(shaderProgramGlId, 512, NULL, infoLog);checkGlError(error);
error_at_line(0,0,__FILE__,__LINE__,"Shader program problem: %s", infoLog);
}
}
glDeleteShader(vertexShaderGlId);checkGlError(error);
GLint iLocation = glGetUniformLocation(shaderProgramGlId, "i");checkGlError(error);
if (iLocation == -1) {error_at_line(0,0,__FILE__,__LINE__,"uniform i not found in shader");goto error;}
error_at_line(0,0,__FILE__,__LINE__,"iLocation: %d", iLocation);
for (int frame = 0; frame < 100; ++frame) {
glClear(GL_COLOR_BUFFER_BIT);checkGlError(error);
glUseProgram(shaderProgramGlId);checkGlError(error);
glBindBuffer(GL_ARRAY_BUFFER, verticesGlIds[0]); checkGlError(error);
glVertexPointer(3,GL_FLOAT,0,0); checkGlError(error);
glBindBuffer(GL_ARRAY_BUFFER, verticesGlIds[1]); checkGlError(error);
glColorPointer(3,GL_FLOAT,0,0); checkGlError(error);
glUniform1f(iLocation, (float) (frame%2)); checkGlError(error);
glDrawArrays(GL_TRIANGLES, 0,sizeof(vertices)/sizeof(float)/3); checkGlError(error);
glBindBuffer(GL_ARRAY_BUFFER, 0); checkGlError(error);
SDL_GL_SwapWindow(window);
SDL_Delay(100);
}
glDeleteProgram(shaderProgramGlId);
glDeleteShader(vertexShaderGlId);
glDeleteBuffers(sizeof(verticesGlIds)/sizeof(GLuint), verticesGlIds);
SDL_GL_DeleteContext(context);
SDL_Delay(3000);
SDL_DestroyWindow(window);
SDL_Quit();
return EXIT_SUCCESS;
error:
glDeleteProgram(shaderProgramGlId);
glDeleteShader(vertexShaderGlId);
glDeleteBuffers(sizeof(verticesGlIds)/sizeof(GLuint), verticesGlIds);
if (context != NULL) SDL_GL_DeleteContext(context);
if (window != NULL) SDL_DestroyWindow(window);
SDL_Quit();
return EXIT_FAILURE;
}
#if defined(__WINDOWS__)
int WINAPI WinMain(HINSTANCE hInstance, HINSTANCE hPrevInstance, LPSTR lpCmdLine, int nShowCmd) {
char *argv[1] = {(char *) 0};
return main(0, argv);
}
#endif
note that I am not familiar with OpenGL's extension function loading issues and routines such as SDL's SDL_GL_LoadLibrary and SDL_GL_GetProcAddress I just manually define the method signatures at the top of the file and import GL through the linker. I don't expect this to be an issue but it is the only issue, I am aware of, that I haven't looked into, that may be causing my problems.
So you declare the function like this:
void glUniform1f();
By omitting any parameters, the compiler will assume that all arguments are of type int. For most other GL functions, this works by chance, because those arguments are just integer types in most cases anyway, but for glUniform1f, it will mean that the function argument is converted to integer, but the resulting bit-pattern is implicitely re-interpreted as GLfloat by the function since the actual prototype for glUniform1f is something like this
void glUniform1f(int location, GLfloat value);
note that I am not familiar with OpenGL's extension function loading issues and routines such as SDL's SDL_GL_LoadLibrary and SDL_GL_GetProcAddress I just manually define the method signatures at the top of the file and import GL through the linker.
You shouldn't do this. The GL functions you try to access might not even be exported by the library at all. If you do not want to manually deal with loading every function pointer, you can use one of the existing OpenGL loaders.
gl_Position expects Clip-space coordinates, which are a hiper-cube of size [2w,2w,2w,w].
For vec4(x, y, z, w) if any of the [x,y,z] is outside of [-w,w] range, then the vertex is clipped.
The coordinates will be automatically converted by the GPU to NDC-space x/w, y/w, z/w, 1 (aka "perspective division") before the fragment shader.
Your GLSL code gl_Position = vec4(aPos.x+i/2,aPos.y,aPos.z,1.0); uses the uniform i.
You update it by glUniform1f(iLocation, (float) (frame%2));
First issue is frame%2. Only 0 or 1 is passed to the GPU. With your current vertices data, only two pairs of triangles should be drawn.
Second issue is that frame is a value 0 <= frame < 100. So, if you pass frame instead of frame%2, then for most values aPos.x + i/2 will fall outside the Clip space and you will see only the first two triangle-pairs, or parts of them.
For future readers
Don't code tired. It turns you into a wee bit of a dink.
My problem was that I had two separate outputs that appeared to collide with one another. One was beyond a return statement, so it was alarming that the code was being executed at all. However, it had not occurred to me to use Visual Studio's search function to check every other file to see if there was another reason I was getting output. To my knowledge at that time, there was only a single call to the LoadGLSLFromFile function, when there was actually two.
The results were extreme frustration on my end because I was positive that there was no other calls to that function, and that somehow there must be optimizations to my code that's either messing up the ordering of the function, or, the return statement is not functioning as it should (for whatever reason).
If you are going to post a question, I urge you to re-evaluate what is deemed as "relevant" code. You are here because you're not seeing something that others can, after all.
If you read the comments of this, you will see that StoryTeller was trying to get it through to me that what I was assuming was happening was indeed not possible, and that my methods of debugging were incorrect. StoryTeller, if you're reading this, I do sincerely apologize.
===============================
EDIT 2: Note that the issue I am having is that the function is not returning when I told it to. It continues execution beyond the return, inside the same function, until it hits a second one.
So, I am really lost as to why this is occurring, but this is my code. So, usually what I do is I check for critical failures, and if there is a serious issue, I return a known "error" value. However, the return is being ignored in multiple places, and the code is executing in the wrong order. (see the second image for the wrong order)
EDIT:
My problem is that the code is executing beyond the return statements.
#include "Loader.h"
#include <stdio.h>
#include <stdlib.h>
#include <direct.h>
#include <string.h>
#include <GL/glew.h>
#include <GL/GL.h>
#include <GL/GLU.h>
#include <GLFW/glfw3.h>
static const GLchar * VSource[] = {
"#version 450 core\n"
"layout (location = 0) in vec4 offset;\n"
"layout (location = 1) in vec4 color;\n"
"out VS_OUT {\n"
" vec4 color;"
"} vs_out;\n"
"void main(void)\n"
"{\n"
" const vec4 vertices[3] = vec4[3](vec4(0.25, -0.25, 0.5, 1.0),\n"
" vec4(-0.25, -0.25, 0.5, 1.0),\n"
" vec4(0.25, 0.25, 0.5, 1.0));\n"
" gl_Position = vertices[gl_VertexID] + offset;\n"
" vs_out.color = color;"
"}\n"
};
static const GLchar* FSource[] = {
"#version 450 core\n"
"in VS_OUT {\n"
" vec4 color;\n"
"} fs_in;\n"
"out vec4 color;\n"
"void main(void)\n"
"{\n"
" color = fs_in.color;\n"
"}\n"
};
static const GLchar* TControlSource[] = {
"#version 450 core\n"
"layout (vertices = 3) out;\n"
"void main(void) {\n"
" if(gl_InvocationID == 0) {\n"
" gl_TessLevelInner[0] = 5.0;\n"
" gl_TessLevelOuter[0] = 5.0;\n"
" gl_TessLevelOuter[1] = 5.0;\n"
" gl_TessLevelOuter[2] = 5.0;\n"
" }\n"
" gl_out[gl_InvocationID].gl_Position = gl_in[gl_InvocationID].gl_Position;\n"
"}"
};
GLuint LoadAllShaders() {
LoadGLSLFromFile("glsl", GL_VERTEX_SHADER);
GLuint VShader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(VShader, 1, VSource, NULL);
glCompileShader(VShader);
LogCompileStatus(VShader, "VShader");
GLuint FShader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(FShader, 1, FSource, NULL);
glCompileShader(FShader);
LogCompileStatus(FShader, "FShader");
GLuint TShader = glCreateShader(GL_TESS_CONTROL_SHADER);
glShaderSource(TShader, 1, TControlSource, NULL);
glCompileShader(TShader);
LogCompileStatus(TShader, "Tessellation Shader");
GLuint Program = glCreateProgram();
glAttachShader(Program, VShader);
glAttachShader(Program, FShader);
glAttachShader(Program, TShader);
glLinkProgram(Program);
glDeleteShader(VShader);
glDeleteShader(FShader);
glDeleteShader(TShader);
return Program;
}
void LogCompileStatus(GLuint Shader, char* ShaderName) {
// Checking compile status of VShader
if (ShaderName == NULL || sizeof(ShaderName) == 0)
ShaderName = ("Unnamed Shader with ID: %i" + (char)&Shader);
GLuint ShaderSuccess = GL_FALSE;
glGetShaderiv(Shader, GL_COMPILE_STATUS, &ShaderSuccess);
if (ShaderSuccess == GL_TRUE)
printf("%s successfully compiled\n", ShaderName);
else {
GLint LogLength;
glGetShaderiv(Shader, GL_INFO_LOG_LENGTH, &LogLength);
char* buffer = (char*)malloc(LogLength);
glGetShaderInfoLog(Shader, LogLength, NULL, buffer);
printf("%s failed to compile.\n%s\n", ShaderName, buffer);
free(buffer);
}
}
GLuint LoadGLSLFromFile(char* location, GLenum ShaderType) {
if (sizeof(location) < 6 || strstr(location, ".glsl") == NULL) {
fprintf(stderr, "Attempted to load invalid file.\n");
return 0;
}
switch (ShaderType) {
case(GL_COMPUTE_SHADER):
case(GL_VERTEX_SHADER):
case(GL_TESS_CONTROL_SHADER):
case(GL_TESS_EVALUATION_SHADER):
case(GL_GEOMETRY_SHADER):
case(GL_FRAGMENT_SHADER):
break;
default:
fprintf(stderr, "Invalid Shadertype\n");
break;
}
FILE* shaderFile = fopen(location, "r");
if (shaderFile == NULL) {
fprintf(stderr, "Wurbulgurb\n");
return 0;
}
size_t bufferSize = fseek(shaderFile, 0, SEEK_END);
printf("File Buffer size: %i\n", (int)bufferSize);
fclose(shaderFile);
/*
glShaderSource(shd, 1, VSource, NULL);
glCompileShader(shd);
LogCompileStatus(shd, "VShader");
*/
return 1;
}
The main file:
#include <stdio.h>
#include <stdlib.h>
#include <GL\glew.h>
#include <GL\GL.h>
#include <GL\GLU.h>
#include <GL\wglew.h>
#include <GLFW\glfw3.h>
#include "Loader.h"
#define CL_BUFFER (GLfloat[4]) { 0.3f, 0.3f, 0.3f, 1.0f }
int main(void);
void err(int error, const char * msg);
void keypress(GLFWwindow *window, int key, int scancode, int action, int mods);
static GLfloat attrib[] = { 0.0f, 0.0f, 0.5f, 1.0f };
int main(void) {
if (!glfwInit())
return -1;
GLFWwindow *window = glfwCreateWindow(1024, 768, "Fididdler", NULL, NULL);
glfwWindowHint(GLFW_VERSION_MAJOR, 4);
glfwWindowHint(GLFW_VERSION_MINOR, 5);
glfwSetInputMode(window, GLFW_CURSOR, GLFW_CURSOR_DISABLED);
if (window == NULL)
return -1;
LoadGLSLFromFile("./Condoms.glsl", GL_VERTEX_SHADER);
glfwSetErrorCallback(err);
glfwSetKeyCallback(window, keypress);
glfwMakeContextCurrent(window);
if (glewInit() != GLEW_OK)
return -1;
printf("GL Version String: %s \n", glGetString(GL_VERSION));
GLuint RProg = LoadAllShaders();
GLuint VAO;
glCreateVertexArrays(1, &VAO);
glBindVertexArray(VAO);
printf("Loading complete");
while (!glfwWindowShouldClose(window)) {
glClearBufferfv(GL_COLOR, 0, CL_BUFFER);
glUseProgram(RProg);
glVertexAttrib4fv(0, attrib);
glDrawArrays(GL_TRIANGLES, 0, 3);
/* End drawing logic*/
glfwSwapBuffers(window);
glfwPollEvents();
}
glDeleteVertexArrays(1, &VAO);
glDeleteProgram(RProg);
glfwTerminate();
return 1;
}
void err(int error, const char * msg) {
printf("Error: %s", msg);
}
void keypress(GLFWwindow *window, int key, int scancode, int action, int mods) {
if (key == GLFW_KEY_ESCAPE && action == GLFW_PRESS) {
glfwSetWindowShouldClose(window, GLFW_TRUE);
}
switch (key) {
case(GLFW_KEY_W):
attrib[1] += 0.01f;
printf("W pressed");
break;
case(GLFW_KEY_S):
attrib[1] -= 0.01f;
printf("S pressed");
break;
}
switch (key) {
case(GLFW_KEY_A):
attrib[0] -= 0.01f;
printf("A pressed");
break;
case(GLFW_KEY_D):
attrib[0] += 0.01f;
printf("D pressed");
break;
}
}
This is the console output for LoadGLSLFromFile("TessControl.glsl", GL_VERTEX_SHADER);. Note: Line 3 should not happen, as there was a return statement prior to it.
This is the console output for LoadGLSLFromFile("glsl", GL_VERTEX_SHADER);
Note: Line 3 should not happen, as there was a return statement prior to it.
There's a return statement specifically blocking further execution, and yet it still executes. I am quite puzzled with this behavior because as far as I am aware, return means "immediately go back to the line that called you".
Any and all assistance would be greatly appreciated.
You say that the return value is being ignored. That is exactly what your code is doing:
GLuint LoadAllShaders() {
// return value is ignored
LoadGLSLFromFile("glsl", GL_VERTEX_SHADER);
GLuint VShader = glCreateShader(GL_VERTEX_SHADER);
Because you don't check the return value, there is nothing to prevent your code from continuing.
Perhaps you're confusing return with exit(). The return statement returns a value from a function so that the calling function can decide how to proceed. The exit function causes the program to terminate, with the passed in value being the return value of your program.
If you want your program to quit, you should use exit. If not, then you need to check the return value in the above code and act accordingly.
EDIT:
This is why a MCVE is so important.
After posting your main code, the issue is that you're calling LoadGLSLFromFile twice. You first call it directly from main. Then main calls LoadAllShaders, which calls LoadGLSLFromFile again.
So "Wurbulgurb" prints on the first call, while either "Attempted to load invalid file" or "File Buffer size" is printed on the second call.
I'm trying to create a shader for a sceen-filling quad. But i can't seem to pass a uniform float to the shader.
In the following example i'm initializing glut, creating/compiling & linking the shaders, passing a uniform int & float to the shader, and getting them back to check. the int asdf works fine, the float qwer is behaving weirdly.
If i set the value of qwer to 1.3, the uniform is set to -2.0, if i set it to 1.2, the uniform is set to 2.0.
#include <stdio.h>
#include <string.h>
#include <GL/glut.h>
#include <unistd.h>
int gw = 640, gh = 360;
void drawScene(){
//creating a screen filling quad
glBegin(GL_QUADS);
glTexCoord2f(0.0, 0.0f); glVertex2i(-1, 1);
glTexCoord2f(1.0, 0.0f); glVertex2i(1, 1);
glTexCoord2f(1.0, 1.0f); glVertex2i(1, -1);
glTexCoord2f(0.0, 1.0f); glVertex2i(-1, -1);
glEnd();
glutSwapBuffers();
}
void update(int value){
glutPostRedisplay();
glutTimerFunc(1000 / 30, update, 0);
}
int main(int argc, char** argv){
//shader source code
char *fraShdrStr = "\n\
uniform int asdf;\
uniform float qwer;\
void main(){\n\
vec2 p = gl_TexCoord[0].xy;\n\
gl_FragColor=vec4(p.x,qwer,float(asdf),1.0);\n\
}";
char *verShdrStr = "\n\
void main(){\n\
gl_Position=gl_ModelViewProjectionMatrix*gl_Vertex;\n\
gl_TexCoord[0]=gl_MultiTexCoord0;\n\
}";
size_t verShdrLen, fraShdrLen;
char errorBuffer[1024];
int errorLength;
int program, verShdr, fraShdr;
verShdrLen = strlen(verShdrStr);
fraShdrLen = strlen(fraShdrStr);
//initializing glut
glutInit(&argc, argv);
glutInitWindowSize(gw, gh);
glutCreateWindow("");
//creating, compiling and linking shaders
verShdr = glCreateShader(GL_VERTEX_SHADER);
fraShdr = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(verShdr, 1, &verShdrStr, &verShdrLen);
glShaderSource(fraShdr, 1, &fraShdrStr, &fraShdrLen);
glCompileShader(verShdr);
glGetShaderInfoLog(verShdr, 1024, &errorLength, errorBuffer);
if(errorLength) printf("Vertex Shader Error:\n%s\n", errorBuffer);
glCompileShader(fraShdr);
glGetShaderInfoLog(fraShdr, 1024, &errorLength, errorBuffer);
if(errorLength) printf("Fragmen Shader Error:\n%s\n", errorBuffer);
program = glCreateProgram();
glAttachShader(program, verShdr);
glAttachShader(program, fraShdr);
glLinkProgram(program);
glGetProgramInfoLog(program, 1024, &errorLength, errorBuffer);
if(errorLength) printf("Linking Error:\n%s\n", errorBuffer);
glUseProgram(program);
//initializing variables to pass as uniform
int asdf = 9;
int asdf2;
float qwer = 1.0;
float qwer2;
//setting the uniform values
glUniform1i(glGetUniformLocation(program, "asdf"), asdf);
glGetUniformiv(program, glGetUniformLocation(program, "asdf"), &asdf2);
printf("%d\n", asdf2);
glUniform1f(glGetUniformLocation(program, "qwer"), qwer);
glGetUniformfv(program, glGetUniformLocation(program, "qwer"), &qwer2);
printf("%f\n", qwer2);
glutDisplayFunc(drawScene);
glutTimerFunc(1000/30, update, 0);
glutMainLoop();
}
You are misunderstanding the whole picture. Since OpenGL 3.0 using glBegin/glEnd in order to draw stuff is deprecated. Instead you should use an approach, based on using so-called Vertex Arrays. Check out this question to get an example code snippet.
I wrote a program in c++ using Windows forms. I use two Forms. The first form only contains a button. When it is pressed, a second form opens. This form contains a panel where a simple OpenGL simulation is played (rotated with the help of a timer provided by windows forms). The second form can be closed and opend again by pressing the botton in the first Form. The more often this is done, the slower the 3D-OpenGL-object rotates. After doing this for about 6 times the 3D-OpenGL-object starts to flicker totally crazy. I think it has to do with the fact, that the OpenGL-Object I construct is not destroyed properly and at a certain point the memory is full (in a more complicated version of the project it was flickering between the current 3D-object and a 3D object that should have been destroyed after the window was closed).
Here is a video of the Problem.
here is the code of OpenGL:
namespace OpenGLForm
{
public ref class COpenGL: public System::Windows::Forms::NativeWindow
{
public:
// Position/Orientation of 3D Mesh in Mesh-Viewer Window
float meshPos_x;
float meshPos_y;
float meshPos_z;
float meshOri_x;
float meshOri_y;
float meshOri_z;
COpenGL(System::Windows::Forms::Panel ^ parentForm, GLsizei iWidth, GLsizei iHeight)
{
// initialize all parameter / set pointers (for pointers of type MAT)
meshPos_x = 0.0;
meshPos_y = 0.0;
meshPos_z = -2.0;
meshOri_x = -63.0;
meshOri_y = 0.0;
meshOri_z = 0.0;
CreateParams^ cp = gcnew CreateParams;
m_hDC = GetDC((HWND)parentForm->Handle.ToPointer());
System::String^ filename = "C:/Advantech/Desktop/Const.txt";
System::IO::StreamWriter^ csvWriter = gcnew System::IO::StreamWriter(filename, false, System::Text::Encoding::UTF8);
csvWriter->Write("Const");
csvWriter->Close();
if(m_hDC)
{
MySetPixelFormat(m_hDC);
ReSizeGLScene(iWidth, iHeight);
InitGL();
}
}
//custom function for transformations
System::Void Transform(float xTrans, float yTrans, float zTrans, float xRot, float yRot, float zRot)
{
//translate object
glTranslatef(xTrans, yTrans, zTrans);
//rotate along x-axis
glRotatef(xRot,1.0f,0.0f,0.0f);
//rotate along y-axis
glRotatef(yRot,0.0f,1.0f,0.0f);
//rotate along z-axis
glRotatef(zRot,0.0f,0.0f,1.0f);
}
System::Void Render(System::Void)
{
// Initial Settings
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear screen and depth buffer
glLoadIdentity();
meshOri_z = meshOri_z + 1;
// Set position and orientation of 3D mesh
Transform( meshPos_x,
meshPos_y,
meshPos_z,
meshOri_x,
meshOri_y,
meshOri_z );
glScalef( 0.05, 0.05, 0.05 );
int meshSize = 200;
// create 3D mesh Toplayer
for (int x = 1; x < meshSize; x++) {
for (int z = 1; z < meshSize; z++) {
glBegin(GL_QUADS);
int dm = 1;
glColor3f(dm,dm,dm);
glVertex3f( x, z, dm );
glVertex3f( (x+1), z, dm );
glVertex3f( (x+1), (z+1), dm );
glVertex3f( x, (z+1), dm );
glEnd();
}
}
}
System::Void SwapOpenGLBuffers(System::Void)
{
SwapBuffers(m_hDC) ;
}
private:
HDC m_hDC;
HGLRC m_hglrc;
protected:
~COpenGL(System::Void)
{
System::String^ filename = "C:/Advantech/Desktop/Dest.txt";
System::IO::StreamWriter^ csvWriter = gcnew System::IO::StreamWriter(filename, false, System::Text::Encoding::UTF8);
csvWriter->Write("Dest");
csvWriter->Close();
wglDeleteContext(m_hglrc);
DeleteDC(m_hDC);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
this->DestroyHandle();
}
GLint MySetPixelFormat(HDC hdc)
{
static PIXELFORMATDESCRIPTOR pfd= // pfd Tells Windows How We Want Things To Be
{
sizeof(PIXELFORMATDESCRIPTOR), // Size Of This Pixel Format Descriptor
1, // Version Number
PFD_DRAW_TO_WINDOW | // Format Must Support Window
PFD_SUPPORT_OPENGL | // Format Must Support OpenGL
PFD_DOUBLEBUFFER, // Must Support Double Buffering
PFD_TYPE_RGBA, // Request An RGBA Format
16, // Select Our Color Depth
0, 0, 0, 0, 0, 0, // Color Bits Ignored
0, // No Alpha Buffer
0, // Shift Bit Ignored
0, // No Accumulation Buffer
0, 0, 0, 0, // Accumulation Bits Ignored
16, // 16Bit Z-Buffer (Depth Buffer)
0, // No Stencil Buffer
0, // No Auxiliary Buffer
PFD_MAIN_PLANE, // Main Drawing Layer
0, // Reserved
0, 0, 0 // Layer Masks Ignored
};
GLint iPixelFormat;
// get the device context's best, available pixel format match
if((iPixelFormat = ChoosePixelFormat(hdc, &pfd)) == 0)
{
MessageBox::Show("ChoosePixelFormat Failed");
return 0;
}
// make that match the device context's current pixel format
if(SetPixelFormat(hdc, iPixelFormat, &pfd) == FALSE)
{
MessageBox::Show("SetPixelFormat Failed");
return 0;
}
if((m_hglrc = wglCreateContext(m_hDC)) == NULL)
{
MessageBox::Show("wglCreateContext Failed");
return 0;
}
if((wglMakeCurrent(m_hDC, m_hglrc)) == NULL)
{
MessageBox::Show("wglMakeCurrent Failed");
return 0;
}
return 1;
}
bool InitGL(GLvoid) // All setup for opengl goes here
{
glShadeModel(GL_SMOOTH); // Enable smooth shading
glClearColor(0.0f, 0.0f, 0.0f, 0.5f); // Black background
glClearDepth(1.0f); // Depth buffer setup
glEnable(GL_DEPTH_TEST); // Enables depth testing
glDepthFunc(GL_LEQUAL); // The type of depth testing to do
glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_NICEST); // Really nice perspective calculations
return TRUE; // Initialisation went ok
}
GLvoid ReSizeGLScene(GLsizei width, GLsizei height) // Resize and initialise the gl window
{
if (height==0) // Prevent A Divide By Zero By
{
height=1; // Making Height Equal One
}
glViewport(0,0,width,height); // Reset The Current Viewport
glMatrixMode(GL_PROJECTION); // Select The Projection Matrix
glLoadIdentity(); // Reset The Projection Matrix
// Calculate The Aspect Ratio Of The Window
gluPerspective(45.0f,(GLfloat)width/(GLfloat)height,0.1f,100.0f);
glMatrixMode(GL_MODELVIEW); // Select The Modelview Matrix
glLoadIdentity(); // Reset The Modelview Matrix
}
System::Void detectBlack(){
}
};
}
the first form only contains a button to open the second form
private: System::Void button2_Click(System::Object^ sender, System::EventArgs^ e) {
Form1^ freaker = gcnew Form1();
freaker->ShowDialog();
}
and here is the code of the second form:
private: System::Void Form1_Load(System::Object^ sender, System::EventArgs^ e) {
}
void Form1_Closing( Object^ /*sender*/, System::ComponentModel::CancelEventArgs^ e )
{
this->OpenGL->~COpenGL();
}
//Time tick for play button option
private: System::Void timer1_Tick(System::Object^ sender, System::EventArgs^ e){
UNREFERENCED_PARAMETER(sender);
UNREFERENCED_PARAMETER(e);
OpenGL->Render();
OpenGL->SwapOpenGLBuffers();
}
After one week of searching and trying around here the answer:
I found a class to destroy openGL objects in Windows (wgl). This class alone did not solve the problem. I had to call all the commands, then switch buffers by calling SwapBuffers(m_hDC); and call all the commands again. Later on I recognized that it is enough only to call the commands g_hDC=NULL; and g_hWnd=NULL;. So this is how my destructor looks like now:
~COpenGL(System::Void){
if (g_bFullscreen) // Are We In Fullscreen Mode?
{
ChangeDisplaySettings(NULL,0); // If So Switch Back To The Desktop
ShowCursor(TRUE); // Show Mouse Pointer
}
if (g_hRC) // Do We Have A Rendering Context?
{
if (!wglMakeCurrent(NULL,NULL)) // Are We Able To Release The DC And RC Contexts?
{
MessageBox(NULL,TEXT("Release Of DC And RC Failed."),TEXT("SHUTDOWN ERROR"),MB_OK | MB_ICONINFORMATION);
}
if (!wglDeleteContext(g_hRC)) // Are We Able To Delete The RC?
{
MessageBox(NULL,TEXT("Release Rendering Context Failed."),TEXT("SHUTDOWN ERROR"),MB_OK | MB_ICONINFORMATION);
}
g_hRC=NULL; // Set RC To NULL
}
if (g_hDC && !ReleaseDC(g_hWnd,g_hDC)) // Are We Able To Release The DC
{
MessageBox(NULL,TEXT("Release Device Context Failed."),TEXT("SHUTDOWN ERROR"),MB_OK | MB_ICONINFORMATION);
g_hDC=NULL; // Set DC To NULL
}
if (g_hWnd && !DestroyWindow(g_hWnd)) // Are We Able To Destroy The Window?
{
MessageBox(NULL,TEXT("Could Not Release hWnd."),TEXT("SHUTDOWN ERROR"),MB_OK | MB_ICONINFORMATION);
g_hWnd=NULL; // Set hWnd To NULL
}
if (!UnregisterClass(TEXT("OpenGL"),g_hInstance)) // Are We Able To Unregister Class
{
MessageBox(NULL,TEXT("Could Not Unregister Class."),TEXT("SHUTDOWN ERROR"),MB_OK | MB_ICONINFORMATION);
g_hInstance=NULL; // Set hInstance To NULL
}
SwapBuffers(g_hDC);
g_hDC=NULL;
g_hWnd=NULL;
}
Problem: I would like to work with vertex colors delivered by Maya in my cgfx shader. It should be a trivial problem but I have no luck so far.
Attempt: Below I have written up a super simple shader which should just display the raw vertex colors. Result is pure white though.
Details: Internally the shader works fine, if I set the vert color to red in the vs it comes through just fine. So the issue is getting the value from Maya. Also tried with both COLOR and COLOR0.
Any help or guidance appreciated.
// string Category = "Effects\\Cg\\BRDF";
// string keywords = "texture";
string description = "Pure vertex color";
//////////////////////////////////////////////////////////
// untweakables //////////////////////////////////////////
//////////////////////////////////////////////////////////
float4x4 WorldViewProjXf : WorldViewProjection < string UIWidget="none";>;
/****************************************************/
/********** SAMPLERS ********************************/
/****************************************************/
texture ColorTex : Diffuse
<
string ResourceName = "default_color.dds";
string ResourceType = "2D";
>;
sampler2D ColorSampler = sampler_state
{
Texture = <ColorTex>;
MagFilter = Linear;
MinFilter = LinearMipmapLinear;
};
/****************************************************/
/********** CG SHADER FUNCTIONS *********************/
/****************************************************/
/**************************************/
/***** SHARED STRUCT ******************/
/**** Data from app vertex buffer *****/
/**** for all passes *****/
/**************************************/
struct appData {
float3 Position : POSITION;
float4 VertColor: COLOR;
};
/****************************************/
/****************************************/
// vertex->fragment registers used for this pass only
struct outVertexData {
float4 HPosition : POSITION;
float4 vertColor : TEXCOORD1;
};
/****************************************/
/****************************************/
outVertexData textureVS(appData IN)
{
outVertexData OUT;
OUT.vertColor = IN.VertColor;
//OUT.vertColor = float4(1.0f,0.0f,0.0f,0.0f);
float4 Po = float4(IN.Position.xyz,1.0);
OUT.HPosition = mul(WorldViewProjXf, Po);
return OUT;
}
float4 texturePS(outVertexData IN) : COLOR
{
return IN.vertColor;
}
/****************************************************/
/********** TECHNIQUES ******************************/
/****************************************************/
technique main {
pass p0 {
VertexProgram = compile arbvp1 textureVS();
DepthTestEnable = true;
DepthMask = true;
CullFaceEnable = false;
FragmentProgram = compile arbfp1 texturePS();
}
}
/***************************** eof ***/
I had the same problem. Try setting the color in the Vertex Data attribute section of the shader to 'color:colorSet1', this solved it for me.