I would like some entry level information about the OpenGI library which converts a 3D mesh to a 2D texture map.
http://opengi.sourceforge.net/doc/index.html
At this point i have implemented an example on a specific mesh and it seems to work perfectly. I would like to process this image further and i need to export it as a bmp file.
I have imported a mesh and created:
texture with the parameterized geometry
texture with the normals
At this point i am confused how to handle them. They are stored as OpenGL textures.
Here's the source code:
glBindTexture(GL_TEXTURE_2D, uiTex[0]);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA32F, res, res, 0, GL_RGBA, GL_FLOAT, NULL);
giGenImages(3, gim);
giBindImage(gim[0]);
giImageGLTextureData(res, res, 4, GL_FLOAT, uiTex[0]);
giAttribImage(0, gim[0]);
giAttribSamplerParameteri(0, GI_SAMPLING_MODE, GI_SAMPLE_DEFAULT);
giSample();
How can I export the texture?
Related
I'm new to OpenGL/GLES, I got Incomplete Missing Attachment error when generate framebuffer from EGLImageKHR with below code:
GLuint texture;
GLuint framebuffer;
EGLImageKHR image = eglCreateImageKHR(display,
EGL_NO_CONTEXT,
EGL_NATIVE_PIXMAP_KHR,
(EGLClientBuffer)&pixmap,
NULL);
assert(image != EGL_NO_IMAGE_KHR);
glGenTextures(1, &texture);
glGenTextures(1, &framebuffer);
glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glBindTexture(GL_TEXTURE_EXTERNAL_OES, textureId);
glEGLImageTargetTexture2DOES(GL_TEXTURE_EXTERNAL_OES, image);
glBindFramebuffer(GL_FRAMEBUFFER, framebuffer);
glFramebufferTexture2D(GL_FRAMEBUFFER,
GL_COLOR_ATTACHMENT0,
GL_TEXTURE_EXTERNAL_OES,
texture,
0);
glCheckFramebufferStatus(GL_FRAMEBUFFER);
glBindFramebuffer(GL_FRAMEBUFFER, 0);
eglDestroyImageKHR(display,image);
glBindTexture(GL_TEXTURE_EXTERNAL_OES, 0);
I got GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT when using:
glFramebufferTexture2D(GL_FRAMEBUFFER,
GL_COLOR_ATTACHMENT0,
GL_TEXTURE_EXTERNAL_OES,
texture,
0);
and GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT when changing texture to GL_TEXTURE_2D:
glFramebufferTexture2D(GL_FRAMEBUFFER,
GL_COLOR_ATTACHMENT0,
GL_TEXTURE_2D,
texture,
0);
The image and texture is correct as I can display correctly. I don't know what I'm missing here.
I just found the answer, as it is explained in Raspi forum:
Can't render to render buffer
We have to use GL_TEXTURE_2D in this function:
glFramebufferTexture2D(GL_FRAMEBUFFER,
GL_COLOR_ATTACHMENT0,
GL_TEXTURE_2D,
texture,
0);
and have to create an empty texture GL_TEXTURE_2D to bind our framebuffer to that texture before rendering. GL_TEXTURE_EXTERNAL_OES and GL_TEXTURE_2D are different textures, cannot mix them together.
I'm knocking my head on this code since two days ago. It seems that there's no error of any sort creating buffers or textures, but the texture doesn't show.
Here is my code for the texture load:
struct image2d texImage = loadBMPImage(filePath);
glActiveTexture(GL_TEXTURE0);
glGenTextures(1, &(result.external->texID));
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glBindTexture(GL_TEXTURE_2D, result.external->texID);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB8, texImage.width, texImage.height, 0, GL_RGB, GL_UNSIGNED_BYTE, texImage.pixels);
free(texImage.pixels);
The image2d structure is this one
struct image2d{
unsigned int width, height;
unsigned char* pixels;
};
Yes, I'm enabling GL_TEXTURE_2D via glEnable()
Then my mesh is drawn wiith this code
void MeshDraw(Mesh m, int renderType)
{
glBindVertexArray(m.external->vao);
glBindBuffer(GL_ARRAY_BUFFER, m.external->vbo);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, VERTEX_SIZE*4, 0);
glEnableVertexAttribArray(1);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, VERTEX_SIZE*4, (void*)12);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, m.external->ibo);
glDrawElements(GL_TRIANGLES, m.external->sizeFc * 3, GL_UNSIGNED_SHORT, 0);
glDisableVertexAttribArray(0);
glDisableVertexAttribArray(1);
return;
}
And finally here is my vertex shader
#version 430 core
layout (location = 0) in vec3 position;
layout (location = 1) in vec2 inTexCoord;
uniform mat4 transform;
out vec2 texCoord;
out vec3 outPos;
void main(void)
{
outPos = position;
gl_Position = transform * vec4(position, 1.0);
texCoord = inTexCoord;
}
And here is my fragment shader
#version 430 core
out vec4 drawColor;
in vec2 texCoord;
in vec3 outPos;
uniform sampler2D sampler;
void main(void)
{
drawColor = texture(sampler, texCoord);
//drawColor = vec4(clamp(outPos, 0.0, 1.0), 1.0);
}
If you need to look at the whole project I'm posting it here
I'll appreciate any kind of help :)
Additional code (which is also in download if anyone wants to see it)
void initOpenGL()
{
printf("OpenGL version: %s\n",glGetString(GL_VERSION));
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glFrontFace(GL_CW);
glCullFace(GL_BACK);
glEnable(GL_CULL_FACE);
glEnable(GL_DEPTH_TEST);
glEnable(GL_TEXTURE_2D);
glEnable(GL_FRAMEBUFFER_SRGB);
return;
}
This is called right after making the context and initializing glew.
void RenderGame(Game g)
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
ShaderBind(g.external->sh);
setUniformmat4(g.external->sh, "transform", TransformGetProjectedTransformation(g.external->transf));
TextureBind(g.external->texture);
MeshDraw(g.external->msh, GL_TRIANGLES);
glFlush();
glfwSwapBuffers(WindowGetHandler(g.external->window));
return;
}
And this is my render method.
Your texture is not mipmap-complete, but you are still using the default GL_NEAREST_MIPMAP_LINEAR minification filter, so sampling the texture will fail.
You try to set it to GL_NEAREST, but this sqeuence of operations is wrong:
glGenTextures(1, &(result.external->texID));
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glBindTexture(GL_TEXTURE_2D, result.external->texID);
In the GL, the texture sampler state is part of the texture object itself (there are also separate sampler objects available nowadays which override that state, but you don't seem to use them either), and glTexParameteri() affects the currently bound texture object at the time of the call. I don't know if some texture is bound at that time, or none at all - but certainly, the new texture is not, so it will stick with the inital default of GL_NEAREST_MIPMAP_LINEAR...
SOLUTION It seems that #peppe was right all the time. Just to be meticulous i set the sampler to 0 with the setuniform call and it workef. The problem is that it didn't work as expected, and it was because the function that loads the bitmap file that was wrong. Now it works like a charm :) Thank you guys!
I'm trying to write some basic shaders to map a ppm file to my shapes. Unfortunately, instead of a nice multicoloured texture (I'm using a stone brick pattern), I get a solid shade of dark purple.
Here's my code:
Init:
printf("Using %d: Texture shading\n", shaderType);
glEnable(GL_TEXTURE_2D);
glGenTextures(1, &textName);
int w, h;
texture = glmReadPPM("brick.ppm", &w, &h);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );
glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT );
glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT );
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);
printf("W%dH%d\n", w, h);
glTexImage2D(GL_TEXTURE_2D, 0, 3, w, h, 0, GL_RGB, GL_UNSIGNED_BYTE, texture);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, textName);
programID = LoadShaders("text.vert", "text.frag");
Render:
glClearColor( 0.6f, 0.85f, 1.0f, 1.0f );
glClear( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT );
glMatrixMode (GL_MODELVIEW);
glLoadIdentity();
/*Unrelated code here*/
glUseProgram(programID);
varloc = glGetUniformLocation(programID,"texture1");
glUniform1i(varloc, textName);
glLightfv(GL_LIGHT0, GL_SPOT_CUTOFF, &cutOff);
gluLookAt (posx, posy, zoom,
lookx,looky,0,
0,1,0);
glRotatef(anglex,0.0f,1.0f,0.0f);
glRotatef(angley,1.0f,0.0f,0.0f);
renderTriangles(); //Renders mountains from a list using intermediate mode
// Yes, I know it's deprecated
glutSwapBuffers();
glui->sync_live();
glUseProgram(0);
Vertex Shader:
varying vec2 uv;
void main() {
uv = vec2(gl_MultiTexCoord0.st);
gl_Position = ftransform();
}
Fragment Shader:
uniform sampler2D texture1;
varying vec2 uv;
void main() {
gl_FragColor = texture2D(texture1, uv);
}
Does anyone see any problems here? I can't seem to figure it out.
I tried with a basic White and Read 2x2 float, but again, I got one colour. It was a light red.
If you're getting a single colour for the whole object, there might be something wrong with the texture coordinates. I would try looking at them and see if they're correct. You can do that by modifying your fragment shader like this:
gl_FragColor = vec3(uv.xy, 0);
If your whole image is still rendered using one colour, there is something wrong with the way you're sending texture coordinates across. You're using some deprecated functionality (immediate mode, gl_MultiTexCoord0), maybe it's not working together as you would expect:
"Keep in mind that for GLSL 1.30, you should define your own vertex attribute." http://www.opengl.org/wiki/GLSL_:_common_mistakes
It looks like you are binding the texture after you have all of the other texture functions. You should put the call to glBindTexture right after the call to glGenTextures because you have to bind a texture before you can upload the image into it. The other problem is that instead of setting the uniform variable for your sampler to textName in the call to glUniform1i(varloc, textName) you should set it to 0 because that variable represents the active texture unit and you used glActiveTexture(GL_TEXTURE0);
I am using this answer to embed my image in the .exe:
Embedding resources in executable using GCC
Here are the relevant bits of my code:
GLuint grass_DTexture;
extern char binary_grass_D_bmp_start[];
extern char binary_grass_D_bmp_size[];
short loadTexture(char *imageData[], GLuint *texture) {
*texture = SOIL_load_OGL_texture_from_memory(imageData, sizeof(*imageData), SOIL_LOAD_AUTO, SOIL_CREATE_NEW_ID, SOIL_FLAG_INVERT_Y | SOIL_FLAG_COMPRESS_TO_DXT);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
return 1;
}
loadTexture(&binary_grass_D_bmp_start, &grass_DTexture);
However the texture does not load and I just get a whitish grey surface.
EDIT: I know this was not my original question, however I am having difficulty using mipmaps. I modified my loadTexture function to this:
short loadTexture(char *imageData[], GLuint *texture, int width, int height) {
glEnable(GL_TEXTURE_2D);
glGenTextures(1, texture);
glBindTexture(GL_TEXTURE_2D, *texture);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_NEAREST);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
*texture = SOIL_load_OGL_texture_from_memory(imageData, sizeof(*imageData), SOIL_LOAD_AUTO, SOIL_CREATE_NEW_ID, SOIL_FLAG_INVERT_Y | SOIL_FLAG_COMPRESS_TO_DXT | SOIL_FLAG_MIPMAPS);
//gluBuild2DMipmaps(GL_TEXTURE_2D, 3, width, height, GL_RGB, GL_UNSIGNED_BYTE, imageData);
//glTexStorage2D(GL_TEXTURE_2D, 3, GL_RGBA8, width, height);
//glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width, height, GL_BGRA, GL_UNSIGNED_BYTE, imageData);
//glGenerateMipmap(GL_TEXTURE_2D);
glDisable(GL_TEXTURE_2D);
return 1;
}
This time, the exe crashes as soon as it is run. I am also greeted with the following warning, which could be the cause of this (probably missing an * or an & somewhere):
init.c:76:2: warning: passing argument 1 of 'loadTexture' from incompatible poin
ter type [enabled by default]
loadTexture.h:1:7: note: expected 'char **' but argument is of type 'char (*)[]'
I have managed to narrow the crash to the SOIL_load_OGL_texture_from_memory call.
GL_TEXTURE_MIN_FILTER defaults to GL_NEAREST_MIPMAP_LINEAR​.
Upload some mipmaps or switch to GL_NEAREST or GL_LINEAR.
I am trying to write depth to a texture. I would like to have the linear depth, so I tried using I tried using a R16F texture. I defined a texture like this:
glTexImage2D(GL_TEXTURE_2D, 0, GL_R16F_EXT, g_bufferWidth, g_bufferHeight, 0,
GL_RED_EXT, GL_HALF_FLOAT_OES, NULL);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0,
GL_TEXTURE_2D, g_texture, 0);
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT16,
g_bufferWidth, g_bufferHeight);
But when debugging on Xcode by using frame capture on an iPhone5, I get an Unknown texture in the color buffer, and nothing is written to the depth buffer.
I've also tried just creating a depth texture:
glTexImage2D(GL_TEXTURE_2D, 0, GL_DEPTH_COMPONENT, g_bufferWidth, g_bufferHeight, 0, GL_DEPTH_COMPONENT, GL_UNSIGNED_INT, NULL);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_TEXTURE_2D, g_texture, 0);
But in this case also nothing seems to get written in the depth buffer.
The only way I can get things rendered to the depth buffer seems by defining the first texture as RGBA32...
Aren't the EXT_color_buffer_half_float and depth extensions active in iOS6??