I'm trying to display text using SDL_ttf, OpenGL and Shaders. This is my code.
Vertex shader
#version 330
layout (location = 0) in vec2 AttribVertex;
layout (location = 1) in vec2 AttribCoordUV;
out vec2 CoordUV;
uniform mat4 UniformProjection;
uniform mat4 UniformModelView;
void main(){
mat4 MVP = UniformProjection * UniformModelView;
gl_Position = MVP * vec4(AttribVertex,1.0,1.0);
CoordUV = AttribCoordUV;
}
Fragment Shader
#version 330
uniform sampler2D UniformTexture;
in vec2 CoordUV;
out vec4 FragColor;
void main(){
FragColor = texture(UniformTexture,CoordUV);
}
TexRender Function
void TextRender(string Text){
TTF_Font *Font = TTF_OpenFont("assets/Fonts/Roboto-Bold.ttf",72);
if(Font == NULL){
cout << "true type font not found" << endl;
return;
}
SDL_Color Color;
Color.r = 255;
Color.g = 255;
Color.b = 255;
Color.a = 255;
SDL_Surface *Surface = TTF_RenderText_Blended(Font,Text.c_str(),Color);
if(Surface == NULL){
cout << "Cannot create surface!" << endl;
TTF_CloseFont(Font);
return;
}
GLfloat vertice[8] = {-(GLfloat)Surface->w/2.0f, (GLfloat)Surface->h/2.0f,
(GLfloat)Surface->w/2.0f, (GLfloat)Surface->h/2.0f,
-(GLfloat)Surface->w/2.0f,-(GLfloat)Surface->h/2.0f,
(GLfloat)Surface->w/2.0f,-(GLfloat)Surface->h/2.0f};
GLfloat coordtex[8] = {0.0f,0.0f,
1.0f,0.0f,
0.0f,1.0f,
1.0f,1.0f};
GLfloat buffer[16] = {vertice[0],vertice[1],coordtex[0],coordtex[1],
vertice[2],vertice[3],coordtex[2],coordtex[3],
vertice[4],vertice[5],coordtex[4],coordtex[5],
vertice[6],vertice[7],coordtex[6],coordtex[7]};
GLuint vbo,vao;
glGenVertexArrays(1,&vao);
glGenBuffers(1,&vbo);
glBindVertexArray(vao);
glBindBuffer(GL_ARRAY_BUFFER,vbo);
glBufferData(GL_ARRAY_BUFFER,sizeof(buffer),buffer,GL_STATIC_DRAW);
glVertexAttribPointer(attribtVRT,2,GL_FLOAT,GL_FALSE,4 * sizeof(GLfloat),(void*)0);
glEnableVertexAttribArray(attribtVRT);
glVertexAttribPointer(attribtCUV,2,GL_FLOAT,GL_FALSE,4 * sizeof(GLfloat),(void*)(2*sizeof(GLfloat)));
glEnableVertexAttribArray(attribtCUV);
glBindBuffer(GL_ARRAY_BUFFER,0);
glBindVertexArray(0);
GLuint Texture;
glGenTextures(1,&Texture);
glBindTexture(GL_TEXTURE_2D,Texture);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D,0,GL_RGBA,Surface->w,Surface->h,0,GL_RGBA,GL_UNSIGNED_BYTE,Surface->pixels);
glBindTexture(GL_TEXTURE_2D,0);
glm::mat4 Model = glm::translate(glm::mat4(1.0f),glm::vec3(100.0f,100.0f,-0.1f));
glm::mat4 ModelView = mxGlobal.Matriz(VIEW) * Model;
glm::mat4 Projection = mxGlobal.Matriz(PROYECTION);
glBlendFunc(GL_SRC_ALPHA,GL_ONE_MINUS_SRC_ALPHA);
glEnable(GL_BLEND);
Shader->Use(true);
glUniformMatrix4fv(uniformPRY,1,GL_FALSE,&Projection[0][0]);
glUniformMatrix4fv(uniformMVW,1,GL_FALSE,&ModelView[0][0]);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D,Texture);
glUniform1i(uniformTEX,0);
glBindVertexArray(vao);
glDrawArrays(GL_TRIANGLE_STRIP,0,4);
glBindVertexArray(0);
glBindTexture(GL_TEXTURE_2D,0);
Shader->Use(false);
glDeleteTextures(1,&Texture);
glDeleteVertexArrays(1,&vao);
glDeleteBuffers(1,&vbo);
glDisable(GL_BLEND);
TTF_CloseFont(Font);
SDL_FreeSurface(Surface);
}
Main.cpp
#include <SDL2/SDL.h>
#include <SDL2/SDL_opengl.h>
#include <SDL2/SDL_opengl_glext.h>
#include <SDL2/SDL_ttf.h>
#include <iostream>
using namespace std;
int main(){
SDL_GLContext ContextOGL;
SDL_Window *Window;
SDL_Renderer *Render;
SDL_Event Event;
if(SDL_Init(SDL_INIT_EVERYTHING) < 0){
cout << "Unable to start SDL!" << endl;
return false;
}
Window = SDL_CreateWindow("Project",SDL_WINDOWPOS_CENTERED,SDL_WINDOWPOS_CENTERED,1280,720,SDL_WINDOW_FULLSCREEN_DESKTOP | SDL_WINDOW_OPENGL | SDL_WINDOW_BORDERLESS);
if(TTF_Init()){
SDL_DestroyWindow(Window);
return -1;
}
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION,3);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION,3);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_PROFILE_MASK,SDL_GL_CONTEXT_PROFILE_CORE);
glHint(GL_PERSPECTIVE_CORRECTION_HINT,GL_NICEST);
SDL_GL_SetAttribute(SDL_GL_RED_SIZE,8);
SDL_GL_SetAttribute(SDL_GL_GREEN_SIZE,8);
SDL_GL_SetAttribute(SDL_GL_BLUE_SIZE,8);
SDL_GL_SetAttribute(SDL_GL_ALPHA_SIZE,8);
SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE,24);
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER,1);
ContextOGL = SDL_GL_CreateContext(Window);
SDL_GL_SetSwapInterval(1);
GLfloat r = 255;
GLfloat g = 255;
GLfloat b = 255;
glDepthFunc(GL_LEQUAL);
glEnable(GL_DEPTH_TEST);
glClearDepthf(1.0f);
glClearColor(r/255.0f,g/255.0f,b/255.0f,1.0f);
bool open = true;
while(open){
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
TextRender("Hello World!");
while(SDL_PollEvent(&Event) != 0){
if(Event.type == SDL_KEYDOWN){
if(Event.key.keysym.sym == SDLK_ESCAPE){
open = false;
}
}
if(Event.type == SDL_QUIT){
open = false;
}
}
SDL_GL_SwapWindow(Window);
}
SDL_GL_DeleteContext(ContextOGL);
SDL_DestroyWindow(Window);
TTF_Quit();
SDL_Quit();
return 0;
}
But when I run my program, I get this...
I have tried changing the parameters in the glTexImage2D function (GL_RGBA,GL_BGRA,etc.) changing values like...
glTexImage2D(GL_TEXTURE_2D,0,GL_RGBA,Surface->w,Surface->h,0,GL_RGBA,GL_UNSIGNED_BYTE,Surface->pixels);
or
glTexImage2D(GL_TEXTURE_2D,0,GL_BGRA,Surface->w,Surface->h,0,GL_RGBA,GL_UNSIGNED_BYTE,Surface->pixels);
or
glTexImage2D(GL_TEXTURE_2D,0,GL_RGBA,Surface->w,Surface->h,0,GL_BGRA,GL_UNSIGNED_BYTE,Surface->pixels);
or
glTexImage2D(GL_TEXTURE_2D,0,GL_BGRA,Surface->w,Surface->h,0,GL_BGRA,GL_UNSIGNED_BYTE,Surface->pixels);
And I've even used functions like...
GLuint colours = image->format->BytesPerPixel;
GLuint externalFormat, internalFormat;
SDL_PixelFormat *format = image->format;
if (colours == 4) {
if (image->format->Rmask == 0x000000ff)
externalFormat = GL_RGBA;
else
externalFormat = GL_BGRA;
}
else {
// no alpha
if (image->format->Rmask == 0x000000ff)
externalFormat = GL_RGB;
else
externalFormat = GL_BGR;
}
internalFormat = (colours == 4) ? GL_RGBA : GL_RGB;
I have also used
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
But this doesn't seem to work....
My question is, what am I doing wrong?
TTF_RenderText_Blended
returns a ARGB surface, therefore:glTexImage2D(target, level, internal_format, width, height, 0, GL_BGRA, GL_UNSIGNED_INT_8_8_8_8_REV, image);
. This tells OpenGl to interpret the image as BGRA pixels (8 bits per channel -> 4*8 = 32 bit unsigned int) and revert them on the fly (hence_REV
), so that ARGB is interpreted as BGRA, which OpenGl can work with.