I mixed Cocoa, GLUT and OpenGL frameworks to draw a teapot.
I usually do this in plain C but I need to mix up Cocoa buttons and stuffs with OpenGL, this is the corrispettive C code:
#import <OpenGL/OpenGL.h>
#import <GLUT/GLUT.h>
#import <math.h>
#include "utility.h"
GLuint width=640, height=480;
GLfloat angle=0.0;
void init()
{
glEnable(GL_DEPTH_TEST);
glViewport(-500, -500, 1000, 1000);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluPerspective(45, width/(float)height, 1, 1000);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
gluLookAt(0, 0, -100, 0, 0, 0, 0, 1, 0);
// Luci
glShadeModel(GL_FLAT);
glLightfv(GL_LIGHT0, GL_AMBIENT, (const GLfloat[]) {0,0,0} );
glLightfv(GL_LIGHT0, GL_DIFFUSE, (const GLfloat[]) {1,1,0} );
glLightfv(GL_LIGHT0, GL_SPECULAR, (const GLfloat[]) {0.5,0.5,0} );
glMaterialfv(GL_FRONT, GL_AMBIENT, (const GLfloat[]) {1,0,0} );
glMaterialfv(GL_FRONT, GL_DIFFUSE, (const GLfloat[]) {1,0.25,0} );
glMaterialfv(GL_FRONT, GL_SPECULAR, (const GLfloat[]) {1,0.75,0} );
glMaterialfv(GL_FRONT, GL_SHININESS, (const GLfloat[]) {1,1,0} );
glEnable(GL_LIGHT0);
glEnable(GL_LIGHTING);
}
void display()
{
glClearColor(BLACK);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glPushMatrix();
glRotatef(angle, 0, 1, 0);
glutSolidTeapot(10);
glPopMatrix();
glutSwapBuffers();
}
void keyboard(unsigned char key, int x, int y)
{
if(key=='+')
{
angle+=5.0;
}
else if(key=='-')
{
angle-=5.0;
}
else
{
return;
}
makeRound(&angle);
glutPostRedisplay();
}
int main(int argc, char** argv)
{
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_RGB | GLUT_DEPTH | GLUT_DOUBLE);
glutInitWindowPosition(100, 100);
glutInitWindowSize(width, height);
glutCreateWindow(argv[0]);
glutDisplayFunc(display);
glutKeyboardFunc(keyboard);
init();
glutMainLoop();
return 0;
}
And this is the result:
Then I'm subclassing a NSOpenGLView and do the same of this C code, just with different sizes, but the meterials and lights are the same:
#implementation MyView
#synthesize angle;
- (id)initWithFrame:(NSRect)frame
{
self = [super initWithFrame:frame];
if (self)
{
}
return self;
}
- (void) prepareOpenGL
{
NSOpenGLContext* context= self.openGLContext;
GLfloat width= self.bounds.size.width;
GLfloat height= self.bounds.size.height;
[context makeCurrentContext];
glEnable(GL_DEPTH_TEST);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluPerspective(45, width/height, 1, 1000);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
gluLookAt(0, 0, -100, 0, 0, 0, 0, 1, 0);
// Lighting
glShadeModel(GL_FLAT);
glLightfv(GL_LIGHT0, GL_AMBIENT, (const GLfloat[]) {0,0,0} );
glLightfv(GL_LIGHT0, GL_DIFFUSE, (const GLfloat[]) {1,1,0} );
glLightfv(GL_LIGHT0, GL_SPECULAR, (const GLfloat[]) {0.5,0.5,0} );
glMaterialfv(GL_FRONT_AND_BACK, GL_AMBIENT, (const GLfloat[]) {1,0,0} );
glMaterialfv(GL_FRONT_AND_BACK, GL_DIFFUSE, (const GLfloat[]) {1,0.25,0} );
glMaterialfv(GL_FRONT_AND_BACK, GL_SPECULAR, (const GLfloat[]) {1,0.75,0} );
glMaterialfv(GL_FRONT_AND_BACK, GL_SHININESS, (const GLfloat[]) {1,1,0} );
glEnable(GL_LIGHT0);
glEnable(GL_LIGHTING);
}
- (void)drawRect:(NSRect)dirtyRect
{
glClearColor(0, 0, 0, 0);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glPushMatrix();
glRotatef(angle, 0, 1, 0);
glutSolidTeapot(20);
glPopMatrix();
glFlush();
}
#pragma mark - Actions
- (IBAction) increaseAngle :(id)sender
{
angle+=5.0;
if(angle>360.0)
{
angle-=360.0;
}
[self setNeedsDisplay: YES];
}
- (IBAction) decreaseAngle:(id)sender
{
angle-=5.0;
if(angle<0.0)
{
angle+=360.0;
}
[self setNeedsDisplay: YES];
}
#end
And this is the result:
That's pretty ugly and considering that I used the same meterials and lights, I think that I'm doing something wrong in the one done with Cocoa and OpenGL mixed.
Make sure you request a depth buffer when you create your GL context. You may not get one by default.
Related
I'm trying to draw something using OpenGL with VAO and VBO objects. To do that, I just subclassed NSOpenGLView. By default, OpenGL v2.1 was used, so, I put a PixelFormat, wrote code, and solved all the printed errors. For now the app is running well, but nothing is drawing at the window, even glClearColor has no effect. Please, help me to find and solve the problems. My OS is Mac v10.12 with OpenGL v4.1.
MyOpenGLView.m, part 1:
#import "MyOpenGLView.h"
#include <OpenGL/gl3.h>
#include "error.h"
static const char *vertexShaderSource =
"#version 410\n"
"in vec3 posAttr;\n"
"in vec3 colAttr;\n"
"out vec3 col;\n"
"void main() {\n"
" col = colAttr;\n"
" gl_Position.xyz = posAttr;\n"
" gl_Position.w = 1.0;\n"
" col = colAttr;\n"
"}\n";
static const char *fragmentShaderSource =
"#version 410\n"
"in vec3 col;\n"
"out vec4 color;\n"
"void main() {\n"
" color.rgb = col;\n"
" color.a = 1.0;\n"
//" color = vec4(1,1,1,1);\n"
"}\n";
// Shader properties
GLuint m_posAttr;
GLuint m_colAttr;
GLuint program;
// Arrays of positions and colors
float fTriangle[9];
float fTriangleColor[9];
// Low-level VBOs and VBAs
GLuint uiVBO[2];
GLuint uiVAO[1];
GLuint makeShader(GLenum type, const char *source) {
GLuint shader;
shader = glCreateShader(type);
GetError();
glShaderSource(shader, 1, &source, NULL);
GetError();
glCompileShader(shader);
GetError();
#if defined(DEBUG)
GLint logLength;
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &logLength);
GetError();
if (logLength > 0) {
GLchar *log = malloc((size_t)logLength);
glGetShaderInfoLog(shader, logLength, &logLength, log);
GetError();
NSLog(#"Shader compilation failed with error:\n%s", log);
free(log);
}
#endif
GLint status;
glGetShaderiv(shader, GL_COMPILE_STATUS, &status);
GetError();
if (0 == status) {
glDeleteShader(shader);
GetError();
NSLog(#"Shader compilation failed from code!");
assert(0);
}
return shader;
}
MyOpenGLView.m, part 2:
//// General staff
#implementation MyOpenGLView
- (void)awakeFromNib {
// Positions and colors of figures
fTriangle[0] = -0.4f; fTriangle[1] = 0.1f; fTriangle[2] = 0.0f;
fTriangle[3] = 0.4f; fTriangle[4] = 0.1f; fTriangle[5] = 0.0f;
fTriangle[6] = 0.0f; fTriangle[7] = 0.7f; fTriangle[8] = 0.0f;
fTriangleColor[0] = 1.0f; fTriangleColor[1] = 0.0f; fTriangleColor[2] = 0.0f;
fTriangleColor[3] = 0.0f; fTriangleColor[4] = 1.0f; fTriangleColor[5] = 0.0f;
fTriangleColor[6] = 0.0f; fTriangleColor[7] = 0.0f; fTriangleColor[8] = 1.0f;
NSOpenGLPixelFormatAttribute attrs[] = {
NSOpenGLPFADoubleBuffer,
NSOpenGLPFADepthSize, 24,
NSOpenGLPFAOpenGLProfile,
NSOpenGLProfileVersion4_1Core,
0
};
NSOpenGLPixelFormat *pf = [[NSOpenGLPixelFormat alloc] initWithAttributes:attrs];
if (!pf) {
NSLog(#"No OpenGL pixel format");
}
NSOpenGLContext* context = [[NSOpenGLContext alloc] initWithFormat:pf shareContext:nil];
[self setPixelFormat:pf];
[self setOpenGLContext:context];
NSLog(#"%s", glGetString(GL_VERSION));
GetError();
GLuint vertexShader = makeShader(GL_VERTEX_SHADER, vertexShaderSource);
GLuint fragmentShader = makeShader(GL_FRAGMENT_SHADER, fragmentShaderSource);
program = glCreateProgram();
GetError();
glAttachShader(program, vertexShader);
GetError();
glAttachShader(program, fragmentShader);
GetError();
glLinkProgram(program);
GetError();
GLint status;
glGetProgramiv(program, GL_LINK_STATUS, &status);
GetError();
if (0 == status) {
NSLog(#"Failed to link shader program");
assert( 0 );
}
m_posAttr = glGetAttribLocation(program, "posAttr");
GetError();
m_colAttr = glGetAttribLocation(program, "colAttr");
GetError();
glGenVertexArrays(1, &uiVAO[0]);
glGenBuffers(2, &uiVBO[0]);
GetError();
}
- (void)drawRect:(NSRect)dirtyRect {
[super drawRect:dirtyRect];
glClearColor(0, 0, 0, 0);
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(program);
GetError();
glBindVertexArray(uiVAO[0]);
GetError();
glBindBuffer(GL_ARRAY_BUFFER, uiVBO[0]);
glBufferData(GL_ARRAY_BUFFER, 9*sizeof(float), fTriangle, GL_STATIC_DRAW);
glEnableVertexAttribArray(m_posAttr);
GetError();
glVertexAttribPointer(m_posAttr, 3, GL_FLOAT, GL_FALSE, 0, 0);
GetError();
glBindBuffer(GL_ARRAY_BUFFER, uiVBO[1]);
glBufferData(GL_ARRAY_BUFFER, 9*sizeof(float), fTriangleColor, GL_STATIC_DRAW);
glEnableVertexAttribArray(m_colAttr);
GetError();
glVertexAttribPointer(m_colAttr, 3, GL_FLOAT, GL_FALSE, 0, 0);
GetError();
glDrawArrays(GL_TRIANGLES, 0, 3);
GetError();
glDisableVertexAttribArray(m_posAttr);
glDisableVertexAttribArray(m_colAttr);
glFlush();
GetError();
}
- (void)dealloc {
glDeleteProgram(program);
glDeleteBuffers(2, uiVBO);
glDeleteVertexArrays(1, uiVAO);
GetError();
}
#end
error.h:
//// Prints OpenGL errors
#ifndef __ERROR_H__
#define __ERROR_H__
#include <stdlib.h>
#include <assert.h>
#ifdef DEBUG
#define GetError( )\
{\
for ( GLenum Error = glGetError( ); ( GL_NO_ERROR != Error ); Error = glGetError( ) )\
{\
switch ( Error )\
{\
case GL_INVALID_ENUM: printf( "\n%s\n\n", "GL_INVALID_ENUM" ); assert( 0 ); break;\
case GL_INVALID_VALUE: printf( "\n%s\n\n", "GL_INVALID_VALUE" ); assert( 0 ); break;\
case GL_INVALID_OPERATION: printf( "\n%s\n\n", "GL_INVALID_OPERATION" ); assert( 0 ); break;\
case GL_OUT_OF_MEMORY: printf( "\n%s\n\n", "GL_OUT_OF_MEMORY" ); assert( 0 ); break;\
default: break;\
}\
}\
}
#else
#define GetError( )
#endif /* DEBUG */
#endif /* __ERROR_H__ */
After lots of effort and code's modifications I figured it out: the only real problem was with NSOpenGLPFADoubleBuffer option in NSOpenGLPixelFormatAttribute array. After commenting this option I got the desired output.
To my mind, the reason is that I had 2 output graphical buffers. However, the code was executed a single time, 1st buffer was painted and then swapped. So, the 2nd buffer, which is empty, was drawn.
I found this C++ code here. It draws simple rectangle. Works great.
#define GLEW_STATIC
#include <iostream>
#include <glew.h>
#include <freeglut.h>
const GLchar* vertexSource =
"#version 150 core\n"
"in vec2 position;"
"in vec3 color;"
"out vec3 Color;"
"void main() {"
" Color = color;"
" gl_Position = vec4(position, 0.0, 1.0);"
"}";
const GLchar* fragmentSource =
"#version 150 core\n"
"in vec3 Color;"
"out vec4 outColor;"
"void main() {"
" outColor = vec4(Color, 1.0);"
"}";
void Display()
{
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
glFlush();
glutSwapBuffers();
}
int main(int argc, char** argv)
{
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_RGBA | GLUT_DOUBLE | GLUT_MULTISAMPLE);
glutInitWindowSize(800, 600);
glutInitContextVersion(4, 4);
glutInitContextProfile(GLUT_CORE_PROFILE);
glutCreateWindow("OpenGL");
glewExperimental = GL_TRUE;
if (GLEW_OK != glewInit())
exit(1);
GLuint vao;
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
GLuint vbo;
glGenBuffers(1, &vbo);
GLfloat vertices[] = {
-0.5f, 0.5f, 1.0f, 0.0f, 0.0f, // Top-left
0.5f, 0.5f, 0.0f, 1.0f, 0.0f, // Top-right
0.5f, -0.5f, 0.0f, 0.0f, 1.0f, // Bottom-right
-0.5f, -0.5f, 1.0f, 1.0f, 1.0f // Bottom-left
};
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
GLuint ebo;
glGenBuffers(1, &ebo);
GLuint elements[] = {
0, 1, 2,
2, 3, 0
};
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ebo);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(elements), elements, GL_STATIC_DRAW);
GLuint vertexShader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertexShader, 1, &vertexSource, NULL);
glCompileShader(vertexShader);
GLuint fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragmentShader, 1, &fragmentSource, NULL);
glCompileShader(fragmentShader);
GLuint shaderProgram = glCreateProgram();
glAttachShader(shaderProgram, vertexShader);
glAttachShader(shaderProgram, fragmentShader);
glBindFragDataLocation(shaderProgram, 0, "outColor");
glLinkProgram(shaderProgram);
glUseProgram(shaderProgram);
GLint posAttrib = glGetAttribLocation(shaderProgram, "position");
glEnableVertexAttribArray(posAttrib);
glVertexAttribPointer(posAttrib, 2, GL_FLOAT, GL_FALSE, 5 * sizeof(GLfloat), 0);
GLint colAttrib = glGetAttribLocation(shaderProgram, "color");
glEnableVertexAttribArray(colAttrib);
glVertexAttribPointer(colAttrib, 3, GL_FLOAT, GL_FALSE, 5 * sizeof(GLfloat), (void*)(2 * sizeof(GLfloat)));
glutDisplayFunc(Display);
glutMainLoop();
glDeleteProgram(shaderProgram);
glDeleteShader(fragmentShader);
glDeleteShader(vertexShader);
glDeleteBuffers(1, &ebo);
glDeleteBuffers(1, &vbo);
glDeleteVertexArrays(1, &vao);
}
So I decided to rewrite it on C# with OpenTK framework:
using OpenTK;
using OpenTK.Graphics.OpenGL;
using System;
using System.IO;
namespace OpenTKTest
{
public class Game : GameWindow
{
private Int32 vao;
private Int32 vbo;
private Int32 ebo;
private Int32 vertexShader;
private Int32 fragmentShader;
private Int32 shaderProgram;
public Game() : base()
{
}
[STAThread]
public static void Main()
{
var window = new Game();
window.Run(120, 120);
window.Dispose();
}
protected override void OnLoad(EventArgs e)
{
GL.GenVertexArrays(1, out vao);
GL.BindVertexArray(vao);
GL.GenBuffers(1, out vbo);
var vertices = new Single[]
{
-0.5f, 0.5f, 1.0f, 0.0f, 0.0f, // Top-left
0.5f, 0.5f, 0.0f, 1.0f, 0.0f, // Top-right
0.5f, -0.5f, 0.0f, 0.0f, 1.0f, // Bottom-right
-0.5f, -0.5f, 1.0f, 1.0f, 1.0f // Bottom-left
};
GL.BindBuffer(BufferTarget.ArrayBuffer, vbo);
GL.BufferData(BufferTarget.ArrayBuffer, new IntPtr(sizeof(Single) * vertices.Length), vertices, BufferUsageHint.StaticDraw);
GL.GenBuffers(1, out ebo);
var elements = new Single[]
{
0, 1, 2,
2, 3, 0
};
GL.BindBuffer(BufferTarget.ElementArrayBuffer, ebo);
GL.BufferData(BufferTarget.ElementArrayBuffer, new IntPtr(sizeof(UInt32) * elements.Length), elements, BufferUsageHint.StaticDraw);
vertexShader = GL.CreateShader(ShaderType.VertexShader);
GL.ShaderSource(vertexShader, File.ReadAllText("vs.glsl"));
GL.CompileShader(vertexShader);
Console.WriteLine(GL.GetShaderInfoLog(vertexShader));
fragmentShader = GL.CreateShader(ShaderType.FragmentShader);
GL.ShaderSource(fragmentShader, File.ReadAllText("fs.glsl"));
GL.CompileShader(fragmentShader);
Console.WriteLine(GL.GetShaderInfoLog(fragmentShader));
shaderProgram = GL.CreateProgram();
GL.AttachShader(shaderProgram, vertexShader);
GL.AttachShader(shaderProgram, fragmentShader);
GL.BindFragDataLocation(shaderProgram, 0, "outColor");
GL.LinkProgram(shaderProgram);
GL.UseProgram(shaderProgram);
var posAttrib = GL.GetAttribLocation(shaderProgram, "position");
GL.EnableVertexAttribArray(posAttrib);
GL.VertexAttribPointer(posAttrib, 2, VertexAttribPointerType.Float, false, 5 * sizeof(Single), IntPtr.Zero);
var colAttrib = GL.GetAttribLocation(shaderProgram, "color");
GL.EnableVertexAttribArray(colAttrib);
GL.VertexAttribPointer(colAttrib, 3, VertexAttribPointerType.Float, false, 5 * sizeof(Single), new IntPtr(2 * sizeof(Single)));
GL.Viewport(0, 0, Width, Height);
}
protected override void OnRenderFrame(FrameEventArgs e)
{
GL.ClearColor(0.0f, 0.0f, 0.0f, 1.0f);
GL.Clear(ClearBufferMask.ColorBufferBit);
GL.DrawElements(PrimitiveType.Triangles, 6, DrawElementsType.UnsignedInt, IntPtr.Zero);
GL.Flush();
SwapBuffers();
}
protected override void OnClosing(System.ComponentModel.CancelEventArgs e)
{
GL.DeleteProgram(shaderProgram);
GL.DeleteShader(fragmentShader);
GL.DeleteShader(vertexShader);
GL.DeleteBuffers(1, ref ebo);
GL.DeleteBuffers(1, ref vbo);
GL.DeleteVertexArrays(1, ref vao);
}
}
}
Vertex shader:
#version 150 core
in vec2 position;
in vec3 color;
out vec3 Color;
void main()
{
Color = color;
gl_Position = vec4(position, 0.0, 1.0);
}
Fragment shader:
#version 150 core
in vec3 Color;
out vec4 outColor;
void main()
{
outColor = vec4(Color, 1.0);
}
Both sources are almost identical. But in C# version GL.DrawElements function is never called (I found it out while debugging with gDEBugger from gremedy).
Why this code doesn't work in OpenTK?
First of all I'm french so sorry for my english.
I'm new to OpenGL ES and I'm trying to draw a simple triangle with these Vertices :
typedef struct {
float Position[3];
float Color[4];
} Vertex;
const Vertex Vertices[] = {
{{0.0, 1.0, -2.0}, {1, 0, 0, 1}},
{{1.0, 0.0, -2.0}, {1, 0, 0, 1}},
{{-1.0, 0.0, -2.0}, {1, 0, 0, 1}},
};
and these indices ;
const GLubyte Indices[] = {
0,1,2
};
But the triangle is not showing up... If I change the indices to 0,2,3, It displays a triangle with vertex 0,2 and a black vertex : {{0,0,0},{0,0,0,1}} , but there is no 4th vertex... I don't understand at all could someone explain me?
Here's my Xcode view code :
//
// EAGLView.m
// OpenGlintro
//
// Created by Arnaud Miguet on 01/12/12.
// Copyright (c) 2012 Tap‘n'Develop. All rights reserved.
//
#import "EAGLView.h"
#implementation EAGLView
+ (Class) layerClass {
return [CAEAGLLayer class];
}
- (void)setupVBOs {
GLuint vertexBuffer;
glGenBuffers(1, &vertexBuffer);
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(Vertices), Vertices, GL_STATIC_DRAW);
GLuint indexBuffer;
glGenBuffers(1, &indexBuffer);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, indexBuffer);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(Indices), Indices, GL_STATIC_DRAW);
}
- (id)initWithFrame:(CGRect)frame
{
self = [super initWithFrame:frame];
if (self) {
// Initialization code
CAEAGLLayer *EAGLLayer = (CAEAGLLayer *) super.layer;
EAGLLayer.opaque = YES;
context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
if (!context || ![EAGLContext setCurrentContext:context]) {
[self release];
return nil;
}
GLuint framebuffer , renderbuffer;
glGenBuffers(1, &framebuffer);
glGenBuffers(1, &renderbuffer);
glBindFramebuffer(GL_FRAMEBUFFER, framebuffer);
glBindRenderbuffer(GL_RENDERBUFFER, renderbuffer);
[context renderbufferStorage:GL_RENDERBUFFER fromDrawable:EAGLLayer];
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, renderbuffer);
glViewport(10, 0, CGRectGetWidth(frame), CGRectGetHeight(frame));
[self compileShaders];
[self setupVBOs];
[self render];
}
return self;
}
- (void)render {
glClearColor(0, 104.0/255.0, 55.0/255.0, 1.0);
glClear(GL_COLOR_BUFFER_BIT);
// 1
glViewport(0, 0, self.frame.size.width, self.frame.size.height);
// 2
glVertexAttribPointer(_positionSlot, 3, GL_FLOAT, GL_FALSE,
sizeof(Vertex), 0);
glVertexAttribPointer(_colorSlot, 4, GL_FLOAT, GL_FALSE,
sizeof(Vertex), (GLvoid*) (sizeof(float) * 3));
// 3
glDrawElements(GL_TRIANGLES, sizeof(Indices),
GL_UNSIGNED_BYTE, 0);
[context presentRenderbuffer:GL_RENDERBUFFER];
}
- (GLuint)compileShader:(NSString*)shaderName withType:(GLenum)shaderType {
// 1
NSString* shaderPath = [[NSBundle mainBundle] pathForResource:shaderName
ofType:#"glsl"];
NSError* error;
NSString* shaderString = [NSString stringWithContentsOfFile:shaderPath
encoding:NSUTF8StringEncoding error:&error];
if (!shaderString) {
NSLog(#"Error loading shader: %#", error.localizedDescription);
exit(1);
}
// 2
GLuint shaderHandle = glCreateShader(shaderType);
// 3
const char * shaderStringUTF8 = [shaderString UTF8String];
int shaderStringLength = [shaderString length];
glShaderSource(shaderHandle, 1, &shaderStringUTF8, &shaderStringLength);
// 4
glCompileShader(shaderHandle);
// 5
GLint compileSuccess;
glGetShaderiv(shaderHandle, GL_COMPILE_STATUS, &compileSuccess);
if (compileSuccess == GL_FALSE) {
GLchar messages[256];
glGetShaderInfoLog(shaderHandle, sizeof(messages), 0, &messages[0]);
NSString *messageString = [NSString stringWithUTF8String:messages];
NSLog(#"%#", messageString);
exit(1);
}
return shaderHandle;
}
- (void)compileShaders {
// 1
GLuint vertexShader = [self compileShader:#"SimpleVertex"
withType:GL_VERTEX_SHADER];
GLuint fragmentShader = [self compileShader:#"SimpleFragment"
withType:GL_FRAGMENT_SHADER];
// 2
GLuint programHandle = glCreateProgram();
glAttachShader(programHandle, vertexShader);
glAttachShader(programHandle, fragmentShader);
glLinkProgram(programHandle);
// 3
GLint linkSuccess;
glGetProgramiv(programHandle, GL_LINK_STATUS, &linkSuccess);
if (linkSuccess == GL_FALSE) {
GLchar messages[256];
glGetProgramInfoLog(programHandle, sizeof(messages), 0, &messages[0]);
NSString *messageString = [NSString stringWithUTF8String:messages];
NSLog(#"%#", messageString);
exit(1);
}
// 4
glUseProgram(programHandle);
// 5
_positionSlot = glGetAttribLocation(programHandle, "Position");
_colorSlot = glGetAttribLocation(programHandle, "SourceColor");
glEnableVertexAttribArray(_positionSlot);
glEnableVertexAttribArray(_colorSlot);
}
typedef struct {
float Position[3];
float Color[4];
} Vertex;
const Vertex Vertices[] = {
{{0.0, 1.0, -2.0}, {1, 0, 0, 1}},
{{1.0, 0.0, -2.0}, {1, 0, 0, 1}},
{{-1.0, 0.0, -2.0}, {1, 0, 0, 1}},
};
const GLubyte Indices[] = {
0,2,3
};
#end
Add a vertex array before you bind the buffers.
GLuint vao;
glGenVertexArray(1, &vao);
glBindVertexArray(vao);
First of all, sorry for my english,
My problem is :
I'm following a tutorial to learn OpenGL ES and I must draw a triangle in the middle of the screen. In the image they show, the triangle isn't touching the edges of the screen (I think it's because of the z coordinates of the vertices)
The vertices they tell us to make is with those coordinates :
(0.0, 1.0, -3.0)
(1.0, 0.0, -3.0)
(-1.0, 0.0, -3.0)
The tutorial is not uptodate so I did it with a personal code :
//
// EAGLView.m
// OpenGlintro
//
// Created by Arnaud Miguet on 01/12/12.
// Copyright (c) 2012 Tap‘n'Develop. All rights reserved.
//
#import "EAGLView.h"
#implementation EAGLView
+ (Class) layerClass {
return [CAEAGLLayer class];
}
- (void)setupVBOs {
GLuint vertexBuffer;
glGenBuffers(1, &vertexBuffer);
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(Vertices), Vertices, GL_STATIC_DRAW);
GLuint indexBuffer;
glGenBuffers(1, &indexBuffer);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, indexBuffer);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(Indices), Indices, GL_STATIC_DRAW);
}
- (id)initWithFrame:(CGRect)frame
{
self = [super initWithFrame:frame];
if (self) {
// Initialization code
CAEAGLLayer *EAGLLayer = (CAEAGLLayer *) super.layer;
EAGLLayer.opaque = YES;
context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
if (!context || ![EAGLContext setCurrentContext:context]) {
[self release];
return nil;
}
glEnable (GL_DEPTH_TEST);
GLuint framebuffer , renderbuffer;
glGenBuffers(1, &framebuffer);
glGenBuffers(1, &renderbuffer);
glBindFramebuffer(GL_FRAMEBUFFER, framebuffer);
glBindRenderbuffer(GL_RENDERBUFFER, renderbuffer);
[context renderbufferStorage:GL_RENDERBUFFER fromDrawable:EAGLLayer];
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, renderbuffer);
glViewport(10, 0, CGRectGetWidth(frame), CGRectGetHeight(frame));
[self compileShaders];
[self setupVBOs];
[self render];
}
return self;
}
- (void)render {
glClearColor(0.7, 0.7, 0.7, 1.0);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// 1
glViewport(0, 0, self.frame.size.width, self.frame.size.height);
// 2
glVertexAttribPointer(_positionSlot, 3, GL_FLOAT, GL_FALSE,
sizeof(Vertex), 0);
glVertexAttribPointer(_colorSlot, 4, GL_FLOAT, GL_FALSE,
sizeof(Vertex), (GLvoid*) (sizeof(float) * 3));
// 3
glDrawElements(GL_TRIANGLES, sizeof(Indices)/sizeof(Indices[0]),GL_UNSIGNED_BYTE, 0);
[context presentRenderbuffer:GL_RENDERBUFFER];
}
- (GLuint)compileShader:(NSString*)shaderName withType:(GLenum)shaderType {
// 1
NSString* shaderPath = [[NSBundle mainBundle] pathForResource:shaderName
ofType:#"glsl"];
NSError* error;
NSString* shaderString = [NSString stringWithContentsOfFile:shaderPath
encoding:NSUTF8StringEncoding error:&error];
if (!shaderString) {
NSLog(#"Error loading shader: %#", error.localizedDescription);
exit(1);
}
// 2
GLuint shaderHandle = glCreateShader(shaderType);
// 3
const char * shaderStringUTF8 = [shaderString UTF8String];
int shaderStringLength = [shaderString length];
glShaderSource(shaderHandle, 1, &shaderStringUTF8, &shaderStringLength);
// 4
glCompileShader(shaderHandle);
// 5
GLint compileSuccess;
glGetShaderiv(shaderHandle, GL_COMPILE_STATUS, &compileSuccess);
if (compileSuccess == GL_FALSE) {
GLchar messages[256];
glGetShaderInfoLog(shaderHandle, sizeof(messages), 0, &messages[0]);
NSString *messageString = [NSString stringWithUTF8String:messages];
NSLog(#"%#", messageString);
exit(1);
}
return shaderHandle;
}
- (void)compileShaders {
// 1
GLuint vertexShader = [self compileShader:#"SimpleVertex"
withType:GL_VERTEX_SHADER];
GLuint fragmentShader = [self compileShader:#"SimpleFragment"
withType:GL_FRAGMENT_SHADER];
// 2
GLuint programHandle = glCreateProgram();
glAttachShader(programHandle, vertexShader);
glAttachShader(programHandle, fragmentShader);
glLinkProgram(programHandle);
// 3
GLint linkSuccess;
glGetProgramiv(programHandle, GL_LINK_STATUS, &linkSuccess);
if (linkSuccess == GL_FALSE) {
GLchar messages[256];
glGetProgramInfoLog(programHandle, sizeof(messages), 0, &messages[0]);
NSString *messageString = [NSString stringWithUTF8String:messages];
NSLog(#"%#", messageString);
exit(1);
}
// 4
glUseProgram(programHandle);
// 5
_positionSlot = glGetAttribLocation(programHandle, "Position");
_colorSlot = glGetAttribLocation(programHandle, "SourceColor");
glEnableVertexAttribArray(_positionSlot);
glEnableVertexAttribArray(_colorSlot);
}
typedef struct {
float Position[3];
float Color[4];
} Vertex;
const Vertex Vertices[] = {
{{0.0, 1.0, -2}, {1, 0, 0, 1}},
{{1.0, 0.0, -2}, {1, 0, 0, 1}},
{{-1.0, 0.0, 0}, {1, 0, 0, 1}}
};
const GLubyte Indices[] = {
0,1,2
};
/*
// Only override drawRect: if you perform custom drawing.
// An empty implementation adversely affects performance during animation.
- (void)drawRect:(CGRect)rect
{
// Drawing code
}
*/
#end
But the vertexes don't appear because of the background drawn at z=-1 coordinates... Can someone help me? I can't find any solution for this...
At first glance, it seems your render function is only being called once, in viewDidLoad. This should instead be implemented in a method with continuous updates, such as drawRect.
OpenGL ES has a very steep learning curve and in iOS you can help yourself a lot by implementing the GLKit framework. It saves you all those calls to different types of buffers and handles most of the boilerplate code for draw/update functions for you.
This tutorial is a very good place to start:
http://www.raywenderlich.com/5223/beginning-opengl-es-2-0-with-glkit-part-1
I am trying to get my texture to show up in a basic OpenGL view (subclass of UIView), but no matter which texture I use, it shows up black. The code for my view is as follows:
#implementation SymGLView
typedef struct {
float Position[3];
float Color[4];
float TexCoord[2];
} Vertex;
const Vertex Vertices[] = {
{{1, -1, 0}, {1, 0, 0, 1}, {0, 0}},
{{1, 1, 0}, {0, 1, 0, 1}, {0, 1}},
{{-1, 1, 0}, {0, 0, 1, 1}, {1, 1}},
{{-1, -1, 0}, {0, 0, 0, 1}, {1, 0}}
};
const GLubyte Indices[] = {
0, 1, 2,
2, 3, 0
};
+ (Class)layerClass {
return [CAEAGLLayer class];
}
- (void)setupLayer {
_eaglLayer = (CAEAGLLayer*) self.layer;
_eaglLayer.opaque = YES;
}
- (void)setupContext {
EAGLRenderingAPI api = kEAGLRenderingAPIOpenGLES2;
_context = [[EAGLContext alloc] initWithAPI:api];
if (!_context) {
NSLog(#"Failed to initialize OpenGLES 2.0 context");
exit(1);
}
if (![EAGLContext setCurrentContext:_context]) {
NSLog(#"Failed to set current OpenGL context");
exit(1);
}
}
- (void)setupRenderBuffer {
glGenRenderbuffers(1, &_colorRenderBuffer);
glBindRenderbuffer(GL_RENDERBUFFER, _colorRenderBuffer);
[_context renderbufferStorage:GL_RENDERBUFFER fromDrawable:_eaglLayer];
}
- (void)setupDepthBuffer {
glGenRenderbuffers(1, &_depthRenderBuffer);
glBindRenderbuffer(GL_RENDERBUFFER, _depthRenderBuffer);
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT16, self.frame.size.width, self.frame.size.height);
}
- (void)setupFrameBuffer {
GLuint framebuffer;
glGenFramebuffers(1, &framebuffer);
glBindFramebuffer(GL_FRAMEBUFFER, framebuffer);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, _colorRenderBuffer);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, _depthRenderBuffer);
}
- (GLuint)compileShader:(NSString*)shaderName withType:(GLenum)shaderType {
NSString* shaderPath = [[NSBundle mainBundle] pathForResource:shaderName ofType:#"glsl"];
NSError* error;
NSString* shaderString = [NSString stringWithContentsOfFile:shaderPath encoding:NSUTF8StringEncoding error:&error];
if (!shaderString) {
NSLog(#"Error loading shader: %#", error.localizedDescription);
exit(1);
}
GLuint shaderHandle = glCreateShader(shaderType);
const char * shaderStringUTF8 = [shaderString UTF8String];
int shaderStringLength = [shaderString length];
glShaderSource(shaderHandle, 1, &shaderStringUTF8, &shaderStringLength);
glCompileShader(shaderHandle);
GLint compileSuccess;
glGetShaderiv(shaderHandle, GL_COMPILE_STATUS, &compileSuccess);
if (compileSuccess == GL_FALSE) {
GLchar messages[256];
glGetShaderInfoLog(shaderHandle, sizeof(messages), 0, &messages[0]);
NSString *messageString = [NSString stringWithUTF8String:messages];
NSLog(#"%#", messageString);
exit(1);
}
return shaderHandle;
}
- (void)compileShaders {
GLuint vertexShader = [self compileShader:#"SimpleVertex" withType:GL_VERTEX_SHADER];
GLuint fragmentShader = [self compileShader:#"SimpleFragment" withType:GL_FRAGMENT_SHADER];
GLuint programHandle = glCreateProgram();
glAttachShader(programHandle, vertexShader);
glAttachShader(programHandle, fragmentShader);
glLinkProgram(programHandle);
GLint linkSuccess;
glGetProgramiv(programHandle, GL_LINK_STATUS, &linkSuccess);
if (linkSuccess == GL_FALSE) {
GLchar messages[256];
glGetProgramInfoLog(programHandle, sizeof(messages), 0, &messages[0]);
NSString *messageString = [NSString stringWithUTF8String:messages];
NSLog(#"%#", messageString);
exit(1);
}
glUseProgram(programHandle);
_positionSlot = glGetAttribLocation(programHandle, "Position");
_colorSlot = glGetAttribLocation(programHandle, "SourceColor");
glEnableVertexAttribArray(_positionSlot);
glEnableVertexAttribArray(_colorSlot);
_modelViewUniform = glGetUniformLocation(programHandle, "Modelview");
_texCoordSlot = glGetAttribLocation(programHandle, "TexCoordIn");
glEnableVertexAttribArray(_texCoordSlot);
_textureUniform = glGetUniformLocation(programHandle, "Texture");
}
- (void)setupVBOs {
GLuint vertexBuffer;
glGenBuffers(1, &vertexBuffer);
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(Vertices), Vertices, GL_STATIC_DRAW);
GLuint indexBuffer;
glGenBuffers(1, &indexBuffer);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, indexBuffer);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(Indices), Indices, GL_STATIC_DRAW);
}
- (void)render {
glClearColor(0, 104.0/255.0, 55.0/255.0, 1.0);
glClear(GL_COLOR_BUFFER_BIT);
glViewport(0, 0, self.frame.size.width, self.frame.size.height);
glVertexAttribPointer(_positionSlot, 3, GL_FLOAT, GL_FALSE,
sizeof(Vertex), 0);
glVertexAttribPointer(_colorSlot, 4, GL_FLOAT, GL_FALSE,
sizeof(Vertex), (GLvoid*) (sizeof(float) * 3));
glVertexAttribPointer(_texCoordSlot, 2, GL_FLOAT, GL_FALSE,
sizeof(Vertex), (GLvoid*) (sizeof(float) * 7));
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, _floorTexture);
glUniform1i(_textureUniform, 0);
glDrawElements(GL_TRIANGLES, sizeof(Indices)/sizeof(Indices[0]),
GL_UNSIGNED_BYTE, 0);
[_context presentRenderbuffer:GL_RENDERBUFFER];
}
- (GLuint)setupTexture:(NSString *)fileName {
CGImageRef spriteImage = [UIImage imageNamed:fileName].CGImage;
if (!spriteImage) {
NSLog(#"Failed to load image %#", fileName);
exit(1);
}
size_t width = CGImageGetWidth(spriteImage);
size_t height = CGImageGetHeight(spriteImage);
GLubyte * spriteData = (GLubyte *) calloc(width*height*4, sizeof(GLubyte));
CGContextRef spriteContext = CGBitmapContextCreate(spriteData, width, height, 8, width*4,
CGImageGetColorSpace(spriteImage), kCGImageAlphaPremultipliedLast);
CGContextDrawImage(spriteContext, CGRectMake(0, 0, width, height), spriteImage);
CGContextRelease(spriteContext);
GLuint texName;
glGenTextures(1, &texName);
glBindTexture(GL_TEXTURE_2D, texName);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, width, height, 0, GL_RGB, GL_UNSIGNED_BYTE, spriteData);
free(spriteData);
return texName;
}
- (id)initWithFrame:(CGRect)frame
{
self = [super initWithFrame:frame];
if (self) {
[self setupLayer];
[self setupContext];
[self setupDepthBuffer];
[self setupRenderBuffer];
[self setupFrameBuffer];
[self compileShaders];
[self setupVBOs];
[self render];
}
_floorTexture = [self setupTexture:#"tile_floor.png"];
return self;
}
#end
Vertex shader:
attribute vec4 Position;
attribute vec2 TexCoordIn;
varying vec2 TexCoordOut;
void main(void) {
gl_Position = Position;
TexCoordOut = TexCoordIn;
}
Fragment shader:
varying lowp vec2 TexCoordOut;
uniform sampler2D Texture;
void main(void) {
gl_FragColor = texture2D(Texture, TexCoordOut);
}
I can create a gradient by changing the values for gl_FragColor, but I have tried several different textures and am at a loss.
This could be depending on the fact that your textures are not power of 2 (i.e. 512X512)
Some OpenGL drivers react in weird ways to this, some others just perform a rescaling of the textures to the nearest power of 2 size.
From OpenGL gold book you can find the below:
You can find a quite good explanation in the OpenGL Gold Book, the OpenGL ES 2.0:
In OpenGL ES 2.0, textures can have non-power-of-two (npot)
dimensions. In other words, the width and height do not need to be a
power of two. However, OpenGL ES 2.0 does have a restriction on the
wrap modes that can be used if the texture dimensions are not power of
two. That is, for npot textures, the wrap mode can only be
GL_CLAMP_TO_EDGE and the minifica- tion filter can only be GL_NEAREST
or GL_LINEAR (in other words, not mip- mapped). The extension
GL_OES_texture_npot relaxes these restrictions and allows wrap modes
of GL_REPEAT and GL_MIRRORED_REPEAT and also allows npot textures to
be mipmapped with the full set of minification filters.
I hope this helps.
Cheers