glReadPixels GL_RGBA format - objective-c

I have app with erasing function. For example I open colored image, it adds grayscale layer on top of it, and with mouse I can erase top (grayscale) layer parts. Later there is ability to save image to file. If pixels for saving are taken as GL_RGB it works OK:
And if pixels for saving are taken as GL_RGBA i have some issues (the white space is transparent):
Original version of image is drawn to framebuffer1, then app draws brush strokes to framebuffer2 and then grayscale version of image is drawn to framebuffer3. Then all these framebuffers are drawn to main_framebuffer and main_framebuffer is drawn to screen. Erasing is done via glBlendFunc and glBlendFuncseparate. When doing glReadPixels, pixels are readed from main_framebuffer. Where can be my problem?
brigadir
now I draw to main framebuffer like that:
glPushMatrix();
glLoadIdentity();
glViewport(0, 0, _width, _height);
glMatrixMode(GL_PROJECTION);
glFrustum(0, _width, 0, _height, 0.1, 100);
glTranslatef(0.0,0.0,-0.5);
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, main_framebuffer);
glClearColor(0.93, 0.93, 0.93, 1.0);
glClear(GL_COLOR_BUFFER_BIT);
glViewport(0, 0, _width, _height);
glDisable(GL_DEPTH_TEST);
glDepthMask(GL_FALSE);
glEnable(GL_TEXTURE_2D);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glBindTexture(GL_TEXTURE_2D, framebuffer1_texture);
glBegin(GL_QUADS);
glTexCoord2f(0.0f, 0.0f); glVertex2f(0.0, _height);
glTexCoord2f(0.0f, 1.0f); glVertex2f(0.0, 0.0);
glTexCoord2f(1.0f, 1.0f); glVertex2f(_width, 0.0);
glTexCoord2f(1.0f, 0.0f); glVertex2f(_width, _height);
glEnd();
glBindTexture(GL_TEXTURE_2D, 0);
glBlendFuncSeparate(GL_ZERO, GL_ONE, GL_DST_COLOR, GL_ZERO);
glBindTexture(GL_TEXTURE_2D, framebuffer2_texture);
glBegin(GL_QUADS);
glTexCoord2f(0.0f, 0.0f); glVertex2f(0.0f, 0.0f);
glTexCoord2f(0.0f, 1.0f); glVertex2f(0.0f, _height);
glTexCoord2f(1.0f, 1.0f); glVertex2f(_width, _height);
glTexCoord2f(1.0f, 0.0f); glVertex2f(_width, 0.0f);
glEnd();
glBindTexture(GL_TEXTURE_2D, 0);
glBlendFunc(GL_DST_ALPHA, GL_ONE_MINUS_DST_ALPHA);
glBindTexture(GL_TEXTURE_2D, framebuffer3_texture);
glBegin(GL_QUADS);
glTexCoord2f(0.0f, 0.0f); glVertex2f(0.0f, 0.0f);
glTexCoord2f(0.0f, 1.0f); glVertex2f(0.0f, _height);
glTexCoord2f(1.0f, 1.0f); glVertex2f(_width, _height);
glTexCoord2f(1.0f, 0.0f); glVertex2f(_width, 0.0f);
glEnd();
glBindTexture(GL_TEXTURE_2D, 0);
glDisable(GL_BLEND);
glColorMask (0.0, 0.0, 0.0, 1.0);
glBegin(GL_QUADS);
glColor4f(1.0, 1.0, 1.0, 1.0);
glVertex2f(0.0, oglAukstis);
glColor4f(1.0, 1.0, 1.0, 1.0);
glVertex2f(0.0, 0.0);
glColor4f(1.0, 1.0, 1.0, 1.0);
glVertex2f(oglPlotis, 0.0);
glColor4f(1.0, 1.0, 1.0, 1.0);
glVertex2f(oglPlotis, oglAukstis);
glEnd();
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, 0);
glPopMatrix();
And when I try to load same image as earlier in this question I get:
And when I save it I get:

You should disable writing to alpha channel of main buffer.
// draw framebuffers 1-3 ...
glColorMask (true, true, true, false);
// render to main buffer ...
glColorMask (true, true, true, true); // revert to default state

Related

Draw IOSurfaces to another IOSurface

How can I draw a series of IOSurfaces to another then draw it to the screen? I've played around with some source from apple in the MultiGPU sample project, but the best I managed to do is draw a white screen or get tons of artifacts and crash the app.
I'm very new to openGL and I don't quite understand the binding of framebuffers and textures and how they interact with IOSurfaces.
This is what I have to create a texture from an IOSurface (directly from Apple's source)
// Create an IOSurface backed texture
// Create an FBO using the name of this texture and bind the texture to the color attachment of the FBO
- (GLuint)setupIOSurfaceTexture:(IOSurfaceRef)ioSurfaceBuffer {
GLuint name;
CGLContextObj cgl_ctx = (CGLContextObj)[[self openGLContext] CGLContextObj];
glGenTextures(1, &name);
glBindTexture(GL_TEXTURE_RECTANGLE_EXT, name);
CGLTexImageIOSurface2D(cgl_ctx, GL_TEXTURE_RECTANGLE_EXT, GL_RGBA, 512, 512, GL_BGRA, GL_UNSIGNED_INT_8_8_8_8_REV,
ioSurfaceBuffer, 0);
glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
// Generate an FBO using the same name with the same texture bound to it as a render target.
glBindTexture(GL_TEXTURE_RECTANGLE_EXT, 0);
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, name);
glFramebufferTexture2DEXT(GL_FRAMEBUFFER_EXT, GL_COLOR_ATTACHMENT0_EXT, GL_TEXTURE_RECTANGLE_EXT, name, 0);
if(!_depthBufferName) {
glGenRenderbuffersEXT(1, &_depthBufferName);
glRenderbufferStorageEXT(GL_TEXTURE_RECTANGLE_EXT, GL_DEPTH, 512, 512);
}
glFramebufferRenderbufferEXT(GL_FRAMEBUFFER_EXT, GL_DEPTH_ATTACHMENT_EXT, GL_TEXTURE_RECTANGLE_EXT, _depthBufferName);
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, 0);
return name;
}
And I have this piece of code to draw the surface to the screen. (Also from Apple's source)
// Fill the view with the IOSurface backed texture
- (void)textureFromCurrentIOSurface {
NSRect bounds = [self bounds];
CGLContextObj cgl_ctx = (CGLContextObj)[[self openGLContext] CGLContextObj];
// Render quad from our iosurface texture
glViewport(0, 0, (GLint)bounds.size.width, (GLint)bounds.size.height);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(0.0, (GLfloat)bounds.size.width, 0.0f, (GLfloat)bounds.size.height, -1.0f, 1.0f);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
glClear(GL_COLOR_BUFFER_BIT);
glBindTexture(GL_TEXTURE_RECTANGLE_EXT, [[NSApp delegate] currentTextureName]); // Grab the texture from the delegate
glEnable(GL_TEXTURE_RECTANGLE_EXT);
glTexEnvi(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_ENV_MODE, GL_REPLACE);
glBegin(GL_QUADS);
glColor4f(1.0f, 1.0f, 1.0f, 1.0f);
glTexCoord2f(0.0f, 0.0f);
glVertex2f(0.0f, 0.0f);
glTexCoord2f(512.0f, 0.0f);
glVertex2f((GLfloat)bounds.size.width, 0.0f);
glTexCoord2f(512.0f, 512.0f);
glVertex2f((GLfloat)bounds.size.width, (GLfloat)bounds.size.height);
glTexCoord2f(0.0f, 512.0f);
glVertex2f(0.0f, (GLfloat)bounds.size.height);
glEnd();
glDisable(GL_TEXTURE_RECTANGLE_EXT);
}
With a single IOSurface being draw to the screen this works fine. What am I missing to draw an IOSurface to another?
Assuming I have textures A, B, C, and D I want to:
-Draw A onto C in a specific region,
-Draw B onto C in a different region (may overlap A),
-Draw C to the screen.
I solved the issue using the following code, however there are some minor issues with scaling the IOSurface before drawing.
- (void)renderIOSurface:(IOSurfaceRef)surface toBuffer:(GLuint)buffer atPoint:(CGPoint)point withSize:(CGSize)size {
CGLContextObj cgl_ctx = (CGLContextObj)[[self openGLContext] CGLContextObj];
// Bind framebuffer
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, buffer);
glViewport(0, 0, TEXWIDE, TEXHIGH);
glMatrixMode(GL_PROJECTION);
glPushMatrix();
glLoadIdentity();
glOrtho(0.0, TEXWIDE, 0.0, TEXHIGH, -1, 1);
glMatrixMode(GL_MODELVIEW);
glPushMatrix();
glLoadIdentity();
GLsizei sH = (GLsizei)IOSurfaceGetHeight(surface);
GLsizei sW = (GLsizei)IOSurfaceGetWidth(surface);
// Create texture
GLuint name;
glGenTextures(1, &name);
glEnable(GL_TEXTURE_RECTANGLE_EXT);
glBindTexture(GL_TEXTURE_RECTANGLE_EXT, name);
CGLTexImageIOSurface2D(cgl_ctx, GL_TEXTURE_RECTANGLE_EXT, GL_RGBA, sW, sH, GL_BGRA, GL_UNSIGNED_INT_8_8_8_8_REV, surface, 0);
glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
// glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
// glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glBindTexture(GL_TEXTURE_RECTANGLE_EXT, 0);
//glDisable(GL_BLEND);
glBindTexture(GL_TEXTURE_RECTANGLE_EXT, name);
glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);
glBegin(GL_QUADS);
glColor4f(1.0f, 1.0f, 1.0f, 1.0f);
glTexCoord2f(0.0f, 0.0f);
glVertex2f(point.x, point.y);
glTexCoord2f(sW, 0.0f);
glVertex2f(point.x + size.width, point.y);
glTexCoord2f(sW, sH);
glVertex2f(point.x + size.width, point.y + size.height);
glTexCoord2f(0.0f, sH);
glVertex2f(point.x, point.y + size.height);
glEnd();
//glDisable(GL_TEXTURE_RECTANGLE_EXT);
// Bindback to normal
glBindTexture(GL_TEXTURE_RECTANGLE_EXT, 0);
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, 0);
// Delete the name texture
glDeleteTextures(1, &name);
// [[self openGLContext] flushBuffer];
// This flush is necessary to ensure proper behavior if the MT engine is enabled.
// glFlush();
glMatrixMode(GL_MODELVIEW);
glPopMatrix();
glMatrixMode(GL_PROJECTION);
glPopMatrix();
glFlush();
}

Drawing textures in OpenGL objective c os x

I tried to display a picture, but it is not displayed.
Here is my code for an NSOpenGLView subclass:
//
// MyOpenGLView.m
// OpenGLTests
//
// Created by Tom Schamberger on 10.12.12.
// Copyright (c) 2012 Tom Schamberger. All rights reserved.
//
#import "MyOpenGLView.h"
#implementation MyOpenGLView
- (id)initWithFrame:(NSRect)frame
{
NSOpenGLPixelFormat * pf = [MyOpenGLView basicPixelFormat];
self = [super initWithFrame: frame pixelFormat: pf];
return self;
}
+ (NSOpenGLPixelFormat*) basicPixelFormat
{
NSOpenGLPixelFormatAttribute attributes [] = {
NSOpenGLPFAWindow,
NSOpenGLPFADoubleBuffer, // double buffered
NSOpenGLPFADepthSize, (NSOpenGLPixelFormatAttribute)16, // 16 bit depth buffer
(NSOpenGLPixelFormatAttribute)nil
};
return [[NSOpenGLPixelFormat alloc] initWithAttributes:attributes];
}
- (void) prepareOpenGL
{
glEnable(GL_DEPTH_TEST);
glShadeModel(GL_SMOOTH);
glEnable(GL_CULL_FACE);
glFrontFace(GL_CCW);
glPolygonOffset (1.0f, 1.0f);
glClearColor(0.0f, 0.0f, 0.0f, 0.5f);
if([self loadTextures])
NSLog(#"Load");
}
- (void)drawRect:(NSRect)dirtyRect
{
glClear (GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glBindTexture( GL_TEXTURE_2D, texture);
glPolygonMode(GL_FRONT_AND_BACK,GL_LINE);
glColor3f(1, 1, 1);
glBegin( GL_QUADS );
glTexCoord2f( 0.0f, 0.0f);
glVertex2f( -0.5f, -0.5f);
glTexCoord2f( 1.0f, 0.0f);
glVertex2f( 0.5f, -0.5f);
glTexCoord2f( 1.0f, 1.0f);
glVertex2f( 0.5f, 0.5f);
glTexCoord2f( 0.0f, 1.0f);
glVertex2f( -0.5f, 0.5f);
glEnd();
glFlush();
}
- (BOOL) loadTextures
{
NSImage *img = [NSImage imageNamed:#"NeHe.bmp"];
if(img == nil)
return FALSE;
else if(img.size.height == 0 || img.size.width == 0)
return FALSE;
NSBitmapImageRep *rep = [[NSBitmapImageRep alloc] initWithData: [img TIFFRepresentation]];
glGenTextures( 1, &texture);
glBindTexture( GL_TEXTURE_2D, texture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_BASE_LEVEL, 0);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAX_LEVEL, 0);
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );
glTexImage2D( GL_TEXTURE_2D, 0, GL_RGB, rep.size.width,
rep.size.height, 0, GL_RGB,
GL_UNSIGNED_BYTE, rep.bitmapData);
return TRUE;
}
#end
It draws a rectangle, but the texture is not displayed.
You should probably set you clear color alpha to 0.0 rather than 0.5
This code for loading a texture works.
backGroundPath = #"Background.bmp";
NSData* backGroundData = [NSData dataWithContentsOfFile:backGroundPath options:NSUncachedRead error:nil];
backGroundImage = [NSBitmapImageRep imageRepWithData:backGroundData];
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glGenTextures( 1, backGroundTexture);
glBindTexture(GL_TEXTURE_2D, backGroundTexture[0]);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER,
GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER,
GL_NEAREST);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, backGroundImage.size.width,
backGroundImage.size.height, 0, GL_RGBA, GL_UNSIGNED_BYTE,
backGroundImage.bitmapData);

Add text to UIImage with CoreGraphics

I just have to add a green text to UIImage I get from the database, how is it done? Here is what I have so far, something is missing for sure:
UIImage *img=[UIImage imageWithData:data];
UIGraphicsBeginImageContext(CGSizeMake(360, 270));
CGImageRef imageCon=[img CGImage];
CGContextRef context = UIGraphicsGetCurrentContext();
CGContextSelectFont(context, "Helvetica", 12, NSMacOSRomanStringEncoding);
CGContextSetTextDrawingMode(context, kCGTextFill);
CGContextSetFillColor(context, CGColorGetComponents([[UIColor whiteColor] CGColor]));
CGAffineTransform xform = CGAffineTransformMake( 1.0, 0.0, 0.0, -1.0, 0.0, 0.0);
CGContextSetTextMatrix(context, xform);
CGContextShowTextAtPoint(context, 100.0f, 180.0f, "test", strlen("test"));
UIImage *imgToAdd=[UIImage imageWithCGImage:CGBitmapContextCreateImage(context)];
[arr addObject:imgToAdd];

Weird dispatch_async memory behavior

I have the following dispatch_async code:
dispatch_async(openGLESContextQueue, ^{
[(EAGLView *)self.view setFramebuffer];
// Replace the implementation of this method to do your own custom drawing.
static const GLfloat squareVertices[] = {
-0.5f, -0.33f,
0.5f, -0.33f,
-0.5f, 0.33f,
0.5f, 0.33f,
};
static const GLubyte squareColors[] = {
127, 127, 0, 127,
0, 255, 255, 255,
0, 0, 0, 0,
255, 0, 255, 255,
};
static float transY = 0.0f;
glClearColor(0.5f, 0.5f, 0.5f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glTranslatef(0.0f, (GLfloat)(sinf(transY)/2.0f), 0.0f);
transY += 0.075f;
glVertexPointer(2, GL_FLOAT, 0, squareVertices);
glEnableClientState(GL_VERTEX_ARRAY);
glColorPointer(4, GL_UNSIGNED_BYTE, 0, squareColors);
glEnableClientState(GL_COLOR_ARRAY);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
[(EAGLView *)self.view presentFramebuffer];
});
And when in Instruments, and even though the animation is running fine, I get tons of "64 bytes malloc"s that never get freed. Anyone know why?
I was finally able to solve the problem using semaphores:
if (dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_NOW) == 0)
{
dispatch_async(openGLESContextQueue, ^{
[(EAGLView *)self.view setFramebuffer];
glClearColor(0.5f, 0.5f, 0.5f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glTranslatef(0.0f, (GLfloat)(sinf(transY)/2.0f), 0.0f);
transY += 0.075f;
glVertexPointer(2, GL_FLOAT, 0, squareVertices);
glEnableClientState(GL_VERTEX_ARRAY);
glColorPointer(4, GL_UNSIGNED_BYTE, 0, squareColors);
glEnableClientState(GL_COLOR_ARRAY);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
[(EAGLView *)self.view presentFramebuffer];
dispatch_semaphore_signal(frameRenderingSemaphore);
});
}
I guess the dispatch queue was getting flooded without time to handle every opengl redraw. This way, it will only process one redraw at a time, asynchronously. Curiously, it has no side effects on the frame rate! :D
Thanks :)

Objective C - remove drawing

I've been using the code below to draw on an UIView
CGContextRef context = UIGraphicsGetCurrentContext();
CGContextSetStrokeColorWithColor(context, [UIColor blueColor].CGColor);
CGContextSetRGBFillColor(context, 0.0, 0.0, 1.0, 1.0);
CGContextSetAlpha(context, 0.5);
CGContextSetLineWidth(context, 10.0);
CGContextMoveToPoint(context, point.x, point.y);
CGContextAddLineToPoint(context, point.x, point.y);
CGContextStrokePath(context);
So that part works.
Now how do I clear my drawing? Most of the example just shows the drawing. Having a hardtime finding keyword to google.
Thanks,
Tee
Have you tried CGContextClearRect?
[self setNeedsDisplay];
it sends the message drawRect: to the view.