I have a small program that should move the mouse to a certain point (in this case, 100,100) when it goes past 500 pixels from the left of the screen. The CGEventTap correctly receives the kCGEventMouseMoved events, but CGEventSetLocation seems to only move events such as mouseUp, not MouseMoved.
Is it possible to move the mouse with CGEventSetLocation? If not, is there some other way to do it?
I've included my code here:
CGEventRef
mouse_filter(CGEventTapProxy proxy, CGEventType type, CGEventRef event, void *refcon) {
if (type != kCGEventMouseMoved)
return event;
CGPoint point = CGEventGetLocation(event);
CGPoint target = CGPointMake(500,point.y);
if (point.x >= 500){
CGEventSetLocation(event,target);
printf("(%f,%f)\n", point.x, point.y);
}
return event;
}
int
main(int argc, char *argv[]) {
NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];
CFRunLoopSourceRef runLoopSource;
CGEventMask event_mask;
event_mask = (1 << kCGEventMouseMoved);
CFMachPortRef eventTap = CGEventTapCreate(kCGSessionEventTap, kCGHeadInsertEventTap, 0, event_mask, mouse_filter, NULL);
if (!eventTap) {
NSLog(#"Couldn't create event tap!");
exit(1);
}
runLoopSource = CFMachPortCreateRunLoopSource(kCFAllocatorDefault, eventTap, 0);
CFRunLoopAddSource(CFRunLoopGetCurrent(), runLoopSource, kCFRunLoopCommonModes);
CGEventTapEnable(eventTap, true);
CFRunLoopRun();
CFRelease(eventTap);
CFRelease(runLoopSource);
[pool release];
exit(0);
}
Confusingly, the function you want is actually not in Quartz Event Services, but Quartz Display Services.
Related
I'm trying to draw something using OpenGL with VAO and VBO objects. To do that, I just subclassed NSOpenGLView. By default, OpenGL v2.1 was used, so, I put a PixelFormat, wrote code, and solved all the printed errors. For now the app is running well, but nothing is drawing at the window, even glClearColor has no effect. Please, help me to find and solve the problems. My OS is Mac v10.12 with OpenGL v4.1.
MyOpenGLView.m, part 1:
#import "MyOpenGLView.h"
#include <OpenGL/gl3.h>
#include "error.h"
static const char *vertexShaderSource =
"#version 410\n"
"in vec3 posAttr;\n"
"in vec3 colAttr;\n"
"out vec3 col;\n"
"void main() {\n"
" col = colAttr;\n"
" gl_Position.xyz = posAttr;\n"
" gl_Position.w = 1.0;\n"
" col = colAttr;\n"
"}\n";
static const char *fragmentShaderSource =
"#version 410\n"
"in vec3 col;\n"
"out vec4 color;\n"
"void main() {\n"
" color.rgb = col;\n"
" color.a = 1.0;\n"
//" color = vec4(1,1,1,1);\n"
"}\n";
// Shader properties
GLuint m_posAttr;
GLuint m_colAttr;
GLuint program;
// Arrays of positions and colors
float fTriangle[9];
float fTriangleColor[9];
// Low-level VBOs and VBAs
GLuint uiVBO[2];
GLuint uiVAO[1];
GLuint makeShader(GLenum type, const char *source) {
GLuint shader;
shader = glCreateShader(type);
GetError();
glShaderSource(shader, 1, &source, NULL);
GetError();
glCompileShader(shader);
GetError();
#if defined(DEBUG)
GLint logLength;
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &logLength);
GetError();
if (logLength > 0) {
GLchar *log = malloc((size_t)logLength);
glGetShaderInfoLog(shader, logLength, &logLength, log);
GetError();
NSLog(#"Shader compilation failed with error:\n%s", log);
free(log);
}
#endif
GLint status;
glGetShaderiv(shader, GL_COMPILE_STATUS, &status);
GetError();
if (0 == status) {
glDeleteShader(shader);
GetError();
NSLog(#"Shader compilation failed from code!");
assert(0);
}
return shader;
}
MyOpenGLView.m, part 2:
//// General staff
#implementation MyOpenGLView
- (void)awakeFromNib {
// Positions and colors of figures
fTriangle[0] = -0.4f; fTriangle[1] = 0.1f; fTriangle[2] = 0.0f;
fTriangle[3] = 0.4f; fTriangle[4] = 0.1f; fTriangle[5] = 0.0f;
fTriangle[6] = 0.0f; fTriangle[7] = 0.7f; fTriangle[8] = 0.0f;
fTriangleColor[0] = 1.0f; fTriangleColor[1] = 0.0f; fTriangleColor[2] = 0.0f;
fTriangleColor[3] = 0.0f; fTriangleColor[4] = 1.0f; fTriangleColor[5] = 0.0f;
fTriangleColor[6] = 0.0f; fTriangleColor[7] = 0.0f; fTriangleColor[8] = 1.0f;
NSOpenGLPixelFormatAttribute attrs[] = {
NSOpenGLPFADoubleBuffer,
NSOpenGLPFADepthSize, 24,
NSOpenGLPFAOpenGLProfile,
NSOpenGLProfileVersion4_1Core,
0
};
NSOpenGLPixelFormat *pf = [[NSOpenGLPixelFormat alloc] initWithAttributes:attrs];
if (!pf) {
NSLog(#"No OpenGL pixel format");
}
NSOpenGLContext* context = [[NSOpenGLContext alloc] initWithFormat:pf shareContext:nil];
[self setPixelFormat:pf];
[self setOpenGLContext:context];
NSLog(#"%s", glGetString(GL_VERSION));
GetError();
GLuint vertexShader = makeShader(GL_VERTEX_SHADER, vertexShaderSource);
GLuint fragmentShader = makeShader(GL_FRAGMENT_SHADER, fragmentShaderSource);
program = glCreateProgram();
GetError();
glAttachShader(program, vertexShader);
GetError();
glAttachShader(program, fragmentShader);
GetError();
glLinkProgram(program);
GetError();
GLint status;
glGetProgramiv(program, GL_LINK_STATUS, &status);
GetError();
if (0 == status) {
NSLog(#"Failed to link shader program");
assert( 0 );
}
m_posAttr = glGetAttribLocation(program, "posAttr");
GetError();
m_colAttr = glGetAttribLocation(program, "colAttr");
GetError();
glGenVertexArrays(1, &uiVAO[0]);
glGenBuffers(2, &uiVBO[0]);
GetError();
}
- (void)drawRect:(NSRect)dirtyRect {
[super drawRect:dirtyRect];
glClearColor(0, 0, 0, 0);
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(program);
GetError();
glBindVertexArray(uiVAO[0]);
GetError();
glBindBuffer(GL_ARRAY_BUFFER, uiVBO[0]);
glBufferData(GL_ARRAY_BUFFER, 9*sizeof(float), fTriangle, GL_STATIC_DRAW);
glEnableVertexAttribArray(m_posAttr);
GetError();
glVertexAttribPointer(m_posAttr, 3, GL_FLOAT, GL_FALSE, 0, 0);
GetError();
glBindBuffer(GL_ARRAY_BUFFER, uiVBO[1]);
glBufferData(GL_ARRAY_BUFFER, 9*sizeof(float), fTriangleColor, GL_STATIC_DRAW);
glEnableVertexAttribArray(m_colAttr);
GetError();
glVertexAttribPointer(m_colAttr, 3, GL_FLOAT, GL_FALSE, 0, 0);
GetError();
glDrawArrays(GL_TRIANGLES, 0, 3);
GetError();
glDisableVertexAttribArray(m_posAttr);
glDisableVertexAttribArray(m_colAttr);
glFlush();
GetError();
}
- (void)dealloc {
glDeleteProgram(program);
glDeleteBuffers(2, uiVBO);
glDeleteVertexArrays(1, uiVAO);
GetError();
}
#end
error.h:
//// Prints OpenGL errors
#ifndef __ERROR_H__
#define __ERROR_H__
#include <stdlib.h>
#include <assert.h>
#ifdef DEBUG
#define GetError( )\
{\
for ( GLenum Error = glGetError( ); ( GL_NO_ERROR != Error ); Error = glGetError( ) )\
{\
switch ( Error )\
{\
case GL_INVALID_ENUM: printf( "\n%s\n\n", "GL_INVALID_ENUM" ); assert( 0 ); break;\
case GL_INVALID_VALUE: printf( "\n%s\n\n", "GL_INVALID_VALUE" ); assert( 0 ); break;\
case GL_INVALID_OPERATION: printf( "\n%s\n\n", "GL_INVALID_OPERATION" ); assert( 0 ); break;\
case GL_OUT_OF_MEMORY: printf( "\n%s\n\n", "GL_OUT_OF_MEMORY" ); assert( 0 ); break;\
default: break;\
}\
}\
}
#else
#define GetError( )
#endif /* DEBUG */
#endif /* __ERROR_H__ */
After lots of effort and code's modifications I figured it out: the only real problem was with NSOpenGLPFADoubleBuffer option in NSOpenGLPixelFormatAttribute array. After commenting this option I got the desired output.
To my mind, the reason is that I had 2 output graphical buffers. However, the code was executed a single time, 1st buffer was painted and then swapped. So, the 2nd buffer, which is empty, was drawn.
#import "collisionTestMyScene.h"
const static int nodeBitMask = 0x1 << 0;
const static int node1BitMask = 0x1 << 1;;
#implementation collisionTestMyScene
-(id)initWithSize:(CGSize)size {
if (self = [super initWithSize:size]) {
/* Setup your scene here */
self.physicsWorld.contactDelegate = self;
w = 0;
}
return self;
}
-(void) didBeginContact:(SKPhysicsContact *)contact {
NSLog(#"Contact Begin");
if (contact.bodyA.categoryBitMask == nodeBitMask) {
NSLog(#"Node is Body A");
}
if (contact.bodyA.categoryBitMask == node1BitMask) {
NSLog(#"Node is Body B");
}
}
-(void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event {
/* Called when a touch begins */
for (UITouch *touch in touches) {
CGPoint location = [touch locationInNode:self];
node = [SKSpriteNode spriteNodeWithImageNamed:#"block.jpg"];
node.position = location;
[node setScale:0.07];
node.physicsBody.contactTestBitMask = node1BitMask;
node.physicsBody.categoryBitMask = nodeBitMask;
node.physicsBody.collisionBitMask = nodeBitMask;
//node.physicsBody.collisionBitMask = 0;
node1 = [SKSpriteNode spriteNodeWithImageNamed:#"block2.jpg"];
node1.position = CGPointMake(200, 200);
node1.physicsBody.categoryBitMask = node1BitMask;
node1.physicsBody.contactTestBitMask = nodeBitMask;
node1.physicsBody.collisionBitMask = node1BitMask;
//node1.physicsBody.collisionBitMask = 0;
[node1 setScale:0.07];
[self addChild:node];
[self addChild:node1];
node.physicsBody = [SKPhysicsBody bodyWithRectangleOfSize:CGSizeMake(node.size.width, node.size.height)];
node1.physicsBody = [SKPhysicsBody bodyWithRectangleOfSize:CGSizeMake(node1.size.width, node1.size.height)];
SKAction *moveUp = [SKAction moveToX:100 duration:3];
node1.physicsBody.affectedByGravity = NO;
node.physicsBody.affectedByGravity = NO;
[node1 runAction:moveUp];
w = 1;
}
}
It is never NSLogging anything. I have tried changing the bit masks, and more. The CGRectIntersects function would work, however it is not accurate enough. Also, the two nodes are in perfect box shapes. What could I be doing wrong? Thank you in advance!
The problem here are the bit masks. The two nodes are in different category, contact and collision groups (bit masks). Therefore they will not contact nor collide because the bitmasks are compared with AND and only if the result is non-zero will the contact/collision occur.
In short, put them in the same contact bit mask at the least, in order to receive didBeginContact messages.
I have been trying to get a simple Flood Fill Algorithm working for an iPhone app that I am developing and I just can't get it working correctly.
I have got the actual process to work great however the app will crash when the fill is too large. From what I can tell its because the thread is overflowing from all of the functions running. From what I have read, I need to implement a stack but I can't work out how this works.
typedef struct {
int red;
int green;
int blue;
} color;
#interface EMFloodTest : UIViewController {
UIImageView *mainImage;
unsigned char *imageData;
color selColor;
color newColor;
int maxByte;
}
#end
#implementation EMFloodTest
- (void)setupImageData {
CGImageRef imageRef = mainImage.image.CGImage;
if (imageRef == NULL) { return; }
NSUInteger width = CGImageGetWidth(imageRef);
NSUInteger height = CGImageGetHeight(imageRef);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
NSUInteger bytesPerPixel = 4;
NSUInteger bytesPerRow = bytesPerPixel * width;
NSUInteger bitsPerComponent = 8;
maxByte = height * width * 4;
imageData = malloc(height * width * 4);
CGContextRef context = CGBitmapContextCreate(imageData, width, height, bitsPerComponent, bytesPerRow, colorSpace,
kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big);
CGColorSpaceRelease(colorSpace);
CGContextDrawImage(context, CGRectMake(0, 0, width, height), imageRef);
CGContextRelease(context);
}
- (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil
{
self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil];
if (self) {
mainImage = [[UIImageView alloc]initWithImage:[UIImage imageNamed:#"Color6.png"]];
[self.view addSubview:mainImage];
newColor.red = 255;
newColor.green = 94;
newColor.blue = 0;
[self setupImageData];
}
return self;
}
- (void)updateImage {
CGImageRef imageRef = mainImage.image.CGImage;
if (imageRef == NULL) { return; }
NSUInteger width = CGImageGetWidth(imageRef);
NSUInteger height = CGImageGetHeight(imageRef);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
NSUInteger bytesPerPixel = 4;
NSUInteger bytesPerRow = bytesPerPixel * width;
NSUInteger bitsPerComponent = 8;
CGContextRef context = CGBitmapContextCreate(imageData, width, height, bitsPerComponent, bytesPerRow, colorSpace, kCGImageAlphaPremultipliedLast );
imageRef = CGBitmapContextCreateImage (context);
mainImage.image = [UIImage imageWithCGImage:imageRef];
CGContextRelease(context);
}
- (void)setPixel:(NSUInteger)byte toColor:(color)color {
imageData[byte] = color.red;
imageData[byte+1] = color.green;
imageData[byte+2] = color.blue;
}
- (BOOL)testByte:(NSInteger)byte againstColor:(color)color {
if (imageData[byte] == color.red && imageData[byte+1] == color.green && imageData[byte+2] == color.blue) {
return YES;
} else {
return NO;
}
}
// This is where the flood fill starts. Its a basic implementation but crashes when filling large sections.
- (void)floodFillFrom:(NSInteger)byte bytesPerRow:(NSInteger)bpr {
int u = byte - bpr;
int r = byte + 4;
int d = byte + bpr;
int l = byte - 4;
if ([self testByte:u againstColor:selColor]) {
[self setPixel:u toColor:newColor];
[self floodFillFrom:u bytesPerRow:bpr];
}
if ([self testByte:r againstColor:selColor]) {
[self setPixel:r toColor:newColor];
[self floodFillFrom:r bytesPerRow:bpr];
}
if ([self testByte:d againstColor:selColor]) {
[self setPixel:d toColor:newColor];
[self floodFillFrom:d bytesPerRow:bpr];
}
if ([self testByte:l againstColor:selColor]) {
[self setPixel:l toColor:newColor];
[self floodFillFrom:l bytesPerRow:bpr];
}
}
- (void)startFillFrom:(NSInteger)byte bytesPerRow:(NSInteger)bpr {
if (imageData[byte] == 0 && imageData[byte+1] == 0 && imageData[byte+2] == 0) {
NSLog(#"Black Selected");
return;
} else if ([self testByte:byte againstColor:newColor]) {
NSLog(#"Same Fill Color");
} else {
// code goes here
NSLog(#"Color to be replaced");
[self floodFrom:byte bytesPerRow:bpr];
[self updateImage];
}
}
- (void)selectedColor:(CGPoint)point {
CGImageRef imageRef = mainImage.image.CGImage;
if (imageRef == NULL) { return; }
if (imageData == NULL) { return; }
NSInteger width = CGImageGetWidth(imageRef);
NSInteger byteNumber = 4*((width*round(point.y))+round(point.x));
NSInteger bytesPerPixel = 4;
NSInteger bytesPerRow = bytesPerPixel * width;
selColor.red = imageData[byteNumber];
selColor.green = imageData[byteNumber + 1];
selColor.blue = imageData[byteNumber + 2];
NSLog(#"Selected Color, RGB: %i, %i, %i",selColor.red, selColor.green, selColor.blue);
NSLog(#"Byte:%i",byteNumber);
[self startFillFrom:byteNumber bytesPerRow:bytesPerRow];
}
- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event {
UITouch *touch = [touches anyObject];
CGPoint location = [touch locationInView:mainImage];
[self selectedColor:location];
}
Any help on how I might be able to implement a stack or even use another algorithm would be greatly appreciated.
Best,
Darren
The problem is recursive implementation.
Too much deep recursive call to function make stack overflow error.
You have to implement your algorithm in iterative manner.
If you want to see iterative example of flood Fill you can visit:
UIImageScanlineFloodfill
I have an AVPlayerLayer which I would like to create an OpenGL Texture out of. I'm comfortable with opengl textures, and even comfortable with converting a CGImageRef into an opengl texture. It seems to me the code below should work, but I get just plain black. What am I doing wrong? Do I need to set any properties on the CALayer / AVPlayerLayer first?
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
int width = (int)[layer bounds].size.width;
int height = (int)[layer bounds].size.height;
CGContextRef context = CGBitmapContextCreate(NULL,
width,
height,
8,
width * 4,
colorSpace,
kCGImageAlphaPremultipliedLast);
CGColorSpaceRelease(colorSpace);
if (context== NULL) {
ofLog(OF_LOG_ERROR, "getTextureFromLayer: failed to create context 1");
return;
}
[[layer presentationLayer] renderInContext:context];
CGImageRef cgImage = CGBitmapContextCreateImage(context);
int bytesPerPixel = CGImageGetBitsPerPixel(cgImage)/8;
if(bytesPerPixel == 3) bytesPerPixel = 4;
GLubyte *pixels = (GLubyte *) malloc(width * height * bytesPerPixel);
CGContextRelease(context);
context = CGBitmapContextCreate(pixels,
width,
height,
CGImageGetBitsPerComponent(cgImage),
width * bytesPerPixel,
CGImageGetColorSpace(cgImage),
kCGImageAlphaPremultipliedLast);
if(context == NULL) {
ofLog(OF_LOG_ERROR, "getTextureFromLayer: failed to create context 2");
free(pixels);
return;
}
CGContextDrawImage(context, CGRectMake(0.0, 0.0, width, height), cgImage);
int glMode;
switch(bytesPerPixel) {
case 1:
glMode = GL_LUMINANCE;
break;
case 3:
glMode = GL_RGB;
break;
case 4:
default:
glMode = GL_RGBA; break;
}
if(texture.bAllocated() == false || texture.getWidth() != width || texture.getHeight() != height) {
NSLog(#"getTextureFromLayer: allocating texture %i, %i\n", width, height);
texture.allocate(width, height, glMode, true);
}
// test texture
// for(int i=0; i<width*height*4; i++) pixels[i] = ofRandomuf() * 255;
texture.loadData(pixels, width, height, glMode);
CGContextRelease(context);
CFRelease(cgImage);
free(pixels);
P.S. The variable 'texture' is a C++ opengl (-es compatible) texture object which I know works. If I uncomment the 'test texture' for-loop filling the texture with random noise, I can see that, so problem is definitely before.
UPDATE
In response to Nick Weaver's reply I tried a different approach, and now I'm always getting NULL back from copyNextSampleBuffer with status == 3 (AVAssetReaderStatusFailed). Am I missing something?
variables
AVPlayer *videoPlayer;
AVPlayerLayer *videoLayer;
AVAssetReader *videoReader;
AVAssetReaderTrackOutput*videoOutput;
init
videoPlayer = [[AVPlayer alloc] initWithURL:[NSURL fileURLWithPath:[NSString stringWithUTF8String:videoPath.c_str()]]];
if(videoPlayer == nil) {
NSLog(#"videoPlayer == nil ERROR LOADING %s\n", videoPath.c_str());
} else {
NSLog(#"videoPlayer: %#", videoPlayer);
videoLayer = [[AVPlayerLayer playerLayerWithPlayer:videoPlayer] retain];
videoLayer.frame = [ThreeDView instance].bounds;
// [[ThreeDView instance].layer addSublayer:videoLayer]; // test to see if it's loading and running
AVAsset *asset = videoPlayer.currentItem.asset;
NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
NSDictionary *settings = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA], (NSString*)kCVPixelBufferPixelFormatTypeKey, nil];
videoReader = [[AVAssetReader alloc] initWithAsset:asset error:nil];
videoOutput = [[AVAssetReaderTrackOutput alloc] initWithTrack:[tracks objectAtIndex:0] outputSettings:settings];
[videoReader addOutput:videoOutput];
[videoReader startReading];
}
draw loop
if(videoPlayer == 0) {
ofLog(OF_LOG_WARNING, "Shot::drawVideo: videoPlayer == 0");
return;
}
if(videoOutput == 0) {
ofLog(OF_LOG_WARNING, "Shot::drawVideo: videoOutput == 0");
return;
}
CMSampleBufferRef sampleBuffer = [videoOutput copyNextSampleBuffer];
if(sampleBuffer == 0) {
ofLog(OF_LOG_ERROR, "Shot::drawVideo: sampleBuffer == 0, status: %i", videoReader.status);
return;
}
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CFRelease(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer,0);
unsigned char *pixels = ( unsigned char *)CVPixelBufferGetBaseAddress(imageBuffer);
int width = CVPixelBufferGetWidth(imageBuffer);
int height = CVPixelBufferGetHeight(imageBuffer);
if(videoTexture.bAllocated() == false || videoTexture.getWidth() != width || videoTexture.getHeight() != height) {
NSLog(#"Shot::drawVideo() allocating texture %i, %i\n", width, height);
videoTexture.allocate(width, height, GL_RGBA, true);
}
videoTexture.loadData(pixels, width, height, GL_BGRA);
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
I think iOS4: how do I use video file as an OpenGL texture? will be helpful for your question.
I am trying to add some post-processing capabilities to a program. The rendering is done using openGL. I just want to allow the program to load some home made fragment shader and use them on the video stream.
I wrote a little piece of shader using "OpenGL Shader Builder" that just turns a texture in grayscale. The shaders works well in the shader builder but I can't make it work in the main program. The screens stays all black.
Here is the setup :
#implementation PluginGLView
- (id) initWithCoder: (NSCoder *) coder
{
const GLubyte * strExt;
if ((self = [super initWithCoder:coder]) == nil)
return nil;
glLock = [[NSLock alloc] init];
if (nil == glLock) {
[self release];
return nil;
}
// Init pixel format attribs
NSOpenGLPixelFormatAttribute attrs[] =
{
NSOpenGLPFAAccelerated,
NSOpenGLPFANoRecovery,
NSOpenGLPFADoubleBuffer,
0
};
// Get pixel format from OpenGL
NSOpenGLPixelFormat* pixFmt = [[NSOpenGLPixelFormat alloc] initWithAttributes:attrs];
if (!pixFmt)
{
NSLog(#"No Accelerated OpenGL pixel format found\n");
NSOpenGLPixelFormatAttribute attrs2[] =
{
NSOpenGLPFANoRecovery,
0
};
// Get pixel format from OpenGL
pixFmt = [[NSOpenGLPixelFormat alloc] initWithAttributes:attrs2];
if (!pixFmt) {
NSLog(#"No OpenGL pixel format found!\n");
[self release];
return nil;
}
}
[self setPixelFormat:[pixFmt autorelease]];
/*
long swapInterval = 1 ;
[[self openGLContext]
setValues:&swapInterval
forParameter:NSOpenGLCPSwapInterval];
*/
[glLock lock];
[[self openGLContext] makeCurrentContext];
// Init object members
strExt = glGetString (GL_EXTENSIONS);
texture_range = gluCheckExtension ((const unsigned char *)"GL_APPLE_texture_range", strExt) ? GL_TRUE : GL_FALSE;
texture_hint = GL_STORAGE_SHARED_APPLE ;
client_storage = gluCheckExtension ((const unsigned char *)"GL_APPLE_client_storage", strExt) ? GL_TRUE : GL_FALSE;
rect_texture = gluCheckExtension((const unsigned char *)"GL_EXT_texture_rectangle", strExt) ? GL_TRUE : GL_FALSE;
// Setup some basic OpenGL stuff
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glTexEnvi(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);
glColor4f(1.0f, 1.0f, 1.0f, 1.0f);
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
// Loads the shaders
shader=LoadShader(GL_FRAGMENT_SHADER,"/Users/alexandremathieu/fragment.fs");
program=glCreateProgram();
glAttachShader(program, shader);
glLinkProgram(program);
glUseProgram(program);
[NSOpenGLContext clearCurrentContext];
[glLock unlock];
image_width = 1024;
image_height = 512;
image_depth = 16;
image_type = GL_UNSIGNED_SHORT_1_5_5_5_REV;
image_base = (GLubyte *) calloc(((IMAGE_COUNT * image_width * image_height) / 3) * 4, image_depth >> 3);
if (image_base == nil) {
[self release];
return nil;
}
// Create and load textures for the first time
[self loadTextures:GL_TRUE];
// Init fps timer
//gettimeofday(&cycle_time, NULL);
drawBG = YES;
// Call for a redisplay
noDisplay = YES;
PSXDisplay.Disabled = 1;
[self setNeedsDisplay:true];
return self;
}
And here is the "render screen" function which basically...renders the screen.
- (void)renderScreen
{
int bufferIndex = whichImage;
glBindTexture(GL_TEXTURE_RECTANGLE_EXT, bufferIndex+1);
glUseProgram(program);
int loc=glGetUniformLocation(program, "texture");
glUniform1i(loc,bufferIndex+1);
glTexSubImage2D(GL_TEXTURE_RECTANGLE_EXT, 0, 0, 0, image_width, image_height, GL_BGRA, image_type, image[bufferIndex]);
glBegin(GL_QUADS);
glTexCoord2f(0.0f, 0.0f);
glVertex2f(-1.0f, 1.0f);
glTexCoord2f(0.0f, image_height);
glVertex2f(-1.0f, -1.0f);
glTexCoord2f(image_width, image_height);
glVertex2f(1.0f, -1.0f);
glTexCoord2f(image_width, 0.0f);
glVertex2f(1.0f, 1.0f);
glEnd();
[[self openGLContext] flushBuffer];
[NSOpenGLContext clearCurrentContext];
//[glLock unlock];
}
and finally here's the shader.
uniform sampler2DRect texture;
void main() {
vec4 color, texel;
color = gl_Color;
texel = texture2DRect(texture, gl_TexCoord[0].xy);
color *= texel;
// Begin Shader
float gray=0.0;
gray+=(color.r + color.g + color.b)/3.0;
color=vec4(gray,gray,gray,color.a);
// End Shader
gl_FragColor = color;
}
The loading and using of shaders works since I am able to turn the screen all red with this shader
void main(){
gl_FragColor=vec4(1.0,0.0,0.0,1.0);
}
If the shader contains a syntax error I get an error message from the LoadShader function etc. If I remove the use of the shader, everything works normally.
I think the problem comes from the "passing the texture as a uniform parameter" thing. But these are my very firsts step with openGL and I can't be sure of anything.
Texture samplers have to be set to the number of the active texture unit. So for example with glActiveTexture(GL_TEXTURE3) the sampler must be set to 3 as well. In your case the number should be 0.