Line Drawing in iOS - objective-c

How to Draw a straight line in iOS irrespective of how user draws it.
I want the line to be of the length of number of pixels user drags his finger.
But it needs to be either vertical or horizontal based on whether user slides his finger from left to right of the screen or top to bottom of the screen.
Lines should not be slanting or any other shape. It needs to be straight.
I have gone through articles which says "Open GL" is the only way.
I have tried coding it myself but when I change the direction from vertical to horizontal or vice versa, I get some extra line or gap between where horizontal line ends and where vertical line starts:
In header file:
#interface ViewController : UIViewController
{
BOOL mouseSwiped;
CGPoint lastPoint;
CGPoint previousPoint;
UIBezierPath *currentPath;
CGFloat lastPointY;
CGFloat lastPointX;
BOOL xchanging;
BOOL ychanging;
NSMutableArray *arrayTouch;
}
#property (weak, nonatomic) IBOutlet UIImageView *drawImage;
#property UIBezierPath *currentPath;
#end
In implementation file:
- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event {
mouseSwiped = NO;
UITouch *touch = [touches anyObject];
if ([touch tapCount] == 2) {
drawImage.image = nil;
[arrayTouch removeAllObjects];
return;
}
lastPoint = [touch locationInView:self.view];
[arrayTouch addObject:touch];
lastPointY = lastPoint.y;
lastPointX = lastPoint.x;
lastPoint.y -= 1;
}
- (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event {
mouseSwiped = YES;
UITouch *touch = [touches anyObject];
CGPoint currentPoint = [touch locationInView:self.view];
currentPoint.y -= 1;
int currentPointXVal = currentPoint.x;
int currentPointYVal = currentPoint.y;
int lastPointXVal = lastPoint.x;
int lastPointYVal = lastPoint.y;
int diffX = abs(currentPointXVal - lastPointXVal);
int diffY = abs(currentPointYVal - lastPointYVal);
if(currentPoint.x > lastPoint.x && diffX > diffY)
{
if(ychanging == YES)
{
lastPointY = currentPoint.y;
lastPointX = currentPoint.x;
ychanging = NO;
}
xchanging = YES;
NSLog(#"x increasing");
NSLog(#"currentPoint: %#",NSStringFromCGPoint(currentPoint));
NSLog(#"lastPoint: %#",NSStringFromCGPoint(lastPoint));
UIGraphicsBeginImageContext(self.view.frame.size);
[drawImage.image drawInRect:CGRectMake(0, 0, self.view.frame.size.width, self.view.frame.size.height)];
CGContextSetLineCap(UIGraphicsGetCurrentContext(), kCGLineCapRound);
CGContextSetLineWidth(UIGraphicsGetCurrentContext(), 5.0);
CGContextSetRGBStrokeColor(UIGraphicsGetCurrentContext(), 1.0, 0.0, 0.0, 1.0);
CGContextBeginPath(UIGraphicsGetCurrentContext());
CGContextMoveToPoint(UIGraphicsGetCurrentContext(), lastPoint.x, lastPointY);
CGContextAddLineToPoint(UIGraphicsGetCurrentContext(), currentPoint.x, lastPointY);
CGContextStrokePath(UIGraphicsGetCurrentContext());
drawImage.image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
}
if(currentPoint.y > lastPoint.y && diffY > diffX)
{
if(xchanging == YES)
{
lastPointY = currentPoint.y;
lastPointX = currentPoint.x;
xchanging = NO;
}
ychanging = YES;
NSLog(#"y increasing");
NSLog(#"currentPoint: %#",NSStringFromCGPoint(currentPoint));
NSLog(#"lastPoint: %#",NSStringFromCGPoint(lastPoint));
UIGraphicsBeginImageContext(self.view.frame.size);
[drawImage.image drawInRect:CGRectMake(0, 0, self.view.frame.size.width, self.view.frame.size.height)];
CGContextSetLineCap(UIGraphicsGetCurrentContext(), kCGLineCapRound);
CGContextSetLineWidth(UIGraphicsGetCurrentContext(), 5.0);
CGContextSetRGBStrokeColor(UIGraphicsGetCurrentContext(), 1.0, 0.0, 0.0, 1.0);
CGContextBeginPath(UIGraphicsGetCurrentContext());
CGContextMoveToPoint(UIGraphicsGetCurrentContext(), lastPointX, lastPoint.y);
CGContextAddLineToPoint(UIGraphicsGetCurrentContext(), lastPointX, currentPoint.y);
CGContextStrokePath(UIGraphicsGetCurrentContext());
drawImage.image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
}
if(currentPoint.x < lastPoint.x && diffX > diffY)
{
if(ychanging == YES)
{
lastPointY = currentPoint.y;
lastPointX = currentPoint.x;
ychanging = NO;
}
xchanging = YES;
NSLog(#"x decreasing");
NSLog(#"currentPoint: %#",NSStringFromCGPoint(currentPoint));
NSLog(#"lastPoint: %#",NSStringFromCGPoint(lastPoint));
UIGraphicsBeginImageContext(self.view.frame.size);
[drawImage.image drawInRect:CGRectMake(0, 0, self.view.frame.size.width, self.view.frame.size.height)];
CGContextSetLineCap(UIGraphicsGetCurrentContext(), kCGLineCapRound);
CGContextSetLineWidth(UIGraphicsGetCurrentContext(), 5.0);
CGContextSetRGBStrokeColor(UIGraphicsGetCurrentContext(), 1.0, 0.0, 0.0, 1.0);
CGContextBeginPath(UIGraphicsGetCurrentContext());
CGContextMoveToPoint(UIGraphicsGetCurrentContext(), lastPoint.x, lastPointY);
CGContextAddLineToPoint(UIGraphicsGetCurrentContext(), currentPoint.x, lastPointY);
CGContextStrokePath(UIGraphicsGetCurrentContext());
drawImage.image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
}
if(currentPoint.y < lastPoint.y && diffY > diffX)
{
if(xchanging == YES)
{
lastPointY = currentPoint.y;
lastPointX = currentPoint.x;
xchanging = NO;
}
ychanging = YES;
NSLog(#"y decreasing");
NSLog(#"currentPoint: %#",NSStringFromCGPoint(currentPoint));
NSLog(#"lastPoint: %#",NSStringFromCGPoint(lastPoint));
UIGraphicsBeginImageContext(self.view.frame.size);
[drawImage.image drawInRect:CGRectMake(0, 0, self.view.frame.size.width, self.view.frame.size.height)];
CGContextSetLineCap(UIGraphicsGetCurrentContext(), kCGLineCapRound);
CGContextSetLineWidth(UIGraphicsGetCurrentContext(), 5.0);
CGContextSetRGBStrokeColor(UIGraphicsGetCurrentContext(), 1.0, 0.0, 0.0, 1.0);
CGContextBeginPath(UIGraphicsGetCurrentContext());
CGContextMoveToPoint(UIGraphicsGetCurrentContext(), lastPointX, lastPoint.y);
CGContextAddLineToPoint(UIGraphicsGetCurrentContext(), lastPointX, currentPoint.y);
CGContextStrokePath(UIGraphicsGetCurrentContext());
drawImage.image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
}
lastPoint = currentPoint;
}
Is there any way I can do it without Open GL?

Sure - You can do this, just take away the slope from the line:
#implementation LineDrawingView
{
CGPoint touchStart;
CGPoint touchCurrent;
}
- (id)initWithFrame:(CGRect)frame
{
self = [super initWithFrame:frame];
if (self) {
// Initialization code
touchStart = (CGPoint) { -1, -1 };
touchCurrent = (CGPoint) { -1, -1 };
}
return self;
}
- (void)drawRect:(CGRect)rect
{
if (touchStart.x != -1)
{
// calculate the line rotation
enum { horizontal, vertical } lineRot = horizontal;
if (touchStart.y == touchCurrent.y)
{
// straight line
lineRot = horizontal;
}
else
{
// calculate the slope
double slope = fabs((touchCurrent.y - touchStart.y) / (touchCurrent.x - touchStart.x));
if (slope > 1)
lineRot = vertical;
}
// draw the actual line
CGPoint lineEndPoint = { };
if (lineRot == horizontal)
lineEndPoint = (CGPoint) { touchCurrent.x, touchStart.y }; // horizontal line
else
lineEndPoint = (CGPoint) { touchStart.x, touchCurrent.y }; // vertical line
// actually draw the line
[[UIColor redColor] setStroke];
UIBezierPath *path = [UIBezierPath bezierPath];
[path moveToPoint:touchStart];
[path addLineToPoint:lineEndPoint];
[path stroke];
}
}
-(void) touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event
{
touchStart = [[touches anyObject] locationInView:self];
touchCurrent = touchStart;
[self setNeedsDisplay];
}
-(void) touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event
{
touchCurrent = [[touches anyObject] locationInView:self];
[self setNeedsDisplay];
}
-(void) touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event
{
touchStart = touchCurrent = (CGPoint) { -1, -1 };
[self setNeedsDisplay];
}
#end
You can extend this to use an array of lines if you'd like, this is just a simple explanation.

Related

setting bounds for imageview in a colouring book type app

I am making an app in which user can fill and stroke colours within an imageview with touches. But i want that when the user is filling a colour within an imageview bounds he/she should not spill colour out of that imageview if accidently touches outside it. I have implemented this till now. Any suggestions?
- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event {
mouseSwiped = NO;
UITouch *touch = [touches anyObject];
lastPoint = [touch locationInView:self.view];
}
- (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event {
// mouseSwiped = YES;
// UITouch *touch = [touches anyObject];
// CGPoint currentPoint = [touch locationInView:self.view];
//
// UIGraphicsBeginImageContext(self.view.frame.size);
// [self.tempDrawImage.image drawInRect:CGRectMake(0, 0, self.view.frame.size.width, self.view.frame.size.height)];
// CGContextMoveToPoint(UIGraphicsGetCurrentContext(), lastPoint.x, lastPoint.y);
// CGContextAddLineToPoint(UIGraphicsGetCurrentContext(), currentPoint.x, currentPoint.y);
// CGContextSetLineCap(UIGraphicsGetCurrentContext(), kCGLineCapRound);
// CGContextSetLineWidth(UIGraphicsGetCurrentContext(), brush );
// CGContextSetRGBStrokeColor(UIGraphicsGetCurrentContext(), red, green, blue, 1.0);
// CGContextSetBlendMode(UIGraphicsGetCurrentContext(),kCGBlendModeNormal);
//
// CGContextStrokePath(UIGraphicsGetCurrentContext());
// self.tempDrawImage.image = UIGraphicsGetImageFromCurrentImageContext();
// [self.tempDrawImage setAlpha:opacity];
// UIGraphicsEndImageContext();
//
// lastPoint = currentPoint;
if(CGRectContainsPoint([testImage bounds], lastPoint))
{
mouseSwiped = YES;
UITouch *touch = [touches anyObject];
CGPoint currentPoint = [touch locationInView:self.view];
UIGraphicsBeginImageContext(self.view.frame.size);
[self.testImage.image drawInRect:CGRectMake(0, 0, self.view.frame.size.width, self.view.frame.size.height)];
CGContextMoveToPoint(UIGraphicsGetCurrentContext(), lastPoint.x, lastPoint.y);
CGContextAddLineToPoint(UIGraphicsGetCurrentContext(), currentPoint.x, currentPoint.y);
CGContextSetLineCap(UIGraphicsGetCurrentContext(), kCGLineCapRound);
CGContextSetLineWidth(UIGraphicsGetCurrentContext(), brush );
CGContextSetRGBStrokeColor(UIGraphicsGetCurrentContext(), red, green, blue, 1.0);
CGContextSetBlendMode(UIGraphicsGetCurrentContext(),kCGBlendModeNormal);
CGContextStrokePath(UIGraphicsGetCurrentContext());
self.testImage.image = UIGraphicsGetImageFromCurrentImageContext();
[self.testImage setAlpha:opacity];
UIGraphicsEndImageContext();
lastPoint = currentPoint;
}
else
{
// CGRectContainsPoint(testImage.frame, lastPoint) = ;
}
}
- (void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event {
if(!mouseSwiped) {
UIGraphicsBeginImageContext(self.view.frame.size);
[self.testImage.image drawInRect:CGRectMake(0, 0, self.view.frame.size.width, self.view.frame.size.height)];
CGContextSetLineCap(UIGraphicsGetCurrentContext(), kCGLineCapRound);
CGContextSetLineWidth(UIGraphicsGetCurrentContext(), brush);
CGContextSetRGBStrokeColor(UIGraphicsGetCurrentContext(), red, green, blue, opacity);
CGContextMoveToPoint(UIGraphicsGetCurrentContext(), lastPoint.x, lastPoint.y);
CGContextAddLineToPoint(UIGraphicsGetCurrentContext(), lastPoint.x, lastPoint.y);
CGContextStrokePath(UIGraphicsGetCurrentContext());
CGContextFlush(UIGraphicsGetCurrentContext());
self.tempDrawImage.image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
}
UIGraphicsBeginImageContext(self.mainImage.frame.size);
[self.mainImage.image drawInRect:CGRectMake(0, 0, self.view.frame.size.width, self.view.frame.size.height) blendMode:kCGBlendModeNormal alpha:1.0];
[self.tempDrawImage.image drawInRect:CGRectMake(0, 0, self.view.frame.size.width, self.view.frame.size.height) blendMode:kCGBlendModeNormal alpha:opacity];
self.mainImage.image = UIGraphicsGetImageFromCurrentImageContext();
self.tempDrawImage.image = nil;
UIGraphicsEndImageContext();
}
What happens if your create a path over the borders of your view and set this as the clipping path of your view, I think nothing should draw outside that anymore once set.

Screen Recording in ipad

I Am facing a problem from last two weeks. Actually I am working on an iPad app. In which I want to do annotation and also make screen recording of that annotations. Annotation part is working fine but it gives problem when I start recording. The problem is that it losts it's smoothness and gives lags during screen recording. For Screen Recording I am using AVAsset Writer. Codes are fine for both Annotation and Screen recording.... But I don't know where is the problem??
My ScreenShot Size is (1050,650)
Should I use Grand Central Dispatch to solve this problem???
Can anybody help me to solve out my problem.....
Plz plz help me....
MY CODE
// For Annotation
- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event
{
mouseSwiped = NO;
UITouch *touch = [touches anyObject];
if ([touch tapCount] == 2)
{
drawImage.image = nil; //Double click to undo drawing.
return;
}
lastPoint = [touch locationInView:self.view];
lastPoint.y -= 20;
}
- (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event
{
mouseSwiped = YES;
UITouch *touch = [touches anyObject];
CGPoint currentPoint = [touch locationInView:self.view];
currentPoint.y -= 20;
// UIGraphicsBeginImageContext(canvasView.frame.size);
UIGraphicsBeginImageContext(drawImage.frame.size);
[drawImage.image drawInRect:CGRectMake(0, 0, drawImage.frame.size.width,drawImage.frame.size.height)];
CGContextSetLineCap(UIGraphicsGetCurrentContext(), kCGLineCapRound);
CGContextSetLineWidth(UIGraphicsGetCurrentContext(), 10.0);
CGContextSetRGBStrokeColor(UIGraphicsGetCurrentContext(), 1.0, 0.0, 0.0, 1.0);
CGContextBeginPath(UIGraphicsGetCurrentContext());
CGContextMoveToPoint(UIGraphicsGetCurrentContext(), lastPoint.x, lastPoint.y);
CGContextAddLineToPoint(UIGraphicsGetCurrentContext(), currentPoint.x, currentPoint.y);
CGContextStrokePath(UIGraphicsGetCurrentContext());
drawImage.image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
lastPoint = currentPoint;
}
- (void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event
{
UITouch *touch = [touches anyObject];
if ([touch tapCount] == 2)
{
drawImage.image = nil;
return;
}
if(!mouseSwiped)
{
UIGraphicsBeginImageContext(drawImage.frame.size);
[drawImage.image drawInRect:CGRectMake(0, 0, drawImage.frame.size.width,drawImage.frame.size.height)];
CGContextSetLineCap(UIGraphicsGetCurrentContext(), kCGLineCapRound);
CGContextSetLineWidth(UIGraphicsGetCurrentContext(), 5.0);
CGContextSetRGBStrokeColor(UIGraphicsGetCurrentContext(), 1.0, 0.0, 0.0, 1.0);
CGContextMoveToPoint(UIGraphicsGetCurrentContext(), lastPoint.x, lastPoint.y);
CGContextAddLineToPoint(UIGraphicsGetCurrentContext(), lastPoint.x, lastPoint.y);
CGContextStrokePath(UIGraphicsGetCurrentContext());
CGContextFlush(UIGraphicsGetCurrentContext());
drawImage.image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
}
}
//For Screen Recording
#define FRAME_WIDTH 1024
#define FRAME_HEIGHT 650
#define TIME_SCALE 600
- (UIImage*)screenshot
{
UIGraphicsBeginImageContext(drawImage.frame.size);
[self.view.layer renderInContext:UIGraphicsGetCurrentContext()];
UIImage *viewImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return viewImage;
}
-(NSURL*) pathToDocumentsDirectory {
NSString* outputPath = [[NSString alloc] initWithFormat:#"%#/%#", [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0], #"output.mov"];
outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
NSFileManager* fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:outputPath]) {
NSError* error;
if ([fileManager removeItemAtPath:outputPath error:&error] == NO) {
NSLog(#"Could not delete old recording file at path: %#", outputPath);
}
}
[outputPath release];
return [outputURL autorelease];
}
-(void) writeSample: (NSTimer*) _timer
{
if (assetWriterInput.readyForMoreMediaData) {
// CMSampleBufferRef sample = nil;
CVReturn cvErr = kCVReturnSuccess;
// get screenshot image!
CGImageRef image = (CGImageRef) [[self screenshot] CGImage];
NSLog (#"made screenshot");
// prepare the pixel buffer
CVPixelBufferRef pixelBuffer = NULL;
CFDataRef imageData= CGDataProviderCopyData(CGImageGetDataProvider(image));
NSLog (#"copied image data");
cvErr = CVPixelBufferCreateWithBytes(kCFAllocatorDefault,
FRAME_WIDTH,
FRAME_HEIGHT,
kCVPixelFormatType_32BGRA,
(void*)CFDataGetBytePtr(imageData),
CGImageGetBytesPerRow(image),
NULL,
NULL,
NULL,
&pixelBuffer);
NSLog (#"CVPixelBufferCreateWithBytes returned %d", cvErr);
// calculate the time
CFAbsoluteTime thisFrameWallClockTime = CFAbsoluteTimeGetCurrent();
CFTimeInterval elapsedTime = thisFrameWallClockTime - firstFrameWallClockTime;
NSLog (#"elapsedTime: %f", elapsedTime);
CMTime presentationTime = CMTimeMake (elapsedTime * TIME_SCALE, TIME_SCALE);
// write the sample
BOOL appended = [assetWriterPixelBufferAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:presentationTime];
if (appended) {
NSLog (#"appended sample at time %lf", CMTimeGetSeconds(presentationTime));
} else {
NSLog (#"failed to append");
[self stopRecording];
}
}
}
-(void) startRecording {
movieURL = [self pathToDocumentsDirectory];
NSLog(#"path=%#",movieURL);
movieError = nil;
[assetWriter release];
assetWriter = [[AVAssetWriter alloc] initWithURL:movieURL
fileType: AVFileTypeQuickTimeMovie
error: &movieError];
[self writer];
// start writing samples to it
NSDate* start = [NSDate date];
frameRate=40.0f;
float processingSeconds = [[NSDate date] timeIntervalSinceDate:start];
delayRemaining = (1.0 / self.frameRate) - processingSeconds;
[assetWriterTimer release];
assetWriterTimer = [NSTimer scheduledTimerWithTimeInterval:delayRemaining > 0.0 ? delayRemaining : 0.01
target:self
selector:#selector (writeSample:)
userInfo:nil
repeats:YES] ;
}
-(void)writer
{
NSDictionary *assetWriterInputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:FRAME_WIDTH], AVVideoWidthKey,
[NSNumber numberWithInt:FRAME_HEIGHT], AVVideoHeightKey,
nil];
assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType: AVMediaTypeVideo
outputSettings:assetWriterInputSettings];
assetWriterInput.expectsMediaDataInRealTime = YES;
[assetWriter addInput:assetWriterInput];
[assetWriterPixelBufferAdaptor release];
assetWriterPixelBufferAdaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc]
initWithAssetWriterInput:assetWriterInput
sourcePixelBufferAttributes:nil];
[assetWriter startWriting];
firstFrameWallClockTime = CFAbsoluteTimeGetCurrent();
[assetWriter startSessionAtSourceTime: CMTimeMake(0, TIME_SCALE)];
}
-(void) stopRecording {
[assetWriterTimer invalidate];
assetWriterTimer = nil;
[assetWriter finishWriting];
NSLog (#"finished writing");
}
Simplest approach is to try lower frame rates.

iOS Quartz Changing stroke color

I am new to iOS and am doing this as a learning experience. I got most of this code from a tutorial and its working but I wanted to add more stroke colors. My code begins with a default stroke in UIColor greenColor, but I have two buttons labeled "Red" and "Blue" that should change the stroke color to Red or Blue when pressed. I want to press Red, then start drawing and have that stroke come out red, etc. like in MS Paint. I've confirmed my UIButtons are linked up correctly. Any help is appreciated, thanks!
DrawView.h:
#import <UIKit/UIKit.h>
#interface DrawView : UIImageView
- (void)changeOption:(NSString *)withOption;
#end
DrawView.m:
#import "DrawView.h"
#interface DrawView()
#property (nonatomic) BOOL fingerMoved;
#property (nonatomic) CGPoint lastPoint;
#end
#implementation DrawView
CGFloat red = 0.0, green = 0.0, blue = 0.0, width = 5.0;
#synthesize fingerMoved = _fingerMoved;
#synthesize lastPoint = _lastPoint;
- (id)initWithFrame:(CGRect)frame
{
self = [super initWithFrame:frame];
if (self) {
// Initialization code
}
return self;
}
- (void)changeOption:(NSString *)withOption
{
if ([withOption isEqualToString:#"Black"]) {
red = 0.0; green = 0.0; blue = 0.0; width = 5.0;
}else if ([withOption isEqualToString:#"Red"]) {
red = 1.0; green = 0.0; blue = 0.0; width = 5.0;
}else if ([withOption isEqualToString:#"Blue"]) {
red = 0.0; green = 0.0; blue = 1.0; width = 5.0;
}else if ([withOption isEqualToString:#"Green"]) {
red = 0.0; green = 1.0; blue = 0.0; width = 5.0;
}else if ([withOption isEqualToString:#"Brown"]) {
red = 0.4; green = 0.0; blue = 0.0; width = 5.0;
}else if ([withOption isEqualToString:#"Orange"]) {
red = 1.0; green = 0.6; blue = 0.0; width = 5.0;
}else if ([withOption isEqualToString:#"Yellow"]) {
red = 1.0; green = 1.0; blue = 0.0; width = 5.0;
}else if ([withOption isEqualToString:#"Purple"]) {
red = 0.5; green = 0.0; blue = 1.0; width = 5.0;
}else if ([withOption isEqualToString:#"Eraser"]) {
red = 1.0; green = 1.0; blue = 1.0; width = 13.0;
}else if ([withOption isEqualToString:#"Clear"]) {
self.image = nil;
}
}
- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event
{
self.fingerMoved = NO;
UITouch *touch = [touches anyObject];
if ([touch tapCount] == 2) {
self.image = nil;
return;
}
self.lastPoint = [touch locationInView:self];
self.lastPoint = CGPointMake(self.lastPoint.x, self.lastPoint.y - 20);
}
- (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event
{
self.fingerMoved = YES;
UITouch *touch = [touches anyObject];
CGPoint currentPoint = [touch locationInView:self];
currentPoint.y -= 20;
UIGraphicsBeginImageContext(self.frame.size);
[self.image drawInRect:CGRectMake(0, 0, self.frame.size.width, self.frame.size.height)];
CGContextSetLineCap(UIGraphicsGetCurrentContext(), kCGLineCapRound);
CGContextSetLineWidth(UIGraphicsGetCurrentContext(), width);
CGContextSetRGBStrokeColor(UIGraphicsGetCurrentContext(), red, green, blue, 1.0);
CGContextMoveToPoint(UIGraphicsGetCurrentContext(), self.lastPoint.x, self.lastPoint.y);
CGContextAddLineToPoint(UIGraphicsGetCurrentContext(), currentPoint.x, currentPoint.y);
CGContextStrokePath(UIGraphicsGetCurrentContext());
self.image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
self.lastPoint = currentPoint;
}
- (void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event
{
UITouch *touch = [touches anyObject];
if ([touch tapCount] == 2) {
self.image = nil;
return;
}
if (!self.fingerMoved) {
UIGraphicsBeginImageContext(self.frame.size);
[self.image drawInRect:CGRectMake(0, 0, self.frame.size.width, self.frame.size.height)];
CGContextSetLineCap(UIGraphicsGetCurrentContext(), kCGLineCapRound);
CGContextSetLineWidth(UIGraphicsGetCurrentContext(), width);
CGContextSetRGBStrokeColor(UIGraphicsGetCurrentContext(), red, green, blue, 1.0);
CGContextMoveToPoint(UIGraphicsGetCurrentContext(), self.lastPoint.x, self.lastPoint.y);
CGContextAddLineToPoint(UIGraphicsGetCurrentContext(), self.lastPoint.x, self.lastPoint.y);
CGContextStrokePath(UIGraphicsGetCurrentContext());
self.image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
}
}
/*
// Only override drawRect: if you perform custom drawing.
// An empty implementation adversely affects performance during animation.
- (void)drawRect:(CGRect)rect
{
// Drawing code
}
*/
#end
Silly mistake in some other code. Fixed now.

drawing with finger on UIImageView with a scroll View

I need to make an UIImageView, on a scroll View, active for touching.
is a little UiimageView 100x50 were people can make the signature.
I am using the same code and it works, but with scroll view it doesn't.
I looked around, but I could not find a code for that.
.h
UIImageView *drawImage;
#property (nonatomic, retain) IBOutlet UIImageView *drawImage;
.m
#synthesize drawImage;
- (void)viewDidLoad {
[scrollView setScrollEnabled:YES];
[scrollView setContentSize:CGSizeMake(320,1540)];
if ([MFMailComposeViewController canSendMail])
button.enabled = YES;
drawImage.userInteractionEnabled = YES;
UITapGestureRecognizer *gestureRec = [[UITapGestureRecognizer alloc] initWithTarget:self action:#selector(touchesBegan:)];
gestureRec.numberOfTouchesRequired = 1;
gestureRec.numberOfTapsRequired = 1;
[drawImage addGestureRecognizer:gestureRec];
[gestureRec release];
UIButton *btn = [UIButton buttonWithType:UIButtonTypeCustom];
[btn addTarget:self action:#selector(printItem) forControlEvents:UIControlEventTouchDown];
btn.frame = CGRectMake(75, 0, 44, 44);
[btn setImage:[UIImage imageNamed:#"print.png"] forState:UIControlStateNormal];
[self.view addSubview:btn];
}
- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event {
mouseSwiped = NO;
UITouch *touch = [touches anyObject];
if ([touch tapCount] == 3) {
drawImage.image = nil;
return;
}
lastPoint = [touch locationInView:drawImage];
// lastPoint.y -=20;// only if signature goes bottom of the screen
}
- (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event {
mouseSwiped = YES;
UITouch *touch = [touches anyObject];
CGPoint currentPoint = [touch locationInView:drawImage];
currentPoint.y -=5; // only if signature goes bottom of the screen
UIGraphicsBeginImageContext(drawImage.frame.size);
CGContextRef currentContext = UIGraphicsGetCurrentContext();
[drawImage.image drawInRect:CGRectMake(0, 0, drawImage.frame.size.width, drawImage.frame.size.height)];
CGContextSetLineCap(currentContext, kCGLineCapRound);
CGContextSetLineWidth(currentContext, 2.0);
CGContextSetRGBStrokeColor(currentContext, 0.0, 0.0, 0.0, 1.0);
CGContextBeginPath(currentContext);
CGContextMoveToPoint(currentContext, lastPoint.x, lastPoint.y);
CGContextAddLineToPoint(currentContext, currentPoint.x, currentPoint.y);
CGContextStrokePath(currentContext);
drawImage.image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
lastPoint = currentPoint;
}
- (void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event {
UITouch *touch = [touches anyObject];
if ([touch tapCount] == 3) {
drawImage.image = nil;
return;
}
if(!mouseSwiped) {
UIGraphicsBeginImageContext(drawImage.frame.size);
CGContextRef currentContext = UIGraphicsGetCurrentContext();
[drawImage.image drawInRect:CGRectMake(0, 0, drawImage.frame.size.width, drawImage.frame.size.height)];
CGContextSetLineCap(currentContext, kCGLineCapRound);
CGContextSetLineWidth(currentContext, 3.0);
CGContextSetRGBStrokeColor(currentContext, 1.0, 1.0, 2.0, 2.0);
CGContextMoveToPoint(currentContext, lastPoint.x, lastPoint.y);
CGContextAddLineToPoint(currentContext, lastPoint.x, lastPoint.y);
CGContextStrokePath(currentContext);
drawImage.image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
}
}
I tried the NSZombieEnabled and I get a SIGABRIT on the UITouch line on touchesBegan
- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event {
mouseSwiped = NO;
UITouch *touch = [touches anyObject];
if ([touch tapCount] == 3) {
drawImage.image = nil;
return;
}
lastPoint = [touch locationInView:drawImage];
// lastPoint.y -=20;// only if signature goes bottom of the screen
}
and a SIGABRIT here, in the int main(int argc, char *argv[]) line
#import <UIKit/UIKit.h>
int main(int argc, char *argv[])
{
NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
int retVal = UIApplicationMain(argc, argv, nil, nil);
[pool release];
return retVal;
}
I still get the error like before:
2011-05-10 22:14:22.509 anamnesiprova[90427:207] -[UITapGestureRecognizer anyObject]: unrecognized selector sent to instance 0x6bd5ba0
2011-05-10 22:14:22.512 anamnesiprova[90427:207] *** Terminating app due to uncaught exception 'NSInvalidArgumentException', reason: '-[UITapGestureRecognizer anyObject]: unrecognized selector sent to instance 0x6bd5ba0'
I think you have to set userInteractionEnabled of your UIImageView to YES.
Another idea would be to add a GestureRecognizer to your imageView, for the beginning a simple one:
UITapGestureRecognizer *gestureRec = [[UITapGestureRecognizer alloc] initWithTarget:self action:#selector(myMethodToBeCalled)];
gestureRec.numberOfTouchesRequired = 1;
gestureRec.numberOfTapsRequired = 1;
[myImageView addGestureRecognizer:gestureRec];
[gestureRec release];

How to erase the context I drawn on to the image in iphone sdk?

I am having an image on my view and i am painting on the image with colors like the code below:
- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event
{
mouseSwiped = NO;
UITouch *touch = [touches anyObject];
if ([touch tapCount] == 2)
{
//imageDraw.image = nil;
return;
}
else
{
}
lastPoint = [touch locationInView:imageDraw];
lastPoint.y -= 5;
}
- (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event
{
mouseSwiped = YES;
UITouch *touch = [touches anyObject];
CGPoint currentPoint = [touch locationInView:mView];
currentPoint.y -= 5;
UIGraphicsBeginImageContext(imageDraw.frame.size);
[imageDraw.image drawInRect:CGRectMake(0, 0, imageDraw.frame.size.width, imageDraw.frame.size.height)];
CGContextSetLineCap(UIGraphicsGetCurrentContext(), kCGLineCapRound);
CGContextSetLineWidth(UIGraphicsGetCurrentContext(), 5.0);
//CGContextSetRGBStrokeColor(UIGraphicsGetCurrentContext(), 1.0, 0.0, 0.0, 0.5);
DrawAppDelegate *appDelegate=(DrawAppDelegate*)[ [UIApplication sharedApplication] delegate];
UIColor *clr = appDelegate.txtColor;
[clr setStroke];
CGContextBeginPath(UIGraphicsGetCurrentContext());
CGContextMoveToPoint(UIGraphicsGetCurrentContext(), lastPoint.x, lastPoint.y);
CGContextAddLineToPoint(UIGraphicsGetCurrentContext(), currentPoint.x, currentPoint.y);
CGContextStrokePath(UIGraphicsGetCurrentContext());
imageDraw.image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
lastPoint = currentPoint;
mouseMoved++;
if (mouseMoved == 10)
{
mouseMoved = 0;
}
}
- (void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event
{
DrawAppDelegate *appDelegate=(DrawAppDelegate*)[ [UIApplication sharedApplication] delegate];
UITouch *touch = [touches anyObject];
if ([touch tapCount] == 2)
{
//imageDraw.image = nil;
return;
}
else
{
}
if(!mouseSwiped)
{
UIGraphicsBeginImageContext(imageDraw.frame.size);
[imageDraw.image drawInRect:CGRectMake(0, 0, imageDraw.frame.size.width, imageDraw.frame.size.height)];
CGContextSetLineCap(UIGraphicsGetCurrentContext(), kCGLineCapRound);
CGContextSetLineWidth(UIGraphicsGetCurrentContext(), 5.0);
UIColor *clr = appDelegate.txtColor;
[clr setStroke];
CGContextMoveToPoint(UIGraphicsGetCurrentContext(), lastPoint.x, lastPoint.y);
CGContextAddLineToPoint(UIGraphicsGetCurrentContext(), lastPoint.x, lastPoint.y);
CGContextStrokePath(UIGraphicsGetCurrentContext());
CGContextFlush(UIGraphicsGetCurrentContext());
imageDraw.image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
}
}
-(void)drawText
{
/*TextViewController *viewController = [[TextViewController alloc]init];
[self.navigationController pushViewController:viewController animated:YES];
[viewController release];
*/
colorWheel = [[ColorPickerImageView alloc] initWithFrame:CGRectMake(20,20,300,340)];
[colorWheel setImage:[UIImage imageNamed:#"colorWheel1.png"]];
[colorWheel setUserInteractionEnabled:YES];
colorWheel.backgroundColor = [UIColor clearColor];
colorWheel.pickedColorDelegate = self;
[mView addSubview:colorWheel];
[self animateColorWheelToShow:YES duration:0.3];
}
- (void) pickedColor:(UIColor*)color
{
//mView.backgroundColor= color;
DrawAppDelegate *appDelegate=(DrawAppDelegate*)[ [UIApplication sharedApplication] delegate];
[self animateColorWheelToShow:NO duration:0.3];
appDelegate.txtColor = color;
//[self setNeedsDisplayInRect:CGRectMake(0, 0, 320, 480)];
[mView setNeedsDisplay];
}
Now I want to erase some painted color on the image.Can anyone suggest me How I can I do any ideas pls with some sample code.
Anyone's help will be deeply appreciate.
Thanks to all,
Monish.
The answer is in the order of the lines. To clear and erase use clearColor.
-(void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event
{
UITouch *touch = [touches anyObject];
self.previousPoint1 = [touch previousLocationInView:self.imageView];
self.previousPoint2 = [touch previousLocationInView:self.imageView];
self.currentPoint = [touch locationInView:self.imageView];
}
-(void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event
{
UITouch *touch = [touches anyObject];
self.previousPoint2 = self.previousPoint1;
self.previousPoint1 = [touch previousLocationInView:self.imageView];
self.currentPoint = [touch locationInView:self.imageView];
// calculate mid point
CGPoint mid1 = midPoint(self.previousPoint1, self.previousPoint2);
CGPoint mid2 = midPoint(self.currentPoint, self.previousPoint1);
UIGraphicsBeginImageContext(self.imageView.frame.size);
CGContextRef context = UIGraphicsGetCurrentContext();
[self.imageView.image drawInRect:CGRectMake(0, 0, self.imageView.frame.size.width, self.imageView.frame.size.height)];
if (self.segmentedControl.selectedSegmentIndex == 11) {
CGContextSetBlendMode(context, kCGBlendModeClear);
} else {
CGContextSetBlendMode(context, kCGBlendModeNormal);
}
CGContextMoveToPoint(context, mid1.x, mid1.y);
// Use QuadCurve is the key
CGContextAddQuadCurveToPoint(context, self.previousPoint1.x, self.previousPoint1.y, mid2.x, mid2.y);
CGContextSetLineCap(context, kCGLineCapRound);
CGContextSetLineWidth(context, 20.0);
[self.color setStroke];
CGContextStrokePath(context);
self.imageView.image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
}
- (NSArray *)colors {
return #[[UIColor colorWithRed:249/255.0 green:195/255.0 blue:207/255.0 alpha:1.0],
[UIColor colorWithRed:225/255.0 green:57 /255.0 blue:156/255.0 alpha:1.0],
[UIColor colorWithRed:97 /255.0 green:48 /255.0 blue:138/255.0 alpha:1.0],
[UIColor colorWithRed:255/255.0 green:199/255.0 blue:69 /255.0 alpha:1.0],
[UIColor colorWithRed:245/255.0 green:125/255.0 blue:55 /255.0 alpha:1.0],
[UIColor colorWithRed:194/255.0 green:25 /255.0 blue:48 /255.0 alpha:1.0],
[UIColor colorWithRed:135/255.0 green:214/255.0 blue:242/255.0 alpha:1.0],
[UIColor colorWithRed:23 /255.0 green:185/255.0 blue:181/255.0 alpha:1.0],
[UIColor colorWithRed:0 /255.0 green:77 /255.0 blue:119/255.0 alpha:1.0],
[UIColor colorWithRed:0 /255.0 green:104/255.0 blue:61 /255.0 alpha:1.0],
[UIColor colorWithRed:0 /255.0 green:163/255.0 blue:89 /255.0 alpha:1.0],
[UIColor clearColor],
[UIColor clearColor],
[UIColor clearColor],
[UIColor clearColor]
];
}
do like this..
for selecting somepart of yor image for erase
give this code,,
UIColor *selectedColor;
self.selectedColor=[UIColor whiteColor];
select that particular part with white color..
Thank u..