UIView subclass doesn't respond to touches - objective-c

I'm making an iPad project in which a class named "Car" (this is a separate file from the view controller) is supposed to be dragged around the main view.
I setup the class as I saw in an Apple example and I'm able to view my image when I run the application but it's like my class doesn't respond to my touches event and I can't solve the problem.
Here is my class code:
Car.h
#import <UIKit/UIKit.h>
#interface Car : UIView {
UIImageView *firstPieceView;
CGPoint startTouchPosition;
}
-(void)animateFirstTouchAtPoint:(CGPoint)touchPoint forView:(UIImageView *)theView;
-(void)animateView:(UIView *)theView toPosition:(CGPoint) thePosition;
-(void)dispatchFirstTouchAtPoint:(CGPoint)touchPoint forEvent:(UIEvent *)event;
-(void)dispatchTouchEvent:(UIView *)theView toPosition:(CGPoint)position;
-(void)dispatchTouchEndEvent:(UIView *)theView toPosition:(CGPoint)position;
#property (nonatomic, retain) IBOutlet UIImageView *firstPieceView;
#end
and this is my other class code: Car.m
#import "Car.h"
#implementation Car
#synthesize firstPieceView;
#define GROW_ANIMATION_DURATION_SECONDS 0.15 // Determines how fast a piece size grows when it is moved.
#define SHRINK_ANIMATION_DURATION_SECONDS 0.15 // Determines how fast a piece size shrinks when a piece stops moving.
-(void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event
{
// Enumerate through all the touch objects.
NSUInteger touchCount = 0;
for (UITouch *touch in touches) {
// Send to the dispatch method, which will make sure the appropriate subview is acted upon
[self dispatchFirstTouchAtPoint:[touch locationInView:self] forEvent:nil];
touchCount++;
}
}
// Checks to see which view, or views, the point is in and then calls a method to perform the opening animation,
// which makes the piece slightly larger, as if it is being picked up by the user.
-(void)dispatchFirstTouchAtPoint:(CGPoint)touchPoint forEvent:(UIEvent *)event
{
if (CGRectContainsPoint([firstPieceView frame], touchPoint)) {
[self animateFirstTouchAtPoint:touchPoint forView:firstPieceView];
}
}
// Handles the continuation of a touch.
-(void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event
{
NSUInteger touchCount = 0;
// Enumerates through all touch objects
for (UITouch *touch in touches) {
// Send to the dispatch method, which will make sure the appropriate subview is acted upon
[self dispatchTouchEvent:[touch view] toPosition:[touch locationInView:self]];
touchCount++;
}
}
// Checks to see which view, or views, the point is in and then sets the center of each moved view to the new postion.
// If views are directly on top of each other, they move together.
-(void)dispatchTouchEvent:(UIView *)theView toPosition:(CGPoint)position
{
// Check to see which view, or views, the point is in and then move to that position.
if (CGRectContainsPoint([firstPieceView frame], position)) {
firstPieceView.center = position;
}
}
// Handles the end of a touch event.
-(void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event
{
// Enumerates through all touch object
for (UITouch *touch in touches) {
// Sends to the dispatch method, which will make sure the appropriate subview is acted upon
[self dispatchTouchEndEvent:[touch view] toPosition:[touch locationInView:self]];
}
}
// Checks to see which view, or views, the point is in and then calls a method to perform the closing animation,
// which is to return the piece to its original size, as if it is being put down by the user.
-(void)dispatchTouchEndEvent:(UIView *)theView toPosition:(CGPoint)position
{
// Check to see which view, or views, the point is in and then animate to that position.
if (CGRectContainsPoint([firstPieceView frame], position)) {
[self animateView:firstPieceView toPosition: position];
}
}
-(void)touchesCancelled:(NSSet *)touches withEvent:(UIEvent *)event
{
// Enumerates through all touch object
for (UITouch *touch in touches) {
// Sends to the dispatch method, which will make sure the appropriate subview is acted upon
[self dispatchTouchEndEvent:[touch view] toPosition:[touch locationInView:self]];
}
}
#pragma mark -
#pragma mark === Animating subviews ===
#pragma mark
// Scales up a view slightly which makes the piece slightly larger, as if it is being picked up by the user.
-(void)animateFirstTouchAtPoint:(CGPoint)touchPoint forView:(UIImageView *)theView
{
// Pulse the view by scaling up, then move the view to under the finger.
[UIView beginAnimations:nil context:nil];
[UIView setAnimationDuration:GROW_ANIMATION_DURATION_SECONDS];
theView.transform = CGAffineTransformMakeScale(1.2, 1.2);
[UIView commitAnimations];
}
// Scales down the view and moves it to the new position.
-(void)animateView:(UIView *)theView toPosition:(CGPoint)thePosition
{
[UIView beginAnimations:nil context:NULL];
[UIView setAnimationDuration:SHRINK_ANIMATION_DURATION_SECONDS];
// Set the center to the final postion
theView.center = thePosition;
// Set the transform back to the identity, thus undoing the previous scaling effect.
theView.transform = CGAffineTransformIdentity;
[UIView commitAnimations];
}
- (id)initWithFrame:(CGRect)frame
{
self = [super initWithFrame:frame];
if (self) {
UIImage *img = [ UIImage imageNamed: #"CyanSquare.png" ];
firstPieceView = [[UIImageView alloc] initWithImage: img];
//[img release];
[super addSubview:firstPieceView];
[firstPieceView release];
}
return self;
}
/*
// Only override drawRect: if you perform custom drawing.
// An empty implementation adversely affects performance during animation.
- (void)drawRect:(CGRect)rect
{
// Drawing code
}
*/
- (void)dealloc
{
[firstPieceView release];
[super dealloc];
}
#end
And here is my code for the view controller: (ParkingviewController.h)
#import <UIKit/UIKit.h>
#import "Car.h"
#interface ParkingViewController : UIViewController {
}
#end
and last but not least the ParkingViewController.m
#import "ParkingViewController.h"
#implementation ParkingViewController
- (void)dealloc
{
[super dealloc];
}
- (void)didReceiveMemoryWarning
{
// Releases the view if it doesn't have a superview.
[super didReceiveMemoryWarning];
// Release any cached data, images, etc that aren't in use.
}
#pragma mark - View lifecycle
// Implement viewDidLoad to do additional setup after loading the view, typically from a nib.
- (void)viewDidLoad
{
Car *car1 = [[Car alloc] init];
[self.view addSubview:car1];
[car1 release];
[super viewDidLoad];
}
- (void)viewDidUnload
{
[super viewDidUnload];
// Release any retained subviews of the main view.
// e.g. self.myOutlet = nil;
}
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation
{
// Return YES for supported orientations
return YES;
}
#end
Please forgive me if I've posted all the code but I want to be clear in every aspect of my project so that anyone can have the whole situation to be clear.

You need to set a frame for Car object that you are creating for touches to be processed. You are able to see the image as the clipsToBounds property of the view is set to NO by default.

Related

Ignore gestures on parent view

I recently came across an issue where I had a superview that needed to be swipable and a subview that also needed to be swipable. The interaction was that the subview should be the only one swiped if the swipe occurred within its bounds. If the swipe happened outside of the subview, the superview should handle the swipe.
I couldn't find any answers that solved this exact problem and eventually came up with a hacky solution that I thought I'd post if it can help others.
Edit:
A better solution is now marked as the right answer.
Changed title from "Ignore touch events..." to "Ignore gestures..."
If you are looking for a better solution, you can use gestureRecognizer:shouldReceiveTouch: delegate method to ignore the touch for the parent view recognizer.
- (BOOL)gestureRecognizer:(UIGestureRecognizer *)gestureRecognizer
shouldReceiveTouch:(UITouch *)touch{
UIView* swipeableSubview = ...; //set to the subview that can be swiped
CGPoint locationInSubview = [touch locationInView:swipeableSubview];
BOOL touchIsInSubview = [swipeableSubview pointInside:locationInSubview withEvent:nil];
return !touchIsInSubview;
}
This will make sure the parent only receives the swipe if the swipe does not start on the swipeable subview.
The basic premise is to catch when a touch happens and remove gestures if the touch happened within a set of views. It then re-adds the gestures after the gesture recognizer handles the gestures.
#interface TouchIgnorer : UIView
#property (nonatomic) NSMutableSet * ignoreOnViews;
#property (nonatomic) NSMutableSet * gesturesToIgnore;
#end
#implementation TouchIgnorer
- (id) init
{
self = [super init];
if (self)
{
_ignoreOnViews = [[NSMutableSet alloc] init];
_gesturesToIgnore = [[NSMutableSet alloc] init];
}
return self;
}
- (BOOL) pointInside:(CGPoint)point withEvent:(UIEvent *)event
{
CGPoint relativePt;
for (UIView * view in _ignoreOnViews)
{
relativePt = [view convertPoint:point toView:view];
if (!view.isHidden && CGRectContainsPoint(view.frame, relativePt))
{
for (UIGestureRecognizer * gesture in _gesturesToIgnore)
{
[self removeGestureRecognizer:gesture];
}
[self performSelector:#selector(rebindGestures) withObject:self afterDelay:0];
break;
}
}
return [super pointInside:point withEvent:event];
}
- (void) rebindGestures
{
for (UIGestureRecognizer * gesture in _gesturesToIgnore)
{
[self addGestureRecognizer:gesture];
}
}
#end

Trouble detecting a touch on a nested custom sprite layer

I have a layer, HelloWorldLayer below, where touch works anywhere, but I'd like it to work only when touching a sprite in the layer -- turtle below.
If I try to add self.isTouchEnabled = YES; onto the CCTurtle layer it says
property isTouchEnabled not found on object type CCTurtle
my output reads as follows
2013-01-08 20:30:14.767 FlashToCocosARC[6746:d503] cocos2d: deallocing
2013-01-08 20:30:15.245 FlashToCocosARC[6746:d503] playing walk animation2
Here's my HelloWorldLayer code:
#import "HelloWorldLayer.h"
#import "CCTurtle.h"
#implementation HelloWorldLayer
+(CCScene *) scene
{
CCScene *scene = [CCScene node];
HelloWorldLayer *layer = [HelloWorldLayer node];
[scene addChild: layer];
return scene;
}
-(id) init
{
if( (self=[super init])) {
turtle= [[CCTurtle alloc] init];
[turtle setPosition:ccp(300, 100)];
[self addChild:turtle];
///addChild:child z:z tag:aTag;
self.isTouchEnabled = YES;
turtle. tag=4;
//
}
return self;
}
//- (void)ccTouchesBegan:(NSSet *)touches withEvent:(UIEvent *)event
//{
// // Processing all touches for multi-touch support
// UITouch *touch = [touches anyObject];
// if ([[touch view] isKindOfClass:[turtle class]]) {
// NSLog(#"[touch view].tag = %d", [touch view].tag);
// [self toggleTurtle];
// }
//}
-(BOOL)containsTouch:(UITouch *)touch {
CGRect r=[turtle textureRect];
CGPoint p=[turtle convertTouchToNodeSpace:touch];
return CGRectContainsPoint(r, p );
}
- (void)ccTouchesBegan:(NSSet *)touches withEvent:(UIEvent *)event
{
//////GENERAL TOUCH SCREEN
for (UITouch *touch in touches) {
CGPoint touchLocation = [touch locationInView:[touch view]];
touchLocation = [[CCDirector sharedDirector] convertToGL:touchLocation];
[self toggleTurtle];
/////
}
}
-(void) toggleTurtle
{
NSLog(#"playing walk animation2");
[turtle playAnimation:#"walk_in" loop:NO wait:YES];
}
#end
//hello world.h
#import "cocos2d.h"
#import "CCTurtle.h"
#interface HelloWorldLayer : CCLayer
{
CCTurtle *turtle;
}
+(CCScene *) scene;
#end
//CCturtle
#import <Foundation/Foundation.h>
#import "FTCCharacter.h"
#interface CCTurtle : FTCCharacter <FTCCharacterDelegate, CCTargetedTouchDelegate>
{
}
#end
I'm using Cocos2D cocos2d v1.0.1 (arch enabled), and am testing on ipad 4.3 simulator.
with thanks Natalie
ive tried to put the touches directly into ccturtle.m so it can handle its own touches using CCTargetedTouchDelegate as above but using
CCturtle/// I changed files to this trying a different way to find the touched area...
- (CGRect)rect
{
CGSize s = [self.texture contentSize];
return CGRectMake(-s.width / 2, -s.height / 2, s.width, s.height);
}
-(BOOL) didTouch: (UITouch*)touch {
return CGRectContainsPoint(self.rect, [self convertTouchToNodeSpaceAR:touch]);
//return CGRectContainsPoint( [self rect], [self convertTouchToNodeSpaceAR: touch] );
}
-(BOOL) ccTouchBegan:(UITouch*)touch withEvent: (UIEvent*)event {
NSLog(#"attempting touch.");
if([self didTouch: touch]) {
return [self tsTouchBegan:touch withEvent: event];
}
return NO;
}
but still wont compile as still returns the error "Property 'is TouchEnabled' not found on object type 'CCTurtle*'
am really not sure what i can do to get it to run now... and really need to get this working (i suppose i could make invisible buttons, but it would be nicer to be able to find the ccturtle properly and to understand what im doing wrong... hope someone can help
First of all, I cannot see anywhere calling of containsTouch: method. And here are several advices:
Use boundingBox instead of textureRect to get local rect of your node (your turtle, in this case). Or just replace containsTouch: method to your turtle class to incapsulate this. It can be helpful, for example, if you want to make touch area of your turtle bigger/smaller. You will just need to change one little method in your turtle class.
In your ccTouchesBegan:withEvent: method just check for every turtle if it is hit by this touch. Then, for example, you can create dictionary, with touch as the key and array of corresponding turtles as the value. Then you just need to update all turtles positions for moved touch in your ccTouchesMoved:withEvent: method and remove this array of turtles from the dictionary in ccTouchesEnded:withEvent: and ccTouchCancelled:withEvent: method.
If you want your CCTurtle object to accept targeted touches, you can do it by specifying that it conforms to the CCTargetedTouchDelegate protocol. In your CCTurtle #interface, you declare it like so:
#interface CCTurtle : CCNode <CCTargetedTouchDelegate>
Then in the implementation, you tell it to accept touches and implement the ccTouch methods:
#implementation CCTurtle
-(id) init {
.
.
[[CCDirector sharedDirector].touchDispatcher addTargetedDelegate:self priority:1 swallowsTouches:YES];
return self;
}
-(void) onExit {
[[CCDirector sharedDirector].touchDispatcher removeDelegate:self];
}
-(BOOL) ccTouchBegan:(UITouch *)touch withEvent:(UIEvent *)event {
CGPoint location = [[CCDirector sharedDirector] convertToGL:[touch locationInView:[touch view]]];
if (CGRectContainsPoint([self boundingBox], location] {
// some code here
}
}
-(void)ccTouchMoved:(UITouch *)touch withEvent:(UIEvent *)event { // code here }
-(void)ccTouchEnded:(UITouch *)touch withEvent:(UIEvent *)event { // code here}
-(void)ccTouchCancelled:(UITouch *)touch withEvent:(UIEvent *)event { // code here}
Thanks to Prototypical the issue was solved,
Although the CCTurtle was a layer with a type of sprite on it was nested sprites
which meant that cocos2d had problems creating the correct bounding box for my
"contains touch method"
so with a bit of magic he combined his "get full bounding box" method to the contains touch method to account for the children of the sprite, and the code now relies on collision detection to process the touch.
currently im happily working away on some nice icons for him in return
but wanted to say thank-you to all that helped, and hope that this method comes in handy for anyone with the same problem!

How can I redraw an image in an custom NSView for an NSStatusItem?

I am having some troubles understanding how to wire a custom NSView for an NSMenuItem to support both animation and dragging and dropping. I have the following subclass of NSView handling the bulk of the job. It draws my icon when the application launches correctly, but I have been unable to correctly setup the subview to change when I invoke the setIcon function from another caller. Is there some element of the design that I am missing?
TrayIconView.m
#import "TrayIconView.h"
#implementation TrayIconView
#synthesize statusItem;
static NSImageView *_imageView;
- (id)initWithFrame:(NSRect)frame
{
self = [super initWithFrame:frame];
if (self) {
statusItem = nil;
isMenuVisible = NO;
_imageView = [[NSImageView alloc] initWithFrame:[self bounds]];
[self addSubview:_imageView];
}
return self;
}
- (void)drawRect:(NSRect)dirtyRect
{
// Draw status bar background, highlighted if menu is showing
[statusItem drawStatusBarBackgroundInRect:[self bounds]
withHighlight:isMenuVisible];
}
- (void)mouseDown:(NSEvent *)event {
[[self menu] setDelegate:self];
[statusItem popUpStatusItemMenu:[self menu]];
[self setNeedsDisplay:YES];
}
- (void)rightMouseDown:(NSEvent *)event {
// Treat right-click just like left-click
[self mouseDown:event];
}
- (void)menuWillOpen:(NSMenu *)menu {
isMenuVisible = YES;
[self setNeedsDisplay:YES];
}
- (void)menuDidClose:(NSMenu *)menu {
isMenuVisible = NO;
[menu setDelegate:nil];
[self setNeedsDisplay:YES];
}
- (void)setIcon:(NSImage *)icon {
[_imageView setImage:icon];
}
TrayIconView.h
#import <Cocoa/Cocoa.h>
#interface TrayIconView : NSView
{
BOOL isMenuVisible;
}
#property (retain, nonatomic) NSStatusItem *statusItem;
- (void)setIcon:(NSImage *)icon;
#end
The solution to this problem was actually outside of the view detailed here. The caller of the interface was being double instantiated on accident, thus nulling out the reference to the previously created NSView. After correcting that concern the app draws and works just fine.
With regard to dragging, I just implemented a subclass of NSView that implemented the Cocoa draggable protocol and added it as a subview to this parent class. That allows dragging onto the currently established NSRect that contains the menubar icon.

Drag out new UIImageView objects from a stationary UIImageView or button

I'm a beginner at iOS development and I'm working on a side project for my school. What I would like to do is have a small image where the user can drag out more images from. Essentially I have an app where I'm generating squares and I need to have a set square where the user can touch the square and drag out new squares onto a board. I have achieved this to a certain point but it's not the desired behavior yet. Currently the user has to touch the button and then release before they can interact with the new square to drag it off of the button. What i would like is to have the touch event somehow passed over to the new UIImageView object that is created so it is seamless dragging out a new square. Below is my current code.
//UIViewController
#import <UIKit/UIKit.h>
#import "Tile.h"
#interface MenuViewController : UIViewController
{
Tile *tileObject;
}
#end
#import "MenuViewController.h"
#implementation MenuViewController
- (IBAction)createTile:(UIButton *)sender {
CGFloat tileX = sender.center.x;
CGFloat tileY = sender.center.y;
tileObject = [[Tile alloc]initWithX:tileX andY:tileY];
[self.view addSubview:tileObject];
}
- (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
// Release any cached data, images, etc that aren't in use.
}
#pragma mark - View lifecycle
- (void)viewDidLoad
{
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
}
- (void)viewDidUnload
{
[super viewDidUnload];
// Release any retained subviews of the main view.
// e.g. self.myOutlet = nil;
}
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation
{
// Return YES for supported orientations
return YES;
}
#end
//Custom Tile Class
#import <UIKit/UIKit.h>
#interface Tile : UIImageView
{
CGPoint startLocation;
}
- (id)init; //Default Initialization
- (id)initWithX:(CGFloat)x andY:(CGFloat)y; //Initialization with coordinate points from single screen tap
#end
#import "Tile.h"
#implementation Tile
//Default initialization
-(id) init
{
self = [self initWithX:0 andY:0];
return self;
}
//Initialization with coordinate points from single screen tap
- (id)initWithX:(CGFloat)centerX andY:(CGFloat)centerY
{
//Creates a UIImageView object from an image file
UIImage *image = [UIImage imageNamed:#"redblock.png"];
self = [super initWithImage:image];
//Used to center the image under the single screen tap
centerX -= (image.size.height/2);
centerY -= (image.size.height/2);
//Sets the position of the image
self.frame = CGRectMake(centerX, centerY, image.size.width, image.size.height);
self.userInteractionEnabled = YES; //required for interacting with UIImageViews
return self;
}
/* Methods from Stack Overflow
http://stackoverflow.com/questions/1991899/uiimage-detecting-touch-and-dragging
Referenced from http://www.iphoneexamples.com/
*/
- (void) touchesBegan:(NSSet*)touches withEvent:(UIEvent*)event {
// Retrieve the touch point
CGPoint point = [[touches anyObject] locationInView:self];
startLocation = point;
[[self superview] bringSubviewToFront:self];
}
- (void) touchesMoved:(NSSet*)touches withEvent:(UIEvent*)event {
// Move relative to the original touch point
CGPoint point = [[touches anyObject] locationInView:self];
CGRect frame = [self frame];
frame.origin.x += point.x - startLocation.x;
frame.origin.y += point.y - startLocation.y;
[self setFrame:frame];
}
/*
end of methods from Stack Overflow
*/
#end
If the initial touch down point can be an image view with the image you want to drag as its image, then you can do it like this:
In your menuViewController class, put this in the viewDidLoad (and delete the button and its method):
- (void)viewDidLoad
{
[super viewDidLoad];
tileObject = [[Tile alloc]initWithX:160 andY:200];
[self.view addSubview:tileObject];
}
Then in your Tile class, in the touchesBegan:withEvent: method you create a new tile, and carry out the drag as you did before:
- (void) touchesBegan:(NSSet*)touches withEvent:(UIEvent*)event {
Tile *newTile = [[Tile alloc]initWithX:160 andY:200];
[self.superview addSubview:newTile];
// Retrieve the touch point
CGPoint point = [[touches anyObject] locationInView:self];
startLocation = point;
[[self superview] bringSubviewToFront:self];
}
Instead of using a button with touchDown event, you need to:
1) Detect touchesBegan and create your tile
2) Detect touchesMoved and move your tile

Subclassing UIPanGestureRecognizer to wait for Condition before recognition

I'm trying to link two gestures one after another. UILongPressGestureRecognizer, then UIPanGestureRecognizer.
I want to detect the Long Press, then allow the Pan gesture to be recognized.
I've Subclassed UIPanGestureRecognizer and Added an panEnabled Bool iVar. In the initWith Frame I've set panEnabled to NO.
In Touches Moved I check to see if it is enabled, and then call Super touchesMoved if it is.
In my LongPress Gesture Handler, I loop though the View's Gestures till I find my Subclassed Gesture and then setPanEnabled to YES.
It seems like it is working, though its like the original pan gesture recognizer is not functioning properly and not setting the Proper states. I know if you Subclass the UIGestureRecognizer, you need to maintain the state yourself, but I would think that if you are subclassing UIPanGestureRecognizer, and for all the touches methods calling the super, that it would be setting the state in there.
Here is my subclass .h File
#import <UIKit/UIKit.h>
#import <UIKit/UIGestureRecognizerSubclass.h>
#interface IoUISEListPanGestureRecognizer : UIPanGestureRecognizer {
int IoUISEdebug;
BOOL panEnabled;
}
- (id)initWithTarget:(id)target action:(SEL)action;
#property(nonatomic, assign) int IoUISEdebug;
#property(nonatomic, assign) BOOL panEnabled;
#end
here is the subclass .m File
#import "IoUISEListPanGestureRecognizer.h"
#implementation IoUISEListPanGestureRecognizer
#synthesize IoUISEdebug;
#synthesize panEnabled;
- (id)initWithTarget:(id)target action:(SEL)action {
[super initWithTarget:target action:action];
panEnabled = NO;
return self;
}
- (void)ignoreTouch:(UITouch*)touch forEvent:(UIEvent*)event {
[super ignoreTouch:touch forEvent:event];
}
-(void)reset {
[super reset];
panEnabled = NO;
}
- (BOOL)canPreventGestureRecognizer:(UIGestureRecognizer *)preventedGestureRecognizer {
return [super canPreventGestureRecognizer:preventedGestureRecognizer];
}
- (BOOL)canBePreventedByGestureRecognizer:(UIGestureRecognizer *)preventingGestureRecognizer{
return [super canBePreventedByGestureRecognizer:preventingGestureRecognizer];
}
- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event{
[super touchesBegan:touches withEvent:event];
}
- (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event{
if (panEnabled) {
[super touchesMoved:touches withEvent:event];
}
}
- (void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event {
[super touchesEnded:touches withEvent:event];
}
- (void)touchesCancelled:(NSSet *)touches withEvent:(UIEvent *)event{
[super touchesCancelled:touches withEvent:event];
}
#end
If you make a BOOL called canPan and include the following delegate methods you can have both a standard UILongPressGestureRecognizer and UIPanGestureRecognizer attached to the same view. On the selector that gets called when the long press gesture is recognized - change canPan to YES. You might want to disable the long press once it has been recognised and re-enable it when the pan finishes. - Don't forget to assign the delegate properties on the gesture recognisers.
- (BOOL)gestureRecognizerShouldBegin:(UIGestureRecognizer *)gestureRecognizer{
if (!canPan && [gestureRecognizer isKindOfClass:[UIPanGestureRecognizer class]]) {
return NO;
}
return YES;
}
- (BOOL)gestureRecognizer:(UIGestureRecognizer *)gestureRecognizer shouldRecognizeSimultaneouslyWithGestureRecognizer:(UIGestureRecognizer *)otherGestureRecognizer{
return YES;
}