I am using following code snippet to drag and drop uiimageview
UIPanGestureRecognizer *panRecognizer = [[UIPanGestureRecognizer alloc] initWithTarget:self action:#selector(move:)];
[panRecognizer setMinimumNumberOfTouches:1];
[panRecognizer setMaximumNumberOfTouches:1];
[panRecognizer setDelegate:self];
[myImageView addGestureRecognizer:panRecognizer];
-(void)move:(id)sender {
CGPoint translatedPoint = [(UIPanGestureRecognizer*)sender translationInView:self.view];
if([(UIPanGestureRecognizer*)sender state] == UIGestureRecognizerStateBegan) {
firstX = [myImageView center].x;
firstY = [myImageView center].y;
}
translatedPoint = CGPointMake(firstX+translatedPoint.x, firstY+translatedPoint.y);
[myImageView setCenter:translatedPoint];
}
This code is drags the whole myImageView ,but my requirement is to just drag the uiimage and drop it into another uiimagview.myImageView should stay as it is after dragging also.just I need to drag the myImageView layer.draggable image should be transparent. Any ideas would b appreciated.
I have put little effort to achieve your output. try it
Step 1 :Define this 3 Variables in your .h file
UIImageView *ivSource1, *ivDestination2, *tempIV;
Step 2 : Initialize all the three UIImageView and add to your ViewController write it in viewDidLoad method
ivSource1 = [[UIImageView alloc] initWithImage:[UIImage imageNamed:#"1.jpg"]];
[ivSource1 setFrame:CGRectMake(100, 100, 100, 100)];
[ivSource1 setTag:100];
[ivSource1 setUserInteractionEnabled:YES];
[self.view addSubview:ivSource1];
ivDestination2 = [[UIImageView alloc] init];
[ivDestination2 setFrame:CGRectMake(200, 300, 100, 100)];
[ivDestination2 setTag:101];
[ivDestination2 setUserInteractionEnabled:YES];
[self.view addSubview:ivDestination2];
tempIV = [[UIImageView alloc] init];
[tempIV setFrame:CGRectMake(0, 300, 100, 100)];
[tempIV setTag:102];
[tempIV setUserInteractionEnabled:YES];
[self.view addSubview:tempIV];
Step 3 : Define following touch methods to handle movement of image for Drag & Drop
-(void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event
{
UITouch *touch = [touches anyObject];
if([[touch view] tag] == 100)
{
[tempIV setImage:ivSource1.image];
[tempIV setCenter:[touch locationInView:self.view]];
}
}
- (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event
{
UITouch *touch = [touches anyObject];
[tempIV setCenter:[touch locationInView:self.view]];
}
- (void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event
{
UITouch *touch = [touches anyObject];
[tempIV setCenter:[touch locationInView:self.view]];
if(CGRectContainsPoint(ivDestination2.frame, [touch locationInView:self.view]))
{
[ivDestination2 setImage:tempIV.image];
}
// Remove image from dragable view
[tempIV setImage:[UIImage imageNamed:#""]];
}
With iOS 13, you can use Drag & Drop and copy/paste APIs in order to perform a drag and drop operation of a UIImage from one UIImageView to another UIImageView. According to your needs, you may choose one of the two following Swift 5.1 implementations.
#1. Using UIDragInteraction, UIDragInteractionDelegate and UIPasteConfiguration
import UIKit
class ViewController: UIViewController {
let imageView1 = UIImageView()
let imageView2 = UIImageView()
override func viewDidLoad() {
super.viewDidLoad()
imageView1.image = UIImage(named: "image")
imageView1.contentMode = .scaleAspectFit
imageView1.isUserInteractionEnabled = true
let dragInteraction = UIDragInteraction(delegate: self)
dragInteraction.isEnabled = true
imageView1.addInteraction(dragInteraction)
imageView2.contentMode = .scaleAspectFit
imageView2.isUserInteractionEnabled = true
let configuration = UIPasteConfiguration(forAccepting: UIImage.self)
imageView2.pasteConfiguration = configuration
let stackView = UIStackView(arrangedSubviews: [imageView1, imageView2])
view.addSubview(stackView)
stackView.distribution = .fillEqually
stackView.frame = view.bounds
stackView.autoresizingMask = [.flexibleWidth, .flexibleHeight]
}
override func paste(itemProviders: [NSItemProvider]) {
_ = itemProviders.first?.loadObject(ofClass: UIImage.self, completionHandler: { (image: NSItemProviderReading?, error: Error?) in
DispatchQueue.main.async {
self.imageView2.image = image as? UIImage
}
})
}
}
extension ViewController: UIDragInteractionDelegate {
func dragInteraction(_ interaction: UIDragInteraction, itemsForBeginning session: UIDragSession) -> [UIDragItem] {
guard let image = imageView1.image else { return [] }
let item = UIDragItem(itemProvider: NSItemProvider(object: image))
return [item]
}
}
#2. Using UIDragInteraction, UIDragInteractionDelegate, UIDropInteraction and UIDropInteractionDelegate
import UIKit
class ViewController: UIViewController {
let imageView1 = UIImageView()
let imageView2 = UIImageView()
override func viewDidLoad() {
super.viewDidLoad()
imageView1.image = UIImage(named: "image")
imageView1.contentMode = .scaleAspectFit
imageView1.isUserInteractionEnabled = true
imageView2.contentMode = .scaleAspectFit
imageView2.isUserInteractionEnabled = true
let dragInteraction = UIDragInteraction(delegate: self)
dragInteraction.isEnabled = true
imageView1.addInteraction(dragInteraction)
let dropInteraction = UIDropInteraction(delegate: self)
imageView2.addInteraction(dropInteraction)
let stackView = UIStackView(arrangedSubviews: [imageView1, imageView2])
view.addSubview(stackView)
stackView.distribution = .fillEqually
stackView.frame = view.bounds
stackView.autoresizingMask = [.flexibleWidth, .flexibleHeight]
}
}
extension ViewController: UIDragInteractionDelegate {
func dragInteraction(_ interaction: UIDragInteraction, itemsForBeginning session: UIDragSession) -> [UIDragItem] {
guard let image = imageView1.image else { return [] }
let item = UIDragItem(itemProvider: NSItemProvider(object: image))
item.localObject = image
return [item]
}
}
extension ViewController: UIDropInteractionDelegate {
func dropInteraction(_ interaction: UIDropInteraction, canHandle session: UIDropSession) -> Bool {
return session.canLoadObjects(ofClass: UIImage.self) && session.items.count == 1
}
func dropInteraction(_ interaction: UIDropInteraction, sessionDidUpdate session: UIDropSession) -> UIDropProposal {
let dropLocation = session.location(in: view)
let operation: UIDropOperation
if imageView2.frame.contains(dropLocation) {
operation = session.localDragSession == nil ? .copy : .move
} else {
operation = .cancel
}
return UIDropProposal(operation: operation)
}
func dropInteraction(_ interaction: UIDropInteraction, performDrop session: UIDropSession) {
session.loadObjects(ofClass: UIImage.self) { imageItems in
guard let images = imageItems as? [UIImage] else { return }
self.imageView2.image = images.first
}
}
}
Related
Ok, i've read a few posts on this one (ex. UIImageView Gestures (Zoom, Rotate) Question) but I can't seem to fix my problem.
I have the following setup: an SKScene, an SKNode _backgroundLayer and 9 SKSpriteNodes that are tiles that make up the background and are attached to the _backgroundLayer.
Since these 9 tiles make a 3x3 square and they are quite large, I need to be able to zoom in and look at other SKSpriteNodes that will be on top of these 9 background images.
There are two problems:
1) When I pinch to zoom in or zoom out it seems like it is zooming in/out from location (0,0) of the _backgroundLayer and not from the touch location.
2) I have added some bounds so that the user can not scroll out of the 9 background images. In general it works. However, if I zoom in then move towards the top of the 9 background images and then zoom out the bounding conditions go berserk and the user can see the black space outside the background images. I need a way to limit the amount of zooming out that the user can do depending on where he's at.
Any ideas? Thanks!
I include my code below:
#import "LevelSelectScene.h"
#import "TurtleWorldSubScene.h"
#interface LevelSelectScene ()
#property (nonatomic, strong) SKNode *selectedNode;
#end
#implementation LevelSelectScene
{
SKNode *_backgroundLayer;
}
-(id)initWithSize:(CGSize)size {
if (self = [super initWithSize:size]) {
/* Setup your scene here */
_backgroundLayer = [SKNode node];
_backgroundLayer.name = #"backgroundLayer";
if (UI_USER_INTERFACE_IDIOM() == UIUserInterfaceIdiomPad) {
[_backgroundLayer setScale:0.76];
} else if (UI_USER_INTERFACE_IDIOM() == UIUserInterfaceIdiomPhone && IS_WIDESCREEN) {
} else {
[_backgroundLayer setScale:0.36];
}
[self addChild:_backgroundLayer];
SKTexture *backgroundTexture = [SKTexture textureWithImageNamed:#"levelSelect"];
int textureID = 0;
for (int i = 0; i<3; i++) {
for (int j = 0; j<3; j++) {
SKSpriteNode *background = [SKSpriteNode spriteNodeWithTexture:backgroundTexture];
background.anchorPoint = CGPointZero;
background.position = CGPointMake((background.size.width)*i, (background.size.height)*j);
background.zPosition = 0;
background.name = [NSString stringWithFormat:#"background%d", textureID];
textureID++;
[_backgroundLayer addChild:background];
}
}
[TurtleWorldSubScene displayTurtleWorld:self];
}
return self;
}
- (void)didMoveToView:(SKView *)view {
UIPanGestureRecognizer *panGestureRecognizer = [[UIPanGestureRecognizer alloc] initWithTarget:self action:#selector(handlePanFrom:)];
[[self view] addGestureRecognizer:panGestureRecognizer];
//UITapGestureRecognizer * tapRecognizer = [[UITapGestureRecognizer alloc] initWithTarget:self action:#selector(handleTap:)];
// [self.view addGestureRecognizer:tapRecognizer];
UIPinchGestureRecognizer *pinchGestureRecognizer = [[UIPinchGestureRecognizer alloc] initWithTarget:self action:#selector(handlePinch:)];
[[self view] addGestureRecognizer:pinchGestureRecognizer];
}
- (void)handlePanFrom:(UIPanGestureRecognizer *)recognizer {
if (recognizer.state == UIGestureRecognizerStateBegan) {
CGPoint touchLocation = [recognizer locationInView:recognizer.view];
touchLocation = [self convertPointFromView:touchLocation];
SKNode *node = [self nodeAtPoint:touchLocation];
_selectedNode = node;
} else if (recognizer.state == UIGestureRecognizerStateChanged) {
CGPoint translation = [recognizer translationInView:recognizer.view];
translation = CGPointMake(translation.x, -translation.y);
CGPoint initialPosition = CGPointAdd(_backgroundLayer.position, translation);
_backgroundLayer.position = [self boundLayerPos:initialPosition];
[recognizer setTranslation:CGPointZero inView:recognizer.view];
} else if (recognizer.state == UIGestureRecognizerStateEnded) {
float scrollDuration = 0.2;
CGPoint velocity = [recognizer velocityInView:recognizer.view];
CGPoint pos = [_backgroundLayer position];
CGPoint p = CGPointMultiplyScalar(velocity, scrollDuration);
CGPoint newPos = CGPointMake(pos.x + p.x, pos.y - p.y);
newPos = [self boundLayerPos:newPos];
[_backgroundLayer removeAllActions];
SKAction *moveTo = [SKAction moveTo:newPos duration:scrollDuration];
[moveTo setTimingMode:SKActionTimingEaseOut];
[_backgroundLayer runAction:moveTo];
}
}
- (void)handlePinch:(UIPinchGestureRecognizer *) recognizer
{
if (UI_USER_INTERFACE_IDIOM() == UIUserInterfaceIdiomPad) {
if(_backgroundLayer.xScale*recognizer.scale < 0.76) {
//SKSpriteNode *backgroundTile = (SKSpriteNode *)[_backgroundLayer childNodeWithName:#"background0"];
[_backgroundLayer setScale:0.76];
} else if(_backgroundLayer.xScale*recognizer.scale > 2) {
[_backgroundLayer setScale:2.0];
} else {
[_backgroundLayer runAction:[SKAction scaleBy:recognizer.scale duration:0]];
}
} else if (UI_USER_INTERFACE_IDIOM() == UIUserInterfaceIdiomPhone && IS_WIDESCREEN) {
} else {
if(_backgroundLayer.xScale*recognizer.scale < 0.36) {
[_backgroundLayer setScale:0.36];
} else if(_backgroundLayer.xScale*recognizer.scale > 2) {
[_backgroundLayer setScale:2.0];
} else {
[_backgroundLayer runAction:[SKAction scaleBy:recognizer.scale duration:0]];
}
}
recognizer.scale = 1;
}
- (CGPoint)boundLayerPos:(CGPoint)newPos {
SKSpriteNode *backgroundTile = (SKSpriteNode *)[_backgroundLayer childNodeWithName:#"background0"];
CGPoint retval = newPos;
retval.x = MIN(retval.x, 0);
retval.x = MAX(retval.x, -(backgroundTile.size.width*_backgroundLayer.xScale*3)+self.size.width);
retval.y = MIN(retval.y, 0);
retval.y = MAX(retval.y, -(backgroundTile.size.height*_backgroundLayer.xScale*3)+self.size.height);
return retval;
}
I am wondering how I can use a touch method for a UIImageView inside a UIScrollView in xCode.
When I add the UIImageView subview to the self.view, I can use the touch method. But when I add the UIImageView subview to the UIScrollView, I can't. How can I solve this?
This is my code:
- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event {
touches = [event allTouches];
for (UITouch *touch in touches) {
NSLog(#"Image Touched");
}
}
- (void)viewDidLoad
{
[super viewDidLoad];
UIScrollView *scrollView = [[UIScrollView alloc] initWithFrame:CGRectMake(0, 44, [UIScreen mainScreen].bounds.size.width, [UIScreen mainScreen].bounds.size.height * 0.9)];
scrollView.scrollEnabled = TRUE;
scrollView.bounces = TRUE;
scrollView.contentSize = CGSizeMake([UIScreen mainScreen].bounds.size.width, [UIScreen mainScreen].bounds.size.height);
scrollView.userInteractionEnabled = YES;
[self.view addSubview:scrollView];
UIImageView *ImageView = [UIImageView alloc] initWithFrame:CGRectMake([UIScreen mainScreen].bounds.size.width * 0.04, 10, [UIScreen mainScreen].bounds.size.width * 0.28, [UIScreen mainScreen].bounds.size.height * 0.22)];
ImageView.image = [UIImage imageNamed(#"image.png")];
ImageView.layer.borderColor = [UIColor whiteColor].CGColor;
ImageView.layer.borderWidth = 1;
ImageView.userInteractionEnabled = YES;
[scrollView addSubview:ImageView];
}
Give UIGestureRecognizers a try. They are far easier to manage with multiple layers of touch management.
- (void)touchedImage:(UITapGestureRecognizer *)gesture {
// When the gesture has ended, perform your action.
if (gesture.state == UIGestureRecognizerStateEnded) {
NSLog(#"Touched Image");
}
}
- (void)viewDidLoad
{
[super viewDidLoad];
UIScrollView *scrollView = [[UIScrollView alloc] initWithFrame:CGRectMake(0, 44, [UIScreen mainScreen].bounds.size.width, [UIScreen mainScreen].bounds.size.height * 0.9)];
scrollView.scrollEnabled = TRUE;
scrollView.bounces = TRUE;
scrollView.contentSize = CGSizeMake([UIScreen mainScreen].bounds.size.width, [UIScreen mainScreen].bounds.size.height);
scrollView.userInteractionEnabled = YES;
[self.view addSubview:scrollView];
UIImageView *imageView = [[UIImageView alloc] initWithFrame:CGRectMake([UIScreen mainScreen].bounds.size.width * 0.04, 10, [UIScreen mainScreen].bounds.size.width * 0.28, [UIScreen mainScreen].bounds.size.height * 0.22)];
imageView.image = [UIImage imageNamed:#"image.png"];
imageView.layer.borderColor = [UIColor whiteColor].CGColor;
imageView.layer.borderWidth = 1;
imageView.userInteractionEnabled = YES;
[scrollView addSubview:imageView];
// Create a tap gesture
UITapGestureRecognizer *tap = [[UITapGestureRecognizer alloc] initWithTarget:self action:#selector(touchedImage:)];
[imageView addGestureRecognizer:tap];
}
if you're dealing with multiple imageviews using the same gesture recognizer, it won't work. Try using a new gesture recognizer per image view.
User interaction of UIImageView is disabled by default you can enable it by setting imageView.userInteractionEnabled = YES;
You'll have to derive a CustomScrollView from UIScrollView and override all touches method like touchesBegan,Moved,Ended and Cancelled like below:
-(void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event
{
for (UIView *view in self.subviews)
{
UITouch *touch = [touches anyObject];
CGPoint pt = [touch locationInView:view];
if (CGRectContainsPoint(view.frame, pt))
{
[view touchesMoved:touches withEvent:event];
}
}
}
After you do this. When you add any view in the instance of customScrollView, you'll get the touches in the added view properly.
I had the same problem with an image view as a subview in a scroll view when applying a transformation.
Upon scroll my gesture recognizer no longer worked so I checked the offset of the scroll view and reset my image so the gesture was recognized.
And of course we need to set userInterActionEnabled to true and add the gesture in viewDidLoad()
override func viewDidLoad() {
imageView.userInteractionEnabled = true
addGesture()
}
func addGesture(){
let singleTap = UITapGestureRecognizer(target: self, action: #selector(MyViewController.enableGesture(_:)));
singleTap.numberOfTapsRequired = 1
self.imageView.addGestureRecognizer(singleTap)
self.imageView.tag = 1
}
override func scrollViewDidScroll(scrollView: UIScrollView) {
let offset = scrollView.contentOffset.y
if offset > 0 {
imageView.layer.transform = avatarTransform
}
else {
imageView.transform = CGAffineTransformIdentity
}
}
I have 2 imageviews that I want to be able to move. This is my code:
panRecognizer = [[UIPanGestureRecognizer alloc] initWithTarget:self action:#selector(gestureRecognizerMethod:)];
imageview = [[UIImageView alloc] initWithImage:[UIImage imageNamed:#"image.png"]];
imageview.frame = CGRectMake(390, 100, 80, 16);
imageview.userInteractionEnabled = YES;
[imageview addGestureRecognizer:panRecognizer];
[self addSubview:imageview];
imageview2 = [[UIImageView alloc] initWithImage:[UIImage imageNamed:#"image.png"]];
imageview2.frame = CGRectMake(390, 120, 80, 17);
imageview2.userInteractionEnabled = YES;
[imageview2 addGestureRecognizer:panRecognizer];
[self imageview2];
And in my gestureRecognizer method:
- (void)gestureRecognizerMethod:(UIPanGestureRecognizer *)recognizer
{
if (recognizer.view == imageview)
{
if (recognizer.state == UIGestureRecognizerStateBegan || recognizer.state == UIGestureRecognizerStateChanged)
{
CGPoint startPoint = [recognizer locationInView:self];
imageview.center = startPoint;
}
}
if (recognizer.view == imageview2)
{
if (recognizer.state == UIGestureRecognizerStateBegan || recognizer.state == UIGestureRecognizerStateChanged)
{
CGPoint startPoint = [recognizer locationInView:self];
imageview2.center = startPoint;
}
}
}
I'm only able to move one of the view. What is wrong?
Create a new gesture recognizer for the second image view. You can use the same target and action for both recognisers but you can only attach any one recogniser to a single view.
Try this:
ResizeImage on github
I'm working on an app with 9 views on screen, and I want the users to connect the views in a way they want, and record their sequence as password.
But I don't know which gesture recognizer I should use.
Should I use CMUnistrokeGestureRecognizer or combination of several swipe gesture or anything else?
Thanks.
You could use a UIPanGestureRecognizer, something like:
CGFloat const kMargin = 10;
- (void)viewDidLoad
{
[super viewDidLoad];
// create a container view that all of our subviews for which we want to detect touches are:
CGFloat containerWidth = fmin(self.view.bounds.size.width, self.view.bounds.size.height) - kMargin * 2.0;
UIView *container = [[UIView alloc] initWithFrame:CGRectMake(kMargin, kMargin, containerWidth, containerWidth)];
container.backgroundColor = [UIColor darkGrayColor];
[self.view addSubview:container];
// now create all of the subviews, specifying a tag for each; and
CGFloat cellWidth = (containerWidth - (4.0 * kMargin)) / 3.0;
for (NSInteger column = 0; column < 3; column++)
{
for (NSInteger row = 0; row < 3; row++)
{
UIView *cell = [[UIView alloc] initWithFrame:CGRectMake(kMargin + column * (cellWidth + kMargin),
kMargin + row * (cellWidth + kMargin),
cellWidth, cellWidth)];
cell.tag = row * 3 + column;
cell.backgroundColor = [UIColor lightGrayColor];
[container addSubview:cell];
}
}
// finally, create the gesture recognizer
UIPanGestureRecognizer *pan = [[UIPanGestureRecognizer alloc] initWithTarget:self
action:#selector(handlePan:)];
[container addGestureRecognizer:pan];
}
- (void)handlePan:(UIPanGestureRecognizer *)gesture
{
static NSMutableArray *gesturedSubviews;
// if we're starting a gesture, initialize our list of subviews that we've gone over
if (gesture.state == UIGestureRecognizerStateBegan)
{
gesturedSubviews = [NSMutableArray array];
}
// now figure out whether:
// (a) are we over a subview; and
// (b) is this a different subview than we last were over
CGPoint location = [gesture locationInView:gesture.view];
for (UIView *subview in gesture.view.subviews)
{
if (CGRectContainsPoint(subview.frame, location))
{
if (subview != [gesturedSubviews lastObject])
{
[gesturedSubviews addObject:subview];
// an example of the sort of graphical flourish to give the
// some visual cue that their going over the subview in question
// was recognized
[UIView animateWithDuration:0.25
delay:0.0
options:UIViewAnimationOptionAutoreverse
animations:^{
subview.alpha = 0.5;
}
completion:^(BOOL finished){
subview.alpha = 1.0;
}];
}
}
}
// finally, when done, let's just log the subviews
// you would do whatever you would want here
if (gesture.state == UIGestureRecognizerStateEnded)
{
NSLog(#"We went over:");
for (UIView *subview in gesturedSubviews)
{
NSLog(#" %d", subview.tag);
}
// you might as well clean up your static variable when you're done
gesturedSubviews = nil;
}
}
Obviously, you would create your subviews any way you want, and keep track of them any way you want, but the idea is to have subviews with unique tag numbers, and the gesture recognizer would just see which order you go over them in a single gesture.
Even if I didn't capture precisely what you want, it at least shows you how you can use a pan gesture recognizer to track the movement of your finger from one subview to another.
Update:
If you wanted to draw a path on the screen as the user is signing in, you could create a CAShapeLayer with a UIBezierPath. I'll demonstrate that below, but as a caveat, I feel compelled to point out that this might not be a great security feature: Usually with password entry, you'll show the user enough so that they can confirm that they're doing what they want, but not enough so that someone can glance look over their shoulder and see what the whole password was. When entering a text password, usually iOS momentarily shows you the last key you hit, but quickly turns that into an asterisk so that, at no point, can you see the whole password. Hence my initial suggestion.
But if you really have your heart set on showing the user the path as they draw it, you could use something like the following. First, this requires Quartz 2D. Thus add the QuartzCore.framework to your project (see Linking to a Library or Framework). Second, import the QuartCore headers:
#import <QuartzCore/QuartzCore.h>
Third, replace the pan handler with something like:
- (void)handlePan:(UIPanGestureRecognizer *)gesture
{
static NSMutableArray *gesturedSubviews;
static UIBezierPath *path = nil;
static CAShapeLayer *shapeLayer = nil;
// if we're starting a gesture, initialize our list of subviews that we've gone over
if (gesture.state == UIGestureRecognizerStateBegan)
{
gesturedSubviews = [NSMutableArray array];
}
// now figure out whether:
// (a) are we over a subview; and
// (b) is this a different subview than we last were over
CGPoint location = [gesture locationInView:gesture.view];
for (UIView *subview in gesture.view.subviews)
{
if (!path)
{
// if the path hasn't be started, initialize it and the shape layer
path = [UIBezierPath bezierPath];
[path moveToPoint:location];
shapeLayer = [[CAShapeLayer alloc] init];
shapeLayer.strokeColor = [UIColor redColor].CGColor;
shapeLayer.fillColor = [UIColor clearColor].CGColor;
shapeLayer.lineWidth = 2.0;
[gesture.view.layer addSublayer:shapeLayer];
}
else
{
// otherwise add this point to the layer's path
[path addLineToPoint:location];
shapeLayer.path = path.CGPath;
}
if (CGRectContainsPoint(subview.frame, location))
{
if (subview != [gesturedSubviews lastObject])
{
[gesturedSubviews addObject:subview];
[UIView animateWithDuration:0.25
delay:0.0
options:UIViewAnimationOptionAutoreverse
animations:^{
subview.alpha = 0.5;
}
completion:^(BOOL finished){
subview.alpha = 1.0;
}];
}
}
}
// finally, when done, let's just log the subviews
// you would do whatever you would want here
if (gesture.state == UIGestureRecognizerStateEnded)
{
// assuming the tags are numbers between 0 and 9 (inclusive), we can build the password here
NSMutableString *password = [NSMutableString string];
for (UIView *subview in gesturedSubviews)
[password appendFormat:#"%c", subview.tag + 48];
NSLog(#"Password = %#", password);
// clean up our array of gesturedSubviews
gesturedSubviews = nil;
// clean up the drawing of the path on the screen the user drew
[shapeLayer removeFromSuperlayer];
shapeLayer = nil;
path = nil;
}
}
That yields a path that the user draws as the gesture proceeds:
Rather than showing the path the user draws with each and every movement of the user's finger, maybe you just draw the lines between the center of the subviews, such as:
- (void)handlePan:(UIPanGestureRecognizer *)gesture
{
static NSMutableArray *gesturedSubviews;
static UIBezierPath *path = nil;
static CAShapeLayer *shapeLayer = nil;
// if we're starting a gesture, initialize our list of subviews that we've gone over
if (gesture.state == UIGestureRecognizerStateBegan)
{
gesturedSubviews = [NSMutableArray array];
}
// now figure out whether:
// (a) are we over a subview; and
// (b) is this a different subview than we last were over
CGPoint location = [gesture locationInView:gesture.view];
for (UIView *subview in gesture.view.subviews)
{
if (CGRectContainsPoint(subview.frame, location))
{
if (subview != [gesturedSubviews lastObject])
{
[gesturedSubviews addObject:subview];
if (!path)
{
// if the path hasn't be started, initialize it and the shape layer
path = [UIBezierPath bezierPath];
[path moveToPoint:subview.center];
shapeLayer = [[CAShapeLayer alloc] init];
shapeLayer.strokeColor = [UIColor redColor].CGColor;
shapeLayer.fillColor = [UIColor clearColor].CGColor;
shapeLayer.lineWidth = 2.0;
[gesture.view.layer addSublayer:shapeLayer];
}
else
{
// otherwise add this point to the layer's path
[path addLineToPoint:subview.center];
shapeLayer.path = path.CGPath;
}
[UIView animateWithDuration:0.25
delay:0.0
options:UIViewAnimationOptionAutoreverse
animations:^{
subview.alpha = 0.5;
}
completion:^(BOOL finished){
subview.alpha = 1.0;
}];
}
}
}
// finally, when done, let's just log the subviews
// you would do whatever you would want here
if (gesture.state == UIGestureRecognizerStateEnded)
{
// assuming the tags are numbers between 0 and 9 (inclusive), we can build the password here
NSMutableString *password = [NSMutableString string];
for (UIView *subview in gesturedSubviews)
[password appendFormat:#"%c", subview.tag + 48];
NSLog(#"Password = %#", password);
// clean up our array of gesturedSubviews
gesturedSubviews = nil;
// clean up the drawing of the path on the screen the user drew
[shapeLayer removeFromSuperlayer];
shapeLayer = nil;
path = nil;
}
}
That yields something like:
You have all sorts of options, but hopefully you now have the building blocks so you can design your own solution.
Forgive me Rob, pure Plagiarism here :) Needed the same code in swift 3.0 :) so I translated this great little example you wrote into swift 3.0.
ViewController.swift
import UIKit
class ViewController: UIViewController {
static let kMargin:CGFloat = 10.0;
override func viewDidLoad()
{
super.viewDidLoad()
// create a container view that all of our subviews for which we want to detect touches are:
let containerWidth = fmin(self.view.bounds.size.width, self.view.bounds.size.height) - ViewController.kMargin * 2.0
let container = UIView(frame: CGRect(x: ViewController.kMargin, y: ViewController.kMargin, width: containerWidth, height: containerWidth))
container.backgroundColor = UIColor.darkGray
view.addSubview(container)
// now create all of the subviews, specifying a tag for each; and
let cellWidth = (containerWidth - (4.0 * ViewController.kMargin)) / 3.0
for column in 0 ..< 3 {
for row in 0 ..< 3 {
let cell = UIView(frame: CGRect(x: ViewController.kMargin + CGFloat(column) * (cellWidth + ViewController.kMargin), y: ViewController.kMargin + CGFloat(row) * (cellWidth + ViewController.kMargin), width: cellWidth, height: cellWidth))
cell.tag = row * 3 + column;
container.addSubview(cell)
}
}
// finally, create the gesture recognizer
let pan = UIPanGestureRecognizer(target: self, action: #selector(handlePan))
container.addGestureRecognizer(pan)
}
func handlePan(gesture: UIPanGestureRecognizer)
{
var gesturedSubviews : [UIView] = []
// if we're starting a gesture, initialize our list of subviews that we've gone over
if (gesture.state == .began)
{
gesturedSubviews.removeAll()
}
let location = gesture.location(in: gesture.view)
for subview in (gesture.view?.subviews)! {
if (subview.frame.contains(location)) {
if (subview != gesturedSubviews.last) {
gesturedSubviews.append(subview)
subview.backgroundColor = UIColor.blue
}
}
// finally, when done, let's just log the subviews
// you would do whatever you would want here
if (gesture.state != .recognized)
{
print("We went over:");
for subview in gesturedSubviews {
print(" %d", (subview as AnyObject).tag);
}
// you might as well clean up your static variable when you're done
}
}
}
}
Update: Almost that is; I tried to translate the update too, but my translation missed something and didn't work, so I searched around SO and crafted a similar if slightly different final solution.
import UIKit
import QuartzCore
class ViewController: UIViewController {
static let kMargin:CGFloat = 10.0;
override func viewDidLoad()
{
super.viewDidLoad()
// create a container view that all of our subviews for which we want to detect touches are:
let containerWidth = fmin(self.view.bounds.size.width, self.view.bounds.size.height) - ViewController.kMargin * 2.0
let container = UIView(frame: CGRect(x: ViewController.kMargin, y: ViewController.kMargin, width: containerWidth, height: containerWidth))
container.backgroundColor = UIColor.darkGray
view.addSubview(container)
// now create all of the subviews, specifying a tag for each; and
let cellWidth = (containerWidth - (4.0 * ViewController.kMargin)) / 3.0
for column in 0 ..< 3 {
for row in 0 ..< 3 {
let cell = UIView(frame: CGRect(x: ViewController.kMargin + CGFloat(column) * (cellWidth + ViewController.kMargin), y: ViewController.kMargin + CGFloat(row) * (cellWidth + ViewController.kMargin), width: cellWidth, height: cellWidth))
cell.tag = row * 3 + column;
container.addSubview(cell)
}
}
// finally, create the gesture recognizer
let pan = UIPanGestureRecognizer(target: self, action: #selector(handlePan))
container.addGestureRecognizer(pan)
}
// Here's a Swift 3.0 version based on Rajesh Choudhary's answer:
func drawLine(onLayer layer: CALayer, fromPoint start: CGPoint, toPoint end:CGPoint) {
let line = CAShapeLayer()
let linePath = UIBezierPath()
linePath.move(to: start)
linePath.addLine(to: end)
line.path = linePath.cgPath
line.fillColor = nil
line.lineWidth = 8
line.opacity = 0.5
line.strokeColor = UIColor.red.cgColor
layer.addSublayer(line)
}
var gesturedSubviews : [UIView] = []
var startX: CGFloat!
var startY: CGFloat!
var endX: CGFloat!
var endY: CGFloat!
func handlePan(gesture: UIPanGestureRecognizer) {
if (gesture.state == .began)
{
gesturedSubviews = [];
let location = gesture.location(in: gesture.view)
print("location \(location)")
startX = location.x
startY = location.y
}
if (gesture.state == .changed) {
let location = gesture.location(in: gesture.view)
print("location \(location)")
endX = location.x
endY = location.y
drawLine(onLayer: view.layer, fromPoint: CGPoint(x: startX, y: startY), toPoint: CGPoint(x:endX, y:endY))
startX = endX
startY = endY
}
if (gesture.state == .ended) {
let location = gesture.location(in: gesture.view)
print("location \(location)")
drawLine(onLayer: view.layer, fromPoint: CGPoint(x: startX, y: startY), toPoint: CGPoint(x:location.x, y:location.y))
}
// now figure out whether:
// (a) are we over a subview; and
// (b) is this a different subview than we last were over
let location = gesture.location(in: gesture.view)
print("location \(location)")
for subview in (gesture.view?.subviews)! {
if subview.frame.contains(location) {
if (subview != gesturedSubviews.last) {
gesturedSubviews.append(subview)
subview.backgroundColor = UIColor.blue
subview.alpha = 1.0
}
}
}
}
}
I want to draw a straight line with my finger that automatically sizes based upon how far away I am from the point of origin.
So if I touch the screen in the middle and slide my finger out a line appears to 'stretch' and pivot around the point of orgin as my finger moves on the screen. WHhen I lift my finger. The Destination Point should finalize and create a line. I can drag my finger across the screen and 'Draw' on the screen but that's not what I am wanting to do.
I thought UIBeizerPath moveToPoint would help but it just messes things up.
What am I doing wrong?
- (id)initWithFrame:(CGRect)frame
{
//default line properties
myPath=[[UIBezierPath alloc]init];
myPath.lineCapStyle=kCGLineCapRound;
myPath.miterLimit=0;
myPath.lineWidth=lineWidth;
brushPattern=[UIColor blackColor];
}
-(void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event {
CGPoint curPoint = [[touches anyObject] locationInView:self];
lastPoint = curPoint;
[myPath moveToPoint:lastPoint];
[pathArray addObject:myPath];
[self setNeedsDisplay];
}
-(void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event{
CGPoint curPoint = [[touches anyObject] locationInView:self];
myPath.lineWidth=lineWidth;
brushPattern=[UIColor redColor]; //red to show it hasn't been added yet.
[myPath moveToPoint:tempPoint];
[self setNeedsDisplay];
}
-(void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event {
CGPoint curPoint = [[touches anyObject] locationInView:self];
myPath.lineWidth=lineWidth;
brushPattern=[UIColor blackColor]; //finalize the line with black color
[myPath addLineToPoint:curPoint];
[self setNeedsDisplay];
}
Here's one concept. Draws a line from where you start dragging your finger until where you let go, animating it as you drag your finger around. It does this by making a CAShapeLayer, resetting the path as you move your finger around.
This should demonstrate the basic idea:
- (void)viewDidLoad {
[super viewDidLoad];
UIPanGestureRecognizer *gesture = [[UIPanGestureRecognizer alloc] initWithTarget:self action:#selector(handlePanGesture:)];
[self.view addGestureRecognizer:gesture];
}
- (CAShapeLayer *)createShapeLayer:(UIView *)view {
CAShapeLayer *shapeLayer = [[CAShapeLayer alloc] init];
shapeLayer.fillColor = [UIColor clearColor].CGColor;
shapeLayer.strokeColor = [UIColor redColor].CGColor;
shapeLayer.lineWidth = 3.0;
[view.layer addSublayer:shapeLayer];
return shapeLayer;
}
- (void)handlePanGesture:(UIPanGestureRecognizer *)gesture {
static CAShapeLayer *shapeLayer;
static CGPoint origin;
if (gesture.state == UIGestureRecognizerStateBegan) {
shapeLayer = [self createShapeLayer:gesture.view];
origin = [gesture locationInView:gesture.view];
} else if (gesture.state == UIGestureRecognizerStateChanged) {
UIBezierPath *path = [UIBezierPath bezierPath];
[path moveToPoint:origin];
CGPoint location = [gesture locationInView:gesture.view];
[path addLineToPoint:location];
shapeLayer.path = path.CGPath;
} else if (gesture.state == UIGestureRecognizerStateEnded ||
gesture.state == UIGestureRecognizerStateFailed ||
gesture.state == UIGestureRecognizerStateCancelled) {
shapeLayer = nil;
}
}
Or, in Swift 3:
override func viewDidLoad() {
super.viewDidLoad()
let pan = UIPanGestureRecognizer(target: self, action: #selector(handlePan(_:)))
view.addGestureRecognizer(pan)
}
private func createShapeLayer(for view: UIView) -> CAShapeLayer {
let shapeLayer = CAShapeLayer()
shapeLayer.fillColor = UIColor.clear.cgColor
shapeLayer.strokeColor = UIColor.red.cgColor
shapeLayer.lineWidth = 3.0
view.layer.addSublayer(shapeLayer)
return shapeLayer
}
private var shapeLayer: CAShapeLayer!
private var origin: CGPoint!
func handlePan(_ gesture: UIPanGestureRecognizer) {
if gesture.state == .began {
shapeLayer = createShapeLayer(for: gesture.view!)
origin = gesture.location(in: gesture.view)
} else if gesture.state == .changed {
let path = UIBezierPath()
path.move(to: origin)
path.addLine(to: gesture.location(in: gesture.view))
shapeLayer.path = path.cgPath
} else if gesture.state == .ended || gesture.state == .failed || gesture.state == .cancelled {
shapeLayer = nil
}
}
If you don't use CAShapeLayer, but you want to keep track of previous paths, you'll have to maintain an array for those old paths, and build a path that consists of all of the old paths, perhaps something like:
#interface CustomView ()
#property (nonatomic) CGPoint originPoint;
#property (nonatomic) CGPoint currentPoint;
#property (nonatomic) NSMutableArray *previousPaths;
#end
#implementation CustomView
- (id)initWithFrame:(CGRect)frame {
self = [super initWithFrame:frame];
if (self) {
[self configure];
}
return self;
}
- (id)init {
return [self initWithFrame:CGRectZero];
}
- (id)initWithCoder:(NSCoder *)aDecoder {
self = [super initWithCoder:aDecoder];
if (self) {
[self configure];
}
return self;
}
- (void)configure {
_previousPaths = [[NSMutableArray alloc] init];
}
- (void)drawRect:(CGRect)rect {
[[UIColor redColor] setStroke];
UIBezierPath *drawPath = [UIBezierPath bezierPath];
drawPath.lineCapStyle = kCGLineCapRound;
drawPath.miterLimit = 0;
drawPath.lineWidth = 3.0;
for (UIBezierPath *path in self.previousPaths)
[drawPath appendPath:path];
UIBezierPath *path = [self pathForCurrentLine];
if (path)
[drawPath appendPath:path];
[drawPath stroke];
}
- (UIBezierPath *)pathForCurrentLine {
if (CGPointEqualToPoint(self.originPoint, CGPointZero) && CGPointEqualToPoint(self.currentPoint, CGPointZero))
return nil;
UIBezierPath *path = [UIBezierPath bezierPath];
[path moveToPoint:self.originPoint];
[path addLineToPoint:self.currentPoint];
return path;
}
- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event {
self.originPoint = [[touches anyObject] locationInView:self];
}
- (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event {
UITouch *touch = [touches anyObject];
if ([event respondsToSelector:#selector(predictedTouchesForTouch:)]) {
touch = [[event predictedTouchesForTouch:touch] lastObject] ?: touch;
}
self.currentPoint = [touch locationInView:self];
[self setNeedsDisplay];
}
- (void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event {
self.currentPoint = [[touches anyObject] locationInView:self];
[self.previousPaths addObject:[self pathForCurrentLine]];
self.originPoint = self.currentPoint = CGPointZero;
[self setNeedsDisplay];
}
#end
UIBezierPath is building a path from your instructions. Imagine a pen. When you say, "moveToPoint:" it moves the pen to that point. When you say "lineToPoint:" it puts the pen down and moves it from the current location to the new point. And so on.
To get the effect you desire, you will need to create a new path whenever the touches move, drawing a line from the original point to the current touch position.