Unable to call up my video files using AVFoundation - objective-c

I'm having difficulty playing back video that I've recently recorded in a hybrid image/video camera akin to Snapchat (e.g. tap to take a photo, press and hold to record a video, playback on button release).
I'm currently saving the video file to NSFileManager. When I log it out I do verify that something is being saved but can't inspect the file because it has to be tested on the phone.
The file path when I log it out:
file:///var/mobile/Containers/Data/Application/7D86B14D-ACFF-4494-AD61-CBBD32DCA7A5/Documents/test.mov
When I go to load the asset from the file manager I log out an error that it can't open the files. I've only just started working with AVFoundation so not sure what some of the issues/considerations are when debugging. Any insight would be greatly appreciated, thank you!
Referenced tutorial
Referenced github repository
Referenced code:
PlayerView.h (reference)
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
#interface PlayerView : UIView
#property (nonatomic) AVPlayer *player;
- (void)setPlayer:(AVPlayer *)player;
#end
PlayerView.m (reference)
#import "PlayerView.h"
#implementation PlayerView
+ (Class)layerClass {
return [AVPlayerLayer class];
}
- (AVPlayer *)player {
return [(AVPlayerLayer *)[self layer] player];
}
- (void)setPlayer:(AVPlayer *)player {
[(AVPlayerLayer *)[self layer] setPlayer:player];
}
#end
HybridCameraViewController.h (reference)
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
#import "PlayerView.h"
#interface HybridCameraViewController : UIViewController
#property UIButton *button;
#property UIButton *saveButton;
#property UIImageView *previewView;
#define VIDEO_FILE #"test.mov"
#end
HybridCameraViewController.m (reference)
#import "HybridCameraViewController.h"
static const NSString *ItemStatusContext;
#class PlayerView;
#interface HybridCameraViewController () <AVCaptureFileOutputRecordingDelegate>
#end
#implementation HybridCameraViewController
AVCaptureSession *session;
AVCaptureStillImageOutput *imageOutput;
AVCaptureMovieFileOutput *movieOutput;
AVCaptureConnection *videoConnection;
AVPlayer *player;
AVPlayerItem *playerItem;
PlayerView *playerView;
- (void)viewDidLoad {
[super viewDidLoad];
[self testDevices];
self.view.backgroundColor = [UIColor blackColor];
//Image preview
self.previewView = [[UIImageView alloc]initWithFrame:self.view.frame];
self.previewView.backgroundColor = [UIColor whiteColor];
self.previewView.contentMode = UIViewContentModeScaleAspectFill;
self.previewView.hidden = YES;
[self.view addSubview:self.previewView];
//Playerback setup
playerView = [[PlayerView alloc]initWithFrame:self.view.frame];
playerView.backgroundColor = [UIColor redColor];
[self syncUI];
//Buttons
self.button = [self createButtonWithTitle:#"REC" chooseColor:[UIColor redColor]];
UILongPressGestureRecognizer *longPressRecognizer = [[UILongPressGestureRecognizer alloc]initWithTarget:self action:#selector(handleLongPressGesture:)];
[self.button addGestureRecognizer:longPressRecognizer];
[self.button addTarget:self action:#selector(captureImage) forControlEvents:UIControlEventTouchUpInside];
self.saveButton = [self createSaveButton];
[self.saveButton addTarget:self action:#selector(saveActions) forControlEvents:UIControlEventTouchUpInside];
}
- (void)viewWillAppear:(BOOL)animated {
//Tests
[self initializeAVItems];
NSLog(#"%#", videoConnection);
NSLog(#"%#", imageOutput.connections);
NSLog(#"%#", imageOutput.description.debugDescription);
}
#pragma mark - AV initialization
- (void)initializeAVItems {
//Start session, input
session = [AVCaptureSession new];
if ([session canSetSessionPreset:AVCaptureSessionPresetHigh]) {
session.sessionPreset = AVCaptureSessionPresetHigh;
}
AVCaptureDevice *inputDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error;
AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:inputDevice error:&error];
if ([session canAddInput:deviceInput]) {
[session addInput:deviceInput];
} else {
NSLog(#"%#", error);
}
AVCaptureVideoPreviewLayer *previewLayer = [[AVCaptureVideoPreviewLayer alloc]initWithSession:session];
[previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
//Layer preview
CALayer *viewLayer = [[self view] layer];
[viewLayer setMasksToBounds:YES];
CGRect frame = self.view.frame;
[previewLayer setFrame:frame];
[viewLayer insertSublayer:previewLayer atIndex:0];
//Image Output
imageOutput = [AVCaptureStillImageOutput new];
NSDictionary *imageOutputSettings = [[NSDictionary alloc]initWithObjectsAndKeys:AVVideoCodecJPEG, AVVideoCodecKey, nil];
imageOutput.outputSettings = imageOutputSettings;
//Video Output
movieOutput = [AVCaptureMovieFileOutput new];
[session addOutput:movieOutput];
[session addOutput:imageOutput];
[session startRunning];
}
- (void)testDevices {
NSArray *devices = [AVCaptureDevice devices];
for (AVCaptureDevice *device in devices) {
NSLog(#"Device name: %#", [device localizedName]);
if ([device hasMediaType:AVMediaTypeVideo]) {
if ([device position] == AVCaptureDevicePositionBack) {
NSLog(#"Device position : back");
}
else {
NSLog(#"Device position : front");
}
}
}
}
#pragma mark - Image capture
- (void)captureImage {
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in imageOutput.connections) {
for (AVCaptureInputPort *port in [connection inputPorts]) {
if ([[port mediaType] isEqual:AVMediaTypeVideo]) {
videoConnection = connection;
break;
}
}
if (videoConnection) {
break;
}
}
NSLog(#"Requesting capture from: %#", imageOutput);
[imageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
if (imageDataSampleBuffer != NULL) {
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage *image = [UIImage imageWithData:imageData];
self.previewView.image = image;
self.previewView.hidden = NO;
}
}];
[self saveButtonFlyIn:self.saveButton];
}
#pragma mark - Video capture
- (void)captureVideo {
NSLog(#"%#", movieOutput.connections);
[[NSFileManager defaultManager] removeItemAtURL:[self outputURL] error:nil];
videoConnection = [self connectionWithMediaType:AVMediaTypeVideo fromConnections:movieOutput.connections];
[movieOutput startRecordingToOutputFileURL:[self outputURL] recordingDelegate:self];
}
- (AVCaptureConnection *)connectionWithMediaType:(NSString *)mediaType fromConnections:(NSArray *)connections {
for (AVCaptureConnection *connection in connections) {
for (AVCaptureInputPort *port in [connection inputPorts]) {
if ([[port mediaType] isEqual:mediaType]) {
return connection;
}
}
}
return nil;
}
#pragma mark - Show Last Recording
- (void)presentRecording {
NSLog(#"unplaying");
NSLog(#"%#",[self outputURL]);
}
- (IBAction)loadAssetFromFile {
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:[self outputURL] options:nil];
NSString *tracksKey = #"tracks";
[asset loadValuesAsynchronouslyForKeys:#[tracksKey] completionHandler:^{
dispatch_async(dispatch_get_main_queue(),^{
NSError *error;
AVKeyValueStatus status = [asset statusOfValueForKey:tracksKey
error:&error];
if (status == AVKeyValueStatusLoaded) {
playerItem = [AVPlayerItem playerItemWithAsset:asset];
[playerItem addObserver:self forKeyPath:#"status"
options:NSKeyValueObservingOptionInitial
context:&ItemStatusContext];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(playerItemDidReachEnd:)
name:AVPlayerItemDidPlayToEndTimeNotification
object:playerItem];
player = [AVPlayer playerWithPlayerItem:playerItem];
[playerView setPlayer:player];
}
else {
NSLog(#"The asset's tracks were not loaded:\n%#", [error localizedDescription]);
}
});
}];
}
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context {
if (context == &ItemStatusContext) {
dispatch_async(dispatch_get_main_queue(),^{
[self syncUI];
});
return;
}
[super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
return;
}
- (void)playerItemDidReachEnd:(NSNotification *)notification {
[player seekToTime:kCMTimeZero];
}
- (void)syncUI {
if ((player.currentItem != nil) &&
([player.currentItem status] == AVPlayerItemStatusReadyToPlay)) {
self.button.enabled = YES;
}
else {
self.button.enabled = NO;
}
}
#pragma mark - AVCaptureFileOutputRecordingDelegate
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error {
if (!error) {
NSLog(#"Success!!!!");
} else {
NSLog(#"Error: %#", [error localizedDescription]);
}
}
#pragma mark - Recoding Destination URL
- (NSURL *)outputURL {
NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
NSString *filePath = [documentsDirectory stringByAppendingPathComponent:VIDEO_FILE];
return [NSURL fileURLWithPath:filePath];
}
#pragma mark - Buttons
- (void)handleLongPressGesture:(UILongPressGestureRecognizer *)recognizer {
if (recognizer.state == UIGestureRecognizerStateBegan) {
NSLog(#"Press");
self.button.backgroundColor = [UIColor greenColor];
[self captureVideo];
}
if (recognizer.state == UIGestureRecognizerStateEnded) {
NSLog(#"Unpress");
self.button.backgroundColor = [UIColor redColor];
[movieOutput stopRecording];
[self performSelector:#selector(loadAssetFromFile)];
[player play];
}
}
- (UIButton *)createButtonWithTitle:(NSString *)title chooseColor:(UIColor *)color {
UIButton *button = [[UIButton alloc] initWithFrame:CGRectMake(self.view.center.x, self.view.frame.size.height - 100, 85, 85)];
button.layer.cornerRadius = button.bounds.size.width / 2;
button.backgroundColor = color;
button.tintColor = [UIColor whiteColor];
[self.view addSubview:button];
return button;
}
- (UIButton *)createSaveButton {
UIButton *button = [[UIButton alloc]initWithFrame:CGRectMake(self.view.frame.size.width, self.view.frame.size.height - 100, 85, 85)];
button.layer.cornerRadius = button.bounds.size.width / 2;
button.backgroundColor = [UIColor greenColor];
button.tintColor = [UIColor whiteColor];
button.userInteractionEnabled = YES;
[button setTitle:#"save" forState:UIControlStateNormal];
[self.view addSubview:button];
return button;
}
- (void)saveButtonFlyIn:(UIButton *)button {
CGRect movement = button.frame;
movement.origin.x = self.view.frame.size.width - 100;
[UIView animateWithDuration:0.2 animations:^{
button.frame = movement;
}];
}
- (void)saveButtonFlyOut:(UIButton *)button {
CGRect movement = button.frame;
movement.origin.x = self.view.frame.size.width;
[UIView animateWithDuration:0.2 animations:^{
button.frame = movement;
}];
}
#pragma mark - Save actions
- (void)saveActions {
[self saveButtonFlyOut:self.saveButton];
self.previewView.image = nil;
self.previewView.hidden = YES;
}
#end

The approach was overly complex. Where you went wrong was 1) not loading the url properly and 2) not adding the 'player layer' sublayer to the main view.
Here is an example of successful playback:
//TestURL
self.videoURL = [NSURL URLWithString:(NSString *)[self.selectedVideo objectForKey:#"source"]];
//Video
self.player = [AVPlayer playerWithPlayerItem:[[AVPlayerItem alloc]initWithAsset:[AVAsset assetWithURL:self.videoURL]]];
//Player layer
self.playerLayer = [AVPlayerLayer playerLayerWithPlayer:self.player];
self.playerLayer.videoGravity = AVLayerVideoGravityResizeAspect;
self.playerLayer.frame = CGRectMake(0, 0, self.view.frame.size.width, self.view.frame.size.height / 3);
[self.view.layer addSublayer:self.playerLayer];
[self.player play];
All you would need to do is redirect the url to the one you just saved:
self.videoURL = [self outputURL];

Related

Objective-C app issue on iOS 11

I wrote an iOS app in Objective-C. Xcode version is 8. It works fine on all devices and iOS versions but when I tried to test this in the iOS device running iOS 11. The “Profile Page” stopped working in the sense that once we navigate to that screen, it becomes touch unresponsive and nothing happens. This page works well with all iOS versions and all device except for those running iOS 11.
Code for Profile page is:
#import "ProfileViewController.h"
#interface ProfileViewController ()
#end
#implementation ProfileViewController
- (void)viewDidLoad
{
[super viewDidLoad];
currentUser = [SingletonClass sharedSingletonClass].settingsDictionary[#"User"];
selectedValues=[NSMutableDictionary dictionary];
[MBProgressHUD showHUDAddedTo:self.view animated:YES];
NSMutableDictionary *paramDict=[NSMutableDictionary dictionary];
[paramDict setObject:#"ios" forKey:#"request"];
[paramDict setObject:[NSString stringWithFormat:#"%#",currentUser.user_id] forKey:#"user_id"];
[GeneralWebservices webserviceMainSplashCall:paramDict webserviceName:Webservice_Profile OnCompletion:^(id returnDict, NSError *error) {
if ([returnDict[#"success"] intValue] ==1)
{
[self setProfileData:returnDict[#"data"]];
provinceList=[NSMutableArray arrayWithArray:returnDict[#"provincedata"]];
questions1Array=[NSMutableArray arrayWithArray:returnDict[#"questiondata"]];
}
else
{
[self get_register_data];
[alert show];
}
[MBProgressHUD hideAllHUDsForView:self.view animated:YES];
}];
}
-(void)setProfileData:(NSMutableDictionary*)dataDict
{
profilePicImageView.imageURL=[NSURL URLWithString:dataDict[#"image_file_thumb"]];
[firstNameLabel setText:dataDict[#"first_name"]];
[lastNameLabel setText:dataDict[#"last_name"]];
[dateOfBirthLabel setText:dataDict[#"user_dob"]];
[postalAddressTextfield setText:dataDict[#"user_address"]];
[mobileTextfield setText:dataDict[#"user_mobile"]];
[question1Textfield setText:dataDict[#"user_answer_1"]];
[question2Textfield setText:dataDict[#"user_answer_2"]];
[LLGButton setTitle:dataDict[#"user_llg"] forState:UIControlStateNormal];
[provinceButton setTitle:dataDict[#"user_province"] forState:UIControlStateNormal];
[districtButton setTitle:dataDict[#"user_district"] forState:UIControlStateNormal];
[villageTextfield setText:dataDict[#"user_village"]];
[question1Button setTitle:dataDict[#"user_question_1"] forState:UIControlStateNormal];
[question2Button setTitle:dataDict[#"user_question_2"] forState:UIControlStateNormal];
[self callforDistrict:#"get_district.php" idForItem:dataDict[#"district_id"]];
[self callforLLG:#"get_llg.php" idForItem:dataDict[#"llg_id"]];
[selectedValues setObject:dataDict[#"user_question_1_id"] forKey:#"user_question_1"];
[selectedValues setObject:dataDict[#"user_question_2_id"] forKey:#"user_question_2"];
[selectedValues setObject:dataDict[#"province_id"] forKey:#"user_province"];
[selectedValues setObject:dataDict[#"district_id"] forKey:#"user_district"];
[selectedValues setObject:dataDict[#"llg_id"] forKey:#"user_llg"];
}
-(void)callforDistrict:(NSString*)serviceName idForItem:(NSString*)idForItem
{
dispatch_async(dispatch_get_main_queue(), ^{
[self getdistdata:idForItem];
});
districList=[NSMutableArray arrayWithArray:returnDict[#"data"]];
{
}
-(void)callforLLG:(NSString*)serviceName idForItem:(NSString*)idForItem
{
dispatch_async(dispatch_get_main_queue(), ^{
[self getIIL:idForItem];
});
}
- (void)viewDidLayoutSubviews
{
[profileScrollView setContentSize:CGSizeMake(self.view.frame.size.width, 700)];
}
- (IBAction)uploadPictureButtonTap:(UIButton *)sender
{
UIActionSheet *popup = [[UIActionSheet alloc] initWithTitle:#"Select option" delegate:self cancelButtonTitle:#"Cancel" destructiveButtonTitle:nil otherButtonTitles:
#"Open Gallery",
#"Take Photo",
nil];
popup.tag = 1;
[popup showInView:[UIApplication sharedApplication].keyWindow];
}
- (IBAction)provinceButtonTap:(UIButton *)sender
{
[self.view endEditing:YES];
NSMutableArray *provinceNames=[NSMutableArray array];
for (NSMutableDictionary*pro in provinceList)
{
[provinceNames addObject:pro[#"province_name"]];
}
LGActionSheet *sheet=[[LGActionSheet alloc] initWithTitle:#"Select Province" buttonTitles:provinceNames cancelButtonTitle:#"Cancel" destructiveButtonTitle:nil];
sheet.tagOfSheet=2;
sheet.heightMax=300;
sheet.delegate=self;
[sheet showAnimated:YES completionHandler:nil];
}
- (IBAction)districtButtonTap:(UIButton *)sender
{
[self.view endEditing:YES];
NSMutableArray *names=[NSMutableArray array];
for (NSMutableDictionary*pro in districList)
{
[names addObject:pro[#"district_name"]];
}
LGActionSheet *sheet=[[LGActionSheet alloc] initWithTitle:#"Select District" buttonTitles:names cancelButtonTitle:#"Cancel" destructiveButtonTitle:nil];
sheet.heightMax=200;
sheet.tagOfSheet=3;
sheet.delegate=self;
[sheet showAnimated:YES completionHandler:nil];
}
- (IBAction)LLGButtonTap:(UIButton *)sender
{
[self.view endEditing:YES];
NSMutableArray *names=[NSMutableArray array];
for (NSMutableDictionary*pro in llgList)
{
[names addObject:pro[#"llg_name"]];
}
LGActionSheet *sheet=[[LGActionSheet alloc] initWithTitle:#"Select LLG" buttonTitles:names cancelButtonTitle:#"Cancel" destructiveButtonTitle:nil];
sheet.heightMax=200;
sheet.tagOfSheet=4;
sheet.delegate=self;
[sheet showAnimated:YES completionHandler:nil];
}
- (IBAction)villageButtonTap:(UIButton *)sender
{
[self.view endEditing:YES];
}
- (IBAction)question1ButtonTap:(UIButton *)sender
{
[self.view endEditing:YES];
NSMutableArray *ques1=[NSMutableArray array];
for (NSMutableDictionary*pro in questions1Array)
{
[ques1 addObject:pro[#"question_name"]];
}
LGActionSheet *sheet=[[LGActionSheet alloc] initWithTitle:#"Select Question 1" buttonTitles:ques1 cancelButtonTitle:#"Cancel" destructiveButtonTitle:nil];
sheet.tagOfSheet=5;
sheet.heightMax=300;
sheet.delegate=self;
[sheet showAnimated:YES completionHandler:nil];
}
- (IBAction)question2ButtonTap:(UIButton *)sender
{
[self.view endEditing:YES];
NSMutableArray *ques2=[NSMutableArray array];
for (NSMutableDictionary*pro in questions1Array)
{
[ques2 addObject:pro[#"question_name"]];
}
LGActionSheet *sheet=[[LGActionSheet alloc] initWithTitle:#"Select Question 2" buttonTitles:ques2 cancelButtonTitle:#"Cancel" destructiveButtonTitle:nil];
sheet.tagOfSheet=6;
sheet.heightMax=300;
sheet.delegate=self;
[sheet showAnimated:YES completionHandler:nil];
}
- (IBAction)updateButtonTap:(UIButton *)sender
{
[self.view endEditing:YES];
[MBProgressHUD showHUDAddedTo:self.view animated:YES];
[selectedValues setObject:#"ios" forKey:#"request"];
[selectedValues setObject:[NSString stringWithFormat:#"%#",currentUser.user_id] forKey:#"user_id"];
[selectedValues setObject:postalAddressTextfield.text forKey:#"address"];
[selectedValues setObject:mobileTextfield.text forKey:#"user_mobile"];
[selectedValues setObject:question1Textfield.text forKey:#"user_answer_1"];
[selectedValues setObject:villageTextfield.text forKey:#"user_village"];
[GeneralWebservices webserviceCallWithData:selectedValues webserviceName:Webservice_ProfileUpdate dataToPost:imageData imageName:imageName OnCompletion:^(id returnDict, NSError *error) {
if ([returnDict[#"success"] intValue] == 1)
{
UIAlertView* alert = [[UIAlertView alloc] init];
[alert setTitle:#"Updated Successfully"];
[alert addButtonWithTitle:#"OK"];
[alert show];
}
else
{
UIAlertView* alert = [[UIAlertView alloc] init];
[alert setTitle:#"Updated Successfully"];
[alert addButtonWithTitle:#"OK"];
[alert show];
}
[MBProgressHUD hideAllHUDsForView:self.view animated:NO];
}];
}
- (IBAction)saveButtonTap:(UIButton *)sender
{
[self.view endEditing:YES];
}
- (void)actionSheet:(LGActionSheet *)actionSheet buttonPressedWithTitle:(NSString *)title index:(NSUInteger)index
{
dispatch_async(dispatch_get_main_queue(), ^{
if (actionSheet.tagOfSheet==2)
{
[selectedValues setObject:provinceList[index][#"province_id"] forKey:#"user_province"];
[provinceButton setTitle:provinceList[index][#"province_name"] forState:UIControlStateNormal];
[districList removeAllObjects];
[llgList removeAllObjects];
[districtButton setTitle:#"District *" forState:UIControlStateNormal];
[LLGButton setTitle:#"LLG *" forState:UIControlStateNormal];
[selectedValues removeObjectForKey:#"district"];
[selectedValues removeObjectForKey:#"llg"];
[self callforDistrict:#"get_district.php" idForItem:provinceList[index][#"province_id"]];
}
else if(actionSheet.tagOfSheet==3)
{
[selectedValues setObject:districList[index][#"id"] forKey:#"user_district"];
[districtButton setTitle:districList[index][#"district_name"] forState:UIControlStateNormal];
[llgList removeAllObjects];
[LLGButton setTitle:#"LLG *" forState:UIControlStateNormal];
[selectedValues removeObjectForKey:#"llg"];
[self callforLLG:#"get_llg.php" idForItem:districList[index][#"id"]];
}
else if(actionSheet.tagOfSheet==4)
{
[selectedValues setObject:llgList[index][#"id"] forKey:#"user_llg"];
[LLGButton setTitle:llgList[index][#"llg_name"] forState:UIControlStateNormal];
}
else if(actionSheet.tagOfSheet==5)
{
[selectedValues setObject:questions1Array[index][#"question_id"] forKey:#"user_question_1"];
[question1Button setTitle:questions1Array[index][#"question_name"] forState:UIControlStateNormal];
}
else if(actionSheet.tagOfSheet==6)
{
[selectedValues setObject:questions1Array[index][#"question_id"] forKey:#"user_question_2"];
[question2Button setTitle:questions1Array[index][#"question_name"] forState:UIControlStateNormal];
}
});
}
-(IBAction) returnTextField:(id)sender
{
CGRect frame = self.view.frame;
frame.origin.y = 0;
[UIView animateWithDuration:0.3 animations:^{
self.view.frame = frame;
}];
[self.view endEditing:YES];
}
- (BOOL)textFieldShouldEndEditing:(UITextField*)textField
{
return YES;
}
-(BOOL) textFieldShouldReturn:(UITextField *)textField
{
[self animateTextField:textField up:NO];
[textField resignFirstResponder];
return YES;
}
- (void)textFieldDidBeginEditing:(UITextField *)textField
{
[self animateTextField:textField up:YES];
}
- (void)textFieldDidEndEditing:(UITextField *)textField
{
[self animateTextField:textField up:NO];
}
- (void)actionSheet:(UIActionSheet *)popup clickedButtonAtIndex:(NSInteger)buttonIndex
{
switch (popup.tag) {
case 1: {
switch (buttonIndex) {
case 0:
[self openPhotoLibraryButton:self];
break;
case 1:
[self openCameraButton:self];
break;
default:
break;
}
break;
}
default:
break;
}
}
- (IBAction)openCameraButton:(id)sender
{
[self.view endEditing:YES];
if ([UIImagePickerController isSourceTypeAvailable:UIImagePickerControllerSourceTypeCamera]) {
UIImagePickerController *picker = [[UIImagePickerController alloc] init];
picker.delegate = self;
[picker setSourceType:UIImagePickerControllerSourceTypeCamera];
picker.allowsEditing = false;
[self presentViewController:picker animated:true completion:nil];
}
else{
UIAlertView *alert=[[UIAlertView alloc]initWithTitle:#"Alert!" message:#"Camera is not connected" delegate:self cancelButtonTitle:#"Ok" otherButtonTitles:nil, nil];
[alert show];
}
}
- (IBAction)openPhotoLibraryButton:(id)sender
{
[self.view endEditing:YES];
if ([UIImagePickerController isSourceTypeAvailable:UIImagePickerControllerSourceTypePhotoLibrary]) {
UIImagePickerController *picker = [[UIImagePickerController alloc] init];
picker.delegate = self;
[picker setSourceType:UIImagePickerControllerSourceTypePhotoLibrary];
picker.allowsEditing = true;
[self presentViewController:picker animated:true completion:nil];
}
}
-(void) imagePickerController:(UIImagePickerController *)picker didFinishPickingMediaWithInfo:(NSDictionary *)info
{
[self dismissViewControllerAnimated:YES completion:nil];
imageData=[[NSData alloc]init];
if ([info[#"UIImagePickerControllerMediaType"] isEqualToString:#"public.image"])
{
UIImage *image = [info objectForKey:#"UIImagePickerControllerEditedImage"];
if (!image)
{
image = [info objectForKey:#"UIImagePickerControllerOriginalImage"];
}
NSURL *imagePath = [info objectForKey:#"UIImagePickerControllerReferenceURL"];
image=[SettingsClass rotateImageAppropriately:image];
NSString *imageNamewithformat = [imagePath lastPathComponent];
imageName=#"assets.jpg";
NSString *Imageformat = [imageNamewithformat substringFromIndex: [imageNamewithformat length] - 3];
if ([Imageformat isEqualToString:#"JPG"]||[Imageformat isEqualToString:#"jpg"]) {
imageData=UIImageJPEGRepresentation(image, 0.33f);
imageName=#"assets.jpg";
}
else if ([Imageformat isEqualToString:#"PNG"]||[Imageformat isEqualToString:#"png"])
{
imageData=UIImagePNGRepresentation(image);
imageName=#"assets.png";
}
else
{
imageData=UIImageJPEGRepresentation(image, 0.33f);
imageName=#"assets.jpg";
}
[profilePicImageView setImage:image];
}
}
- (void) animateTextField: (UITextField*) textField up: (BOOL) up
{
CGPoint temp = [textField.superview convertPoint:textField.frame.origin toView:nil];
UIInterfaceOrientation orientation =
[[UIApplication sharedApplication] statusBarOrientation];
if (orientation == UIInterfaceOrientationPortrait){
if(up) {
int moveUpValue = temp.y+textField.frame.size.height;
animatedDis = 264-(self.view.frame.size.height-moveUpValue-35);
}
}
else if(orientation == UIInterfaceOrientationPortraitUpsideDown) {
if(up) {
int moveUpValue = self.view.frame.size.height-temp.y+textField.frame.size.height;
animatedDis = 264-(self.view.frame.size.height-moveUpValue-35);
}
}
else if(orientation == UIInterfaceOrientationLandscapeLeft || orientation == UIInterfaceOrientationLandscapeRight) {
if(up) {
int moveUpValue = temp.y+textField.frame.size.height;
animatedDis = 352-(self.view.frame.size.height-moveUpValue-100);
}
}
else
{
if(up) {
int moveUpValue = temp.y+textField.frame.size.height;
animatedDis = 352-(768-moveUpValue-100);
}
}
if(animatedDis>0)
{
const int movementDistance = animatedDis;
const float movementDuration = 0.3f;
int movement = (up ? -movementDistance : movementDistance);
[UIView beginAnimations: nil context: nil];
[UIView setAnimationBeginsFromCurrentState: YES];
[UIView setAnimationDuration: movementDuration];
if (orientation == UIInterfaceOrientationPortrait){
self.view.frame = CGRectOffset( self.view.frame, 0, movement);
}
else if(orientation == UIInterfaceOrientationPortraitUpsideDown) {
self.view.frame = CGRectOffset( self.view.frame, 0, movement);
}
else if(orientation == UIInterfaceOrientationLandscapeLeft) {
self.view.frame = CGRectOffset( self.view.frame, 0, movement);
}
else {
self.view.frame = CGRectOffset( self.view.frame, 0, movement);
}
[UIView commitAnimations];
}
}
- (IBAction)backtohomeview :(id)sender {
[self dismissViewControllerAnimated:YES completion:nil];
}
-(void)getdistdata:(NSString*)idForItem
{
self.view.userInteractionEnabled = NO;
NSString *strURL;
strURL = [NSString stringWithFormat:#"http://bullionscope.com/Gold_Phase3/webservices/get_district.php?request=ios&province_id=%#",idForItem];
NSDictionary *headers = #{ #"cache-control": #"no-cache",
#"postman-token": #"900e1577-4876-cd9a-d24c-cb0631b4a1fb" };
NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:[NSURL URLWithString:strURL]
cachePolicy:NSURLRequestUseProtocolCachePolicy
timeoutInterval:10.0];
[request setHTTPMethod:#"POST"];
[request setAllHTTPHeaderFields:headers];
NSURLSession *session = [NSURLSession sharedSession];
NSURLSessionDataTask *dataTask = [session dataTaskWithRequest:request
completionHandler:^(NSData *data, NSURLResponse *response, NSError *error) {
if (error) {
}
else
{
NSDictionary *jsonDic = [NSJSONSerialization JSONObjectWithData:data options:kNilOptions error:&error];
NSString *success = [[jsonDic objectForKey:#"success"]stringValue];
if (
[success isEqualToString:
#"1"])
{
districList=[NSMutableArray arrayWithArray:[jsonDic objectForKey:#"data"]];
dispatch_async(dispatch_get_main_queue(), ^{
self.view.userInteractionEnabled = YES;
});
}
else
{
dispatch_async(dispatch_get_main_queue(), ^{
self.view.userInteractionEnabled = YES;
});
}
}
}];
[dataTask resume];
}
-(void)getIIL:(NSString*)idForItem
{
self.view.userInteractionEnabled = NO;
NSString *strURL;
strURL = [NSString stringWithFormat:#"http://bullionscope.com/Gold_Phase3/webservices/get_llg.php?request=ios&district_id=%#",idForItem];
NSDictionary *headers = #{ #"cache-control": #"no-cache",
#"postman-token": #"900e1577-4876-cd9a-d24c-cb0631b4a1fb" };
NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:[NSURL URLWithString:strURL]
cachePolicy:NSURLRequestUseProtocolCachePolicy
timeoutInterval:10.0];
[request setHTTPMethod:#"POST"];
[request setAllHTTPHeaderFields:headers];
NSURLSession *session = [NSURLSession sharedSession];
NSURLSessionDataTask *dataTask = [session dataTaskWithRequest:request
completionHandler:^(NSData *data, NSURLResponse *response, NSError *error) {
if (error) {
}
else
{
NSDictionary *jsonDic = [NSJSONSerialization JSONObjectWithData:data options:kNilOptions error:&error];
NSString *success = [[jsonDic objectForKey:#"success"]stringValue];
if (
[success isEqualToString:
#"1"])
{
llgList=[NSMutableArray arrayWithArray:[jsonDic objectForKey:#"data"]];
dispatch_async(dispatch_get_main_queue(), ^{
self.view.userInteractionEnabled = YES;
});
}
else
{
dispatch_async(dispatch_get_main_queue(), ^{
self.view.userInteractionEnabled = YES;
});
}
}
}];
[dataTask resume];
}
-(void)get_register_data
{
self.view.userInteractionEnabled = NO;
NSString *strURL;
http://bullionscope.com/Gold_Phase3/webservices/get_all_province.php?request=ios
strURL = [NSString stringWithFormat:#"http://bullionscope.com/Gold_Phase3/webservices/get_all_province.php?request=ios"];
NSDictionary *headers = #{ #"cache-control": #"no-cache",
#"postman-token": #"900e1577-4876-cd9a-d24c-cb0631b4a1fb" };
NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:[NSURL URLWithString:strURL]
cachePolicy:NSURLRequestUseProtocolCachePolicy
timeoutInterval:10.0];
[request setHTTPMethod:#"POST"];
[request setAllHTTPHeaderFields:headers];
NSURLSession *session = [NSURLSession sharedSession];
NSURLSessionDataTask *dataTask = [session dataTaskWithRequest:request
completionHandler:^(NSData *data, NSURLResponse *response, NSError *error) {
if (error) {
}
else
{
NSDictionary *jsonDic = [NSJSONSerialization JSONObjectWithData:data options:kNilOptions error:&error];
NSString *success = [[jsonDic objectForKey:#"success"]stringValue];
if (
[success isEqualToString:
#"1"])
{
provinceList=[NSMutableArray arrayWithArray:[jsonDic objectForKey:#"data"]];
dispatch_async(dispatch_get_main_queue(), ^{
self.view.userInteractionEnabled = YES;
});
}
else
{
dispatch_async(dispatch_get_main_queue(), ^{
self.view.userInteractionEnabled = YES;
});
}
}
}];
[dataTask resume];
}
#end

Which third party Imageview class should i used?

My requirement is :
Need to show image in imageview from server URL & save it for offline use also.
But There may be chances that same URL image can be updated.
I tried EGOImageview,AsyncImageview,FXImageview,Haneke thsese all classes i tried.
my first part of requirement is achieved. but not second..i.e if "xxxxx" link image is shown for first time....if on that same link image is upadated ,app shows old image only...
You can use the below class that will full fill your requirement
DImageView.h
#import <UIKit/UIKit.h>
#interface DImageView : UIImageView
#property (nonatomic, strong) UIActivityIndicatorView *activityView;
- (void)processImageDataWithURLString:(NSString *)urlString;
+ (UIImage *)getSavedImage :(NSString *)fileName;
+ (void)saveImageWithFolderName:(NSString *)folderName AndFileName:(NSString *)fileName AndImage:(NSData *) imageData;
DImageView.m
#import "DImageView.h"
#define IMAGES_FOLDER_NAME #"DImages"
#implementation DImageView
#pragma mark - App Life Cycle
- (id)initWithFrame:(CGRect)frame
{
self = [super initWithFrame:frame];
if (self)
{
}
return self;
}
- (void)dealloc
{
self.activityView = nil;
[super dealloc];
}
- (id)initWithCoder:(NSCoder *)aDecoder
{
self = [super initWithCoder:aDecoder];
if (self)
{
[self initWithFrame:[self frame]];
}
return self;
}
#pragma mark - Download images
- (void)processImageDataWithURLString:(NSString *)urlString //andBlock:(void (^)(UIImage * img))processImage
{
#autoreleasepool
{
UIImage * saveImg = [DImageView getSavedImage:urlString];
if (saveImg)
{
dispatch_queue_t callerQueue = dispatch_get_main_queue();
dispatch_async(callerQueue, ^{
#autoreleasepool
{
[self setImage:saveImg];
}
});
}
else
{
[self showActivityIndicator];
NSURL *url = [NSURL URLWithString:[urlString stringByAddingPercentEscapesUsingEncoding:NSUTF8StringEncoding]];
dispatch_queue_t callerQueue = dispatch_get_main_queue();
dispatch_queue_t downloadQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH,0);
__block NSError* error = nil;
dispatch_async(downloadQueue, ^{
NSData * imageData = [[[NSData dataWithContentsOfURL:url options:NSDataReadingUncached error:&error] retain] autorelease];
if (error)
{
NSLog(#"DImg Error %#", [error debugDescription]);
}
else
{
dispatch_async(callerQueue, ^{
#autoreleasepool
{
UIImage *image = [UIImage imageWithData:imageData];
[self setImage:image];
[self hideActivityIndicator];
/* Below code is to save image*/
[DImageView saveImageWithFolderName:IMAGES_FOLDER_NAME AndFileName:urlString AndImage:imageData];
}
});
}
});
dispatch_release(downloadQueue);
}
}
}
#pragma mark - File Save methods
+ (void)saveImageWithFolderName:(NSString *)folderName AndFileName:(NSString *)fileName AndImage:(NSData *) imageData
{
#autoreleasepool
{
NSFileManager *fileManger = [NSFileManager defaultManager] ;
NSString *directoryPath = [NSString stringWithFormat:#"%#/%#",[DImageView applicationDocumentsDirectory],folderName] ;
if (![fileManger fileExistsAtPath:directoryPath])
{
NSError *error = nil;
[fileManger createDirectoryAtPath:directoryPath withIntermediateDirectories:YES attributes:nil error:&error];
}
fileName = [DImageView fileNameValidate:fileName];
NSString *filePath = [NSString stringWithFormat:#"%#/%#",directoryPath,fileName] ;
BOOL isSaved = [imageData writeToFile:filePath atomically:YES];
if (!isSaved)
{
NSLog(#" ** Img Not Saved");
}
}
}
+ (NSString *)applicationDocumentsDirectory
{
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *basePath = ([paths count] > 0) ? [paths objectAtIndex:0] : nil;
return basePath;
}
+ (UIImage *)getSavedImage :(NSString *)fileName
{
NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init];
fileName = [DImageView fileNameValidate:fileName];
NSFileManager * fileManger = [NSFileManager defaultManager] ;
NSString * directoryPath = [NSString stringWithFormat:#"%#/%#",[DImageView applicationDocumentsDirectory],IMAGES_FOLDER_NAME] ;
NSString * filePath = [NSString stringWithFormat:#"%#/%#",directoryPath,fileName] ;
if ([fileManger fileExistsAtPath:directoryPath])
{
UIImage *image = [UIImage imageWithContentsOfFile:filePath] ;
if (image)
{
return image;
}
else
{
NSLog(#"** Img Not Found **");
return nil;
}
}
[pool release];
return nil;
}
+ (NSString*) fileNameValidate : (NSString*) name
{
name = [name stringByReplacingOccurrencesOfString:#"://" withString:#"##"];
name = [name stringByReplacingOccurrencesOfString:#"/" withString:#"#"];
name = [name stringByReplacingOccurrencesOfString:#"%20" withString:#""];
return name;
}
#pragma mark - Activity Methods
- (void) showActivityIndicator
{
self.activityView = [[UIActivityIndicatorView alloc]initWithFrame:CGRectMake(0, 0, self.frame.size.width, self.frame.size.height)];
self.activityView.autoresizingMask = UIViewAutoresizingFlexibleLeftMargin | UIViewAutoresizingFlexibleTopMargin | UIViewAutoresizingFlexibleRightMargin | UIViewAutoresizingFlexibleBottomMargin;
self.activityView.hidesWhenStopped = TRUE;
self.activityView.backgroundColor = [UIColor clearColor];
self.activityView.activityIndicatorViewStyle = UIActivityIndicatorViewStyleGray;
[self addSubview:self.activityView];
[self.activityView startAnimating];
}
- (void) hideActivityIndicator
{
CAAnimation *animation = [NSClassFromString(#"CATransition") animation];
[animation setValue:#"kCATransitionFade" forKey:#"type"];
animation.duration = 0.4;;
[self.layer addAnimation:animation forKey:nil];
[self.activityView stopAnimating];
[self.activityView removeFromSuperview];
for (UIView * view in self.subviews)
{
if([view isKindOfClass:[UIActivityIndicatorView class]])
[view removeFromSuperview];
}
}
What will this class do ?
It will download images from server and save it to application document directory And get it from local if its available.
How to use that ?
Set DImageView class in your nib file for UIImageView.
Then you can simply use it as below in your .m file.
[imgViewName processImageDataWithURLString:imageURl];
You should take a look at SDWebImage. It's one of the most used UIImageView category right now and offers a lot of features, including caching.
Have a look at this part of the documentation to learn how to refresh your cache with it: Handle image refresh

Page View Controller: UILabel and UIImageview empty on first page

I'm using a UIPageViewController to display items from a JSON file. It works fine except for the first page which displays nothing on loading, but if I come back, it works.
Code is as follows:
#import "SJPagesViewController.h"
#import "SJChildViewController.h"
#interface SJPagesViewController ()
#end
#implementation SJPagesViewController
#synthesize urlToFollow, data,articlesArray;
- (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil
{
...
}
- (void)viewDidLoad
{
[super viewDidLoad];
self.pageController = [[UIPageViewController alloc] initWithTransitionStyle:UIPageViewControllerTransitionStyleScroll navigationOrientation:UIPageViewControllerNavigationOrientationHorizontal options:nil];
CGFloat window_height = ([self window_height]-30.0);
CGFloat window_width = [self window_width];
CGRect pageFrame = CGRectMake(0.0f, 0.0f,window_width , window_height);
self.pageController.dataSource = self;
[[self.pageController view] setFrame:pageFrame];
SJChildViewController *initialViewController = [self viewControllerAtIndex:0];
NSArray *viewControllers = [NSArray arrayWithObject:initialViewController];
[self.pageController setViewControllers:viewControllers direction:UIPageViewControllerNavigationDirectionForward animated:NO completion:nil];
[self addChildViewController:self.pageController];
[[self view] addSubview:[self.pageController view]];
[self.pageController didMoveToParentViewController:self];
//Download JSON
NSError *error=nil;
NSURL *url = [NSURL URLWithString:urlToFollow];
data = [NSData dataWithContentsOfURL: url options:NSDataReadingMappedIfSafe error:&error];
NSDictionary *dictionary = [NSJSONSerialization JSONObjectWithData:data options:0 error:&error];
NSArray *response = [dictionary objectForKey:#"items"];
articlesArray = [[NSArray alloc] initWithArray:response];
//NSLog(#"articlesArray = %#", articlesArray);
}
- (UIViewController *)pageViewController:(UIPageViewController *)pageViewController viewControllerBeforeViewController:(UIViewController *)viewController {
NSUInteger index = [(SJChildViewController *)viewController index];
if (index == 0) {
return nil;
}
index--;
return [self viewControllerAtIndex:index];
}
- (UIViewController *)pageViewController:(UIPageViewController *)pageViewController viewControllerAfterViewController:(UIViewController *)viewController {
NSUInteger index = [(SJChildViewController *)viewController index];
index++;
if (index == articlesArray.count) {
return nil;
}
return [self viewControllerAtIndex:index];
}
- (CGFloat) window_height {
return [UIScreen mainScreen].applicationFrame.size.height;
}
- (CGFloat) window_width {
return [UIScreen mainScreen].applicationFrame.size.width;
}
- (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
- (SJChildViewController *)viewControllerAtIndex:(NSUInteger)index {
SJChildViewController *childViewController = [[SJChildViewController alloc] initWithNibName:#"SJChildViewController" bundle:nil];
childViewController.index = index;
childViewController.arrayCount = self.articlesArray.count;
childViewController.model = [[self.articlesArray objectAtIndex:index]objectForKey:#"modelo"];
childViewController.price = [[self.articlesArray objectAtIndex:index]objectForKey:#"precio"];
childViewController.make = [[self.articlesArray objectAtIndex:index]objectForKey:#"marca"];
childViewController.imageUrl = [[self.articlesArray objectAtIndex:index]objectForKey:#"photoUrl"];
return childViewController;
}
- (NSInteger)presentationCountForPageViewController:(UIPageViewController *)pageViewController {
// The number of items reflected in the page indicator.
if (articlesArray.count >5) {
return 5;
}else return [articlesArray count];
}
- (NSInteger)presentationIndexForPageViewController:(UIPageViewController *)pageViewController {
// The selected item reflected in the page indicator.
return 0;
}
#end
And ChildViewController to display items:
#implementation SJChildViewController
#synthesize activityIndicator,imageUrl,price,model,make;
- (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil
{
....
}
- (void)viewDidLoad
{
[super viewDidLoad];
//Activity indicator
activityIndicator = [[UIActivityIndicatorView alloc]initWithActivityIndicatorStyle:UIActivityIndicatorViewStyleGray];
activityIndicator.center = CGPointMake(self.view.frame.size.width / 2.0, self.view.frame.size.height / 2.0);
[self.view addSubview: activityIndicator];
[activityIndicator startAnimating];
}
-(void)viewDidAppear:(BOOL)animated{
self.scrrenNumber.text = [NSString stringWithFormat:#"Artículo %ld de %ld", ((long)self.index+1), (long)self.arrayCount];
self.lblMake.lineBreakMode = NSLineBreakByWordWrapping;
self.lblMake.text = [NSString stringWithFormat:#"%#",make];
//Donload image
//Download image from url
NSURL *url_1= [NSURL URLWithString:imageUrl];
NSURLRequest *request_1 = [NSURLRequest requestWithURL:url_1];
[NSURLConnection sendAsynchronousRequest:request_1
queue:[NSOperationQueue currentQueue]
completionHandler:^(NSURLResponse *response, NSData *data, NSError *error){
UIImage *img = [UIImage imageWithData:data];
[self.articleImage performSelectorOnMainThread:#selector(setImage:) withObject:img waitUntilDone:YES];
}];
//Activity indicator
self.lblPrice.text = [NSString stringWithFormat:#"%#",price];
self.lblModel.lineBreakMode = NSLineBreakByWordWrapping;
self.lblModel.text = [NSString stringWithFormat:#"%#",model];
[activityIndicator stopAnimating];
}
How can I make it work from the first moment?
Luis, I don't have access to xcode right now but it appears your issue is that you are instantiating childViewController while articlesArray is empty.
In viewDidLoad you are downloading your json file and populating articlesArray after you call your helper method viewControllerAtIndex.
Try downloading your json file and populating the array first like you have here.
//Download JSON
NSError *error=nil;
NSURL *url = [NSURL URLWithString:urlToFollow];
data = [NSData dataWithContentsOfURL: url options:NSDataReadingMappedIfSafe error:&error];
NSDictionary *dictionary = [NSJSONSerialization JSONObjectWithData:data options:0 error:&error];
NSArray *response = [dictionary objectForKey:#"items"];
articlesArray = [[NSArray alloc] initWithArray:response];
Then create your pageViewController and populate it like you did here.
self.pageController = [[UIPageViewController alloc] initWithTransitionStyle:UIPageViewControllerTransitionStyleScroll navigationOrientation:UIPageViewControllerNavigationOrientationHorizontal options:nil];
CGFloat window_height = ([self window_height]-30.0);
CGFloat window_width = [self window_width];
CGRect pageFrame = CGRectMake(0.0f, 0.0f,window_width , window_height);
self.pageController.dataSource = self;
[[self.pageController view] setFrame:pageFrame];
SJChildViewController *initialViewController = [self viewControllerAtIndex:0];
NSArray *viewControllers = [NSArray arrayWithObject:initialViewController];
[self.pageController setViewControllers:viewControllers direction:UIPageViewControllerNavigationDirectionForward animated:NO completion:nil];
[self addChildViewController:self.pageController];
[[self view] addSubview:[self.pageController view]];
[self.pageController didMoveToParentViewController:self];

Take a picture in iOS without UIImagePicker and without preview it

Do you know any way / method to take a photo in iOS and saving it to camera Roll only with a simple button pressure without showing any preview?
I already know how to show the camera view but it show the preview of the image and the user need to click the take photo button to take the photo.
In few Words: the user click the button, the picture is taken, without previews nor double checks to take / save the photo.
I already found the takePicture method of UIIMagePickerController Class http://developer.apple.com/library/ios/documentation/uikit/reference/UIImagePickerController_Class/UIImagePickerController/UIImagePickerController.html#//apple_ref/occ/instm/UIImagePickerController/takePicture
Set the showsCameraControls-Property to NO.
poc = [[UIImagePickerController alloc] init];
[poc setTitle:#"Take a photo."];
[poc setDelegate:self];
[poc setSourceType:UIImagePickerControllerSourceTypeCamera];
poc.showsCameraControls = NO;
You also have to add your own Controls as a custom view on the top of poc.view. But that is very simple and you can add your own UI-style by that way.
You receive the image-data as usually within the imagePickerController:(UIImagePickerController *)picker didFinishPickingMediaWithInfo:
To take the photo, you call
[poc takePicture];
from your custom button.
Hope, that works for you.
Assuming you want a point-and-shoot method, you can create an AVSession and just call the UIImageWriteToSavedPhotosAlbum method.
Here is a link that goes into that exact process: http://www.musicalgeometry.com/?p=1297
It's also worth noting that your users need to have given the app access to their camera roll or you may experience issues saving the images.
You need to design your own custom preview according to your size, on capture button pressed and call buttonPressed method and do stuff what you want
(void)buttonPressed:(UIButton *)sender {
NSLog(#" Capture Clicked");
[self.imagePicker takePicture];
//[NSTimer scheduledTimerWithTimeInterval:3.0f target:self
selector:#selector(timerFired:) userInfo:nil repeats:NO];
}
following is code that will take photo without showing preview screen. when i tried the accepted answer, which uses UIImagePickerController, the preview screen showed, then auto disappeared. with the code below, user taps 'takePhoto' button, and the devices takes photo with zero change to UI (in my app, i add a green check mark next to take photo button). the code below is from apple https://developer.apple.com/LIBRARY/IOS/samplecode/AVCam/Introduction/Intro.html with the 'extra functions' (that do not relate to taking still photo) commented out. thank you incmiko for suggesting this code in your answer iOS take photo from camera without modalViewController.
updating code, 26 mar 2015:
to trigger snap photo:
[self snapStillImage:sender];
in .h file:
#import <AVFoundation/AVFoundation.h>
#import <AssetsLibrary/AssetsLibrary.h>
// include code below in header file, after #import and before #interface
// avfoundation copy paste code
static void * CapturingStillImageContext = &CapturingStillImageContext;
static void * RecordingContext = &RecordingContext;
static void * SessionRunningAndDeviceAuthorizedContext = &SessionRunningAndDeviceAuthorizedContext;
// avfoundation, include code below after #interface
// avf - Session management.
#property (nonatomic) dispatch_queue_t sessionQueue; // Communicate with the session and other session objects on this queue.
#property (nonatomic) AVCaptureSession *session;
#property (nonatomic) AVCaptureDeviceInput *videoDeviceInput;
#property (nonatomic) AVCaptureMovieFileOutput *movieFileOutput;
#property (nonatomic) AVCaptureStillImageOutput *stillImageOutput;
// avf - Utilities.
#property (nonatomic) UIBackgroundTaskIdentifier backgroundRecordingID;
#property (nonatomic, getter = isDeviceAuthorized) BOOL deviceAuthorized;
#property (nonatomic, readonly, getter = isSessionRunningAndDeviceAuthorized) BOOL sessionRunningAndDeviceAuthorized;
#property (nonatomic) BOOL lockInterfaceRotation;
#property (nonatomic) id runtimeErrorHandlingObserver;
in .m file:
#pragma mark - AV Foundation
- (BOOL)isSessionRunningAndDeviceAuthorized
{
return [[self session] isRunning] && [self isDeviceAuthorized];
}
+ (NSSet *)keyPathsForValuesAffectingSessionRunningAndDeviceAuthorized
{
return [NSSet setWithObjects:#"session.running", #"deviceAuthorized", nil];
}
// call following method from viewDidLoad
- (void)CreateAVCaptureSession
{
// Create the AVCaptureSession
AVCaptureSession *session = [[AVCaptureSession alloc] init];
[self setSession:session];
// Check for device authorization
[self checkDeviceAuthorizationStatus];
// In general it is not safe to mutate an AVCaptureSession or any of its inputs, outputs, or connections from multiple threads at the same time.
// Why not do all of this on the main queue?
// -[AVCaptureSession startRunning] is a blocking call which can take a long time. We dispatch session setup to the sessionQueue so that the main queue isn't blocked (which keeps the UI responsive).
dispatch_queue_t sessionQueue = dispatch_queue_create("session queue", DISPATCH_QUEUE_SERIAL);
[self setSessionQueue:sessionQueue];
dispatch_async(sessionQueue, ^{
[self setBackgroundRecordingID:UIBackgroundTaskInvalid];
NSError *error = nil;
AVCaptureDevice *videoDevice = [ViewController deviceWithMediaType:AVMediaTypeVideo preferringPosition:AVCaptureDevicePositionFront];
AVCaptureDeviceInput *videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
if (error)
{
NSLog(#"%#", error);
}
if ([session canAddInput:videoDeviceInput])
{
[session addInput:videoDeviceInput];
[self setVideoDeviceInput:videoDeviceInput];
dispatch_async(dispatch_get_main_queue(), ^{
// Why are we dispatching this to the main queue?
// Because AVCaptureVideoPreviewLayer is the backing layer for AVCamPreviewView and UIView can only be manipulated on main thread.
// Note: As an exception to the above rule, it is not necessary to serialize video orientation changes on the AVCaptureVideoPreviewLayer’s connection with other session manipulation.
});
}
/* AVCaptureDevice *audioDevice = [[AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio] firstObject];
AVCaptureDeviceInput *audioDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:&error];
if (error)
{
NSLog(#"%#", error);
}
if ([session canAddInput:audioDeviceInput])
{
[session addInput:audioDeviceInput];
}
*/
AVCaptureMovieFileOutput *movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
if ([session canAddOutput:movieFileOutput])
{
[session addOutput:movieFileOutput];
AVCaptureConnection *connection = [movieFileOutput connectionWithMediaType:AVMediaTypeVideo];
if ([connection isVideoStabilizationSupported])
[connection setEnablesVideoStabilizationWhenAvailable:YES];
[self setMovieFileOutput:movieFileOutput];
}
AVCaptureStillImageOutput *stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
if ([session canAddOutput:stillImageOutput])
{
[stillImageOutput setOutputSettings:#{AVVideoCodecKey : AVVideoCodecJPEG}];
[session addOutput:stillImageOutput];
[self setStillImageOutput:stillImageOutput];
}
});
}
// call method below from viewWilAppear
- (void)AVFoundationStartSession
{
dispatch_async([self sessionQueue], ^{
[self addObserver:self forKeyPath:#"sessionRunningAndDeviceAuthorized" options:(NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew) context:SessionRunningAndDeviceAuthorizedContext];
[self addObserver:self forKeyPath:#"stillImageOutput.capturingStillImage" options:(NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew) context:CapturingStillImageContext];
[self addObserver:self forKeyPath:#"movieFileOutput.recording" options:(NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew) context:RecordingContext];
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:[[self videoDeviceInput] device]];
__weak ViewController *weakSelf = self;
[self setRuntimeErrorHandlingObserver:[[NSNotificationCenter defaultCenter] addObserverForName:AVCaptureSessionRuntimeErrorNotification object:[self session] queue:nil usingBlock:^(NSNotification *note) {
ViewController *strongSelf = weakSelf;
dispatch_async([strongSelf sessionQueue], ^{
// Manually restarting the session since it must have been stopped due to an error.
[[strongSelf session] startRunning];
});
}]];
[[self session] startRunning];
});
}
// call method below from viewDidDisappear
- (void)AVFoundationStopSession
{
dispatch_async([self sessionQueue], ^{
[[self session] stopRunning];
[[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:[[self videoDeviceInput] device]];
[[NSNotificationCenter defaultCenter] removeObserver:[self runtimeErrorHandlingObserver]];
[self removeObserver:self forKeyPath:#"sessionRunningAndDeviceAuthorized" context:SessionRunningAndDeviceAuthorizedContext];
[self removeObserver:self forKeyPath:#"stillImageOutput.capturingStillImage" context:CapturingStillImageContext];
[self removeObserver:self forKeyPath:#"movieFileOutput.recording" context:RecordingContext];
});
}
- (BOOL)prefersStatusBarHidden
{
return YES;
}
- (BOOL)shouldAutorotate
{
// Disable autorotation of the interface when recording is in progress.
return ![self lockInterfaceRotation];
}
- (NSUInteger)supportedInterfaceOrientations
{
return UIInterfaceOrientationMaskAll;
}
- (void)willRotateToInterfaceOrientation:(UIInterfaceOrientation)toInterfaceOrientation duration:(NSTimeInterval)duration
{
// [[(AVCaptureVideoPreviewLayer *)[[self previewView] layer] connection] setVideoOrientation:(AVCaptureVideoOrientation)toInterfaceOrientation];
}
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
{
if (context == CapturingStillImageContext)
{
BOOL isCapturingStillImage = [change[NSKeyValueChangeNewKey] boolValue];
if (isCapturingStillImage)
{
[self runStillImageCaptureAnimation];
}
}
else if (context == RecordingContext)
{
BOOL isRecording = [change[NSKeyValueChangeNewKey] boolValue];
dispatch_async(dispatch_get_main_queue(), ^{
if (isRecording)
{
// [[self cameraButton] setEnabled:NO];
// [[self recordButton] setTitle:NSLocalizedString(#"Stop", #"Recording button stop title") forState:UIControlStateNormal];
// [[self recordButton] setEnabled:YES];
}
else
{
// [[self cameraButton] setEnabled:YES];
// [[self recordButton] setTitle:NSLocalizedString(#"Record", #"Recording button record title") forState:UIControlStateNormal];
// [[self recordButton] setEnabled:YES];
}
});
}
else if (context == SessionRunningAndDeviceAuthorizedContext)
{
BOOL isRunning = [change[NSKeyValueChangeNewKey] boolValue];
dispatch_async(dispatch_get_main_queue(), ^{
if (isRunning)
{
// [[self cameraButton] setEnabled:YES];
// [[self recordButton] setEnabled:YES];
// [[self stillButton] setEnabled:YES];
}
else
{
// [[self cameraButton] setEnabled:NO];
// [[self recordButton] setEnabled:NO];
// [[self stillButton] setEnabled:NO];
}
});
}
else
{
[super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
}
}
#pragma mark Actions
- (IBAction)toggleMovieRecording:(id)sender
{
// [[self recordButton] setEnabled:NO];
dispatch_async([self sessionQueue], ^{
if (![[self movieFileOutput] isRecording])
{
[self setLockInterfaceRotation:YES];
if ([[UIDevice currentDevice] isMultitaskingSupported])
{
// Setup background task. This is needed because the captureOutput:didFinishRecordingToOutputFileAtURL: callback is not received until AVCam returns to the foreground unless you request background execution time. This also ensures that there will be time to write the file to the assets library when AVCam is backgrounded. To conclude this background execution, -endBackgroundTask is called in -recorder:recordingDidFinishToOutputFileURL:error: after the recorded file has been saved.
[self setBackgroundRecordingID:[[UIApplication sharedApplication] beginBackgroundTaskWithExpirationHandler:nil]];
}
// Update the orientation on the movie file output video connection before starting recording.
// [[[self movieFileOutput] connectionWithMediaType:AVMediaTypeVideo] setVideoOrientation:[[(AVCaptureVideoPreviewLayer *)[[self previewView] layer] connection] videoOrientation]];
// Turning OFF flash for video recording
[ViewController setFlashMode:AVCaptureFlashModeOff forDevice:[[self videoDeviceInput] device]];
// Start recording to a temporary file.
NSString *outputFilePath = [NSTemporaryDirectory() stringByAppendingPathComponent:[#"movie" stringByAppendingPathExtension:#"mov"]];
[[self movieFileOutput] startRecordingToOutputFileURL:[NSURL fileURLWithPath:outputFilePath] recordingDelegate:self];
}
else
{
[[self movieFileOutput] stopRecording];
}
});
}
- (IBAction)changeCamera:(id)sender
{
// [[self cameraButton] setEnabled:NO];
// [[self recordButton] setEnabled:NO];
// [[self stillButton] setEnabled:NO];
dispatch_async([self sessionQueue], ^{
AVCaptureDevice *currentVideoDevice = [[self videoDeviceInput] device];
AVCaptureDevicePosition preferredPosition = AVCaptureDevicePositionUnspecified;
AVCaptureDevicePosition currentPosition = [currentVideoDevice position];
switch (currentPosition)
{
case AVCaptureDevicePositionUnspecified:
preferredPosition = AVCaptureDevicePositionBack;
break;
case AVCaptureDevicePositionBack:
preferredPosition = AVCaptureDevicePositionFront;
break;
case AVCaptureDevicePositionFront:
preferredPosition = AVCaptureDevicePositionBack;
break;
}
AVCaptureDevice *videoDevice = [ViewController deviceWithMediaType:AVMediaTypeVideo preferringPosition:preferredPosition];
AVCaptureDeviceInput *videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:nil];
[[self session] beginConfiguration];
[[self session] removeInput:[self videoDeviceInput]];
if ([[self session] canAddInput:videoDeviceInput])
{
[[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:currentVideoDevice];
[ViewController setFlashMode:AVCaptureFlashModeAuto forDevice:videoDevice];
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:videoDevice];
[[self session] addInput:videoDeviceInput];
[self setVideoDeviceInput:videoDeviceInput];
}
else
{
[[self session] addInput:[self videoDeviceInput]];
}
[[self session] commitConfiguration];
dispatch_async(dispatch_get_main_queue(), ^{
// [[self cameraButton] setEnabled:YES];
// [[self recordButton] setEnabled:YES];
// [[self stillButton] setEnabled:YES];
});
});
}
- (IBAction)snapStillImage:(id)sender
{
dispatch_async([self sessionQueue], ^{
// Update the orientation on the still image output video connection before capturing.
// [[[self stillImageOutput] connectionWithMediaType:AVMediaTypeVideo] setVideoOrientation:[[(AVCaptureVideoPreviewLayer *)[[self previewView] layer] connection] videoOrientation]];
// Flash set to Auto for Still Capture
[ViewController setFlashMode:AVCaptureFlashModeAuto forDevice:[[self videoDeviceInput] device]];
// Capture a still image.
[[self stillImageOutput] captureStillImageAsynchronouslyFromConnection:[[self stillImageOutput] connectionWithMediaType:AVMediaTypeVideo] completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
if (imageDataSampleBuffer)
{
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
// do someting good with saved image
[self saveImageToParse:image];
}
}];
});
}
- (IBAction)focusAndExposeTap:(UIGestureRecognizer *)gestureRecognizer
{
// CGPoint devicePoint = [(AVCaptureVideoPreviewLayer *)[[self previewView] layer] captureDevicePointOfInterestForPoint:[gestureRecognizer locationInView:[gestureRecognizer view]]];
// [self focusWithMode:AVCaptureFocusModeAutoFocus exposeWithMode:AVCaptureExposureModeAutoExpose atDevicePoint:devicePoint monitorSubjectAreaChange:YES];
}
- (void)subjectAreaDidChange:(NSNotification *)notification
{
CGPoint devicePoint = CGPointMake(.5, .5);
[self focusWithMode:AVCaptureFocusModeContinuousAutoFocus exposeWithMode:AVCaptureExposureModeContinuousAutoExposure atDevicePoint:devicePoint monitorSubjectAreaChange:NO];
}
#pragma mark File Output Delegate
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error
{
if (error)
NSLog(#"%#", error);
[self setLockInterfaceRotation:NO];
// Note the backgroundRecordingID for use in the ALAssetsLibrary completion handler to end the background task associated with this recording. This allows a new recording to be started, associated with a new UIBackgroundTaskIdentifier, once the movie file output's -isRecording is back to NO — which happens sometime after this method returns.
UIBackgroundTaskIdentifier backgroundRecordingID = [self backgroundRecordingID];
[self setBackgroundRecordingID:UIBackgroundTaskInvalid];
[[[ALAssetsLibrary alloc] init] writeVideoAtPathToSavedPhotosAlbum:outputFileURL completionBlock:^(NSURL *assetURL, NSError *error) {
if (error)
NSLog(#"%#", error);
[[NSFileManager defaultManager] removeItemAtURL:outputFileURL error:nil];
if (backgroundRecordingID != UIBackgroundTaskInvalid)
[[UIApplication sharedApplication] endBackgroundTask:backgroundRecordingID];
}];
}
#pragma mark Device Configuration
- (void)focusWithMode:(AVCaptureFocusMode)focusMode exposeWithMode:(AVCaptureExposureMode)exposureMode atDevicePoint:(CGPoint)point monitorSubjectAreaChange:(BOOL)monitorSubjectAreaChange
{
dispatch_async([self sessionQueue], ^{
AVCaptureDevice *device = [[self videoDeviceInput] device];
NSError *error = nil;
if ([device lockForConfiguration:&error])
{
if ([device isFocusPointOfInterestSupported] && [device isFocusModeSupported:focusMode])
{
[device setFocusMode:focusMode];
[device setFocusPointOfInterest:point];
}
if ([device isExposurePointOfInterestSupported] && [device isExposureModeSupported:exposureMode])
{
[device setExposureMode:exposureMode];
[device setExposurePointOfInterest:point];
}
[device setSubjectAreaChangeMonitoringEnabled:monitorSubjectAreaChange];
[device unlockForConfiguration];
}
else
{
NSLog(#"%#", error);
}
});
}
+ (void)setFlashMode:(AVCaptureFlashMode)flashMode forDevice:(AVCaptureDevice *)device
{
if ([device hasFlash] && [device isFlashModeSupported:flashMode])
{
NSError *error = nil;
if ([device lockForConfiguration:&error])
{
[device setFlashMode:flashMode];
[device unlockForConfiguration];
}
else
{
NSLog(#"%#", error);
}
}
}
+ (AVCaptureDevice *)deviceWithMediaType:(NSString *)mediaType preferringPosition:(AVCaptureDevicePosition)position
{
NSArray *devices = [AVCaptureDevice devicesWithMediaType:mediaType];
AVCaptureDevice *captureDevice = [devices firstObject];
for (AVCaptureDevice *device in devices)
{
if ([device position] == position)
{
captureDevice = device;
break;
}
}
return captureDevice;
}
#pragma mark UI
- (void)runStillImageCaptureAnimation
{
/*
dispatch_async(dispatch_get_main_queue(), ^{
[[[self previewView] layer] setOpacity:0.0];
[UIView animateWithDuration:.25 animations:^{
[[[self previewView] layer] setOpacity:1.0];
}];
});
*/
}
- (void)checkDeviceAuthorizationStatus
{
NSString *mediaType = AVMediaTypeVideo;
[AVCaptureDevice requestAccessForMediaType:mediaType completionHandler:^(BOOL granted) {
if (granted)
{
//Granted access to mediaType
[self setDeviceAuthorized:YES];
}
else
{
//Not granted access to mediaType
dispatch_async(dispatch_get_main_queue(), ^{
[[[UIAlertView alloc] initWithTitle:#"AVCam!"
message:#"AVCam doesn't have permission to use Camera, please change privacy settings"
delegate:self
cancelButtonTitle:#"OK"
otherButtonTitles:nil] show];
[self setDeviceAuthorized:NO];
});
}
}];
}

Async Image download a UITableView's cellForRowAtIndexPath not always called

I using Apple's LazyTableImages sample to async download images for UITableViewCells. The main difference is that I'm saving a copy of the file to the Documents folder so that it doesn't need to be downloaded all the time. The method below is called by the class responsible for downloading the image
- (void)imageDidLoad:(NSIndexPath *)indexPath
{
AsyncImage *aImage = [imageDownloadsInProgress objectForKey:indexPath];
if (aImage != nil)
{
UITableViewCell *cell = [self.tableView cellForRowAtIndexPath:aImage.indexPathInTableView];
cell.imageView.image = aImage.image;
}
}
When the image is not on the device, the download goes fine and the image is displayed. However when the image is on the device the image is not displayed and the cell variable is null.
The code for the AsyncImage is:
//
// AsyncImgView.m
// BelfastChamberOfCommerce
//
// Created by markpirvine on 23/06/2011.
// Copyright 2011 __MyCompanyName__. All rights reserved.
//
#import "AsyncImage.h"
#import "AppManager.h"
#define kAppIconHeight 48
#interface AsyncImage ()
#property (nonatomic, retain) NSString *fileUrl;
#end
#implementation AsyncImage
#synthesize imageUrl, fileUrl;
#synthesize image;
#synthesize indexPathInTableView;
#synthesize delegate;
- (void)dealloc
{
[imageUrl release];
[fileUrl release];
[image release];
[indexPathInTableView release];
[connection cancel];
[connection release];
[data release];
[super dealloc];
}
- (void)loadImage
{
if (connection != nil)
{
[connection release];
}
if (data != nil)
{
[data release];
}
if([self.imageUrl length] > 0)
{
self.fileUrl = [[[AppManager sharedInstance] getCacheLocation] stringByAppendingPathComponent:[[self.imageUrl componentsSeparatedByString:#"/"] lastObject]];
if([[NSFileManager defaultManager] fileExistsAtPath:self.fileUrl] == YES)
{
self.image = [UIImage imageWithContentsOfFile:self.fileUrl];
[self deliverImage];
}
else
{
NSURLRequest* request = [NSURLRequest requestWithURL:[NSURL URLWithString:self.imageUrl] cachePolicy:NSURLRequestUseProtocolCachePolicy timeoutInterval:60.0];
connection = [[NSURLConnection alloc] initWithRequest:request delegate:self];
}
}
}
- (void)connection:(NSURLConnection *)theConnection didReceiveData:(NSData *)incrementalData
{
if (data == nil)
{
data = [[NSMutableData alloc] initWithCapacity:2048];
}
[data appendData:incrementalData];
}
- (void)connectionDidFinishLoading:(NSURLConnection*)theConnection
{
[connection release];
connection = nil;
UIImage *iTmp = [[UIImage alloc] initWithData:data];
if (iTmp.size.width != kAppIconHeight && iTmp.size.height != kAppIconHeight)
{
CGSize itemSize = CGSizeMake(kAppIconHeight, kAppIconHeight);
UIGraphicsBeginImageContext(itemSize);
CGRect imageRect = CGRectMake(0.0, 0.0, itemSize.width, itemSize.height);
[iTmp drawInRect:imageRect];
self.image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
}
else
{
self.image = iTmp;
}
[UIImageJPEGRepresentation(self.image, 1.0) writeToFile:self.fileUrl atomically:YES];
[iTmp release];
[data release];
data = nil;
[self deliverImage];
}
- (void)deliverImage
{
[delegate imageDidLoad:self.indexPathInTableView];
}
- (void)cancelDownload
{
[connection cancel];
connection = nil;
data = nil;
}
#end
Any help would be greatly appreciated
I suggest using one of the open source classes for async download of images that has built-in support for caching. The one that I'm using is cached image for iOS.