Why is my Progress HUD not displaying as expected - objective-c

I'm trying to develop a 'passwordless' registration form using a QRCode. The background processing works (can authenticate user), but noting happens visually until the registration is complete and the user is redirected to the apps main ViewController, however, A progress dialog with spinner is supposed to appear during the registration process but it doesn't. If, I manually input the credentials and press 'authenticate', everything works as expected, Just not after scanning the QR code.
Please see code below:
#interface ViewControllerRegister (){
JGProgressHUD *HUD;
NSString *regid;
NSString *regpin;
NSMutableDictionary *dicto;
}
//OPEN QRCODE READER VC
- (IBAction)scanAction:(id)sender
{
if ([QRCodeReader supportsMetadataObjectTypes:#[AVMetadataObjectTypeQRCode]]) {
static QRCodeReaderViewController *vc = nil;
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
QRCodeReader *reader = [QRCodeReader readerWithMetadataObjectTypes:#[AVMetadataObjectTypeQRCode]];
vc = [QRCodeReaderViewController readerWithCancelButtonTitle:#"Cancel" codeReader:reader startScanningAtLoad:YES];
vc.modalPresentationStyle = UIModalPresentationFormSheet;
});
vc.delegate = self;
[vc setCompletionWithBlock:^(NSString *resultAsString) {
// NSLog(#"Completion with result: %#", resultAsString);
}];
[self presentViewController:vc animated:YES completion:NULL];
}
else {
[self showAlert:#"Reader not supported by the current device"];
}
}
//// PARSE THE RETURNED QR CODE DATA
#pragma mark - QRCodeReader Delegate Methods
- (void)reader:(QRCodeReaderViewController *)reader didScanResult:(NSString *)result
{
[self dismissViewControllerAnimated:YES completion:^{
NSURL *stringfromscan = [NSURL URLWithString:result];
dicto = [NSMutableDictionary new];
NSURLComponents *components = [NSURLComponents componentsWithURL:stringfromscan resolvingAgainstBaseURL:NO];
NSArray *queryItems = [components queryItems];
for (NSURLQueryItem *item in queryItems)
{
[dicto setObject:[item value] forKey:[item name]];
}
if ([result rangeOfString:#"eloq"].location == NSNotFound && [result rangeOfString:#"unique"].location == NSNotFound) {
[self showAlert:#"Looks like you've scanned an incorect QR code"];
} else {
self.TextViewUniqueID.text = [dicto objectForKey:#"uniqueid"];
self.TextViewPIN.text = [dicto objectForKey:#"password"];
double delayInSeconds = 1.0;
dispatch_time_t popTime = dispatch_time(DISPATCH_TIME_NOW, (int64_t)(delayInSeconds * NSEC_PER_SEC));
dispatch_after(popTime, dispatch_get_main_queue(), ^(void){
[self authent];
});
}
}];
}
/// CREDENTIAL VALIDATION OF SORTS
- (void)authent{
NSLog(#"s UNIQUEID %#", self.TextViewUniqueID.text);
NSLog(#"PIN %#", self.TextViewPIN.text);
// [self resignFirstResponder];
int uniqueID = [self.TextViewUniqueID.text length];
int PIN = [self.TextViewPIN.text length];
if (uniqueID < 7 || PIN < 6){
UIAlertController* alert = [UIAlertController alertControllerWithTitle:#"Error!"
message:#"Your UniqueID or PIN is too short."
preferredStyle:UIAlertControllerStyleAlert];
UIAlertAction* defaultAction = [UIAlertAction actionWithTitle:#"Try Again" style:UIAlertActionStyleDefault
handler:^(UIAlertAction * action) {}];
[alert addAction:defaultAction];
[self presentViewController:alert animated:YES completion:nil];
[self clear];
}else{
/*
HUD WILL NOT DISPLAY AFTER READING QR CODE
*/
HUD = [JGProgressHUD progressHUDWithStyle:JGProgressHUDStyleDark];
HUD.textLabel.text = #"Authenticating";
[HUD showInView:self.view];
[self PostJson:self.TextViewUniqueID.text :self.TextViewPIN.text];
}
}

Related

This item cannot be shared. Please select a different item when sharing on whatsApp

first time app install and share image is not working, it show alert:
This item cannot be shared. Please select a different item.
and I will try second time image share successfully. What is the issue?
-(void)TwitterAndmanyMore
{
UIImage * image = _imageView.image;
NSArray * items = #[image];
UIActivityViewController *controller = [[UIActivityViewController alloc]initWithActivityItems:items applicationActivities:nil];
// and present it
[self presentActivityController:controller];
}
- (void)presentActivityController:(UIActivityViewController *)controller {
// for iPad: make the presentation a Popover
controller.modalPresentationStyle = UIModalPresentationPopover;
[self presentViewController:controller animated:YES completion:nil];
UIPopoverPresentationController *popController = [controller popoverPresentationController];
popController.permittedArrowDirections = UIPopoverArrowDirectionAny;
popController.barButtonItem = self.navigationItem.leftBarButtonItem;
// access the completion handler
controller.completionWithItemsHandler = ^(NSString *activityType,
BOOL completed,
NSArray *returnedItems,
NSError *error)
{
// react to the completion
NSLog(#"----retu------%#",returnedItems);
if (completed)
{
[self showContine];
//[self viewWillAppear:YES];
// user shared an item
NSLog(#"We used activity type%#", activityType);
} else {
// user cancelled
NSLog(#"We didn't want to share anything after all.");
}
if (error) {
NSLog(#"An Error occured: %#, %#", error.localizedDescription, error.localizedFailureReason);
}
};
}
use below code for this issue it work
-(void)TwitterAndmanyMore
{
// UIImage * image = _imageView.image;
// NSLog(#"Image Data %#",image);
// NSArray * items = #[#"", image];
tempImage = [UIImage imageNamed:#"Sample.jpg"];
tempImage = _imageView.image;
NSString *theMessage = #"";
NSArray *items;
CGImageRef cgref = [tempImage CGImage];
CIImage *cim = [tempImage CIImage];
if (cim != nil || cgref != NULL)
{
items = #[theMessage,tempImage];
}
else
{
items = #[theMessage];
}
NSLog(#"Image Data %#",items);
// build an activity view controller
UIActivityViewController *controller = [[UIActivityViewController alloc]initWithActivityItems:items applicationActivities:nil];
// and present it
[self presentActivityController:controller];
}
- (void)presentActivityController:(UIActivityViewController *)controller {
// for iPad: make the presentation a Popover
controller.modalPresentationStyle = UIModalPresentationPopover;
[self presentViewController:controller animated:YES completion:nil];
UIPopoverPresentationController *popController = [controller popoverPresentationController];
popController.permittedArrowDirections = UIPopoverArrowDirectionAny;
popController.barButtonItem = self.navigationItem.leftBarButtonItem;
// access the completion handler
controller.completionWithItemsHandler = ^(NSString *activityType,
BOOL completed,
NSArray *returnedItems,
NSError *error)
{
// react to the completion
NSLog(#"----retu------%#",returnedItems);
if (completed)
{
[self showContine];
//[self viewWillAppear:YES];
// user shared an item
NSLog(#"We used activity type%#", activityType);
} else {
// user cancelled
NSLog(#"We didn't want to share anything after all.");
}
if (error) {
NSLog(#"An Error occured: %#, %#", error.localizedDescription, error.localizedFailureReason);
}
};
}

CNContactPickerViewController gets error (CNPropertyNotFetchedException) - Objective-C

I am calling the Contact Picker View like so:
- (void)openContacts {
CNContactPickerViewController *picker = [[CNContactPickerViewController alloc] init];
picker.delegate = self;
picker.displayedPropertyKeys = #[CNContactGivenNameKey];
[self presentViewController:picker animated:YES completion:nil];
}
And I am handling the selections with this delegate method:
- (void)contactPicker:(CNContactPickerViewController *)picker didSelectContacts:(NSArray<CNContact *> *)contacts {
for (CNContact *contact in contacts) {
NSLog(#"%#",contact);
NSString *phone;
NSString *mobilePhone;
for (CNLabeledValue *label in contact.phoneNumbers) {
if ([label.label isEqualToString:CNLabelPhoneNumberMobile] || [label.label isEqualToString:CNLabelPhoneNumberiPhone]) {
mobilePhone = [label.value stringValue];
} else if ([label.label isEqualToString:CNLabelPhoneNumberMain]) {
phone = [label.value stringValue];
}
}
}
}
The NSLog outputs the following for contact: <CNContact: 0x78e56e50: identifier=1861C9BD-143B-4E93-8475-F9079F5D6192, givenName=Kate, familyName=Bell, organizationName=Creative Consulting, phoneNumbers=(not fetched), emailAddresses=(not fetched), postalAddresses=(not fetched)>
This only occurs on simulator and iOS9 iPad. I don't get an error with my iOS10 iPhone.
You should first ask permission for this. Then you can show picker.
CNContactStore *store = [[CNContactStore alloc] init];
[store requestAccessForEntityType:CNEntityTypeContacts completionHandler:^(BOOL granted, NSError * _Nullable error)
{
CNContactPickerViewController *picker = [[CNContactPickerViewController alloc] init];
picker.delegate = self;
picker.displayedPropertyKeys = #[CNContactGivenNameKey];
[self presentViewController:picker animated:YES completion:nil];
}];
In this case you'll show picker always, without dependance on user's choice. So you should check property availability like that:
if ([CNContactStore authorizationStatusForEntityType:CNEntityTypeContacts] == CNAuthorizationStatusAuthorized)
{
}

Check if user shares with Twitter

I am using an UIActivityViewController to show my share-dialogue.
How can I check if the user is using Twitter/Facebook/etc.?
UIActivityViewController *activityViewController = [[UIActivityViewController alloc] initWithActivityItems:itemsToShare applicationActivities:nil];
UIPopoverPresentationController *presentationController = [activityViewController popoverPresentationController];
[self presentViewController:activityViewController animated:YES completion:nil];
I want to customize the Text for sharing with different services, so this would be really helpful.
You can use setCompletionWithItemsHandler like this:
UPDATE - remove the if(complete)
UIActivityViewController *activityViewController = [[UIActivityViewController alloc] initWithActivityItems:itemsToShare applicationActivities:nil];
[activityViewController setCompletionWithItemsHandler:^(NSString *activityType, BOOL completed, NSArray *returnedItems, NSError *activityError)
{
//if (completed)
//{
if ([activityType isEqualToString:UIActivityTypeMail]) {
//code for email
} else if ([activityType isEqualToString:UIActivityTypeMessage]) {
//code for iMessage
} else if ([activityType isEqualToString:UIActivityTypePostToFacebook]) {
//code for Facebook
} else if ([activityType isEqualToString:UIActivityTypePostToTwitter]) {
//code for twitter
}
//}
}];
UIPopoverPresentationController *presentationController = [activityViewController popoverPresentationController];
[self presentViewController:activityViewController animated:YES completion:nil];

Take a picture in iOS without UIImagePicker and without preview it

Do you know any way / method to take a photo in iOS and saving it to camera Roll only with a simple button pressure without showing any preview?
I already know how to show the camera view but it show the preview of the image and the user need to click the take photo button to take the photo.
In few Words: the user click the button, the picture is taken, without previews nor double checks to take / save the photo.
I already found the takePicture method of UIIMagePickerController Class http://developer.apple.com/library/ios/documentation/uikit/reference/UIImagePickerController_Class/UIImagePickerController/UIImagePickerController.html#//apple_ref/occ/instm/UIImagePickerController/takePicture
Set the showsCameraControls-Property to NO.
poc = [[UIImagePickerController alloc] init];
[poc setTitle:#"Take a photo."];
[poc setDelegate:self];
[poc setSourceType:UIImagePickerControllerSourceTypeCamera];
poc.showsCameraControls = NO;
You also have to add your own Controls as a custom view on the top of poc.view. But that is very simple and you can add your own UI-style by that way.
You receive the image-data as usually within the imagePickerController:(UIImagePickerController *)picker didFinishPickingMediaWithInfo:
To take the photo, you call
[poc takePicture];
from your custom button.
Hope, that works for you.
Assuming you want a point-and-shoot method, you can create an AVSession and just call the UIImageWriteToSavedPhotosAlbum method.
Here is a link that goes into that exact process: http://www.musicalgeometry.com/?p=1297
It's also worth noting that your users need to have given the app access to their camera roll or you may experience issues saving the images.
You need to design your own custom preview according to your size, on capture button pressed and call buttonPressed method and do stuff what you want
(void)buttonPressed:(UIButton *)sender {
NSLog(#" Capture Clicked");
[self.imagePicker takePicture];
//[NSTimer scheduledTimerWithTimeInterval:3.0f target:self
selector:#selector(timerFired:) userInfo:nil repeats:NO];
}
following is code that will take photo without showing preview screen. when i tried the accepted answer, which uses UIImagePickerController, the preview screen showed, then auto disappeared. with the code below, user taps 'takePhoto' button, and the devices takes photo with zero change to UI (in my app, i add a green check mark next to take photo button). the code below is from apple https://developer.apple.com/LIBRARY/IOS/samplecode/AVCam/Introduction/Intro.html with the 'extra functions' (that do not relate to taking still photo) commented out. thank you incmiko for suggesting this code in your answer iOS take photo from camera without modalViewController.
updating code, 26 mar 2015:
to trigger snap photo:
[self snapStillImage:sender];
in .h file:
#import <AVFoundation/AVFoundation.h>
#import <AssetsLibrary/AssetsLibrary.h>
// include code below in header file, after #import and before #interface
// avfoundation copy paste code
static void * CapturingStillImageContext = &CapturingStillImageContext;
static void * RecordingContext = &RecordingContext;
static void * SessionRunningAndDeviceAuthorizedContext = &SessionRunningAndDeviceAuthorizedContext;
// avfoundation, include code below after #interface
// avf - Session management.
#property (nonatomic) dispatch_queue_t sessionQueue; // Communicate with the session and other session objects on this queue.
#property (nonatomic) AVCaptureSession *session;
#property (nonatomic) AVCaptureDeviceInput *videoDeviceInput;
#property (nonatomic) AVCaptureMovieFileOutput *movieFileOutput;
#property (nonatomic) AVCaptureStillImageOutput *stillImageOutput;
// avf - Utilities.
#property (nonatomic) UIBackgroundTaskIdentifier backgroundRecordingID;
#property (nonatomic, getter = isDeviceAuthorized) BOOL deviceAuthorized;
#property (nonatomic, readonly, getter = isSessionRunningAndDeviceAuthorized) BOOL sessionRunningAndDeviceAuthorized;
#property (nonatomic) BOOL lockInterfaceRotation;
#property (nonatomic) id runtimeErrorHandlingObserver;
in .m file:
#pragma mark - AV Foundation
- (BOOL)isSessionRunningAndDeviceAuthorized
{
return [[self session] isRunning] && [self isDeviceAuthorized];
}
+ (NSSet *)keyPathsForValuesAffectingSessionRunningAndDeviceAuthorized
{
return [NSSet setWithObjects:#"session.running", #"deviceAuthorized", nil];
}
// call following method from viewDidLoad
- (void)CreateAVCaptureSession
{
// Create the AVCaptureSession
AVCaptureSession *session = [[AVCaptureSession alloc] init];
[self setSession:session];
// Check for device authorization
[self checkDeviceAuthorizationStatus];
// In general it is not safe to mutate an AVCaptureSession or any of its inputs, outputs, or connections from multiple threads at the same time.
// Why not do all of this on the main queue?
// -[AVCaptureSession startRunning] is a blocking call which can take a long time. We dispatch session setup to the sessionQueue so that the main queue isn't blocked (which keeps the UI responsive).
dispatch_queue_t sessionQueue = dispatch_queue_create("session queue", DISPATCH_QUEUE_SERIAL);
[self setSessionQueue:sessionQueue];
dispatch_async(sessionQueue, ^{
[self setBackgroundRecordingID:UIBackgroundTaskInvalid];
NSError *error = nil;
AVCaptureDevice *videoDevice = [ViewController deviceWithMediaType:AVMediaTypeVideo preferringPosition:AVCaptureDevicePositionFront];
AVCaptureDeviceInput *videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
if (error)
{
NSLog(#"%#", error);
}
if ([session canAddInput:videoDeviceInput])
{
[session addInput:videoDeviceInput];
[self setVideoDeviceInput:videoDeviceInput];
dispatch_async(dispatch_get_main_queue(), ^{
// Why are we dispatching this to the main queue?
// Because AVCaptureVideoPreviewLayer is the backing layer for AVCamPreviewView and UIView can only be manipulated on main thread.
// Note: As an exception to the above rule, it is not necessary to serialize video orientation changes on the AVCaptureVideoPreviewLayer’s connection with other session manipulation.
});
}
/* AVCaptureDevice *audioDevice = [[AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio] firstObject];
AVCaptureDeviceInput *audioDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:&error];
if (error)
{
NSLog(#"%#", error);
}
if ([session canAddInput:audioDeviceInput])
{
[session addInput:audioDeviceInput];
}
*/
AVCaptureMovieFileOutput *movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
if ([session canAddOutput:movieFileOutput])
{
[session addOutput:movieFileOutput];
AVCaptureConnection *connection = [movieFileOutput connectionWithMediaType:AVMediaTypeVideo];
if ([connection isVideoStabilizationSupported])
[connection setEnablesVideoStabilizationWhenAvailable:YES];
[self setMovieFileOutput:movieFileOutput];
}
AVCaptureStillImageOutput *stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
if ([session canAddOutput:stillImageOutput])
{
[stillImageOutput setOutputSettings:#{AVVideoCodecKey : AVVideoCodecJPEG}];
[session addOutput:stillImageOutput];
[self setStillImageOutput:stillImageOutput];
}
});
}
// call method below from viewWilAppear
- (void)AVFoundationStartSession
{
dispatch_async([self sessionQueue], ^{
[self addObserver:self forKeyPath:#"sessionRunningAndDeviceAuthorized" options:(NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew) context:SessionRunningAndDeviceAuthorizedContext];
[self addObserver:self forKeyPath:#"stillImageOutput.capturingStillImage" options:(NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew) context:CapturingStillImageContext];
[self addObserver:self forKeyPath:#"movieFileOutput.recording" options:(NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew) context:RecordingContext];
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:[[self videoDeviceInput] device]];
__weak ViewController *weakSelf = self;
[self setRuntimeErrorHandlingObserver:[[NSNotificationCenter defaultCenter] addObserverForName:AVCaptureSessionRuntimeErrorNotification object:[self session] queue:nil usingBlock:^(NSNotification *note) {
ViewController *strongSelf = weakSelf;
dispatch_async([strongSelf sessionQueue], ^{
// Manually restarting the session since it must have been stopped due to an error.
[[strongSelf session] startRunning];
});
}]];
[[self session] startRunning];
});
}
// call method below from viewDidDisappear
- (void)AVFoundationStopSession
{
dispatch_async([self sessionQueue], ^{
[[self session] stopRunning];
[[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:[[self videoDeviceInput] device]];
[[NSNotificationCenter defaultCenter] removeObserver:[self runtimeErrorHandlingObserver]];
[self removeObserver:self forKeyPath:#"sessionRunningAndDeviceAuthorized" context:SessionRunningAndDeviceAuthorizedContext];
[self removeObserver:self forKeyPath:#"stillImageOutput.capturingStillImage" context:CapturingStillImageContext];
[self removeObserver:self forKeyPath:#"movieFileOutput.recording" context:RecordingContext];
});
}
- (BOOL)prefersStatusBarHidden
{
return YES;
}
- (BOOL)shouldAutorotate
{
// Disable autorotation of the interface when recording is in progress.
return ![self lockInterfaceRotation];
}
- (NSUInteger)supportedInterfaceOrientations
{
return UIInterfaceOrientationMaskAll;
}
- (void)willRotateToInterfaceOrientation:(UIInterfaceOrientation)toInterfaceOrientation duration:(NSTimeInterval)duration
{
// [[(AVCaptureVideoPreviewLayer *)[[self previewView] layer] connection] setVideoOrientation:(AVCaptureVideoOrientation)toInterfaceOrientation];
}
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
{
if (context == CapturingStillImageContext)
{
BOOL isCapturingStillImage = [change[NSKeyValueChangeNewKey] boolValue];
if (isCapturingStillImage)
{
[self runStillImageCaptureAnimation];
}
}
else if (context == RecordingContext)
{
BOOL isRecording = [change[NSKeyValueChangeNewKey] boolValue];
dispatch_async(dispatch_get_main_queue(), ^{
if (isRecording)
{
// [[self cameraButton] setEnabled:NO];
// [[self recordButton] setTitle:NSLocalizedString(#"Stop", #"Recording button stop title") forState:UIControlStateNormal];
// [[self recordButton] setEnabled:YES];
}
else
{
// [[self cameraButton] setEnabled:YES];
// [[self recordButton] setTitle:NSLocalizedString(#"Record", #"Recording button record title") forState:UIControlStateNormal];
// [[self recordButton] setEnabled:YES];
}
});
}
else if (context == SessionRunningAndDeviceAuthorizedContext)
{
BOOL isRunning = [change[NSKeyValueChangeNewKey] boolValue];
dispatch_async(dispatch_get_main_queue(), ^{
if (isRunning)
{
// [[self cameraButton] setEnabled:YES];
// [[self recordButton] setEnabled:YES];
// [[self stillButton] setEnabled:YES];
}
else
{
// [[self cameraButton] setEnabled:NO];
// [[self recordButton] setEnabled:NO];
// [[self stillButton] setEnabled:NO];
}
});
}
else
{
[super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
}
}
#pragma mark Actions
- (IBAction)toggleMovieRecording:(id)sender
{
// [[self recordButton] setEnabled:NO];
dispatch_async([self sessionQueue], ^{
if (![[self movieFileOutput] isRecording])
{
[self setLockInterfaceRotation:YES];
if ([[UIDevice currentDevice] isMultitaskingSupported])
{
// Setup background task. This is needed because the captureOutput:didFinishRecordingToOutputFileAtURL: callback is not received until AVCam returns to the foreground unless you request background execution time. This also ensures that there will be time to write the file to the assets library when AVCam is backgrounded. To conclude this background execution, -endBackgroundTask is called in -recorder:recordingDidFinishToOutputFileURL:error: after the recorded file has been saved.
[self setBackgroundRecordingID:[[UIApplication sharedApplication] beginBackgroundTaskWithExpirationHandler:nil]];
}
// Update the orientation on the movie file output video connection before starting recording.
// [[[self movieFileOutput] connectionWithMediaType:AVMediaTypeVideo] setVideoOrientation:[[(AVCaptureVideoPreviewLayer *)[[self previewView] layer] connection] videoOrientation]];
// Turning OFF flash for video recording
[ViewController setFlashMode:AVCaptureFlashModeOff forDevice:[[self videoDeviceInput] device]];
// Start recording to a temporary file.
NSString *outputFilePath = [NSTemporaryDirectory() stringByAppendingPathComponent:[#"movie" stringByAppendingPathExtension:#"mov"]];
[[self movieFileOutput] startRecordingToOutputFileURL:[NSURL fileURLWithPath:outputFilePath] recordingDelegate:self];
}
else
{
[[self movieFileOutput] stopRecording];
}
});
}
- (IBAction)changeCamera:(id)sender
{
// [[self cameraButton] setEnabled:NO];
// [[self recordButton] setEnabled:NO];
// [[self stillButton] setEnabled:NO];
dispatch_async([self sessionQueue], ^{
AVCaptureDevice *currentVideoDevice = [[self videoDeviceInput] device];
AVCaptureDevicePosition preferredPosition = AVCaptureDevicePositionUnspecified;
AVCaptureDevicePosition currentPosition = [currentVideoDevice position];
switch (currentPosition)
{
case AVCaptureDevicePositionUnspecified:
preferredPosition = AVCaptureDevicePositionBack;
break;
case AVCaptureDevicePositionBack:
preferredPosition = AVCaptureDevicePositionFront;
break;
case AVCaptureDevicePositionFront:
preferredPosition = AVCaptureDevicePositionBack;
break;
}
AVCaptureDevice *videoDevice = [ViewController deviceWithMediaType:AVMediaTypeVideo preferringPosition:preferredPosition];
AVCaptureDeviceInput *videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:nil];
[[self session] beginConfiguration];
[[self session] removeInput:[self videoDeviceInput]];
if ([[self session] canAddInput:videoDeviceInput])
{
[[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:currentVideoDevice];
[ViewController setFlashMode:AVCaptureFlashModeAuto forDevice:videoDevice];
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:videoDevice];
[[self session] addInput:videoDeviceInput];
[self setVideoDeviceInput:videoDeviceInput];
}
else
{
[[self session] addInput:[self videoDeviceInput]];
}
[[self session] commitConfiguration];
dispatch_async(dispatch_get_main_queue(), ^{
// [[self cameraButton] setEnabled:YES];
// [[self recordButton] setEnabled:YES];
// [[self stillButton] setEnabled:YES];
});
});
}
- (IBAction)snapStillImage:(id)sender
{
dispatch_async([self sessionQueue], ^{
// Update the orientation on the still image output video connection before capturing.
// [[[self stillImageOutput] connectionWithMediaType:AVMediaTypeVideo] setVideoOrientation:[[(AVCaptureVideoPreviewLayer *)[[self previewView] layer] connection] videoOrientation]];
// Flash set to Auto for Still Capture
[ViewController setFlashMode:AVCaptureFlashModeAuto forDevice:[[self videoDeviceInput] device]];
// Capture a still image.
[[self stillImageOutput] captureStillImageAsynchronouslyFromConnection:[[self stillImageOutput] connectionWithMediaType:AVMediaTypeVideo] completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
if (imageDataSampleBuffer)
{
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
// do someting good with saved image
[self saveImageToParse:image];
}
}];
});
}
- (IBAction)focusAndExposeTap:(UIGestureRecognizer *)gestureRecognizer
{
// CGPoint devicePoint = [(AVCaptureVideoPreviewLayer *)[[self previewView] layer] captureDevicePointOfInterestForPoint:[gestureRecognizer locationInView:[gestureRecognizer view]]];
// [self focusWithMode:AVCaptureFocusModeAutoFocus exposeWithMode:AVCaptureExposureModeAutoExpose atDevicePoint:devicePoint monitorSubjectAreaChange:YES];
}
- (void)subjectAreaDidChange:(NSNotification *)notification
{
CGPoint devicePoint = CGPointMake(.5, .5);
[self focusWithMode:AVCaptureFocusModeContinuousAutoFocus exposeWithMode:AVCaptureExposureModeContinuousAutoExposure atDevicePoint:devicePoint monitorSubjectAreaChange:NO];
}
#pragma mark File Output Delegate
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error
{
if (error)
NSLog(#"%#", error);
[self setLockInterfaceRotation:NO];
// Note the backgroundRecordingID for use in the ALAssetsLibrary completion handler to end the background task associated with this recording. This allows a new recording to be started, associated with a new UIBackgroundTaskIdentifier, once the movie file output's -isRecording is back to NO — which happens sometime after this method returns.
UIBackgroundTaskIdentifier backgroundRecordingID = [self backgroundRecordingID];
[self setBackgroundRecordingID:UIBackgroundTaskInvalid];
[[[ALAssetsLibrary alloc] init] writeVideoAtPathToSavedPhotosAlbum:outputFileURL completionBlock:^(NSURL *assetURL, NSError *error) {
if (error)
NSLog(#"%#", error);
[[NSFileManager defaultManager] removeItemAtURL:outputFileURL error:nil];
if (backgroundRecordingID != UIBackgroundTaskInvalid)
[[UIApplication sharedApplication] endBackgroundTask:backgroundRecordingID];
}];
}
#pragma mark Device Configuration
- (void)focusWithMode:(AVCaptureFocusMode)focusMode exposeWithMode:(AVCaptureExposureMode)exposureMode atDevicePoint:(CGPoint)point monitorSubjectAreaChange:(BOOL)monitorSubjectAreaChange
{
dispatch_async([self sessionQueue], ^{
AVCaptureDevice *device = [[self videoDeviceInput] device];
NSError *error = nil;
if ([device lockForConfiguration:&error])
{
if ([device isFocusPointOfInterestSupported] && [device isFocusModeSupported:focusMode])
{
[device setFocusMode:focusMode];
[device setFocusPointOfInterest:point];
}
if ([device isExposurePointOfInterestSupported] && [device isExposureModeSupported:exposureMode])
{
[device setExposureMode:exposureMode];
[device setExposurePointOfInterest:point];
}
[device setSubjectAreaChangeMonitoringEnabled:monitorSubjectAreaChange];
[device unlockForConfiguration];
}
else
{
NSLog(#"%#", error);
}
});
}
+ (void)setFlashMode:(AVCaptureFlashMode)flashMode forDevice:(AVCaptureDevice *)device
{
if ([device hasFlash] && [device isFlashModeSupported:flashMode])
{
NSError *error = nil;
if ([device lockForConfiguration:&error])
{
[device setFlashMode:flashMode];
[device unlockForConfiguration];
}
else
{
NSLog(#"%#", error);
}
}
}
+ (AVCaptureDevice *)deviceWithMediaType:(NSString *)mediaType preferringPosition:(AVCaptureDevicePosition)position
{
NSArray *devices = [AVCaptureDevice devicesWithMediaType:mediaType];
AVCaptureDevice *captureDevice = [devices firstObject];
for (AVCaptureDevice *device in devices)
{
if ([device position] == position)
{
captureDevice = device;
break;
}
}
return captureDevice;
}
#pragma mark UI
- (void)runStillImageCaptureAnimation
{
/*
dispatch_async(dispatch_get_main_queue(), ^{
[[[self previewView] layer] setOpacity:0.0];
[UIView animateWithDuration:.25 animations:^{
[[[self previewView] layer] setOpacity:1.0];
}];
});
*/
}
- (void)checkDeviceAuthorizationStatus
{
NSString *mediaType = AVMediaTypeVideo;
[AVCaptureDevice requestAccessForMediaType:mediaType completionHandler:^(BOOL granted) {
if (granted)
{
//Granted access to mediaType
[self setDeviceAuthorized:YES];
}
else
{
//Not granted access to mediaType
dispatch_async(dispatch_get_main_queue(), ^{
[[[UIAlertView alloc] initWithTitle:#"AVCam!"
message:#"AVCam doesn't have permission to use Camera, please change privacy settings"
delegate:self
cancelButtonTitle:#"OK"
otherButtonTitles:nil] show];
[self setDeviceAuthorized:NO];
});
}
}];
}

Twitter number of followers iOS 6

Hi how can I get the number of followers of the current twitter user in iOS 6. The TWRequest is depreciated so how can I use the new Social.Framework to get the number of followers?
First, you need to Authenticate your request (Get permission).
second, see follow these steps:
1.Download FHSTwitterEngine Twitter Library.
2.Add the folder FHSTwitterEngine" to your project and #import "FHSTwitterEngine.h".
3.add SystemConfiguration.framework to your project.
Usage : 1.in the [ViewDidLoad] add the following code:
UIButton *logIn = [UIButton buttonWithType:UIButtonTypeRoundedRect];
logIn.frame = CGRectMake(100, 100, 100, 100);
[logIn setTitle:#"Login" forState:UIControlStateNormal];
[logIn addTarget:self action:#selector(showLoginWindow:) forControlEvents:UIControlEventTouchUpInside];
[self.view addSubview:logIn];
[[FHSTwitterEngine sharedEngine]permanentlySetConsumerKey:#"<consumer_key>" andSecret:#"<consumer_secret>"];
[[FHSTwitterEngine sharedEngine]setDelegate:self];
and don't forget to import the delegate FHSTwitterEngineAccessTokenDelegate.
you need to get the permission for your request, with the following method which will present Login window:
- (void)showLoginWindow:(id)sender {
[[FHSTwitterEngine sharedEngine]showOAuthLoginControllerFromViewController:self withCompletion:^(BOOL success) {
NSLog(success?#"L0L success":#"O noes!!! Loggen faylur!!!");
}];
}
when the Login window is presented, enter your Twitter Username and Password to authenticate your request.
add the following methods to your code:
-(void)viewWillAppear:(BOOL)animated
{
[super viewWillAppear:animated];
[[FHSTwitterEngine sharedEngine]loadAccessToken];
NSString *username = [[FHSTwitterEngine sharedEngine]loggedInUsername];// self.engine.loggedInUsername;
if (username.length > 0) {
lbl.text = [NSString stringWithFormat:#"Logged in as %#",username];
[self listResults];
} else {
lbl.text = #"You are not logged in.";
}
}
- (void)storeAccessToken:(NSString *)accessToken {
[[NSUserDefaults standardUserDefaults]setObject:accessToken forKey:#"SavedAccessHTTPBody"];
}
- (NSString *)loadAccessToken {
return [[NSUserDefaults standardUserDefaults]objectForKey:#"SavedAccessHTTPBody"];
}
4.Now you are ready to get your request, the following method will list your followers ID's, add them to an NSArray and the get the Count from the NSArray:
- (Void)listFriends:(id)sender {
NSMutableArray *arr = [[NSMutableArray alloc]init];
[_tweetField resignFirstResponder];
dispatch_async(GCDBackgroundThread, ^{
#autoreleasepool {
[UIApplication sharedApplication].networkActivityIndicatorVisible = YES;
// NSLog(#"Friends' IDs: %#",[[FHSTwitterEngine sharedEngine]getFriendsIDs]);
dict = [[FHSTwitterEngine sharedEngine]getFollowersIDs];
for (NSDictionary *item in [dict objectForKey:#"ids"]) {
[arr addObject:[dict objectForKey:#"ids"]];
}
dispatch_sync(GCDMainThread, ^{
#autoreleasepool {
UIAlertView *av = [[UIAlertView alloc]initWithTitle:#"Complete!" message:#"Your list of followers has been fetched" delegate:nil cancelButtonTitle:#"OK" otherButtonTitles:nil];
[av show];
[UIApplication sharedApplication].networkActivityIndicatorVisible = NO;
NSLog(#"====> %d",[arr count]);
}
});
}
});
}
I tested this code and it's working perfectly ^_^.
use This code
(IBAction)listFriends:(id)sender {
NSMutableArray *arr = [[NSMutableArray alloc]init];
dict=[[NSMutableDictionary alloc]init];
[_tweetField resignFirstResponder];
dispatch_async(GCDBackgroundThread, ^{
#autoreleasepool {
[UIApplication sharedApplication].networkActivityIndicatorVisible = YES;
//to get friends id.......
NSLog(#"Friends' IDs: %#",[[FHSTwitterEngine sharedEngine]getFriendsIDs]);
/* dict = [[FHSTwitterEngine sharedEngine]getFollowersIDs];
for (NSDictionary *item in [dict objectForKey:#"ids"]) {
[arr addObject:[dict objectForKey:#"ids"]];
}*/
// To get friends name ...
// NSLog(#"Friends_Name: %#",[[FHSTwitterEngine sharedEngine]listFriendsForUser:_loggedInUserLabel.text isID:NO withCursor:#"-1"]);
dict = [[FHSTwitterEngine sharedEngine]listFriendsForUser:_loggedInUserLabel.text isID:NO withCursor:#"-1"];
NSLog(#"====> %#",[dict objectForKey:#"users"] );
NSMutableArray *array=[dict objectForKey:#"users"];
for(int i=0;i<[array count];i++)
{
NSLog(#"names:%#",[[array objectAtIndex:i]objectForKey:#"name"]);
}
// NSLog(#"Friends_Name: %#",[[FHSTwitterEngine sharedEngine]getMentionsTimelineWithCount:1000 sinceID:nil maxID:nil]);
dispatch_sync(GCDMainThread, ^{
#autoreleasepool {
UIAlertView *av = [[UIAlertView alloc]initWithTitle:#"Complete!" message:#"Your list of followers has been fetched" delegate:nil cancelButtonTitle:#"OK" otherButtonTitles:nil];
[av show];
[UIApplication sharedApplication].networkActivityIndicatorVisible = NO;
// NSLog(#"====> %d",[arr count]);
}
});
}
});
}