how to get thumbnail of video url ios - objective-c

i have tried below code. As result its gets image some time, not in every call of this method.
is there any solution.
-(UIImage *)thumbnailFromVideoAtURL:(NSString *)urlstr
{
NSURL *url = [NSURL URLWithString:urlstr];
AVAsset *asset = [AVAsset assetWithURL:url];
// Get thumbnail at the very start of the video
CMTime thumbnailTime = [asset duration];
thumbnailTime.value = 0;
// Get image from the video at the given time
AVAssetImageGenerator *imageGenerator = [[AVAssetImageGenerator alloc] initWithAsset:asset];
CGImageRef imageRef = [imageGenerator copyCGImageAtTime:thumbnailTime actualTime:NULL error:NULL];
UIImage *thumbnail = [UIImage imageWithCGImage:imageRef];
CGImageRelease(imageRef);
return thumbnail;
}

CustomMoviePlayerVC.h
#interface CustomMoviePlayerVC : UIViewController {
#public
MPMoviePlayerController *mp;
NSURL *movieURL;
}
#property (weak, nonatomic) UIView *playBtn;
- (id)initWithPath:(NSString *)moviePath tag:(int)tag;
- (void)startPlayer;
- (void)pausePlayer;
- (void)stopPlayer;
- (BOOL)isFullScreen;
- (UIImage *)getVideoThumbnail;
- (MPMoviePlaybackState)getVideoPlaybackState;
- (MPMovieLoadState)getVideoLoadState;
- (void)enterFullScreenAnimated:(BOOL)animated;
- (void)exitFullScreenAnimated:(BOOL)animated;
- (void)prepare;
#end
CustomMoviePlayerVC.m
#import "CustomMoviePlayerVC.h"
#interface CustomMoviePlayerVC ()
#end
#implementation CustomMoviePlayerVC
- (BOOL)isFullScreen
{
return mp.isFullscreen;
}
- (void)dealloc
{
[[NSNotificationCenter defaultCenter] removeObserver:self];
}
- (id)initWithPath:(NSString *)moviePath tag:(int)tag
{
// Initialize and create movie URL
if (self = [super init]) {
movieURL = [NSURL URLWithString:moviePath];
mp = [[MPMoviePlayerController alloc] initWithContentURL:movieURL];
// Set movie player layout
if(tag == 1) {
[mp setControlStyle:MPMovieControlStyleNone];
}
[mp setShouldAutoplay:NO];
[mp setFullscreen:NO];
// May help to reduce latency
[mp prepareToPlay];
// Register that the load state changed (movie is ready)
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(moviePlayerLoadStateChanged:)
name:MPMoviePlayerLoadStateDidChangeNotification
object:nil];
[[NSNotificationCenter defaultCenter] addObserver:self
selector: #selector(MoviePlayerThumbnailImageRequestDidFinish:)
name: MPMoviePlayerThumbnailImageRequestDidFinishNotification
object: nil];
// registering for playback state change notification
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(moviePlaybackStateDidChange:)
name:MPMoviePlayerPlaybackStateDidChangeNotification
object:nil];
// registering for playback finish notification
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(moviePlaybackDidFinish:)
name:MPMoviePlayerPlaybackDidFinishNotification
object:nil];
// registering for playback finish notification
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(moviePlaybackWillEnterFullscreen:)
name:MPMoviePlayerWillEnterFullscreenNotification
object:nil];
// registering for playback finish notification
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(moviePlaybackDidExitFullscreen:)
name:MPMoviePlayerDidExitFullscreenNotification
object:nil];
}
return self;
}
- (void)MoviePlayerThumbnailImageRequestDidFinish:(NSNotification*)not
{
}
- (void)enterFullScreenAnimated:(BOOL)animated
{
[mp setFullscreen:YES animated:animated];
}
- (void)exitFullScreenAnimated:(BOOL)animated
{
[mp setFullscreen:NO animated:animated];
}
- (void)moviePlayerLoadStateChanged:(NSNotification *)notification
{
[mp setControlStyle:MPMovieControlStyleEmbedded];
// Unless state is unknown, start playback
if ([mp loadState] != MPMovieLoadStateUnknown) {
// Set frame of movie player
[[mp view] setFrame:self.view.frame];
// Add movie player as subview
[[self view] addSubview:[mp view]];
}
}
- (void)moviePlaybackDidFinish:(NSNotification *)notification
{
[self exitFullScreenAnimated:YES];
if (mp.currentPlaybackTime <= 0.1) {
dispatch_async(dispatch_get_main_queue(), ^{
[mp stop];
[mp play];
[mp pause];
});
}
}
- (void)moviePlaybackStateDidChange:(NSNotification *)notification
{
switch ([self getVideoPlaybackState]) {
case MPMoviePlaybackStatePlaying:
[self.playBtn setHidden:YES];
[self.playBtn setUserInteractionEnabled:NO];
break;
case MPMoviePlaybackStatePaused:
[self.playBtn setHidden:NO];
[self.playBtn setUserInteractionEnabled:YES];
break;
case MPMoviePlaybackStateStopped:
[self.playBtn setHidden:NO];
[self.playBtn setUserInteractionEnabled:YES];
break;
default:
break;
}
}
- (void)moviePlaybackWillEnterFullscreen:(NSNotification *)notification
{
}
- (void)moviePlaybackDidExitFullscreen:(NSNotification *)notification
{
}
- (void)startPlayer
{
[self.playBtn setHidden:YES];
[self.playBtn setUserInteractionEnabled:NO];
if (mp.loadState != MPMovieLoadStateUnknown) {
[mp play];
}
}
- (void)pausePlayer
{
[self.playBtn setHidden:NO];
[self.playBtn setUserInteractionEnabled:YES];
if (mp.loadState != MPMovieLoadStateUnknown) {
[mp pause];
}
}
- (void)stopPlayer
{
[self.playBtn setHidden:NO];
[self.playBtn setUserInteractionEnabled:YES];
if (mp.loadState != MPMovieLoadStateUnknown) {
[mp stop];
}
}
- (UIImage *)getVideoThumbnail
{
if (mp && mp.loadState != MPMovieLoadStateUnknown) {
return [mp thumbnailImageAtTime:0.3 timeOption:MPMovieTimeOptionExact];
}
return nil;
}
- (MPMoviePlaybackState)getVideoPlaybackState
{
return mp.playbackState;
}
- (MPMovieLoadState)getVideoLoadState
{
return mp.loadState;
}
- (void)prepare
{
[mp prepareToPlay];
}
#end
Use getVideoThumbnail to get image from video.

Related

Hide iAd Banner SKScene

Hi Guys What Do You Think Should I Use In The Handle Notifications (hideAd,showAd) to show and hide my iAd Banner In My Project and here is what i use to show iAd Banner `#import "GameViewController.h"
import "GameScene.h"
#interface GameViewController(){
bool adOnTop;
bool iadsBannerIsVisible;
ADBannerView* theBanner;
}
#end
#implementation GameViewController
- (void)viewDidLoad
{
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(handleNotification:) name:#"hideAd" object:nil];
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(handleNotification:) name:#"showAd" object:nil];
[super viewDidLoad];
// Configure the view.
SKView * skView = (SKView *)self.view;
skView.showsFPS = YES;
skView.showsNodeCount = YES;
skView.multipleTouchEnabled = YES;
/* Sprite Kit applies additional optimizations to improve rendering performance */
skView.ignoresSiblingOrder = YES;
// Create and configure the scene.
GameScene *scene = [GameScene sceneWithSize:skView.bounds.size];
scene.scaleMode = SKSceneScaleModeAspectFit;
// Present the scene.
[skView presentScene:scene];
[self showThinBanner];
}
-(void) showThinBanner {
iadsBannerIsVisible = YES;
theBanner = [[ADBannerView alloc] initWithFrame:CGRectZero];
[theBanner setAutoresizingMask:UIViewAutoresizingFlexibleWidth];
theBanner.delegate = self;
[self.view addSubview:theBanner];
}
- (BOOL)bannerViewActionShouldBegin:(ADBannerView *)banner willLeaveApplication:(BOOL)willLeave {
NSLog(#"Banner view is beginning an ad action");
BOOL shouldExecuteAction = YES; // your app implements this method
if (!willLeave && shouldExecuteAction){
// insert code here to suspend any services that might conflict with the advertisement, for example, you might pause the game with an NSNotification like this...
[[NSNotificationCenter defaultCenter] postNotificationName:#"PauseScene" object:nil]; //optional
}
return shouldExecuteAction;
}
-(void) bannerViewActionDidFinish:(ADBannerView *)banner {
NSLog(#"banner is done being fullscreen");
//Unpause the game if you paused it previously.
[[NSNotificationCenter defaultCenter] postNotificationName:#"UnPauseScene" object:nil]; //optional
}
- (void)bannerView:(ADBannerView *)banner didFailToReceiveAdWithError:(NSError *)error
{
if (iadsBannerIsVisible == YES) {
[UIView beginAnimations:#"animateAdBannerOff" context:NULL];
// Assumes the banner view is placed at the bottom of the screen.
banner.frame = CGRectOffset(banner.frame, 0, banner.frame.size.height);
[UIView commitAnimations];
iadsBannerIsVisible = NO;
NSLog(#"banner unavailable");
}
}
- (void)handleNotification:(NSNotification *)notification
{
if ([notification.name isEqualToString:#"hideAd"])
{
// hide your banner;
}else if ([notification.name isEqualToString:#"showAd"])
{
// show your banner
}
}
Sloved
- (void)handleNotification:(NSNotification *)notification
{
if ([notification.name isEqualToString:#"hideAd"])
{
// hide your banner;
[theBanner removeFromSuperview];
}else if ([notification.name isEqualToString:#"showAd"])
{
// show your banner
[self.view addSubview:theBanner];
}
}

Take a picture in iOS without UIImagePicker and without preview it

Do you know any way / method to take a photo in iOS and saving it to camera Roll only with a simple button pressure without showing any preview?
I already know how to show the camera view but it show the preview of the image and the user need to click the take photo button to take the photo.
In few Words: the user click the button, the picture is taken, without previews nor double checks to take / save the photo.
I already found the takePicture method of UIIMagePickerController Class http://developer.apple.com/library/ios/documentation/uikit/reference/UIImagePickerController_Class/UIImagePickerController/UIImagePickerController.html#//apple_ref/occ/instm/UIImagePickerController/takePicture
Set the showsCameraControls-Property to NO.
poc = [[UIImagePickerController alloc] init];
[poc setTitle:#"Take a photo."];
[poc setDelegate:self];
[poc setSourceType:UIImagePickerControllerSourceTypeCamera];
poc.showsCameraControls = NO;
You also have to add your own Controls as a custom view on the top of poc.view. But that is very simple and you can add your own UI-style by that way.
You receive the image-data as usually within the imagePickerController:(UIImagePickerController *)picker didFinishPickingMediaWithInfo:
To take the photo, you call
[poc takePicture];
from your custom button.
Hope, that works for you.
Assuming you want a point-and-shoot method, you can create an AVSession and just call the UIImageWriteToSavedPhotosAlbum method.
Here is a link that goes into that exact process: http://www.musicalgeometry.com/?p=1297
It's also worth noting that your users need to have given the app access to their camera roll or you may experience issues saving the images.
You need to design your own custom preview according to your size, on capture button pressed and call buttonPressed method and do stuff what you want
(void)buttonPressed:(UIButton *)sender {
NSLog(#" Capture Clicked");
[self.imagePicker takePicture];
//[NSTimer scheduledTimerWithTimeInterval:3.0f target:self
selector:#selector(timerFired:) userInfo:nil repeats:NO];
}
following is code that will take photo without showing preview screen. when i tried the accepted answer, which uses UIImagePickerController, the preview screen showed, then auto disappeared. with the code below, user taps 'takePhoto' button, and the devices takes photo with zero change to UI (in my app, i add a green check mark next to take photo button). the code below is from apple https://developer.apple.com/LIBRARY/IOS/samplecode/AVCam/Introduction/Intro.html with the 'extra functions' (that do not relate to taking still photo) commented out. thank you incmiko for suggesting this code in your answer iOS take photo from camera without modalViewController.
updating code, 26 mar 2015:
to trigger snap photo:
[self snapStillImage:sender];
in .h file:
#import <AVFoundation/AVFoundation.h>
#import <AssetsLibrary/AssetsLibrary.h>
// include code below in header file, after #import and before #interface
// avfoundation copy paste code
static void * CapturingStillImageContext = &CapturingStillImageContext;
static void * RecordingContext = &RecordingContext;
static void * SessionRunningAndDeviceAuthorizedContext = &SessionRunningAndDeviceAuthorizedContext;
// avfoundation, include code below after #interface
// avf - Session management.
#property (nonatomic) dispatch_queue_t sessionQueue; // Communicate with the session and other session objects on this queue.
#property (nonatomic) AVCaptureSession *session;
#property (nonatomic) AVCaptureDeviceInput *videoDeviceInput;
#property (nonatomic) AVCaptureMovieFileOutput *movieFileOutput;
#property (nonatomic) AVCaptureStillImageOutput *stillImageOutput;
// avf - Utilities.
#property (nonatomic) UIBackgroundTaskIdentifier backgroundRecordingID;
#property (nonatomic, getter = isDeviceAuthorized) BOOL deviceAuthorized;
#property (nonatomic, readonly, getter = isSessionRunningAndDeviceAuthorized) BOOL sessionRunningAndDeviceAuthorized;
#property (nonatomic) BOOL lockInterfaceRotation;
#property (nonatomic) id runtimeErrorHandlingObserver;
in .m file:
#pragma mark - AV Foundation
- (BOOL)isSessionRunningAndDeviceAuthorized
{
return [[self session] isRunning] && [self isDeviceAuthorized];
}
+ (NSSet *)keyPathsForValuesAffectingSessionRunningAndDeviceAuthorized
{
return [NSSet setWithObjects:#"session.running", #"deviceAuthorized", nil];
}
// call following method from viewDidLoad
- (void)CreateAVCaptureSession
{
// Create the AVCaptureSession
AVCaptureSession *session = [[AVCaptureSession alloc] init];
[self setSession:session];
// Check for device authorization
[self checkDeviceAuthorizationStatus];
// In general it is not safe to mutate an AVCaptureSession or any of its inputs, outputs, or connections from multiple threads at the same time.
// Why not do all of this on the main queue?
// -[AVCaptureSession startRunning] is a blocking call which can take a long time. We dispatch session setup to the sessionQueue so that the main queue isn't blocked (which keeps the UI responsive).
dispatch_queue_t sessionQueue = dispatch_queue_create("session queue", DISPATCH_QUEUE_SERIAL);
[self setSessionQueue:sessionQueue];
dispatch_async(sessionQueue, ^{
[self setBackgroundRecordingID:UIBackgroundTaskInvalid];
NSError *error = nil;
AVCaptureDevice *videoDevice = [ViewController deviceWithMediaType:AVMediaTypeVideo preferringPosition:AVCaptureDevicePositionFront];
AVCaptureDeviceInput *videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
if (error)
{
NSLog(#"%#", error);
}
if ([session canAddInput:videoDeviceInput])
{
[session addInput:videoDeviceInput];
[self setVideoDeviceInput:videoDeviceInput];
dispatch_async(dispatch_get_main_queue(), ^{
// Why are we dispatching this to the main queue?
// Because AVCaptureVideoPreviewLayer is the backing layer for AVCamPreviewView and UIView can only be manipulated on main thread.
// Note: As an exception to the above rule, it is not necessary to serialize video orientation changes on the AVCaptureVideoPreviewLayer’s connection with other session manipulation.
});
}
/* AVCaptureDevice *audioDevice = [[AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio] firstObject];
AVCaptureDeviceInput *audioDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:&error];
if (error)
{
NSLog(#"%#", error);
}
if ([session canAddInput:audioDeviceInput])
{
[session addInput:audioDeviceInput];
}
*/
AVCaptureMovieFileOutput *movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
if ([session canAddOutput:movieFileOutput])
{
[session addOutput:movieFileOutput];
AVCaptureConnection *connection = [movieFileOutput connectionWithMediaType:AVMediaTypeVideo];
if ([connection isVideoStabilizationSupported])
[connection setEnablesVideoStabilizationWhenAvailable:YES];
[self setMovieFileOutput:movieFileOutput];
}
AVCaptureStillImageOutput *stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
if ([session canAddOutput:stillImageOutput])
{
[stillImageOutput setOutputSettings:#{AVVideoCodecKey : AVVideoCodecJPEG}];
[session addOutput:stillImageOutput];
[self setStillImageOutput:stillImageOutput];
}
});
}
// call method below from viewWilAppear
- (void)AVFoundationStartSession
{
dispatch_async([self sessionQueue], ^{
[self addObserver:self forKeyPath:#"sessionRunningAndDeviceAuthorized" options:(NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew) context:SessionRunningAndDeviceAuthorizedContext];
[self addObserver:self forKeyPath:#"stillImageOutput.capturingStillImage" options:(NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew) context:CapturingStillImageContext];
[self addObserver:self forKeyPath:#"movieFileOutput.recording" options:(NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew) context:RecordingContext];
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:[[self videoDeviceInput] device]];
__weak ViewController *weakSelf = self;
[self setRuntimeErrorHandlingObserver:[[NSNotificationCenter defaultCenter] addObserverForName:AVCaptureSessionRuntimeErrorNotification object:[self session] queue:nil usingBlock:^(NSNotification *note) {
ViewController *strongSelf = weakSelf;
dispatch_async([strongSelf sessionQueue], ^{
// Manually restarting the session since it must have been stopped due to an error.
[[strongSelf session] startRunning];
});
}]];
[[self session] startRunning];
});
}
// call method below from viewDidDisappear
- (void)AVFoundationStopSession
{
dispatch_async([self sessionQueue], ^{
[[self session] stopRunning];
[[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:[[self videoDeviceInput] device]];
[[NSNotificationCenter defaultCenter] removeObserver:[self runtimeErrorHandlingObserver]];
[self removeObserver:self forKeyPath:#"sessionRunningAndDeviceAuthorized" context:SessionRunningAndDeviceAuthorizedContext];
[self removeObserver:self forKeyPath:#"stillImageOutput.capturingStillImage" context:CapturingStillImageContext];
[self removeObserver:self forKeyPath:#"movieFileOutput.recording" context:RecordingContext];
});
}
- (BOOL)prefersStatusBarHidden
{
return YES;
}
- (BOOL)shouldAutorotate
{
// Disable autorotation of the interface when recording is in progress.
return ![self lockInterfaceRotation];
}
- (NSUInteger)supportedInterfaceOrientations
{
return UIInterfaceOrientationMaskAll;
}
- (void)willRotateToInterfaceOrientation:(UIInterfaceOrientation)toInterfaceOrientation duration:(NSTimeInterval)duration
{
// [[(AVCaptureVideoPreviewLayer *)[[self previewView] layer] connection] setVideoOrientation:(AVCaptureVideoOrientation)toInterfaceOrientation];
}
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
{
if (context == CapturingStillImageContext)
{
BOOL isCapturingStillImage = [change[NSKeyValueChangeNewKey] boolValue];
if (isCapturingStillImage)
{
[self runStillImageCaptureAnimation];
}
}
else if (context == RecordingContext)
{
BOOL isRecording = [change[NSKeyValueChangeNewKey] boolValue];
dispatch_async(dispatch_get_main_queue(), ^{
if (isRecording)
{
// [[self cameraButton] setEnabled:NO];
// [[self recordButton] setTitle:NSLocalizedString(#"Stop", #"Recording button stop title") forState:UIControlStateNormal];
// [[self recordButton] setEnabled:YES];
}
else
{
// [[self cameraButton] setEnabled:YES];
// [[self recordButton] setTitle:NSLocalizedString(#"Record", #"Recording button record title") forState:UIControlStateNormal];
// [[self recordButton] setEnabled:YES];
}
});
}
else if (context == SessionRunningAndDeviceAuthorizedContext)
{
BOOL isRunning = [change[NSKeyValueChangeNewKey] boolValue];
dispatch_async(dispatch_get_main_queue(), ^{
if (isRunning)
{
// [[self cameraButton] setEnabled:YES];
// [[self recordButton] setEnabled:YES];
// [[self stillButton] setEnabled:YES];
}
else
{
// [[self cameraButton] setEnabled:NO];
// [[self recordButton] setEnabled:NO];
// [[self stillButton] setEnabled:NO];
}
});
}
else
{
[super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
}
}
#pragma mark Actions
- (IBAction)toggleMovieRecording:(id)sender
{
// [[self recordButton] setEnabled:NO];
dispatch_async([self sessionQueue], ^{
if (![[self movieFileOutput] isRecording])
{
[self setLockInterfaceRotation:YES];
if ([[UIDevice currentDevice] isMultitaskingSupported])
{
// Setup background task. This is needed because the captureOutput:didFinishRecordingToOutputFileAtURL: callback is not received until AVCam returns to the foreground unless you request background execution time. This also ensures that there will be time to write the file to the assets library when AVCam is backgrounded. To conclude this background execution, -endBackgroundTask is called in -recorder:recordingDidFinishToOutputFileURL:error: after the recorded file has been saved.
[self setBackgroundRecordingID:[[UIApplication sharedApplication] beginBackgroundTaskWithExpirationHandler:nil]];
}
// Update the orientation on the movie file output video connection before starting recording.
// [[[self movieFileOutput] connectionWithMediaType:AVMediaTypeVideo] setVideoOrientation:[[(AVCaptureVideoPreviewLayer *)[[self previewView] layer] connection] videoOrientation]];
// Turning OFF flash for video recording
[ViewController setFlashMode:AVCaptureFlashModeOff forDevice:[[self videoDeviceInput] device]];
// Start recording to a temporary file.
NSString *outputFilePath = [NSTemporaryDirectory() stringByAppendingPathComponent:[#"movie" stringByAppendingPathExtension:#"mov"]];
[[self movieFileOutput] startRecordingToOutputFileURL:[NSURL fileURLWithPath:outputFilePath] recordingDelegate:self];
}
else
{
[[self movieFileOutput] stopRecording];
}
});
}
- (IBAction)changeCamera:(id)sender
{
// [[self cameraButton] setEnabled:NO];
// [[self recordButton] setEnabled:NO];
// [[self stillButton] setEnabled:NO];
dispatch_async([self sessionQueue], ^{
AVCaptureDevice *currentVideoDevice = [[self videoDeviceInput] device];
AVCaptureDevicePosition preferredPosition = AVCaptureDevicePositionUnspecified;
AVCaptureDevicePosition currentPosition = [currentVideoDevice position];
switch (currentPosition)
{
case AVCaptureDevicePositionUnspecified:
preferredPosition = AVCaptureDevicePositionBack;
break;
case AVCaptureDevicePositionBack:
preferredPosition = AVCaptureDevicePositionFront;
break;
case AVCaptureDevicePositionFront:
preferredPosition = AVCaptureDevicePositionBack;
break;
}
AVCaptureDevice *videoDevice = [ViewController deviceWithMediaType:AVMediaTypeVideo preferringPosition:preferredPosition];
AVCaptureDeviceInput *videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:nil];
[[self session] beginConfiguration];
[[self session] removeInput:[self videoDeviceInput]];
if ([[self session] canAddInput:videoDeviceInput])
{
[[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:currentVideoDevice];
[ViewController setFlashMode:AVCaptureFlashModeAuto forDevice:videoDevice];
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:videoDevice];
[[self session] addInput:videoDeviceInput];
[self setVideoDeviceInput:videoDeviceInput];
}
else
{
[[self session] addInput:[self videoDeviceInput]];
}
[[self session] commitConfiguration];
dispatch_async(dispatch_get_main_queue(), ^{
// [[self cameraButton] setEnabled:YES];
// [[self recordButton] setEnabled:YES];
// [[self stillButton] setEnabled:YES];
});
});
}
- (IBAction)snapStillImage:(id)sender
{
dispatch_async([self sessionQueue], ^{
// Update the orientation on the still image output video connection before capturing.
// [[[self stillImageOutput] connectionWithMediaType:AVMediaTypeVideo] setVideoOrientation:[[(AVCaptureVideoPreviewLayer *)[[self previewView] layer] connection] videoOrientation]];
// Flash set to Auto for Still Capture
[ViewController setFlashMode:AVCaptureFlashModeAuto forDevice:[[self videoDeviceInput] device]];
// Capture a still image.
[[self stillImageOutput] captureStillImageAsynchronouslyFromConnection:[[self stillImageOutput] connectionWithMediaType:AVMediaTypeVideo] completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
if (imageDataSampleBuffer)
{
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
// do someting good with saved image
[self saveImageToParse:image];
}
}];
});
}
- (IBAction)focusAndExposeTap:(UIGestureRecognizer *)gestureRecognizer
{
// CGPoint devicePoint = [(AVCaptureVideoPreviewLayer *)[[self previewView] layer] captureDevicePointOfInterestForPoint:[gestureRecognizer locationInView:[gestureRecognizer view]]];
// [self focusWithMode:AVCaptureFocusModeAutoFocus exposeWithMode:AVCaptureExposureModeAutoExpose atDevicePoint:devicePoint monitorSubjectAreaChange:YES];
}
- (void)subjectAreaDidChange:(NSNotification *)notification
{
CGPoint devicePoint = CGPointMake(.5, .5);
[self focusWithMode:AVCaptureFocusModeContinuousAutoFocus exposeWithMode:AVCaptureExposureModeContinuousAutoExposure atDevicePoint:devicePoint monitorSubjectAreaChange:NO];
}
#pragma mark File Output Delegate
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error
{
if (error)
NSLog(#"%#", error);
[self setLockInterfaceRotation:NO];
// Note the backgroundRecordingID for use in the ALAssetsLibrary completion handler to end the background task associated with this recording. This allows a new recording to be started, associated with a new UIBackgroundTaskIdentifier, once the movie file output's -isRecording is back to NO — which happens sometime after this method returns.
UIBackgroundTaskIdentifier backgroundRecordingID = [self backgroundRecordingID];
[self setBackgroundRecordingID:UIBackgroundTaskInvalid];
[[[ALAssetsLibrary alloc] init] writeVideoAtPathToSavedPhotosAlbum:outputFileURL completionBlock:^(NSURL *assetURL, NSError *error) {
if (error)
NSLog(#"%#", error);
[[NSFileManager defaultManager] removeItemAtURL:outputFileURL error:nil];
if (backgroundRecordingID != UIBackgroundTaskInvalid)
[[UIApplication sharedApplication] endBackgroundTask:backgroundRecordingID];
}];
}
#pragma mark Device Configuration
- (void)focusWithMode:(AVCaptureFocusMode)focusMode exposeWithMode:(AVCaptureExposureMode)exposureMode atDevicePoint:(CGPoint)point monitorSubjectAreaChange:(BOOL)monitorSubjectAreaChange
{
dispatch_async([self sessionQueue], ^{
AVCaptureDevice *device = [[self videoDeviceInput] device];
NSError *error = nil;
if ([device lockForConfiguration:&error])
{
if ([device isFocusPointOfInterestSupported] && [device isFocusModeSupported:focusMode])
{
[device setFocusMode:focusMode];
[device setFocusPointOfInterest:point];
}
if ([device isExposurePointOfInterestSupported] && [device isExposureModeSupported:exposureMode])
{
[device setExposureMode:exposureMode];
[device setExposurePointOfInterest:point];
}
[device setSubjectAreaChangeMonitoringEnabled:monitorSubjectAreaChange];
[device unlockForConfiguration];
}
else
{
NSLog(#"%#", error);
}
});
}
+ (void)setFlashMode:(AVCaptureFlashMode)flashMode forDevice:(AVCaptureDevice *)device
{
if ([device hasFlash] && [device isFlashModeSupported:flashMode])
{
NSError *error = nil;
if ([device lockForConfiguration:&error])
{
[device setFlashMode:flashMode];
[device unlockForConfiguration];
}
else
{
NSLog(#"%#", error);
}
}
}
+ (AVCaptureDevice *)deviceWithMediaType:(NSString *)mediaType preferringPosition:(AVCaptureDevicePosition)position
{
NSArray *devices = [AVCaptureDevice devicesWithMediaType:mediaType];
AVCaptureDevice *captureDevice = [devices firstObject];
for (AVCaptureDevice *device in devices)
{
if ([device position] == position)
{
captureDevice = device;
break;
}
}
return captureDevice;
}
#pragma mark UI
- (void)runStillImageCaptureAnimation
{
/*
dispatch_async(dispatch_get_main_queue(), ^{
[[[self previewView] layer] setOpacity:0.0];
[UIView animateWithDuration:.25 animations:^{
[[[self previewView] layer] setOpacity:1.0];
}];
});
*/
}
- (void)checkDeviceAuthorizationStatus
{
NSString *mediaType = AVMediaTypeVideo;
[AVCaptureDevice requestAccessForMediaType:mediaType completionHandler:^(BOOL granted) {
if (granted)
{
//Granted access to mediaType
[self setDeviceAuthorized:YES];
}
else
{
//Not granted access to mediaType
dispatch_async(dispatch_get_main_queue(), ^{
[[[UIAlertView alloc] initWithTitle:#"AVCam!"
message:#"AVCam doesn't have permission to use Camera, please change privacy settings"
delegate:self
cancelButtonTitle:#"OK"
otherButtonTitles:nil] show];
[self setDeviceAuthorized:NO];
});
}
}];
}

MPMoviePlayerController doesn't work with remoteControlReceivedWithEvent

I want to allow the controls from my keyboard to work in my app. The controls use Apple's Remote Control events (beginReceivingRemoteControlEvents, endReceivingRemoteControlEvents, and remoteControlReceivedWithEvent); however I cannot seem to get this to work with MPMoviePlayerController.
I do not see any events at the start of the program, even though beginReceivingRemoteControlEvents is called at the start.
I do not see any events during the playback of a video.
I do see events after I close the video.
From the above, it seems that the audio stream of MPMoviePlayerController disables the controls. However I do not know how to change this. I tried using [moviePlayer setUseApplicationAudioSession:NO]; to change the audio to use the system session, yet it does nothing.
Here is my setup. My app delegate is a UIViewController. I set the main window's root view controller to the app delegate, add views to the view controller and in the view controller for the parts which has to do with video.
- (BOOL)canBecomeFirstResponder {
return YES;
}
- (void)tableView:(UITableView *)theTableView didSelectRowAtIndexPath:(NSIndexPath *)theIndexPath {
NSString *file = [[MGMFilesPath stringByExpandingTildeInPath] stringByAppendingPathComponent:[files objectAtIndex:[theIndexPath indexAtPosition:1]]];
AVAudioSession *audioSession = [AVAudioSession sharedInstance];
[audioSession setCategory:AVAudioSessionCategoryPlayback error:nil];
[audioSession setActive:YES error:nil];
NSLog(#"%d", [self isFirstResponder]);
moviePlayer = [[MPMoviePlayerController alloc] initWithContentURL:[NSURL fileURLWithPath:file]];
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(exitedFullscreen:) name:MPMoviePlayerDidExitFullscreenNotification object:moviePlayer];
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(playbackFinished:) name:MPMoviePlayerPlaybackDidFinishNotification object:moviePlayer];
if ([moviePlayer respondsToSelector:#selector(setFullscreen:animated:)]) {
[[self view] addSubview:[moviePlayer view]];
[moviePlayer setFullscreen:YES animated:YES];
[moviePlayer play];
} else {
[moviePlayer play];
}
[fileView deselectRowAtIndexPath:theIndexPath animated:NO];
}
- (void)viewDidAppear:(BOOL)animated {
[super viewDidAppear:animated];
[[UIApplication sharedApplication] beginReceivingRemoteControlEvents];
[self becomeFirstResponder];
}
- (void)viewWillDisappear:(BOOL)animated {
[super viewWillDisappear:animated];
[[UIApplication sharedApplication] endReceivingRemoteControlEvents];
[self resignFirstResponder];
}
- (void)remoteControlReceivedWithEvent:(UIEvent *)event {
NSLog(#"remoteControlReceivedWithEvent: %#", event);
if (event.type==UIEventTypeRemoteControl) {
if (event.subtype==UIEventSubtypeRemoteControlPlay) {
NSLog(#"Play");
} else if (event.subtype==UIEventSubtypeRemoteControlPause) {
NSLog(#"Pause");
} else if (event.subtype==UIEventSubtypeRemoteControlTogglePlayPause) {
NSLog(#"Play Pause");
}
}
}
- (void)exitedFullscreen:(NSNotification*)notification {
[[moviePlayer view] removeFromSuperview];
[moviePlayer stop];
[moviePlayer release];
moviePlayer = nil;
[[AVAudioSession sharedInstance] setActive:NO error:nil];
}
- (void)playbackFinished:(NSNotification*)theNotification {
[[NSNotificationCenter defaultCenter] removeObserver:self name:MPMoviePlayerDidExitFullscreenNotification object:moviePlayer];
[[NSNotificationCenter defaultCenter] removeObserver:self name:MPMoviePlayerPlaybackDidFinishNotification object:moviePlayer];
NSNumber *reason = [[theNotification userInfo] objectForKey:MPMoviePlayerPlaybackDidFinishReasonUserInfoKey];
if ([reason intValue]!=MPMovieFinishReasonUserExited) {
[moviePlayer setFullscreen:NO animated:YES];
[[moviePlayer view] removeFromSuperview];
[moviePlayer stop];
[moviePlayer release];
moviePlayer = nil;
[[AVAudioSession sharedInstance] setActive:NO error:nil];
}
NSLog(#"%d", [self isFirstResponder]);
}
As you can see in the code above, I verified that it was first responder and it was, so I know it's not a first responder issue.
Can someone help me get this working?
Thanks
Apparently, MPMoviePlayerController isn't the way to go for this. What I ended up doing was using MPMoviePlayerViewController and override the remoteControlReceivedWithEvent to customize the controls. Below is my current code which I am using.
#interface MGMMoviePlayerViewController : MPMoviePlayerViewController
- (void)remoteControlReceivedWithEvent:(UIEvent *)event;
#end
#implementation MGMMoviePlayerViewController
- (void)remoteControlReceivedWithEvent:(UIEvent *)theEvent {
if (theEvent.type==UIEventTypeRemoteControl) {
if (theEvent.subtype==UIEventSubtypeRemoteControlPlay) {
[[self moviePlayer] play];
} else if (theEvent.subtype==UIEventSubtypeRemoteControlPause) {
[[self moviePlayer] pause];
} else if (theEvent.subtype==UIEventSubtypeRemoteControlTogglePlayPause) {
if ([[self moviePlayer] playbackState]==MPMoviePlaybackStatePlaying) {
[[self moviePlayer] pause];
} else {
[[self moviePlayer] play];
}
} else if (theEvent.subtype==UIEventSubtypeRemoteControlStop) {
[[self moviePlayer] stop];
} else if (theEvent.subtype==UIEventSubtypeRemoteControlNextTrack) {
NSTimeInterval currentTime = [[self moviePlayer] currentPlaybackTime];
currentTime += 10;
if (currentTime>[[self moviePlayer] duration])
currentTime = [[self moviePlayer] duration];
[[self moviePlayer] setCurrentPlaybackTime:currentTime];
} else if (theEvent.subtype==UIEventSubtypeRemoteControlPreviousTrack) {
NSTimeInterval currentTime = [[self moviePlayer] currentPlaybackTime];
currentTime -= 10;
if (currentTime<0)
currentTime = 0;
[[self moviePlayer] setCurrentPlaybackTime:currentTime];
} else if (theEvent.subtype==UIEventSubtypeRemoteControlBeginSeekingBackward) {
[[self moviePlayer] beginSeekingBackward];
} else if (theEvent.subtype==UIEventSubtypeRemoteControlBeginSeekingForward) {
[[self moviePlayer] beginSeekingForward];
} else if (theEvent.subtype==UIEventSubtypeRemoteControlEndSeekingBackward || theEvent.subtype==UIEventSubtypeRemoteControlEndSeekingForward) {
[[self moviePlayer] endSeeking];
}
}
}
#end
- (void)tableView:(UITableView *)theTableView didSelectRowAtIndexPath:(NSIndexPath *)theIndexPath {
NSString *file = [[MGMFilesPath stringByExpandingTildeInPath] stringByAppendingPathComponent:[files objectAtIndex:[theIndexPath indexAtPosition:1]]];
moviePlayerView = [[MGMMoviePlayerViewController alloc] initWithContentURL:[NSURL fileURLWithPath:file]];
[self presentMoviePlayerViewControllerAnimated:moviePlayerView];
[[moviePlayerView moviePlayer] play];
[fileView deselectRowAtIndexPath:theIndexPath animated:NO];
}

UIButton disappears when my application returns from running in the background

I have a small project that contains a few UITextFields with a number keypad. When the keyboard is displayed I'm adding a button as a subview for the user to dismiss the keyboard.
However, if the keyboard is active and I close the application, the button I've added will disappear upon relaunching the app. (The app stays inactive, through multitasking, and therefore not quit completely.)
This is the code im using to add the button (my "done" button configured in the xib).
- (void)viewDidLoad
{
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(keyboardWillShow)name:UIKeyboardWillShowNotification object:nil];
[super viewDidLoad];
}
- (void)keyboardWillShow{
[[NSNotificationCenter defaultCenter] removeObserver:self];
// We must perform on delay (schedules on next run loop pass) for the keyboard subviews to be present.
[self performSelector:#selector(addHideKeyboardButtonToKeyboard) withObject:nil afterDelay:0];
}
- (void)addHideKeyboardButtonToKeyboard{
// Locate non-UIWindow.
doneButton.hidden=NO;
UIWindow *keyboardWindow = nil;
for (UIWindow *testWindow in [[UIApplication sharedApplication] windows]) {
if (![[testWindow class] isEqual:[UIWindow class]]) {
keyboardWindow = testWindow;
break;
}
}
if (!keyboardWindow) return;
// Locate UIKeyboard.
UIView *foundKeyboard = nil;
for (UIView __strong *possibleKeyboard in [keyboardWindow subviews]) {
// iOS 4 sticks the UIKeyboard inside a UIPeripheralHostView.
if ([[possibleKeyboard description] hasPrefix:#"<UIPeripheralHostView"]) {
possibleKeyboard = [[possibleKeyboard subviews] objectAtIndex:0];
}
if ([[possibleKeyboard description] hasPrefix:#"<UIKeyboard"]) {
foundKeyboard = possibleKeyboard;
break;
}
}
if (foundKeyboard) {
[doneButton setImage:[UIImage imageNamed:#"doneupHeb.png"] forState:UIControlStateNormal];
[doneButton setImage:[UIImage imageNamed:#"donedownHeb.png"] forState:UIControlStateHighlighted];
doneButton.frame = CGRectMake(-1, 163, 106, 53);
[foundKeyboard addSubview:doneButton];
// Add the button to foundKeyboard.
}
}
-(void)textFieldDidEndEditing:(UITextField *)textField{
[loan resignFirstResponder];
[YearCycle resignFirstResponder];
[prime resignFirstResponder];
[MothlyReturn resignFirstResponder];
[doneButton removeFromSuperview];
doneButton = nil;
}
- (void)textFieldDidBeginEditing:(UITextField *)textField{
textField.delegate=self;
//editingField = textField;
if ([prime isFirstResponder]||[MothlyReturn isFirstResponder]){
scroll.contentOffset = CGPointMake(0, 166 );
}
// if ([YearCycle isFirstResponder]){
// scroll.contentOffset = CGPointMake(0, 200);
}
- (IBAction)closeNumpad:(id)sender{
[loan resignFirstResponder];
[YearCycle resignFirstResponder];
[prime resignFirstResponder];
[MothlyReturn resignFirstResponder];
scroll.contentOffset = CGPointMake(0, 0);
doneButton.hidden=YES;
}
i fixed the problem with a little help from other questions in the website - for all of you that have or will have the problem - this is the code:
Please note: the button himself are designed in the xib file and not in the code.
the .h file:
BOOL firstTime;
BOOL add;
BOOL keyboardOpened;
IBOutlet UIButton *doneButton;
the .m file:
- (void)viewDidLoad
{
[super viewDidLoad];
firstTime = TRUE;
add = TRUE;
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(keyboardDidShow:) name:UIKeyboardDidShowNotification object:nil]; // Do any additional setup after loading the view, typically from a nib.
}
- (void)viewDidUnload
{
// [[NSNotificationCenter defaultCenter] removeObserver:self];
[super viewDidUnload];
// Release any retained subviews of the main view.
// e.g. self.myOutlet = nil;
}
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation
{
// Return YES for supported orientations
return (interfaceOrientation != UIInterfaceOrientationPortraitUpsideDown);
}
- (void)addButtonToKeyboard {
// create custom button
/* UIButton *doneButton = [UIButton buttonWithType:UIButtonTypeCustom];
doneButton.frame = CGRectMake(0, 163, 106, 53);
doneButton.adjustsImageWhenHighlighted = NO;
[doneButton setImage:[UIImage imageNamed:#"DoneUp.png"] forState:UIControlStateNormal];
[doneButton setImage:[UIImage imageNamed:#"DoneDown.png"] forState:UIControlStateHighlighted];
doneButton.tag = 3;
[doneButton addTarget:self action:#selector(doneButton:) forControlEvents:UIControlEventTouchUpInside];*/
// locate keyboard view
UIWindow* tempWindow = [[[UIApplication sharedApplication] windows] objectAtIndex:1];
UIView* keyboard;
for(int i=0; i<[tempWindow.subviews count]; i++) {
keyboard = [tempWindow.subviews objectAtIndex:i];
// keyboard found, add the button
if ([[keyboard description] hasPrefix:#"<UIPeripheralHostView"] == YES && add){
doneButton.frame = CGRectMake(-1, 163, 106, 53);
[keyboard addSubview:doneButton];
}
}
}
- (void)removeButtonFromKeyboard
{
// locate keyboard view
UIWindow* tempWindow = [[[UIApplication sharedApplication] windows] objectAtIndex:1];
UIView* keyboard;
for(int i=0; i<[tempWindow.subviews count]; i++) {
keyboard = [tempWindow.subviews objectAtIndex:i];
// keyboard found, remove the button
if([[keyboard description] hasPrefix:#"<UIPeripheralHost"] == YES) [[keyboard viewWithTag:3] removeFromSuperview];
}
}
- (IBAction)doneButton:(id)sender {
[loan resignFirstResponder];
[YearCycle resignFirstResponder];
[ageOfCeo resignFirstResponder];
[YearofBusiness resignFirstResponder];
scroll.contentOffset = CGPointMake(0, 0);
if (![[[UIDevice currentDevice] model] isEqualToString:#"iPad"] && ![[[UIDevice currentDevice] model] isEqualToString:#"iPad Simulator"])
{
[self removeButtonFromKeyboard];
firstTime = TRUE;
}
}
- (BOOL)textFieldShouldReturn:(UITextField *)theTextField {
[theTextField resignFirstResponder];
return YES;
}
- (void)textFieldDidBeginEditing:(UITextField *)textField
{
if ([ageOfCeo isFirstResponder]||[YearofBusiness isFirstResponder]){
scroll.contentOffset = CGPointMake(0, 166 );
}
// firstResponder = textField;
}
- (void)keyboardDidShow:(id)sender
{
if (![[[UIDevice currentDevice] model] isEqualToString:#"iPad"] && ![[[UIDevice currentDevice] model] isEqualToString:#"iPad Simulator"])
{
NSLog(#"%#",[[UIDevice currentDevice] model]);
[self addButtonToKeyboard];
keyboardOpened = TRUE;
}
}
- (void)textFieldDidEndEditing:(UITextField *)textField
{
if (![[[UIDevice currentDevice] model] isEqualToString:#"iPad"] && ![[[UIDevice currentDevice] model] isEqualToString:#"iPad Simulator"])
{
[self removeButtonFromKeyboard];
keyboardOpened = FALSE;
}
}
i think your problem because you are removing the observer in the - (void)keyboardWillShow .. try to put this line [[NSNotificationCenter defaultCenter] removeObserver:self]; in the -(void)viewDidUnload

How to play video in Objective-C - Causes Jerk when loads

I am developing an application which loads video while the application launches. I observe a huge jerk while the video plays and removes from screen.
My code is as follow:
VideoPlayer.m
-(id)initWithVideo: (CGRect)frame file:(NSString *)videoFile
{
if (self = [super initWithFrame:frame])
{
NSArray *file = [videoFile componentsSeparatedByCharactersInSet:[NSCharacterSet characterSetWithCharactersInString:#"."]];
NSString *moviePath = [[NSBundle mainBundle] pathForResource:[file objectAtIndex:0] ofType:[file objectAtIndex:1]];
if (nil != moviePath)
{
if (nil != theMovie)
{
[theMovie.view removeFromSuperview];
}
theMovie = [[MPMoviePlayerViewController alloc] initWithContentURL:[NSURL fileURLWithPath:moviePath]];
theMovie.view.frame = self.bounds;
theMovie.moviePlayer.scalingMode = MPMovieScalingModeAspectFit;
theMovie.moviePlayer.controlStyle = MPMovieControlStyleNone;
theMovie.moviePlayer.movieSourceType = MPMovieSourceTypeFile;
theMovie.moviePlayer.fullscreen = NO;
[self addSubview:theMovie.view];
}
}
return self;
}
Please guide me where I am missing it.
-(void)prepareMoviePlayer:(NSURL *)moviePath {
self.moviePlayerViewController = [[MPMoviePlayerViewController alloc] initWithContentURL:moviePath];
if ([[self.moviePlayerViewController moviePlayer] respondsToSelector:#selector(loadState)]) {
[[self.moviePlayerViewController moviePlayer] setControlStyle:MPMovieControlStyleEmbedded];
[[self.moviePlayerViewController moviePlayer] setFullscreen:NO];
[[self.moviePlayerViewController moviePlayer] prepareToPlay];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(moviePlayerLoadStateDidChange:)
name:MPMoviePlayerLoadStateDidChangeNotification
object:nil];
}
- (void)moviePlayerLoadStateDidChange:(NSNotification *)notification {
if ([[self.moviePlayerViewController moviePlayer] loadState] == MPMovieLoadStateStalled) {
NSLog(#"Movie Playback Stalled");
} else if([[self.moviePlayerViewController moviePlayer] loadState] != MPMovieLoadStateUnknown) {
[[NSNotificationCenter defaultCenter] removeObserver:self
name:MPMoviePlayerLoadStateDidChangeNotification
object:nil];
[[[self.moviePlayerViewController moviePlayer] view] setFrame:self.view.bounds];
[[self view] addSubview:[[self.moviePlayerViewController moviePlayer] view]];
[[self.moviePlayerViewController moviePlayer] play];
}
}
The NSMoviePlayerViewController prepareToPlay method seems to be the key here. I also suggest adding other notifications to handle playback completion and playback stopping. Also some of this code is designed to work from a subview of the window.