Composing multiple videos causes hang - objective-c

I am working on an app that composes multiple video clips taken by the user. The clips are recorded on the camera, and overlayed with another video and then the recorded clips are composed together into one long clip. The length of each clip is determined by the overlaying video file.
I am using an AVAssetExportSession and exportAsynchronouslyWithCompletionHandler. The odd thing is this works with some clips and not others. The real problem is that the exporter doesn't report any errors or failures, just zero progress and never calls the completion handler.
I don't even know where to begin looking to find out what the issue is. Here's the function I use to compose the clips together
- (void) setupAndStitchVideos:(NSMutableArray*)videoData
{
// Filepath to where the final generated video is stored
NSURL * exportUrl = nil;
// Contains information about a single asset/track
NSDictionary * assetOptions = nil;
AVURLAsset * currVideoAsset = nil;
AVURLAsset * currAudioAsset = nil;
AVAssetTrack * currVideoTrack = nil;
AVAssetTrack * currAudioTrack = nil;
// Contains all tracks and time ranges used to build the final composition
NSMutableArray * allVideoTracks = nil;
NSMutableArray * allVideoRanges = nil;
NSMutableArray * allAudioTracks = nil;
NSMutableArray * allAudioRanges = nil;
AVMutableCompositionTrack * videoTracks = nil;
AVMutableCompositionTrack * audioTracks = nil;
// Misc time values used when calculating a clips start time and total length
float animationLength = 0.0f;
float clipLength = 0.0f;
float startTime = 0.0f;
CMTime clipStart = kCMTimeZero;
CMTime clipDuration = kCMTimeZero;
CMTimeRange currRange = kCMTimeRangeZero;
// The final composition to be generated and exported
AVMutableComposition * finalComposition = nil;
// Cancel any already active exports
if (m_activeExport)
{
[m_activeExport cancelExport];
m_activeExport = nil;
}
// Initialize and setup all composition related member variables
allVideoTracks = [[NSMutableArray alloc] init];
allAudioTracks = [[NSMutableArray alloc] init];
allVideoRanges = [[NSMutableArray alloc] init];
allAudioRanges = [[NSMutableArray alloc] init];
exportUrl = [NSURL fileURLWithPath:[MobveoAnimation getMergeDestination]];
finalComposition = [AVMutableComposition composition];
videoTracks = [finalComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
audioTracks = [finalComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
assetOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:AVURLAssetPreferPreciseDurationAndTimingKey];
animationLength = m_animation.videoDuration;
// Define all of the audio and video tracks that will be used in the composition
for (NSDictionary * currData in videoData)
{
currVideoAsset = [AVURLAsset URLAssetWithURL:[currData objectForKey:KEY_STITCH_VIDEO_URL] options:assetOptions];
currAudioAsset = [AVURLAsset URLAssetWithURL:[currData objectForKey:KEY_STITCH_AUDIO_URL] options:assetOptions];
currVideoTrack = [[currVideoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
NSArray *audioTracks = [currAudioAsset tracksWithMediaType:AVMediaTypeAudio];
if ( audioTracks != nil && audioTracks.count > 0 )
{
currAudioTrack = audioTracks[0];
}
else
{
currAudioTrack = nil;
}
clipLength = animationLength * [(NSNumber*)[currData objectForKey:KEY_STITCH_LENGTH_PERCENTAGE] floatValue];
clipStart = CMTimeMakeWithSeconds(startTime, currVideoAsset.duration.timescale);
clipDuration = CMTimeMakeWithSeconds(clipLength, currVideoAsset.duration.timescale);
NSLog(#"Clip length: %.2f", clipLength);
NSLog(#"Clip Start: %lld", clipStart.value );
NSLog(#"Clip duration: %lld", clipDuration.value);
currRange = CMTimeRangeMake(clipStart, clipDuration);
[allVideoTracks addObject:currVideoTrack];
if ( currAudioTrack != nil )
{
[allAudioTracks addObject:currAudioTrack];
[allAudioRanges addObject:[NSValue valueWithCMTimeRange:currRange]];
}
[allVideoRanges addObject:[NSValue valueWithCMTimeRange:currRange]];
startTime += clipLength;
}
[videoTracks insertTimeRanges:allVideoRanges ofTracks:allVideoTracks atTime:kCMTimeZero error:nil];
if ( allAudioTracks.count > 0 )
{
[audioTracks insertTimeRanges:allAudioRanges ofTracks:allAudioTracks atTime:kCMTimeZero error:nil];
}
for ( int i = 0; i < allVideoTracks.count - allAudioTracks.count; ++i )
{
CMTimeRange curRange = [allVideoRanges[i] CMTimeRangeValue];
[audioTracks insertEmptyTimeRange:curRange];
}
// Delete any previous exported video files that may already exist
[[NSFileManager defaultManager] removeItemAtURL:exportUrl error:nil];
// Begin the composition generation and export process!
m_activeExport = [[AVAssetExportSession alloc] initWithAsset:finalComposition presetName:AVAssetExportPreset1280x720];
[m_activeExport setOutputFileType:AVFileTypeQuickTimeMovie];
[m_activeExport setOutputURL:exportUrl];
NSLog(#"Exporting async");
[m_activeExport exportAsynchronouslyWithCompletionHandler:^(void)
{
NSLog(#"Export complete");
// Cancel the update timer
[m_updateTimer invalidate];
m_updateTimer = nil;
// Dismiss the displayed dialog
[m_displayedDialog hide:TRUE];
m_displayedDialog = nil;
// Re-enable touch events
[[UIApplication sharedApplication] endIgnoringInteractionEvents];
// Report the success/failure result
switch (m_activeExport.status)
{
case AVAssetExportSessionStatusFailed:
[self performSelectorOnMainThread:#selector(videoStitchingFailed:) withObject:m_activeExport.error waitUntilDone:FALSE];
break;
case AVAssetExportSessionStatusCompleted:
[self performSelectorOnMainThread:#selector(videoStitchingComplete:) withObject:m_activeExport.outputURL waitUntilDone:FALSE];
break;
}
// Clear our reference to the completed export
m_activeExport = nil;
}];
}
EDIT:
Thanks to Josh in the comments I noticed there were error parameters I wasn't making use of. In the case where it is failing now I am getting the ever so useful "Operation could not be completed" error on inserting the time ranges of the video tracks:
NSError *videoError = nil;
[videoTracks insertTimeRanges:allVideoRanges ofTracks:allVideoTracks atTime:kCMTimeZero error:&videoError];
if ( videoError != nil )
{
NSLog(#"Error adding video track: %#", videoError);
}
Output:
Error adding video track: Error Domain=AVFoundationErrorDomain Code=-11800 "The operation could not be completed" UserInfo=0x17426dd00 {NSUnderlyingError=0x174040cc0 "The operation couldn’t be completed. (OSStatus error -12780.)", NSLocalizedFailureReason=An unknown error occurred (-12780), NSLocalizedDescription=The operation could not be completed}
It is worth noting however that nowhere in this entire codebase is urlWithString used instead of fileUrlWithPath so that isn't the problem.

Judging from your for in enumeration of the videoData array, after you've Initialized the composition member variables, it looks as if you're blocking the calling thread. Although accessing each AVAssetTrack instance is permitted, the values for the keys are not always immediately available and run synchronously..
Instead, try registering for change notifications using AVSynchronousKeyValueLoading protocols. Apple's documentation should help you straighten out the issue and get you on your way!
Here are a few more Apple recommendations I've aggregated for AVFoundation:
Hopefully this will do the trick! Good luck and let me know if you have any further questions/problems.

Related

TableView doesn't show uiimage

I have an app that shows twitter account feed. So I have ImageView, textLabel and detailLabel for the content of the feed. The problem is that when all the data is loaded, the uiimage doesn't appear. When I click on the cell or scroll up-down, images are set. here is some of my code.
-(void)getImageFromUrl:(NSString*)imageUrl asynchronouslyForImageView:(UIImageView*)imageView andKey:(NSString*)key{
dispatch_async(dispatch_get_global_queue(
DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
NSURL *url = [NSURL URLWithString:imageUrl];
__block NSData *imageData;
dispatch_sync(dispatch_get_global_queue(
DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
imageData =[NSData dataWithContentsOfURL:url];
if(imageData){
[self.imagesDictionary setObject:[UIImage imageWithData:imageData] forKey:key];
dispatch_sync(dispatch_get_main_queue(), ^{
imageView.image = self.imagesDictionary[key];
});
}
});
});
}
- (void)refreshTwitterHomeFeedWithCompletion {
// Request access to the Twitter accounts
ACAccountStore *accountStore = [[ACAccountStore alloc] init];
ACAccountType *accountType = [accountStore accountTypeWithAccountTypeIdentifier:ACAccountTypeIdentifierTwitter];
[accountStore requestAccessToAccountsWithType:accountType options:nil completion:^(BOOL granted, NSError *error){
if (granted) {
NSArray *accounts = [accountStore accountsWithAccountType:accountType];
// Check if the users has setup at least one Twitter account
if (accounts.count > 0)
{
ACAccount *twitterAccount = [accounts objectAtIndex:0];
NSLog(#"request.account ...%#",twitterAccount.username);
NSURL* url = [NSURL URLWithString:#"https://api.twitter.com/1.1/statuses/home_timeline.json"];
NSDictionary* params = #{#"count" : #"50", #"screen_name" : twitterAccount.username};
SLRequest *request = [SLRequest requestForServiceType:SLServiceTypeTwitter
requestMethod:SLRequestMethodGET
URL:url parameters:params];
request.account = twitterAccount;
[request performRequestWithHandler:^(NSData *responseData,
NSHTTPURLResponse *urlResponse, NSError *error) {
if (error)
{
NSString* errorMessage = [NSString stringWithFormat:#"There was an error reading your Twitter feed. %#",
[error localizedDescription]];
NSLog(#"%#",errorMessage);
}
else
{
NSError *jsonError;
NSArray *responseJSON = [NSJSONSerialization
JSONObjectWithData:responseData
options:NSJSONReadingAllowFragments
error:&jsonError];
if (jsonError)
{
NSString* errorMessage = [NSString stringWithFormat:#"There was an error reading your Twitter feed. %#",
[jsonError localizedDescription]];
NSLog(#"%#",errorMessage);
}
else
{
NSLog(#"Home responseJSON..%#",(NSDictionary*)responseJSON.description);
dispatch_async(dispatch_get_main_queue(), ^{
[self reloadData:responseJSON];
});
}
}
}];
}
}
}];
}
-(void)reloadData:(NSArray*)jsonResponse
{
self.tweets = jsonResponse;
[self.tableView reloadData];
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
#pragma mark - Table view data source
- (NSInteger)numberOfSectionsInTableView:(UITableView *)tableView {
// Return the number of sections.
return 1;
}
- (NSInteger)tableView:(UITableView *)tableView numberOfRowsInSection:(NSInteger)section {
// Return the number of rows in the section.
return self.tweets.count;
}
- (UITableViewCell *)tableView:(UITableView *)tableView cellForRowAtIndexPath:(NSIndexPath *)indexPath {
static NSString *CellIdentifier = #"Cell";
SNTwitterCell *cell = [tableView dequeueReusableCellWithIdentifier:CellIdentifier];
if(!cell)
{
cell = [[SNTwitterCell alloc]initWithStyle:UITableViewCellStyleSubtitle reuseIdentifier:CellIdentifier];
}
NSDictionary *tweetDictionary = self.tweets[indexPath.row];
NSDictionary *user = tweetDictionary[#"user"];
NSString *userName = user[#"name"];
NSString *tweetContaint = tweetDictionary[#"text"];
NSString* imageUrl = [user objectForKey:#"profile_image_url"];
[self getImageFromUrl:imageUrl asynchronouslyForImageView:cell.imageView andKey:userName];
cell.profileImage.image = [UIImage imageNamed:#"images.png"];
NSArray *days = [NSArray arrayWithObjects:#"Mon ", #"Tue ", #"Wed ", #"Thu ", #"Fri ", #"Sat ", #"Sun ", nil];
NSArray *calendarMonths = [NSArray arrayWithObjects:#"Jan", #"Feb", #"Mar",#"Apr", #"May", #"Jun", #"Jul", #"Aug", #"Sep", #"Oct", #"Nov", #"Dec", nil];
NSString *dateStr = [tweetDictionary objectForKey:#"created_at"];
for (NSString *day in days) {
if ([dateStr rangeOfString:day].location == 0) {
dateStr = [dateStr stringByReplacingOccurrencesOfString:day withString:#""];
break;
}
}
NSArray *dateArray = [dateStr componentsSeparatedByString:#" "];
NSArray *hourArray = [[dateArray objectAtIndex:2] componentsSeparatedByString:#":"];
NSDateComponents *components = [[NSDateComponents alloc] init];
NSString *aux = [dateArray objectAtIndex:0];
int month = 0;
for (NSString *m in calendarMonths) {
month++;
if ([m isEqualToString:aux]) {
break;
}
}
components.month = month;
components.day = [[dateArray objectAtIndex:1] intValue];
components.hour = [[hourArray objectAtIndex:0] intValue];
components.minute = [[hourArray objectAtIndex:1] intValue];
components.second = [[hourArray objectAtIndex:2] intValue];
components.year = [[dateArray objectAtIndex:4] intValue];
NSTimeZone *gmt = [NSTimeZone timeZoneForSecondsFromGMT:2];
[components setTimeZone:gmt];
NSCalendar *calendar = [[NSCalendar alloc] initWithCalendarIdentifier:NSCalendarIdentifierGregorian];
[calendar setTimeZone:[NSTimeZone systemTimeZone]];
NSDate *date = [calendar dateFromComponents:components];
NSString *tweetDate = [self getTimeAsString:date];
NSString *tweetValues = [NSString stringWithFormat:#"%# :%#",userName,tweetDate];
cell.textLabel.text = [NSString stringWithFormat:#"%#",tweetValues];
cell.detailTextLabel.text = [NSString stringWithFormat:#"%#",tweetContaint];
[cell.detailTextLabel setFont:[UIFont fontWithName:#"Helvetica" size:20]];
return cell;
}
- (NSString*)getTimeAsString:(NSDate *)lastDate {
NSTimeInterval dateDiff = [[NSDate date] timeIntervalSinceDate:lastDate];
int nrSeconds = dateDiff;//components.second;
int nrMinutes = nrSeconds / 60;
int nrHours = nrSeconds / 3600;
int nrDays = dateDiff / 86400; //components.day;
NSString *time;
if (nrDays > 5){
NSDateFormatter *dateFormat = [[NSDateFormatter alloc] init];
[dateFormat setDateStyle:NSDateFormatterShortStyle];
[dateFormat setTimeStyle:NSDateFormatterNoStyle];
time = [NSString stringWithFormat:#"%#", [dateFormat stringFromDate:lastDate]];
} else {
// days=1-5
if (nrDays > 0) {
if (nrDays == 1) {
time = #"1 day ago";
} else {
time = [NSString stringWithFormat:#"%d days ago", nrDays];
}
} else {
if (nrHours == 0) {
if (nrMinutes < 2) {
time = #"just now";
} else {
time = [NSString stringWithFormat:#"%d minutes ago", nrMinutes];
}
} else { // days=0 hours!=0
if (nrHours == 1) {
time = #"1 hour ago";
} else {
time = [NSString stringWithFormat:#"%d hours ago", nrHours];
}
}
}
}
return [NSString stringWithFormat:NSLocalizedString(#"%#", #"label"), time];
}
-(CGFloat)tableView:(UITableView *)tableView heightForRowAtIndexPath:(NSIndexPath *)indexPath
{
return 100;
}
The fundamental problem is that the standard imageView property of the standard table view cell will automatically resize itself based upon the image that is present when cellForRowAtIndexPath finishes. But since there is no image yet when you first present the table, the cell is laid out as if there's no image. And when you asynchronously update the image view's image, it won't resize the image view.
There are a couple of ways of solving this:
Don't use the default imageView provided by UITableViewCell, but rather define your own custom cell subclass with an IBOutlet to its own UIImageView property. Make sure that this UIImageView has a fixed layout (i.e., it doesn't use the intrinsic size derived from the underlying image).
If you do that, you can asynchronously update the image property for your custom UIImageView outlet, and because the layout was not contingent upon the presence of the image, any asynchronous updates of that image should appear correctly.
When you receive the image, don't just set the image view's image property, but rather reload the whole row associated with that NSIndexPath using reloadRowsAtIndexPaths.
If you do this, the cell will be laid out correctly assuming that you retrieve the image from the cache correctly, and do so before cellForRowAtIndexPath finishes.
Note, if you do this, you will need to fix your getImageFromUrl to actually try to retrieve the image from the cache first (and do this from the main queue, before to dispatch to the background queue), or else you'll end up in an endless loop.
Having said that, there are deeper problems here.
As I mentioned above, you're caching your images, but never using the cache when retrieving the images.
You are asynchronously updating the image view.
You should initialize the image property of the UIImageView before you initiate the new asynchronous fetch, otherwise when a cell is reused, you'll see the old image there until the new image is retrieved.
What if the cell was reused in the intervening period between calling getImageFromUrl and when the asynchronous request finishes? You'll be updating the image view for the wrong cell. (This problem will be more apparent when doing this over a slow connection. Run your code using the network link conditioner to simulate slow connections and you'll see the problem I'm describing.)
What if the user rapidly scrolls down to the 100th row in the table? The network requests for the visible cells will be backlogged behind the other 99 image requests. You could even get timeout errors on slow connections.
There are a bunch of tactical little issues in getImageFromUrl.
Why dispatching synchronously from global queue to another global queue? That's unnecessary. Why dispatching UI update synchronously to main thread? That's inefficient.
Why define imageData as __block outside of the block; just define it within the block and you don't need __block qualifier.
What if you didn't receive a valid UIImage from the network request (e.g. you got a 404 error message); the existing code would crash. There are all sorts of responses the server might provide which are not a valid image, and you really must identify that situation (i.e. make sure that not only was NSData you received not nil, but also that the UIImage that you created from it was not nil, too).
I'd probably use NSCache rather than NSMutableDictionary for the cache. Also, regardless of whether you use NSCache or NSMutableDictionary, you want to make sure that you respond to memory pressure events and empty that cache if needed.
We can go through all of these individual problems, but it's a non-trivial amount of work to fix all of this. I might therefore suggest you consider the UIImageView categories of SDWebImage or AFNetworking. They take care of most of these issues, plus others. It will make your life much, much easier.

AVFoundation - why can't I get the video orientation right

I am using AVCaptureSession to capture video from a devices camera and then using AVAssetWriterInput and AVAssetTrack to compress/resize the video before uploading it to a server. The final videos will be viewed on the web via an html5 video element.
I'm running into multiple issues trying to get the orientation of the video correct. My app only supports landscape orientation and all captured videos should be in landscape orientation. However, I would like to allow the user to hold their device in either landscape direction (i.e. home button on either the left or the right hand side).
I am able to make the video preview show in the correct orientation with the following line of code
_previewLayer.connection.videoOrientation = UIDevice.currentDevice.orientation;
The problems start when processing the video via AVAssetWriterInput and friends. The result does not seem to account for the left vs. right landscape mode the video was captured in. IOW, sometimes the video comes out upside down. After some googling I found many people suggesting that the following line of code would solve this issue
writerInput.transform = videoTrack.preferredTransform;
...but this doesn't seem to work. After a bit of debugging I found that videoTrack.preferredTransform is always the same value, regardless of the orientation the video was captured in.
I tried manually tracking what orientation the video was captured in and setting the writerInput.transform to CGAffineTransformMakeRotation(M_PI) as needed. Which solved the problem!!!
...sorta
When I viewed the results on the device this solution worked as expected. Videos were right-side-up regardless of left vs. right orientation while recording. Unfortunately, when I viewed the exact same videos in another browser (chrome on a mac book) they were all upside-down!?!?!?
What am I doing wrong?
EDIT
Here's some code, in case it's helpful...
-(void)compressFile:(NSURL*)inUrl;
{
NSString* fileName = [#"compressed." stringByAppendingString:inUrl.lastPathComponent];
NSError* error;
NSURL* outUrl = [PlatformHelper getFilePath:fileName error:&error];
NSDictionary* compressionSettings = #{ AVVideoProfileLevelKey: AVVideoProfileLevelH264Main31,
AVVideoAverageBitRateKey: [NSNumber numberWithInt:2500000],
AVVideoMaxKeyFrameIntervalKey: [NSNumber numberWithInt: 30] };
NSDictionary* videoSettings = #{ AVVideoCodecKey: AVVideoCodecH264,
AVVideoWidthKey: [NSNumber numberWithInt:1280],
AVVideoHeightKey: [NSNumber numberWithInt:720],
AVVideoScalingModeKey: AVVideoScalingModeResizeAspectFill,
AVVideoCompressionPropertiesKey: compressionSettings };
NSDictionary* videoOptions = #{ (id)kCVPixelBufferPixelFormatTypeKey: [NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] };
AVAssetWriterInput* writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
writerInput.expectsMediaDataInRealTime = YES;
AVAssetWriter* assetWriter = [AVAssetWriter assetWriterWithURL:outUrl fileType:AVFileTypeMPEG4 error:&error];
assetWriter.shouldOptimizeForNetworkUse = YES;
[assetWriter addInput:writerInput];
AVURLAsset* asset = [AVURLAsset URLAssetWithURL:inUrl options:nil];
AVAssetTrack* videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
// !!! this line does not work as expected and causes all sorts of issues (videos display sideways in some cases) !!!
//writerInput.transform = videoTrack.preferredTransform;
AVAssetReaderTrackOutput* readerOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:videoTrack outputSettings:videoOptions];
AVAssetReader* assetReader = [AVAssetReader assetReaderWithAsset:asset error:&error];
[assetReader addOutput:readerOutput];
[assetWriter startWriting];
[assetWriter startSessionAtSourceTime:kCMTimeZero];
[assetReader startReading];
[writerInput requestMediaDataWhenReadyOnQueue:_processingQueue usingBlock:
^{
/* snip */
}];
}
The problem is that modifying the writerInput.transform property only adds a tag in the video file metadata which instructs the video player to rotate the file during playback. That's why the videos play in the correct orientation on your device (I'm guessing they also play correctly in a Quicktime player as well).
The pixel buffers captured by the camera are still laid out in the orientation in which they were captured. Many video players will not check for the preferred orientation metadata tag and will just play the file in the native pixel orientation.
If you want the user to be able to record video holding the phone in either landscape mode, you need to rectify this at the AVCaptureSession level before compression by performing a transform on the CVPixelBuffer of each video frame. This Apple Q&A covers it (look at the AVCaptureVideoOutput documentation as well):
https://developer.apple.com/library/ios/qa/qa1744/_index.html
Investigating the link above is the correct way to solve your problem. An alternate fast n' dirty way to solve the same problem would be to lock the recording UI of your app into only one landscape orientation and then to rotate all of your videos server-side using ffmpeg.
In case it's helpful for anyone, here's the code I ended up with. I ended up having to do the work on the video as it was being captured instead of as a post processing step. This is a helper class that manages the capture.
Interface
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
#interface VideoCaptureManager : NSObject<AVCaptureVideoDataOutputSampleBufferDelegate>
{
AVCaptureSession* _captureSession;
AVCaptureVideoPreviewLayer* _previewLayer;
AVCaptureVideoDataOutput* _videoOut;
AVCaptureDevice* _videoDevice;
AVCaptureDeviceInput* _videoIn;
dispatch_queue_t _videoProcessingQueue;
AVAssetWriter* _assetWriter;
AVAssetWriterInput* _writerInput;
BOOL _isCapturing;
NSString* _gameId;
NSString* _authToken;
}
-(void)setSettings:(NSString*)gameId authToken:(NSString*)authToken;
-(void)setOrientation:(AVCaptureVideoOrientation)orientation;
-(AVCaptureVideoPreviewLayer*)getPreviewLayer;
-(void)startPreview;
-(void)stopPreview;
-(void)startCapture;
-(void)stopCapture;
#end
Implementation (w/ a bit of editing and a few little TODO's)
#implementation VideoCaptureManager
-(id)init;
{
self = [super init];
if (self) {
NSError* error;
_videoProcessingQueue = dispatch_queue_create("VideoQueue", DISPATCH_QUEUE_SERIAL);
_captureSession = [AVCaptureSession new];
_videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
_previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:_captureSession];
[_previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
_videoOut = [AVCaptureVideoDataOutput new];
_videoOut.videoSettings = #{ (id)kCVPixelBufferPixelFormatTypeKey: [NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] };
_videoOut.alwaysDiscardsLateVideoFrames = YES;
_videoIn = [AVCaptureDeviceInput deviceInputWithDevice:_videoDevice error:&error];
// handle errors here
[_captureSession addInput:_videoIn];
[_captureSession addOutput:_videoOut];
}
return self;
}
-(void)setOrientation:(AVCaptureVideoOrientation)orientation;
{
_previewLayer.connection.videoOrientation = orientation;
for (AVCaptureConnection* item in _videoOut.connections) {
item.videoOrientation = orientation;
}
}
-(AVCaptureVideoPreviewLayer*)getPreviewLayer;
{
return _previewLayer;
}
-(void)startPreview;
{
[_captureSession startRunning];
}
-(void)stopPreview;
{
[_captureSession stopRunning];
}
-(void)startCapture;
{
if (_isCapturing) return;
NSURL* url = put code here to create your output url
NSDictionary* compressionSettings = #{ AVVideoProfileLevelKey: AVVideoProfileLevelH264Main31,
AVVideoAverageBitRateKey: [NSNumber numberWithInt:2500000],
AVVideoMaxKeyFrameIntervalKey: [NSNumber numberWithInt: 1],
};
NSDictionary* videoSettings = #{ AVVideoCodecKey: AVVideoCodecH264,
AVVideoWidthKey: [NSNumber numberWithInt:1280],
AVVideoHeightKey: [NSNumber numberWithInt:720],
AVVideoScalingModeKey: AVVideoScalingModeResizeAspectFill,
AVVideoCompressionPropertiesKey: compressionSettings
};
_writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
_writerInput.expectsMediaDataInRealTime = YES;
NSError* error;
_assetWriter = [AVAssetWriter assetWriterWithURL:url fileType:AVFileTypeMPEG4 error:&error];
// handle errors
_assetWriter.shouldOptimizeForNetworkUse = YES;
[_assetWriter addInput:_writerInput];
[_videoOut setSampleBufferDelegate:self queue:_videoProcessingQueue];
_isCapturing = YES;
}
-(void)stopCapture;
{
if (!_isCapturing) return;
[_videoOut setSampleBufferDelegate:nil queue:nil]; // TODO: seems like there could be a race condition between this line and the next (could end up trying to write a buffer after calling writingFinished
dispatch_async(_videoProcessingQueue, ^{
[_assetWriter finishWritingWithCompletionHandler:^{
[self writingFinished];
}];
});
}
-(void)writingFinished;
{
// TODO: need to check _assetWriter.status to make sure everything completed successfully
// do whatever post processing you need here
}
-(void)captureOutput:(AVCaptureOutput*)captureOutput didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection*)connection;
{
NSLog(#"Video frame was dropped.");
}
-(void)captureOutput:(AVCaptureOutput*)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
if(_assetWriter.status != AVAssetWriterStatusWriting) {
CMTime lastSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
[_assetWriter startWriting]; // TODO: need to check the return value (a bool)
[_assetWriter startSessionAtSourceTime:lastSampleTime];
}
if (!_writerInput.readyForMoreMediaData || ![_writerInput appendSampleBuffer:sampleBuffer]) {
NSLog(#"Failed to write video buffer to output.");
}
}
#end
For compressing /Resizing the video ,we can use AVAssetExportSession.
We can uppload a video of duration 3.30minutes.
If the video duration will be more than 3.30minutes,it will show a memory warning .
As here we are not using any transform for the video,the video will be as it is while recording.
Below is the sample code for compressing the video .
we can check the video size before compression and after compression.
{
-(void)trimVideoWithURL:(NSURL *)inputURL{
NSString *path1 = [inputURL path];
NSData *data = [[NSFileManager defaultManager] contentsAtPath:path1];
NSLog(#"size before compress video is %lu",(unsigned long)data.length);
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:inputURL options:nil];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPreset640x480];
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *outputURL = paths[0];
NSFileManager *manager = [NSFileManager defaultManager];
[manager createDirectoryAtPath:outputURL withIntermediateDirectories:YES attributes:nil error:nil];
outputURL = [outputURL stringByAppendingPathComponent:#"output.mp4"];
fullPath = [NSURL URLWithString:outputURL];
// Remove Existing File
[manager removeItemAtPath:outputURL error:nil];
exportSession.outputURL = [NSURL fileURLWithPath:outputURL];
exportSession.shouldOptimizeForNetworkUse = YES;
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
CMTime start = CMTimeMakeWithSeconds(1.0, 600);
CMTime duration = CMTimeMakeWithSeconds(1.0, 600);
CMTimeRange range = CMTimeRangeMake(start, duration);
exportSession.timeRange = range;
[exportSession exportAsynchronouslyWithCompletionHandler:^(void)
{
switch (exportSession.status) {
case AVAssetExportSessionStatusCompleted:{
NSString *path = [fullPath path];
NSData *data = [[NSFileManager defaultManager] contentsAtPath:path];
NSLog(#"size after compress video is %lu",(unsigned long)data.length);
NSLog(#"Export Complete %d %#", exportSession.status, exportSession.error);
/*
Do your neccessay stuff here after compression
*/
}
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"Failed:%#",exportSession.error);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Canceled:%#",exportSession.error);
break;
default:
break;
}
}];}

merge audio and video

I have an app in which I need to merge audio file into video file.
Some time my audio file is larger than video file duration. I had use AVFoundation's MixComposition, both get merged. but the problem is that if video file duration is smaller then when video is finished sound still goes on play and complete its full duration. It should be if video is finished audio must get stop.
Could any one provide me any solution.
Use the following code to stop your audio and it also create fade audio in last five second
- (void)getFadeAudioFile {
if (![appDelegate.musicFilePath isEqualToString:#"Not set"]) {
NSURL *url = [[[NSURL alloc]initWithString:appDelegate.musicFilePath]autorelease];
AVURLAsset* audioAsset = [[[AVURLAsset alloc]initWithURL:url options:nil]autorelease];
NSString *filePath = [self applicationDocumentsDirectory];
NSString *outputFilePath = nil;
outputFilePath = [filePath stringByAppendingPathComponent:#"/mySong.m4a"];
NSURL *outputFileUrl = [[[NSURL alloc]initFileURLWithPath:outputFilePath]autorelease];
NSError *theError = nil;
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:&theError];
[self exportAsset:audioAsset toFilePath:outputFileUrl];
}
}
- (BOOL)exportAsset:(AVAsset *)avAsset toFilePath:(NSURL *)filePath {
// get the first audio track
NSArray *tracks = [avAsset tracksWithMediaType:AVMediaTypeAudio];
if ([tracks count] == 0) return NO;
AVAssetTrack *track = [tracks objectAtIndex:0];
// create the export session
// no need to retain here since the session will be retained by the
// completion handler since it is referenced there
AVAssetExportSession *exportSession = [AVAssetExportSession exportSessionWithAsset:avAsset presetName:AVAssetExportPresetAppleM4A];
if (nil == exportSession) return NO;
NSLog(#"arrOfImagesForVideo.coun:%d",arrImageDataDict.count);
int imgCount = arrImageDataDict.count+1;
int delay = appDelegate.delaySecond;
int duration = imgCount*delay;
CMTime stopTime = CMTimeMake(duration, 1);
// create trim time range - 20 seconds starting from 30 seconds into the asset
// NSInteger totalTime = CMTimeGetSeconds(avAsset.duration);
CMTime startTime = CMTimeMake(0, 1);
//CMTime stopTime = CMTimeMake(totalTime, 1);//0,30
CMTimeRange exportTimeRange = CMTimeRangeFromTimeToTime(startTime, stopTime);
// create fade in time range - 10 seconds starting at the beginning of trimmed asset
NSInteger fadeTime = duration-5;
NSLog(#"fade time:%d",fadeTime);
NSLog(#"fade duration:%d",duration);
CMTime startFadeInTime = CMTimeMake(fadeTime, 1);
CMTime endFadeInTime = CMTimeMake(duration, 1);
CMTimeRange fadeInTimeRange = CMTimeRangeFromTimeToTime(startFadeInTime, endFadeInTime);
// setup audio mix
AVMutableAudioMix *exportAudioMix = [AVMutableAudioMix audioMix];
AVMutableAudioMixInputParameters *exportAudioMixInputParameters = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:track];
[exportAudioMixInputParameters setVolumeRampFromStartVolume:1.0 toEndVolume:0.0 timeRange:fadeInTimeRange];
exportAudioMix.inputParameters = [NSArray arrayWithObject:exportAudioMixInputParameters];
// configure export session output with all our parameters
exportSession.outputURL = filePath; // output path
exportSession.outputFileType = AVFileTypeAppleM4A; // output file type
exportSession.timeRange = exportTimeRange; // trim time range
exportSession.audioMix = exportAudioMix; // fade in audio mix
[exportSession exportAsynchronouslyWithCompletionHandler:
^(void ) {
//[self saveVideoToAlbum:outputFilePath];
}
];
return YES;
}
It will be saved in your file path documents directory and use it like
NSString *filePath = [self applicationDocumentsDirectory];
NSString *outputFilePath1 = [filePath tringByAppendingPathComponent:#"/mySong.m4a"];
NSURL *audio_inputFileUrl = [[NSURL alloc]initFileURLWithPath:outputFilePath1];
int imgCount = imageArray.count;
int delay = appDelegate.delaySecond;
NSLog(#"audio merged");
int duration = imgCount*delay;
CMTime seekingCM = CMTimeMake(duration, 1);
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, seekingCM);
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition MutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];
//[audioAsset autorelease];
newAudioTrack = [audioAsset tracksWithMediaType:AVMediaTypeAudio][0];

Memory leak issue while downloading large number of images

I am trying to downloading more then 600 images in loop with a progress meter on the top of the screen to the user. I blocked my screen with a fade layer for showing activity and progress.
I am getting the memory warning message in between and app getting crashes.
My steps to reach the loop are :
On app delegate, I check first core data table for all rows which is having "0" value in isImageAvailable bool field.
If shows me some count (say 600), and I show and alert with YES and NO option.
On YES : [self performSelector:#selector(myDownload:) withObject:nil afterDelay:0.2];
in myDownload
NSOperationQueue *queue = [NSOperationQueue new];
// Create our NSInvocationOperation to call loadDataWithOperation, passing in nil
NSInvocationOperation *operation = [[NSInvocationOperation alloc] initWithTarget:self
selector:#selector(startUpdatingRecords:) object:nil];
// Add the operation to the queue
[queue addOperation:operation];
[operation release];
[queue release];
in startUpdatingRecords :
-(void)startUpdatingRecords:(id)sender
{
[self performSelectorInBackground:#selector(updateProgressMeter:) withObject: [NSString stringWithFormat:#"%d",self.loopStartIndex]];
// Variable declarations
CGSize newSizeLarge ;
NSPredicate *predicate;
NSMutableArray *MatchingID;
Image_DB *data;
// Cache Directory path
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES);
NSData *responseData; // = [[NSData alloc]init] ;
NSURL *url = [[[NSURL alloc]init] autorelease];
NSMutableURLRequest *request = [[[NSMutableURLRequest alloc]init] autorelease];
UIImage *imgSelected_Large = [[[UIImage alloc]init] autorelease];
// Loop through all IDs
for (int i = 0; i < [self.arrayOfID count]; i++) //for (int i = loopStart; i < loopEnd; i++)
{
if (self.abortDownload)
{
break;
}
NSString *documentsDirectory = [[[NSString alloc] initWithFormat:#"%#",[paths objectAtIndex:0]] autorelease];
documentsDirectory = [paths objectAtIndex:0];
documentsDirectory = [documentsDirectory stringByAppendingFormat:#"/ImageFolder"]; // Image folder path
myClass *classObj = [self.arrayOfID objectAtIndex:i];
NSString *strURl = [[[NSString alloc] initWithFormat:#"%#%#", self.MyURL,recipeObj.recipeImageStr] autorelease];
//NSLog(#"URL = %#",strURl);
url = [NSURL URLWithString:strURl];
request = [NSMutableURLRequest requestWithURL:url];
responseData = [NSURLConnection sendSynchronousRequest:request returningResponse:NULL error:NULL]; // Get Image Data into NSData
//imgSelected_Large = [UIImage imageWithData:[NSData dataWithContentsOfURL:[NSURL URLWithString:strURl]]];
NSLog(#"Download Count = %d",i+1);
if (responseData != nil)
{
imgSelected_Large = [UIImage imageWithData:responseData];
// Resizining image
newSizeLarge.width = 320;
newSizeLarge.height = 180;
imgSelected_Large = [self imageWithImage:imgSelected_Large scaledToSize:newSizeLarge]; // New sized image
NSData *dataPhoto; // no need to release it because UIImageJPEGRepresentation gives autoreleased NSData obj.
dataPhoto = UIImageJPEGRepresentation(imgSelected_Large, 0.6); // Set new image representation and its Compression Quality
documentsDirectory = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"Image_%d", classObj.nodeID]];
[dataPhoto writeToFile:documentsDirectory atomically:YES]; //Write file to local folder at default path
predicate = [NSPredicate predicateWithFormat: #"(image_ID = %d )",recipeObj.nodeID];
MatchingID = [CoreDataAPIMethods searchObjectsInContext:#"Image_DB" :predicate :#"image_ID" :YES :self.managedObjectContext];
// Setting flag variable for available image
for (int j = 0; j< [MatchingID count]; j++)
{
//Assign the Authors Records in Class Object and save to Database
data = (Image_DB*) [MatchingID objectAtIndex:j];
// data.image_large = dataPhoto; // Code for storing BLOB object to DB
data.extra_1 = #"1";
//NSLog(#"Flag updated");
}
}
// Exit out code
if ( i == [self.arrayOfID count] - 1 || i == [self.arrayOfID count]) // Its the last record to be stored
{
NSError *error;
if (![self.managedObjectContext save:&error])
{
// Handle the error...
NSLog(#"Error in updating %#",error);
}
self.isUpdateImageCalled = NO;
[self performSelectorOnMainThread:#selector(removeProgressMeter) withObject:nil waitUntilDone:NO];
}
// Update UI screen while in downloading process
[self performSelectorInBackground:#selector(updateProgressMeter:) withObject:[NSString stringWithFormat:#"%d",self.loopStartIndex+i+1]];
}
}
If I didn't release responseData then my app shows me memory warning and got crashed. If I released then, [NSConcreteMutableData release]: message sent to deallocated instance 0x1e931de0 error occures.
How to refine my code. Can any one suggest me on my code and rework and make a refined code.
Please please help me out.
Your responseData returned by sendSynchronousRequest is autoreleased thus you shouldn't release it yourself. For the first sight I don't see a memory leak in your code. It is possible that your application actually uses too much memory, without leaking it. Try to place an autorelease pool inside your for cycle:
for (...) {
NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
// your original code with a lot of autoreleased objects
[pool release];
}
If you wrap your code within an autorelease pool, all objects that are sent the autorelease message inside the wrap will be actually released when the pool itself is released: this way you purge the memory in every for cycle.
See also Using Autorelease Pools in the doc, it specifically mentions that you should use them in the case "if you write a loop that creates many temporary objects".

Trim audio with iOS

I want to implement a feature that lets the user trim an audio file (.caf) which he perviously recorded. The recording part already works, but how can i add a trimming feature similar to the one in the Voicememos app. Is there an api for the audio trimmer apple uses?
Any help would be great...
How about using the AVFoundation? Import the audio file into an AVAsset (composition etc), then you can export it - setting preferred time + duration - to a file.
I wrote a stock function awhile ago that exports an asset to a file, you can also specify an audiomix. As below it exports all of the file, but you could add a NSTimeRange to exporter.timeRange and there you go. I haven't tested that though, but should work(?). Another alternative could be to adjust time ranges when creating the AVAsset + tracks. Of course the exporter only handles m4a (AAC). Sorry if this wasn't what you wanted.
-(void)exportAsset:(AVAsset*)asset toFile:(NSString*)filename overwrite:(BOOL)overwrite withMix:(AVAudioMix*)mix {
//NSArray* availablePresets = [AVAssetExportSession exportPresetsCompatibleWithAsset:asset];
AVAssetExportSession* exporter = [AVAssetExportSession exportSessionWithAsset:asset presetName:AVAssetExportPresetAppleM4A];
if (exporter == nil) {
DLog(#"Failed creating exporter!");
return;
}
DLog(#"Created exporter! %#", exporter);
// Set output file type
DLog(#"Supported file types: %#", exporter.supportedFileTypes);
for (NSString* filetype in exporter.supportedFileTypes) {
if ([filetype isEqualToString:AVFileTypeAppleM4A]) {
exporter.outputFileType = AVFileTypeAppleM4A;
break;
}
}
if (exporter.outputFileType == nil) {
DLog(#"Needed output file type not found? (%#)", AVFileTypeAppleM4A);
return;
}
// Set outputURL
NSArray* paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString* parentDir = [NSString stringWithFormat:#"%#/", [paths objectAtIndex:0]];
NSString* outPath = [NSString stringWithFormat:#"%#%#", parentDir, filename];
NSFileManager* manager = [NSFileManager defaultManager];
if ([manager fileExistsAtPath:outPath]) {
DLog(#"%# already exists!", outPath);
if (!overwrite) {
DLog(#"Not overwriting, uh oh!");
return;
}
else {
// Overwrite
DLog(#"Overwrite! (delete first)");
NSError* error = nil;
if (![manager removeItemAtPath:outPath error:&error]) {
DLog(#"Failed removing %#, error: %#", outPath, error.description);
return;
}
else {
DLog(#"Removed %#", outPath);
}
}
}
NSURL* const outUrl = [NSURL fileURLWithPath:outPath];
exporter.outputURL = outUrl;
// Specify a time range in case only part of file should be exported
//exporter.timeRange =
if (mix != nil)
exporter.audioMix = mix; // important
DLog(#"Starting export! (%#)", exporter.outputURL);
[exporter exportAsynchronouslyWithCompletionHandler:^(void) {
// Export ended for some reason. Check in status
NSString* message;
switch (exporter.status) {
case AVAssetExportSessionStatusFailed:
message = [NSString stringWithFormat:#"Export failed. Error: %#", exporter.error.description];
DLog(#"%#", message);
[self showAlert:message];
break;
case AVAssetExportSessionStatusCompleted: {
/*if (playfileWhenExportFinished) {
DLog(#"playfileWhenExportFinished!");
[self playfileAfterExport:exporter.outputURL];
playfileWhenExportFinished = NO;
}*/
message = [NSString stringWithFormat:#"Export completed: %#", filename];
DLog(#"%#", message);
[self showAlert:message];
break;
}
case AVAssetExportSessionStatusCancelled:
message = [NSString stringWithFormat:#"Export cancelled!"];
DLog(#"%#", message);
[self showAlert:message];
break;
default:
DLog(#"Export unhandled status: %d", exporter.status);
break;
}
}];
}
The above answer of #Jonny is correct. Here's I'm adding the use of AudioMixer to add the Fade-in effect while audio trimming.
Output: Audio asset trimmed to 20 seconds with a 10 second fade in.
The trim being set up in the code snippet takes place at the 30 second
mark of the asset and therefore the track duration should be at least
50 seconds.
- (BOOL)exportAssettoFilePath:(NSString *)filePath {
NSString *inputFilePath = <inputFilePath>;
NSURL *videoToTrimURL = [NSURL fileURLWithPath:inputFilePath];
AVAsset *avAsset = [AVAsset assetWithURL:videoToTrimURL];
// we need the audio asset to be at least 50 seconds long for this snippet
CMTime assetTime = [avAsset duration];
Float64 duration = CMTimeGetSeconds(assetTime);
if (duration < 50.0) return NO;
// get the first audio track
NSArray *tracks = [avAsset tracksWithMediaType:AVMediaTypeAudio];
if ([tracks count] == 0) return NO;
AVAssetTrack *track = [tracks objectAtIndex:0];
// create the export session
// no need for a retain here, the session will be retained by the
// completion handler since it is referenced there
AVAssetExportSession *exportSession = [AVAssetExportSession
exportSessionWithAsset:avAsset
presetName:AVAssetExportPresetAppleM4A];
if (nil == exportSession) return NO;
// create trim time range - 20 seconds starting from 30 seconds into the asset
CMTime startTime = CMTimeMake(30, 1);
CMTime stopTime = CMTimeMake(50, 1);
CMTimeRange exportTimeRange = CMTimeRangeFromTimeToTime(startTime, stopTime);
// create fade in time range - 10 seconds starting at the beginning of trimmed asset
CMTime startFadeInTime = startTime;
CMTime endFadeInTime = CMTimeMake(40, 1);
CMTimeRange fadeInTimeRange = CMTimeRangeFromTimeToTime(startFadeInTime,
endFadeInTime);
// setup audio mix
AVMutableAudioMix *exportAudioMix = [AVMutableAudioMix audioMix];
AVMutableAudioMixInputParameters *exportAudioMixInputParameters =
[AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:track];
[exportAudioMixInputParameters setVolumeRampFromStartVolume:0.0 toEndVolume:1.0
timeRange:fadeInTimeRange];
exportAudioMix.inputParameters = [NSArray
arrayWithObject:exportAudioMixInputParameters];
// configure export session output with all our parameters
exportSession.outputURL = [NSURL fileURLWithPath:filePath]; // output path
exportSession.outputFileType = AVFileTypeAppleM4A; // output file type
exportSession.timeRange = exportTimeRange; // trim time range
//exportSession.audioMix = exportAudioMix; // fade in audio mix
// perform the export
[exportSession exportAsynchronouslyWithCompletionHandler:^{
if (AVAssetExportSessionStatusCompleted == exportSession.status) {
NSLog(#"AVAssetExportSessionStatusCompleted");
} else if (AVAssetExportSessionStatusFailed == exportSession.status) {
// a failure may happen because of an event out of your control
// for example, an interruption like a phone call comming in
// make sure and handle this case appropriately
NSLog(#"AVAssetExportSessionStatusFailed");
} else {
NSLog(#"Export Session Status: %ld", (long)exportSession.status);
}
}];
return YES;}
Thanks
For More Details :
https://developer.apple.com/library/ios/qa/qa1730/_index.html