how to release AVASSETEXPORTSESSION iOS - objective-c

i wrote the code like below and i am getting the memory leak at line:
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetPassthrough];
My Code is:
NSError *error;
AVMutableComposition* mixComposition = [AVMutableComposition composition];
NSURL* audio_inputFileUrl = [_songPath valueForProperty:MPMediaItemPropertyAssetURL];
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = ([paths count] > 0) ? [paths objectAtIndex:0] : nil;
NSString *video_inputFilePath = [documentsDirectory stringByAppendingPathComponent:#"videoOutput1234videoOutput1234.mp4"];
NSLog(#"output url %#",video_inputFilePath);
NSURL* video_inputFileUrl = [NSURL fileURLWithPath:video_inputFilePath];
NSArray *docPaths=NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *docPath=[docPaths objectAtIndex:0];
NSString* outputFilePath = [docPath stringByAppendingPathComponent:#"OutPut.mov"];//[[NSBundle mainBundle] pathForResource:#"OutPut" ofType:#"mov"];
NSURL* outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
{
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
}
else {
NSString *defaultPath = [[[NSBundle mainBundle] resourcePath] stringByAppendingPathComponent:#"OutPut.mov"];
[[NSFileManager defaultManager] copyItemAtPath:defaultPath toPath:outputFilePath error:&error];
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
}
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetPassthrough];
_assetExport.outputFileType = #"com.apple.quicktime-movie";
_assetExport.outputURL = outputFileUrl;
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
[_assetExport exportAsynchronouslyWithCompletionHandler:^(void ) {[self saveVideoToAlbum:outputFilePath]; } ];
[outputFileUrl release];
[videoAsset release];
[audioAsset release];
if (_assetExport.status == 2) {
UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:#"Exporting to library is completed" message:nil delegate:nil cancelButtonTitle:nil otherButtonTitles:#"OK",nil];
[alertView show];
[alertView release];
[mixComposition release];
}
when i tried to release AVASSETEXPORTSESSION my app is crashing and i don't have ay idea how to release this
can you please tell me the answer.

You don't need to release:
[outputFileUrl release];
[mixComposition release];
Remove these lines from Your code. You need to do that because You do not own them (not allocated). So You do not need to release them.
And then it should work now.
Autorelease:
AVAssetExportSession* _assetExport = [[[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetPassthrough] autorelease];
or release:
[_assetExport release];

Related

How do I generate thumbnail of map using given Lat/Long in iOS?

I am working on a chat application where I am going to send a map to another user.
Below is my code to generate a thumbnail of a map. Is there any way to generate a map thumbnail in Objective-C?
Code to generate thumbnail of static map:
NSString *staticMapUrl = [NSString stringWithFormat:#"http://maps.google.com/maps/api/staticmap?markers=color:blue|%#,%#&%#&sensor=true",_lat, _longi,#"zoom=13&size=%dx%d"];
NSURL *mapUrl = [NSURL URLWithString:[staticMapUrl stringByAddingPercentEscapesUsingEncoding:NSUTF8StringEncoding]];
UIImage *Mapimage = [UIImage imageWithData: [NSData dataWithContentsOfURL:mapUrl]];
Save thumbnail images in document directories:
NSString *datapathMap=[Locations stringByAppendingPathComponent:[NSString stringWithFormat:#"Sm_%#_lc_%#.jpg",userid,dateString1]];
NSData *urlData = [NSData dataWithContentsOfURL:mapUrl];
datapathMap = [datapathMap stringByStandardizingPath];
[urlData writeToFile:datapathMap atomically:YES];
Send thumbnail data to bubble to display thumbnail:
NSBubbleData *MapSendBubble = [NSBubbleData dataWithImage:mapImage date:[NSDate date] type:BubbleTypeMine];
MapSendBubble.avatar = [UIImage imageNamed:#"avatar1.png"];
[bubbleData addObject:MapSendBubble];
[_bubbleTable reloadData];
Send map:
NSDate *today=[NSDate date];
NSDateFormatter *dateFormat1 = [[NSDateFormatter alloc] init];
[dateFormat1 setDateFormat:#"YYYYMMddhhmmss"];
NSString *dateString1=[dateFormat1 stringFromDate:today];
NSString *userid= #"mc1"; NSString *latlong=[NSString stringWithFormat:#"%#%#",[_lat substringToIndex:2],[_longi substringToIndex:2]];
[map setTag:[[NSString stringWithFormat:#"%#",latlong] integerValue]];
NSString *longlat=[NSString stringWithFormat:#"%#,%#",_longi,_lat];
NSDateFormatter *dateFormat = [[NSDateFormatter alloc] init];
[dateFormat setDateFormat:#"dd-MMM-yy HH:mm:ss"];
NSString *date=[dateFormat stringFromDate:[NSDate date]];
//Find a cache directory. You could consider using documents dir instead (depends on the data you are fetching)
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *path = [paths objectAtIndex:0];
NSError *error;
NSString *MainFolder = [path stringByAppendingPathComponent:[NSString stringWithFormat:#"/SmoothChat"]];
NSString *Locations=[MainFolder stringByAppendingPathComponent:[NSString stringWithFormat:#"/Locations"]];
if (![[NSFileManager defaultManager] fileExistsAtPath:MainFolder])
{
[[NSFileManager defaultManager] createDirectoryAtPath:MainFolder withIntermediateDirectories:NO attributes:nil error:&error];
}
if (![[NSFileManager defaultManager] fileExistsAtPath:Locations])
{
[[NSFileManager defaultManager] createDirectoryAtPath:Locations withIntermediateDirectories:NO attributes:nil error:&error];
}
NSString *staticMapUrl = [NSString stringWithFormat:#"http://maps.google.com/maps/api/staticmap?markers=color:red|%#,%#&%#&sensor=true",_lat,_longi,#"zoom=15&size=300x180"];
NSURL *mapUrl = [NSURL URLWithString:[staticMapUrl stringByAddingPercentEscapesUsingEncoding:NSUTF8StringEncoding]];
UIImage *Mapimage = [UIImage imageWithData: [NSData dataWithContentsOfURL:mapUrl]];
NSString *datapathMap=[Locations stringByAppendingPathComponent:[NSString stringWithFormat:#"Sm_%#_lc_%#.jpg",userid,dateString1]];
NSData *urlData = [NSData dataWithContentsOfURL:mapUrl];
datapathMap = [datapathMap stringByStandardizingPath];
[urlData writeToFile:datapathMap atomically:YES];
// NSString *uuid1 = [[NSUUID UUID] UUIDString];
// [self sendXMPPMessage:1 :[NSString stringWithFormat:#"lc%#",value] :[NSString stringWithFormat:#"oi_%#_lc_%#.jpg",userid,dateString1] :#"" :jsonstring :[NSString stringWithFormat:#"%#",longlat] :_user :#"" :#"" :#"" :[[NSUserDefaults standardUserDefaults] stringForKey:#"current_chat"] :#"" :#"" :_senderType :[self currentDateTme] :0];
NSBubbleData *photoBubble11 = [NSBubbleData dataWithImage:staticMapUrl date:[NSDate date] type:BubbleTypeMine];
photoBubble11.avatar = [UIImage imageNamed:#"avatar1.png"];
[bubbleData addObject:photoBubble11];
[_bubbleTable reloadData];
[self goToBottom];
[map setHidden:YES];
[blurView setHidden:YES];
[_NewView setHidden:YES];
[map removeAnnotation:point];
currentLocation=nil;
[locationManager stopUpdatingLocation];

Cut AVAsset faster

I have this code to cut my AVAsset Video:
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:_url options:nil];
[[NSFileManager defaultManager] removeItemAtPath:[NSString stringWithFormat:#"%#",_url] error:nil];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetHighestQuality];
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *outputURL = paths[0];
NSFileManager *manager = [NSFileManager defaultManager];
[manager createDirectoryAtPath:outputURL withIntermediateDirectories:YES attributes:nil error:nil];
outputURL = [outputURL stringByAppendingPathComponent:#"output.mp4"];
[manager removeItemAtPath:outputURL error:nil];
exportSession.outputURL = [NSURL fileURLWithPath:outputURL];
exportSession.shouldOptimizeForNetworkUse = YES;
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
CMTime start = CMTimeMakeWithSeconds(slider.min*(videoDuration-1), 600);
CMTime duration = CMTimeMakeWithSeconds(slider.max*(videoDuration-1), 600);
CMTimeRange range = CMTimeRangeMake(start, duration);
exportSession.timeRange = range;
[exportSession exportAsynchronouslyWithCompletionHandler:^(void) {
switch (exportSession.status) {
case AVAssetExportSessionStatusCompleted:
_url = [NSURL URLWithString:[NSString stringWithFormat:#"file://%#",outputURL]];
break;
default:
break;
}
}];
The problem: It takes a while to save the AVAsset to the URL and reload it. Is it possible to make it faster?
You could use AVAssetExportPresetPassthrough as your preset name, instead of AVAssetExportPresetHighestQuality. You may be able to avoid an expensive transcode that way. Setting a time range may preclude this for some formats, but it's worth trying.

How to merge audio in our video file

I have an application in which I need to merge an audio file in to the video recorded by AVCapture session, so that both audio of recorded movie and merged audio can be heard.
I am able to merge the audio to video with avcomposition and it does fine . But the problem is that the original audio file can not be heard. Here is my code.
NSString *resourceAudioName = [NSString stringWithFormat:#"%#_audio",getTitle];
NSURL *audio_inputFileUrl = [[NSBundle mainBundle] URLForResource:resourceAudioName withExtension:#"mp3"];
NSString * video_inputFilePath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
video_inputFilePath = [video_inputFilePath stringByAppendingPathComponent:#"movie1.mp4"];
self.outputFilePath = [NSHomeDirectory() stringByAppendingPathComponent:[NSString stringWithFormat:#"/Documents/OutPutMovie-%#.mp4",[NSDate date]]];
NSURL * outputFileUrl = [NSURL fileURLWithPath:self.outputFilePath];
if (audio_inputFileUrl) {
AVMutableComposition* mixComposition = [AVMutableComposition composition];
NSURL * video_inputFileUrl = [NSURL fileURLWithPath:video_inputFilePath];
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset * videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
NSArray * videoAssetTracks2 = [videoAsset tracksWithMediaType:AVMediaTypeVideo];
AVAssetTrack * videoAssetTrack2 = ([videoAssetTracks2 count] > 0 ? [videoAssetTracks2 objectAtIndex:0] : nil);
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack * a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:videoAssetTrack2 atTime:nextClipStartTime error:nil];
AVURLAsset * audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
NSArray * videoAssetTracks = [audioAsset tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack * videoAssetTrack = ([videoAssetTracks count] > 0 ? [videoAssetTracks objectAtIndex:0] : nil);
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
AVMutableCompositionTrack * b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:videoAssetTrack atTime:nextClipStartTime error:nil];
AVAssetExportSession * _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.outputFileType = #"com.apple.quicktime-movie";
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
if (AVAssetExportSessionStatusCompleted == _assetExport.status) {
[videoAsset release];
[audioAsset release];
[_assetExport release];
[self performSelectorOnMainThread:#selector(moveNextView) withObject:nil waitUntilDone:YES];
}
}
];
-(void)mergeAndSave
{
//Create AVMutableComposition Object which will hold our multiple AVMutableCompositionTrack or we can say it will hold our video and audio files.
AVMutableComposition* mixComposition = [AVMutableComposition composition];
//Now first load your audio file using AVURLAsset. Make sure you give the correct path of your videos.
NSURL *audio_url = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:#"kick" ofType:#"mp3"]];
self.audioAsset = [[AVURLAsset alloc]initWithURL:audio_url options:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, self.audioAsset.duration);
//Now we are creating the first AVMutableCompositionTrack containing our audio and add it to our AVMutableComposition object.
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[self.audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil];
//Now we will load video file.
NSURL *video_url = mediaUrl;
self.videoAsset = [[AVURLAsset alloc]initWithURL:video_url options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,self.audioAsset.duration);
//Now we are creating the second AVMutableCompositionTrack containing our video and add it to our AVMutableComposition object.
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[self.videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
//decide the path where you want to store the final video created with audio and video merge.
NSArray *dirPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *docsDir = [dirPaths objectAtIndex:0];
NSString *outputFilePath = [docsDir stringByAppendingPathComponent:[NSString stringWithFormat:#"video.mov"]];
NSURL *outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
//Now create an AVAssetExportSession object that will save your final video at specified path.
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.outputFileType = #"com.apple.quicktime-movie";
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:_assetExport];
});
}
];
}
- (void)exportDidFinish:(AVAssetExportSession*)session
{
if(session.status == AVAssetExportSessionStatusCompleted){
NSURL *outputURL = session.outputURL;
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputURL]) {
[library writeVideoAtPathToSavedPhotosAlbum:outputURL
completionBlock:^(NSURL *assetURL, NSError *error){
dispatch_async(dispatch_get_main_queue(), ^{
if (error) {
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:#"Error" message:#"Video Saving Failed" delegate:nil cancelButtonTitle:#"Ok" otherButtonTitles: nil, nil];
[alert show];
}else{
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:#"Video Saved" message:#"Saved To Photo Album" delegate:self cancelButtonTitle:#"Ok" otherButtonTitles: nil];
[alert show];
//[self loadMoviePlayer:outputURL];
}
});
}];
}
}
self.audioAsset = nil;
self.videoAsset = nil;
//[activityView stopAnimating];
//[activityView setHidden:YES];
}
try this
I believe you have to use AVMutableAudioMix in order to do a mixing of more than one audio. With your approach only the audioAsset gets added to the composition. There is a video about this in WWDC 2010, which explains how to do this. I have tried to implement without success here. Hopefully someone will help us fix it.

Integrate a selected music file into video

I am developing a video recording application and I would like to be able to integrate a music file selected by the user from the iPod library. Please share your inputs as to how I can achieve this requirement. Sample code is helpful.
Finally succeeded Integrate a selected music file into video
Using AVAssetExportSession we can merge Video and audio together using AVMutableComposition.
Thanks for update all of you!!
//This method merges the audio and video.
- (void)mergeAudioAtUrl:(NSURL *)audioUrl withVideoAtUrl:(NSURL *)videoUrl toUrl:(NSURL *)outputUrl
{
//_imageCaptureCount = [_imagesArray count]*100;
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audioUrl options:nil];
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:videoUrl options:nil];
AVMutableComposition* mixComposition = [AVMutableComposition composition];
if([[audioAsset tracksWithMediaType:AVMediaTypeAudio] count])
{
AVMutableCompositionTrack *compositionCommentaryTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionCommentaryTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
}
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
[audioAsset release];
[videoAsset release];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetPassthrough];
NSURL *exportUrl = [NSURL fileURLWithPath:[NSString stringWithFormat:#"%#/Documents/%#Video.mp4", NSHomeDirectory(),pcNameString]];
if ([[NSFileManager defaultManager] fileExistsAtPath:[NSString stringWithFormat:#"%#/Documents/%#Video.mp4", NSHomeDirectory(),pcNameString]])
{
[[NSFileManager defaultManager] removeItemAtPath:[NSString stringWithFormat:#"%#/Documents/%#Video.mp4", NSHomeDirectory(),pcNameString] error:nil];
}
_assetExport.outputFileType = #"com.apple.quicktime-movie"; //com.apple.m4v-video
_assetExport.outputURL = exportUrl;
_assetExport.shouldOptimizeForNetworkUse = YES;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
// your completion code here
// NSLog(#"completed");
removeProgresBarFlag = YES;
/* NSString* savedVideoFilePath = [NSString stringWithFormat:#"%#/Documents/PC%d.mp4", NSHomeDirectory(),[videosListArray count]];
if(UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(savedVideoFilePath))
{
[[UIApplication sharedApplication]beginIgnoringInteractionEvents];
UISaveVideoAtPathToSavedPhotosAlbum(savedVideoFilePath, self, nil, nil);
[[UIApplication sharedApplication]endIgnoringInteractionEvents];
} */
NSMutableDictionary* videoDetailDict = [[NSMutableDictionary alloc] initWithCapacity:0];
NSStringEncoding encoding;
NSError* error;
NSString * persistentID;
NSFileManager* fileManager = [NSFileManager defaultManager];
if([fileManager fileExistsAtPath:PRESENTSONGS_FILE_PATH])
persistentID = [NSString stringWithContentsOfFile:PRESENTSONGS_FILE_PATH usedEncoding:&encoding error:&error];
else
persistentID = #"";
[videoDetailDict setObject:persistentID forKey:KSong];
if([fileManager fileExistsAtPath:PRESENTIMAGES_FILE])
{
NSMutableArray* currentImagesArray = [[NSMutableArray alloc] initWithContentsOfFile:IMAGESDATA_FILE_PATH];
NSMutableArray* durationArray = [[NSMutableArray alloc] initWithContentsOfFile:[NSString stringWithFormat:#"%#/videoduration.plist", [[NSBundle mainBundle] resourcePath]]];
[videoDetailDict setObject:[durationArray objectAtIndex:[currentImagesArray count]-1] forKey:KfileSize];
[durationArray release];
[videoDetailDict setObject:currentImagesArray forKey:KImagesList];
if([fileManager fileExistsAtPath:TEMPVIDEO_FILE_PATH])
[fileManager removeItemAtPath:TEMPVIDEO_FILE_PATH error:nil];
NSString* mainPath;
mainPath = [NSString stringWithFormat:#"%#/Documents/%#File",NSHomeDirectory(),pcNameString];
if([fileManager fileExistsAtPath:mainPath])
[fileManager removeItemAtPath:mainPath error:nil];
[fileManager createDirectoryAtPath:mainPath withIntermediateDirectories:NO attributes:nil error:nil];
for(int i=0;i<[currentImagesArray count];i++)
{
[fileManager copyItemAtPath:[NSString stringWithFormat:#"%#%#",PRESENTIMAGES_FILE,[currentImagesArray objectAtIndex:i]] toPath:[NSString stringWithFormat:#"%#/%#",mainPath,[currentImagesArray objectAtIndex:i]] error:nil];
}
[currentImagesArray release];
}
if([fileManager fileExistsAtPath:KMESSAGE_FILEPATH])
{
NSMutableDictionary* currentMessageDictioanry = [[NSMutableDictionary alloc] initWithContentsOfFile:KMESSAGE_FILEPATH];
[videoDetailDict setObject:currentMessageDictioanry forKey:Kmessage];
[currentMessageDictioanry release];
}
[videoDetailDict setObject:pcNameString forKey:KPostCardName]; //[NSString stringWithFormat:#"PostCard Video%d",[videosListArray count]]
//[videosListArray insertObject:videoDetailDict atIndex:0];
[videosListArray addObject:videoDetailDict];
[videoDetailDict release];
[videosListArray writeToFile:VIDEOS_FILE_PATH atomically:YES];
}
];
}

Merge audio and video/image to create a movie file

I want to merge an Audion CAF file and a video/image UIImage to create a movie file (in .mov format).
Say that my audio is 30 seconds long and I have a UIImage; I want to create a .mov file such that the UIImage is displayed the entire time the audio is playing.
I found this reference:
How to add audio to video file on iphone SDK
Can anyone tell me, is it helpful in my case, since the length of my audio and image/video is different?
Thanks in advance.
Yes, you should use AVMutableComposition. To create the video track from your UIImage use AVAssetWriter.
Quicktime Pro can do this. Done that for my own app.
You create the movie from the images. Quicktime offers to read a sequence of images and creates a movie from it. He ask for the FPS during import as well.
The audio track can then simply merged, pasted somewhere or scaled to a selected range of the movie.
use this i found this somewhere in net, i don't remember,....
NSString *fileNamePath = #"audio.caf";
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *oldappSettingsPath = [documentsDirectory stringByAppendingPathComponent:fileNamePath];
NSURL *audioUrl = [NSURL fileURLWithPath:oldappSettingsPath];
NSString *fileNamePath1 = #"output.mp4";
NSArray *paths1 = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask, YES);
NSString *documentsDirectory1 = [paths1 objectAtIndex:0];
NSString *oldappSettingsPath1 = [documentsDirectory1 stringByAppendingPathComponent:fileNamePath1];
NSLog(#"oldpath=%#",oldappSettingsPath);
NSURL *videoUrl = [NSURL fileURLWithPath:oldappSettingsPath1];
if (avPlayer.duration >0.00000)
{
NSLog(#"SOMEDATA IS THERE ");
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audioUrl options:nil];
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:videoUrl options:nil];
AVMutableComposition* mixComposition = [AVMutableComposition composition];
NSLog(#"audio =%#",audioAsset);
AVMutableCompositionTrack *compositionCommentaryTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionCommentaryTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioAsset.duration) ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetPassthrough];
NSString* videoName = #"export.mov";
NSString *exportPath = [NSTemporaryDirectory() stringByAppendingPathComponent:videoName];
NSURL *exportUrl = [NSURL fileURLWithPath:exportPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath])
{
[[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
}
_assetExport.outputFileType = #"com.apple.quicktime-movie";
NSLog(#"file type %#",_assetExport.outputFileType);
_assetExport.outputURL = exportUrl;
_assetExport.shouldOptimizeForNetworkUse = YES;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void )
{
NSString *fileNamePath = #"sound_record.mov";
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *oldappSettingsPath = [documentsDirectory stringByAppendingPathComponent:fileNamePath];
// if ([[NSFileManager defaultManager] fileExistsAtPath:oldappSettingsPath]) {
//
// NSFileManager *fileManager = [NSFileManager defaultManager];
// [fileManager removeItemAtPath: oldappSettingsPath error:NULL];
//
// }
NSURL *documentDirectoryURL = [NSURL fileURLWithPath:oldappSettingsPath];
[[NSFileManager defaultManager] copyItemAtURL:exportUrl toURL:documentDirectoryURL error:nil];
[audioAsset release];
[videoAsset release];
[_assetExport release];
}
];