Audio mixing on iPhone : Need advice - objective-c

I'm using AudioQueue Service to play audio in my app.
I need to play several audio files together. What I do now I just create as much audio queue as much i need to play files. I.e. I create two audio queue for two audio files and start them at the same time to have audio mixing effect.
So basically I would like to know is this an "elegant" way of doing it.
Please note, that I'm aware of Audio Unit service and its MixerHost example, please do not suggest that option, I need to do sound mixing exclusively using audio queue service.

- (void) setUpAndAddAudioAtPath:(NSURL*)assetURL toComposition:(AVMutableComposition *)composition {
AVURLAsset *songAsset = [AVURLAsset URLAssetWithURL:assetURL options:nil];
AVMutableCompositionTrack *track = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *sourceAudioTrack = [[songAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
NSError *error = nil;
BOOL ok = NO;
CMTime startTime = CMTimeMakeWithSeconds(0, 1);
CMTime trackDuration = songAsset.duration;
CMTime longestTime = CMTimeMake(848896, 44100); //(19.24 seconds)
CMTimeRange tRange = CMTimeRangeMake(startTime, trackDuration);
//Set Volume
AVMutableAudioMixInputParameters *trackMix = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:track];
[trackMix setVolume:0.8f atTime:startTime];
[audioMixParams addObject:trackMix];
//Insert audio into track
ok = [track insertTimeRange:tRange ofTrack:sourceAudioTrack atTime:CMTimeMake(0, 44100) error:&error];
}
- (BOOL) exportAudio {
if (defaultSoundPath == nil || recordingSoundPath == nil) {
[actvityIdicatiorView stopAnimating];
[actvityIdicatiorView setHidden:YES];
UIAlertView *alertView=[[UIAlertView alloc]initWithTitle:#"Select Sound" message:#"Both Sound is selected" delegate:self cancelButtonTitle:#"Ok" otherButtonTitles:nil];
[alertView show];
return NO;
}
AVMutableComposition *composition = [AVMutableComposition composition];
if (audioMixParams) {
[audioMixParams release];
audioMixParams=nil;
}
audioMixParams = [[NSMutableArray alloc] initWithObjects:nil];
//Add Audio Tracks to Composition
NSString *sourceA= [[NSBundle mainBundle] pathForResource:#"Beach Soundscape" ofType:#"mp3"];
//NSString *URLPath1 = pathToYourAudioFile1;
NSURL *assetURL1 = [NSURL fileURLWithPath:sourceA];
[self setUpAndAddAudioAtPath:assetURL1 toComposition:composition];
NSString *sourceB = [[NSBundle mainBundle] pathForResource:#"DrumsMonoSTP" ofType:#"aif"];
// NSString *URLPath2 = pathToYourAudioFile2;
NSURL *assetURL2 = [NSURL fileURLWithPath:sourceB];
[self setUpAndAddAudioAtPath:assetURL2 toComposition:composition];
AVMutableAudioMix *audioMix = [AVMutableAudioMix audioMix];
audioMix.inputParameters = [NSArray arrayWithArray:audioMixParams];
//If you need to query what formats you can export to, here's a way to find out
NSLog (#"compatible presets for songAsset: %#",
[AVAssetExportSession exportPresetsCompatibleWithAsset:composition]);
AVAssetExportSession *exporter = [[AVAssetExportSession alloc]
initWithAsset: composition
presetName: AVAssetExportPresetAppleM4A];
exporter.audioMix = audioMix;
exporter.outputFileType = #"com.apple.m4a-audio";
// NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
//
// NSString *fileName = #"someFilename";
//NSString *exportFile = [[paths objectAtIndex:0] stringByAppendingFormat: #"/%#.m4a", fileName];
mixingSoundPath= [[self mixingSoundFolder] stringByAppendingFormat: #"/Mixing%#.m4a", [self dateString]];
[mixingSoundPath retain];
// set up export
//myDeleteFile(exportFile);
NSURL *exportURL = [NSURL fileURLWithPath:mixingSoundPath];
exporter.outputURL = exportURL;
static BOOL isComplete;
// do the export
[exporter exportAsynchronouslyWithCompletionHandler:^{
int exportStatus = exporter.status;
NSLog(#"exporter.......%i",exportStatus);
switch (exportStatus) {
case AVAssetExportSessionStatusFailed:
// NSError *exportError =exporter.error;
isComplete=NO;
NSLog (#"AVAssetExportSessionStatusFailed");
NSLog (#"Error == %#", exporter.error);
break;
case AVAssetExportSessionStatusCompleted:
[self mixingDidFinshing];
isComplete=YES;
break;
case AVAssetExportSessionStatusUnknown:
NSLog (#"AVAssetExportSessionStatusUnknown");
isComplete=NO;
break;
case AVAssetExportSessionStatusExporting:
isComplete=NO;
NSLog (#"AVAssetExportSessionStatusExporting");
break;
case AVAssetExportSessionStatusCancelled:
isComplete=NO;
NSLog (#"AVAssetExportSessionStatusCancelled");
break;
case AVAssetExportSessionStatusWaiting:
isComplete=NO;
NSLog (#"AVAssetExportSessionStatusWaiting");
break;
default:
NSLog (#"didn't get export status");
isComplete=NO;
break;
}
}];
return isComplete;
}

Related

Can not export AVMutableComposition with AVAssetExportSession

Here is part of composition creation:
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];
for (NSDictionary *track in tracks) {
NSURL *url = [[NSURL alloc] initWithString:track[#"url"]];
AVURLAsset *urlAsset = [AVURLAsset URLAssetWithURL:url options:nil];
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, urlAsset.duration)
ofTrack:[[urlAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:kCMTimeZero
error:nil];
}
[self persist:mixComposition for:songId];
Then i wish to persist collection in directory so i do not have to download it each time
Output of composition looks like this:
"AVMutableCompositionTrack: 0x1c4a276a0 trackID = 1, mediaType = soun, editCount = 1",
"AVMutableCompositionTrack: 0x1c4a28560 trackID = 2, mediaType = soun, editCount = 1"...,
- (void)persist:(AVMutableComposition *) composition
for:(NSString *) songId {
NSLog(#"%#", composition);
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc]
initWithAsset:composition
presetName:AVAssetExportPresetAppleM4A];
NSString *path = [NSHomeDirectory() stringByAppendingPathComponent: songId];
NSURL *url = [NSURL fileURLWithPath: path];
exportSession.outputURL = url;
exportSession.shouldOptimizeForNetworkUse = YES;
exportSession.outputFileType = AVFileTypeAppleM4A;
// perform the export
[exportSession exportAsynchronouslyWithCompletionHandler:^{
if (AVAssetExportSessionStatusCompleted == exportSession.status) {
NSLog(#"AVAssetExportSessionStatusCompleted");
NSLog(#"Path : %#", url);
} else if (AVAssetExportSessionStatusFailed == exportSession.status) {
// a failure may happen because of an event out of your control
// for example, an interruption like a phone call comming in
// make sure and handle this case appropriately
NSLog(#"%#", exportSession.error);
} else {
NSLog(#"Export Session Status: %ld", (long)exportSession.status);
}
}];
}
The error i get:
Error Domain=AVFoundationErrorDomain Code=-11800 "The operation could
not be completed" UserInfo={NSLocalizedFailureReason=An unknown error
occurred (-12780), NSLocalizedDescription=The operation could not be
completed, NSUnderlyingError=0x1c0a409f0 {Error
Domain=NSOSStatusErrorDomain Code=-12780 "(null)"}}
the path you use must content type of your file need to export, it's look like:
let directory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first
let audioTypeOutput = ".m4a"//type of your file, mp3, m4a for audio
let exportPath = directory?.appendingPathComponent(name + audioTypeOutput)
hope this help!

AVFoundation - AVAssetExportSession - Operation Stopped on Second Export attempt

I am creating a Picture-In-Picture video, this function has worked flawlessly (as far as I know) for 1.5 years. Now it appears in IOS 11 it only works the first time it is called...when it is called to do a second video (without force closing the app first) I get the error Message below.
I found this article on stack, but I am already using the asset track correctly as per this article: AVAssetExportSession export fails non-deterministically with error: “Operation Stopped, NSLocalizedFailureReason=The video could not be composed.”
I have put the exact method I am using. Any help would be greatly appreciated!
Error Message:
Error: Error Domain=AVFoundationErrorDomain Code=-11841 "Operation Stopped"
UserInfo={NSLocalizedFailureReason=The video could not be composed.,
NSLocalizedDescription=Operation Stopped,
NSUnderlyingError=0x1c04521e0
{Error Domain=NSOSStatusErrorDomain Code=-17390 "(null)"}}
Method Below:
- (void) composeVideo:(NSString*)videoPIP onVideo:(NSString*)videoBG
{
#try {
NSError *e = nil;
AVURLAsset *backAsset, *pipAsset;
// Load our 2 movies using AVURLAsset
pipAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:videoPIP] options:nil];
backAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:videoBG] options:nil];
if ([[NSFileManager defaultManager] fileExistsAtPath:videoPIP])
{
NSLog(#"PIP File Exists!");
}
else
{
NSLog(#"PIP File DOESN'T Exist!");
}
if ([[NSFileManager defaultManager] fileExistsAtPath:videoBG])
{
NSLog(#"BG File Exists!");
}
else
{
NSLog(#"BG File DOESN'T Exist!");
}
float scaleH = VIDEO_SIZE.height / [[[backAsset tracksWithMediaType:AVMediaTypeVideo ] objectAtIndex:0] naturalSize].width;
float scaleW = VIDEO_SIZE.width / [[[backAsset tracksWithMediaType:AVMediaTypeVideo ] objectAtIndex:0] naturalSize].height;
float scalePIP = (VIDEO_SIZE.width * 0.25) / [[[pipAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] naturalSize].width;
// Create AVMutableComposition Object - this object will hold our multiple AVMutableCompositionTracks.
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];
// Create the first AVMutableCompositionTrack by adding a new track to our AVMutableComposition.
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
// Set the length of the firstTrack equal to the length of the firstAsset and add the firstAsset to our newly created track at kCMTimeZero so video plays from the start of the track.
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, pipAsset.duration) ofTrack:[[pipAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:&e];
if (e)
{
NSLog(#"Error0: %#",e);
e = nil;
}
// Repeat the same process for the 2nd track and also start at kCMTimeZero so both tracks will play simultaneously.
AVMutableCompositionTrack *secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, backAsset.duration) ofTrack:[[backAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:&e];
if (e)
{
NSLog(#"Error1: %#",e);
e = nil;
}
// We also need the audio track!
AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, backAsset.duration) ofTrack:[[backAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:&e];
if (e)
{
NSLog(#"Error2: %#",e);
e = nil;
}
// Create an AVMutableVideoCompositionInstruction object - Contains the array of AVMutableVideoCompositionLayerInstruction objects.
AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
// Set Time to the shorter Asset.
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, (pipAsset.duration.value > backAsset.duration.value) ? pipAsset.duration : backAsset.duration);
// Create an AVMutableVideoCompositionLayerInstruction object to make use of CGAffinetransform to move and scale our First Track so it is displayed at the bottom of the screen in smaller size.
AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack];
//CGAffineTransform Scale1 = CGAffineTransformMakeScale(0.3f,0.3f);
CGAffineTransform Scale1 = CGAffineTransformMakeScale(scalePIP, scalePIP);
// Top Left
CGAffineTransform Move1 = CGAffineTransformMakeTranslation(3.0, 3.0);
[FirstlayerInstruction setTransform:CGAffineTransformConcat(Scale1,Move1) atTime:kCMTimeZero];
// Repeat for the second track.
AVMutableVideoCompositionLayerInstruction *SecondlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:secondTrack];
CGAffineTransform Scale2 = CGAffineTransformMakeScale(scaleW, scaleH);
CGAffineTransform rotateBy90Degrees = CGAffineTransformMakeRotation( M_PI_2);
CGAffineTransform Move2 = CGAffineTransformMakeTranslation(0.0, ([[[backAsset tracksWithMediaType:AVMediaTypeVideo ] objectAtIndex:0] naturalSize].height) * -1);
[SecondlayerInstruction setTransform:CGAffineTransformConcat(Move2, CGAffineTransformConcat(rotateBy90Degrees, Scale2)) atTime:kCMTimeZero];
// Add the 2 created AVMutableVideoCompositionLayerInstruction objects to our AVMutableVideoCompositionInstruction.
MainInstruction.layerInstructions = [NSArray arrayWithObjects:FirstlayerInstruction, SecondlayerInstruction, nil];
// Create an AVMutableVideoComposition object.
AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];
MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainCompositionInst.frameDuration = CMTimeMake(1, 30);
// Set the render size to the screen size.
// MainCompositionInst.renderSize = [[UIScreen mainScreen] bounds].size;
MainCompositionInst.renderSize = VIDEO_SIZE;
NSString *fileName = [NSString stringWithFormat:#"%#%#", NSTemporaryDirectory(), #"fullreaction.MP4"];
// Make sure the video doesn't exist.
if ([[NSFileManager defaultManager] fileExistsAtPath:fileName])
{
[[NSFileManager defaultManager] removeItemAtPath:fileName error:nil];
}
// Now we need to save the video.
NSURL *url = [NSURL fileURLWithPath:fileName];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:QUALITY];
exporter.videoComposition = MainCompositionInst;
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeMPEG4;
[exporter exportAsynchronouslyWithCompletionHandler:
^(void )
{
NSLog(#"File Saved as %#!", fileName);
NSLog(#"Error: %#", exporter.error);
[self performSelectorOnMainThread:#selector(runProcessingComplete) withObject:nil waitUntilDone:false];
}];
}
#catch (NSException *ex) {
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:#"Error 3" message:[NSString stringWithFormat:#"%#",ex]
delegate:self cancelButtonTitle:#"OK" otherButtonTitles: nil];
[alert show];
}
}
The Cause:
It ends up the "MainInstruction" timeRange is incorrect.
CMTime objects cannot be compared using "value". Instead, you must use CMTIME_COMPARE_INLINE.
To fix, replace this line:
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, (pipAsset.duration.value > backAsset.duration.value) ? pipAsset.duration : backAsset.duration);
With this line:
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTIME_COMPARE_INLINE(pipAsset.duration, >, backAsset.duration) ? pipAsset.duration : backAsset.duration);

How to trim Music Library file and save it document directry

-(BOOL)trimAudioFileAtPath:(NSString *)inputFilename
start:(float)start
end:(float) stop{
NSString *outputFilename = #"File Path";
NSError *error = nil; NSFileManager *fileManager = [NSFileManager defaultManager]; if ([fileManager fileExistsAtPath:outputFilename]) {
if (![fileManager removeItemAtPath:outputFilename error:&error]) {
DebugLog(#"error file remove:%#",error); } else {
DebugLog(#"sucess remove file");
} }
NSURL *audioFileInput = [NSURL fileURLWithPath:inputFilename];
NSURL *audioFileOutput = [NSURL fileURLWithPath:outputFilename];
if (!audioFileInput || !audioFileOutput){ return NO; }
[[NSFileManager defaultManager] removeItemAtURL:audioFileOutput error:NULL];
AVMutableComposition *mutableComposition = [AVMutableComposition composition]; // Create the video composition track.
AVMutableCompositionTrack *mutableCompositionAudioTrack = [mutableComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
NSLog(#"audioFileInput %#",audioFileInput); AVURLAsset *assetUrl = [AVURLAsset assetWithURL:audioFileInput];
if ([[assetUrl tracksWithMediaType:AVMediaTypeAudio] count]==0) { return NO; }
// Get the first music track from each asset. AVAssetTrack *audioAssetTrack = [[assetUrl tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]; [mutableCompositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,audioAssetTrack.timeRange.duration) ofTrack:audioAssetTrack atTime:kCMTimeZero error:nil];
// we need the audio asset to be at least 50 seconds long for this snippet
CMTime startTime = CMTimeMake(start, 1);
CMTime stopTime = CMTimeMake(stop, 1);
CMTimeRange exportTimeRange = CMTimeRangeFromTimeToTime(startTime,stopTime);
Float64 duration = CMTimeGetSeconds(exportTimeRange.duration);
// Create the export session with the composition and set the preset to the highest quality.
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mutableComposition presetName:AVAssetExportPresetAppleM4A];
if (duration > 6.0){
AVMutableAudioMix *mutableAudioMix = [AVMutableAudioMix audioMix];
// Create the audio mix input parameters object.
AVMutableAudioMixInputParameters *mixParameters = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:mutableCompositionAudioTrack];
// float totalDutaion=mutableComposition.duration.value;
float totalDutaion=duration;
float lenth=totalDutaion/3;
CMTime startCM = CMTimeMake(totalDutaion-lenth-1,mutableComposition.duration.timescale);
CMTime endCM = CMTimeMake(lenth, mutableComposition.duration.timescale);
// Set the volume ramp to slowly fade the audio out over the duration of the composition.
[mixParameters setVolumeRampFromStartVolume:0.f toEndVolume:1.f timeRange:CMTimeRangeMake(startTime, endCM)];
[mixParameters setVolumeRampFromStartVolume:1.f toEndVolume:0.f timeRange:CMTimeRangeMake(startCM,endCM)];
// Attach the input parameters to the audio mix.
mutableAudioMix.inputParameters = #[mixParameters];
exportSession.audioMix=mutableAudioMix; }
if (exportSession == nil){
return NO; }
exportSession.outputURL = audioFileOutput; exportSession.outputFileType = AVFileTypeAppleM4A; exportSession.timeRange = exportTimeRange;
[exportSession exportAsynchronouslyWithCompletionHandler:^ {
if (AVAssetExportSessionStatusCompleted == exportSession.status)
{
// It worked!
}
else if (AVAssetExportSessionStatusFailed == exportSession.status)
{
// It failed...
} }]; return YES; }
I am using this function to trim the music file from music libaray. The Above code is working file when I try to trim a file which is located in bundle. But When I try to use the same function with input file from iTune music Library it is give no Tracks i.e. if ([[assetUrl tracksWithMediaType:AVMediaTypeAudio] count]==0) {
return NO;
} return No. Can anyone help to trim music from iTunes Libaray
- (void) mediaPicker: (MPMediaPickerController *) mediaPicker didPickMediaItems: (MPMediaItemCollection *) mediaItemCollection
{
if (mediaItemCollection) {
NSLog(#"%#",[mediaItemCollection items]);
// [musicPlayer setQueueWithItemCollection: mediaItemCollection];
// [musicPlayer play];
}
[KVNProgress showWithStatus:#"Processing"];
MPMediaItem *item =mediaItemCollection.representativeItem;
NSURL* assetURL = [item valueForProperty:MPMediaItemPropertyAssetURL];
// set up an AVAssetReader to read from the iPod Library
AVURLAsset *songAsset =
[AVURLAsset URLAssetWithURL:assetURL options:nil];
NSError *assetError = nil;
AVAssetReader *assetReader =[AVAssetReader assetReaderWithAsset:songAsset error:&assetError];
if (assetError) {
NSLog (#"error: %#", assetError);
return;
}
AVAssetReaderOutput *assetReaderOutput =[AVAssetReaderAudioMixOutput assetReaderAudioMixOutputWithAudioTracks:songAsset.tracks
audioSettings: nil];
if (! [assetReader canAddOutput: assetReaderOutput]) {
NSLog (#"can't add reader output... die!");
return;
}
[assetReader addOutput: assetReaderOutput];
// NSArray *dirs = NSSearchPathForDirectoriesInDomains
// (NSDocumentDirectory, NSUserDomainMask, YES);
// NSString *documentsDirectoryPath = [dirs objectAtIndex:0];
// NSString *exportPath = [documentsDirectoryPath stringByAppendingPathComponent:#"out.m4a"];
NSString * exportPath =[NSString stringWithFormat:#"%#%#", NSTemporaryDirectory(), #"out.m4a"];
if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath]) {
[[NSFileManager defaultManager] removeItemAtPath:exportPath
error:nil];
}
NSURL *exportURL = [NSURL fileURLWithPath:exportPath];
AVAssetWriter *assetWriter =[AVAssetWriter assetWriterWithURL:exportURL
fileType:AVFileTypeCoreAudioFormat
error:&assetError];
if (assetError) {
NSLog (#"error: %#", assetError);
return;
}
AudioChannelLayout channelLayout;
memset(&channelLayout, 0, sizeof(AudioChannelLayout));
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
NSDictionary *outputSettings =[NSDictionary dictionaryWithObjectsAndKeys:
#(kAudioFormatLinearPCM), AVFormatIDKey,
#44100.0, AVSampleRateKey,
#2, AVNumberOfChannelsKey,
[NSData dataWithBytes:&channelLayout length:sizeof(AudioChannelLayout)],AVChannelLayoutKey,
#16, AVLinearPCMBitDepthKey,
#NO, AVLinearPCMIsNonInterleaved,
#NO,AVLinearPCMIsFloatKey,
#NO, AVLinearPCMIsBigEndianKey,
nil];
AVAssetWriterInput *assetWriterInput =[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio
outputSettings:outputSettings];
if ([assetWriter canAddInput:assetWriterInput]) {
[assetWriter addInput:assetWriterInput];
} else {
NSLog (#"can't add asset writer input... die!");
return;
}
assetWriterInput.expectsMediaDataInRealTime = NO;
[assetWriter startWriting];
[assetReader startReading];
AVAssetTrack *soundTrack = [songAsset.tracks objectAtIndex:0];
CMTime startTime = CMTimeMake (0, soundTrack.naturalTimeScale);
[assetWriter startSessionAtSourceTime: startTime];
__block UInt64 convertedByteCount = 0;
dispatch_queue_t mediaInputQueue =
dispatch_queue_create("mediaInputQueue", NULL);
[assetWriterInput requestMediaDataWhenReadyOnQueue:mediaInputQueue
usingBlock: ^
{
while (assetWriterInput.readyForMoreMediaData) {
CMSampleBufferRef nextBuffer =
[assetReaderOutput copyNextSampleBuffer];
if (nextBuffer) {
// append buffer
[assetWriterInput appendSampleBuffer: nextBuffer];
// update ui
convertedByteCount +=
CMSampleBufferGetTotalSampleSize (nextBuffer);
} else {
// done!
[assetWriterInput markAsFinished];
[assetWriter finishWritingWithCompletionHandler:^{
[assetReader cancelReading];
[self performSelectorOnMainThread:#selector(updateCompletedAtMusicPath:)
withObject:exportPath
waitUntilDone:NO];
// NSLog (#"done. file size is %llu",[outputFileAttributes fileSize]);
}];
break;
}}}];
[self dismissViewControllerAnimated:NO completion:^{
}];
}
This is code used for geting the URL form iTune Library and story in document directry
First i just want to point out that you can just read segments of the audio from the library by setting the timeRange property of your assetReader. This way instead of copying over the whole file first you can just copy the segments you need. That being said, if you are going to stick with your original implementation, i think you just need to change AVURLAsset *assetUrl = [AVURLAsset assetWithURL:audioFileInput]; to AVURLAsset *assetUrl = [[AVURLAsset URLAssetWithURL:audioFileInput options:nil];
I Got sucess to save itune music in document libray by using following Method
NSArray* paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString* documentsDirectory = [paths objectAtIndex:0];
self.fullPathToFileForAudio = [documentsDirectory stringByAppendingPathComponent:#"auto-old.m4a"];
NSFileManager *fileMgr = [NSFileManager defaultManager];
// get rid of existing mp4 if exists...
if ([fileMgr removeItemAtPath:self.fullPathToFileForAudio error:&error] != YES)
NSLog(#"Unable to delete file: %#", [error localizedDescription]);
[self convertVideoToLowQuailtyWithInputURL:self.musicUrl outputURL:[NSURL fileURLWithPath:self.fullPathToFileForAudio] handler:^(AVAssetExportSession *exportSession)
{
if (exportSession.status == AVAssetExportSessionStatusCompleted)
{
NSLog(#"completed %#",exportSession.error);
printf("completed\n");
dispatch_sync(dispatch_get_main_queue(), ^{
NSLog(#"%# PATH",self.fullPathToFileForAudio);
[self exporterCompleted:[NSURL fileURLWithPath:self.fullPathToFileForAudio]];
});
}
else
{
// NSLog(#"%#",exportSession.error);
printf("error\n");
dispatch_sync(dispatch_get_main_queue(), ^{
[SVProgressHUD dismiss];
});
}
}];
- (void)convertVideoToLowQuailtyWithInputURL:(NSURL*)inputURL
outputURL:(NSURL*)outputURL
handler:(void (^)(AVAssetExportSession*))handler
{
[[NSFileManager defaultManager] removeItemAtURL:outputURL error:nil];
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:inputURL options:nil];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetAppleM4A];
CMTime startTime = CMTimeMake(minValue, 1);
CMTime stopTime = CMTimeMake(maxValue, 1);
CMTimeRange exportTimeRange = CMTimeRangeFromTimeToTime(startTime, stopTime);
exportSession.outputURL = outputURL;
exportSession.outputFileType = #"com.apple.m4a-audio";
exportSession.timeRange = exportTimeRange;
[exportSession exportAsynchronouslyWithCompletionHandler:^(void)
{
handler(exportSession);
}];
}

merge audio and video

I have an app in which I need to merge audio file into video file.
Some time my audio file is larger than video file duration. I had use AVFoundation's MixComposition, both get merged. but the problem is that if video file duration is smaller then when video is finished sound still goes on play and complete its full duration. It should be if video is finished audio must get stop.
Could any one provide me any solution.
Use the following code to stop your audio and it also create fade audio in last five second
- (void)getFadeAudioFile {
if (![appDelegate.musicFilePath isEqualToString:#"Not set"]) {
NSURL *url = [[[NSURL alloc]initWithString:appDelegate.musicFilePath]autorelease];
AVURLAsset* audioAsset = [[[AVURLAsset alloc]initWithURL:url options:nil]autorelease];
NSString *filePath = [self applicationDocumentsDirectory];
NSString *outputFilePath = nil;
outputFilePath = [filePath stringByAppendingPathComponent:#"/mySong.m4a"];
NSURL *outputFileUrl = [[[NSURL alloc]initFileURLWithPath:outputFilePath]autorelease];
NSError *theError = nil;
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:&theError];
[self exportAsset:audioAsset toFilePath:outputFileUrl];
}
}
- (BOOL)exportAsset:(AVAsset *)avAsset toFilePath:(NSURL *)filePath {
// get the first audio track
NSArray *tracks = [avAsset tracksWithMediaType:AVMediaTypeAudio];
if ([tracks count] == 0) return NO;
AVAssetTrack *track = [tracks objectAtIndex:0];
// create the export session
// no need to retain here since the session will be retained by the
// completion handler since it is referenced there
AVAssetExportSession *exportSession = [AVAssetExportSession exportSessionWithAsset:avAsset presetName:AVAssetExportPresetAppleM4A];
if (nil == exportSession) return NO;
NSLog(#"arrOfImagesForVideo.coun:%d",arrImageDataDict.count);
int imgCount = arrImageDataDict.count+1;
int delay = appDelegate.delaySecond;
int duration = imgCount*delay;
CMTime stopTime = CMTimeMake(duration, 1);
// create trim time range - 20 seconds starting from 30 seconds into the asset
// NSInteger totalTime = CMTimeGetSeconds(avAsset.duration);
CMTime startTime = CMTimeMake(0, 1);
//CMTime stopTime = CMTimeMake(totalTime, 1);//0,30
CMTimeRange exportTimeRange = CMTimeRangeFromTimeToTime(startTime, stopTime);
// create fade in time range - 10 seconds starting at the beginning of trimmed asset
NSInteger fadeTime = duration-5;
NSLog(#"fade time:%d",fadeTime);
NSLog(#"fade duration:%d",duration);
CMTime startFadeInTime = CMTimeMake(fadeTime, 1);
CMTime endFadeInTime = CMTimeMake(duration, 1);
CMTimeRange fadeInTimeRange = CMTimeRangeFromTimeToTime(startFadeInTime, endFadeInTime);
// setup audio mix
AVMutableAudioMix *exportAudioMix = [AVMutableAudioMix audioMix];
AVMutableAudioMixInputParameters *exportAudioMixInputParameters = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:track];
[exportAudioMixInputParameters setVolumeRampFromStartVolume:1.0 toEndVolume:0.0 timeRange:fadeInTimeRange];
exportAudioMix.inputParameters = [NSArray arrayWithObject:exportAudioMixInputParameters];
// configure export session output with all our parameters
exportSession.outputURL = filePath; // output path
exportSession.outputFileType = AVFileTypeAppleM4A; // output file type
exportSession.timeRange = exportTimeRange; // trim time range
exportSession.audioMix = exportAudioMix; // fade in audio mix
[exportSession exportAsynchronouslyWithCompletionHandler:
^(void ) {
//[self saveVideoToAlbum:outputFilePath];
}
];
return YES;
}
It will be saved in your file path documents directory and use it like
NSString *filePath = [self applicationDocumentsDirectory];
NSString *outputFilePath1 = [filePath tringByAppendingPathComponent:#"/mySong.m4a"];
NSURL *audio_inputFileUrl = [[NSURL alloc]initFileURLWithPath:outputFilePath1];
int imgCount = imageArray.count;
int delay = appDelegate.delaySecond;
NSLog(#"audio merged");
int duration = imgCount*delay;
CMTime seekingCM = CMTimeMake(duration, 1);
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, seekingCM);
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition MutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];
//[audioAsset autorelease];
newAudioTrack = [audioAsset tracksWithMediaType:AVMediaTypeAudio][0];

What iOS framework is needed for this particular audio manipulation

Before I go sit down a read an entire book on CoreAudio, I wanted to know if it was the best Framework for me to study or if AVFoundation can do what I need. I want to be able to download a small portion of an MP3 located on a remote server, lets say 20 seconds of the file, preferable without downloading the entire file first then trimming it.
Then I want to layer 2 tracks of audio then bounce them as into one file.
Will I need to delve into CoreAudio or can AVFoundation so the trick? Advise is much appreciated.
The downloading part of the file is up to you, but if you want to mix 2 or more audio files into one, AVFoundation is probably the easiest route to take, using AVAssetExportSession to do the exporting and AVMutableAudioMix to do the mix.. There is some example code for a simple editor floating around in the apple docs but cant seem to find it, if i do I will post the link..
Here is a method that actually does the mix, keep in mind that im adding video here as well, _audioTracks and _videoTracks are mutable arrays with AVAssets in them
-(void)createMix
{
CGSize videoSize = [[_videoTracks objectAtIndex:0] naturalSize];
AVMutableComposition *composition = [AVMutableComposition composition];
AVMutableVideoComposition *videoComposition = nil;
AVMutableAudioMix *audioMix = nil;
composition.naturalSize = videoSize;
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAsset *videoAsset=[_videoTracks objectAtIndex:0];
CMTimeRange timeRangeInAsset = CMTimeRangeMake(kCMTimeZero, [videoAsset duration]);
AVAssetTrack *clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[compositionVideoTrack insertTimeRange:timeRangeInAsset ofTrack:clipVideoTrack atTime:kCMTimeZero error:nil];
AVAssetTrack *clipAudioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[compositionAudioTrack insertTimeRange:timeRangeInAsset ofTrack:clipAudioTrack atTime:kCMTimeZero error:nil];
NSMutableArray *trackMixArray = [NSMutableArray array];
if(_audioTracks && _audioTracks.count>0)
{
for(AVAsset *audio in _audioTracks)
{
// CMTimeRange timeRangeInAsset = CMTimeRangeMake(kCMTimeZero, [audio duration]);
// AVAssetTrack *clipAudioTrack = [[audio tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
//[compositionAudioTrack insertTimeRange:timeRangeInAsset ofTrack:clipAudioTrack atTime:kCMTimeZero error:nil];
NSInteger i;
NSArray *tracksToDuck = [audio tracksWithMediaType:AVMediaTypeAudio]; // before we add the commentary
// Clip commentary duration to composition duration.
CMTimeRange commentaryTimeRange = CMTimeRangeMake(kCMTimeZero, audio.duration);
if (CMTIME_COMPARE_INLINE(CMTimeRangeGetEnd(commentaryTimeRange), >, [composition duration]))
commentaryTimeRange.duration = CMTimeSubtract([composition duration], commentaryTimeRange.start);
// Add the commentary track.
AVMutableCompositionTrack *compositionCommentaryTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionCommentaryTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, commentaryTimeRange.duration) ofTrack:[[audio tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:commentaryTimeRange.start error:nil];
CMTime rampDuration = CMTimeMake(1, 2); // half-second ramps
for (i = 0; i < [tracksToDuck count]; i++) {
AVMutableAudioMixInputParameters *trackMix = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:[tracksToDuck objectAtIndex:i]];
[trackMix setVolumeRampFromStartVolume:1.0 toEndVolume:0.2 timeRange:CMTimeRangeMake(CMTimeSubtract(commentaryTimeRange.start, rampDuration), rampDuration)];
[trackMix setVolumeRampFromStartVolume:0.2 toEndVolume:1.0 timeRange:CMTimeRangeMake(CMTimeRangeGetEnd(commentaryTimeRange), rampDuration)];
[trackMixArray addObject:trackMix];
}
}
}
// audioMix.inputParameters = trackMixArray;
if (videoComposition) {
// Every videoComposition needs these properties to be set:
videoComposition.frameDuration = CMTimeMake(1, 30); // 30 fps
videoComposition.renderSize = videoSize;
}
AVAssetExportSession *session = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPreset1280x720];
session.videoComposition = videoComposition;
session.audioMix = audioMix;
NSUInteger count = 0;
NSString *filePath;
do {
filePath = NSTemporaryDirectory();
NSString *numberString = count > 0 ? [NSString stringWithFormat:#"-%i", count] : #"";
filePath = [filePath stringByAppendingPathComponent:[NSString stringWithFormat:#"Output-%#.mp4", numberString]];
count++;
} while([[NSFileManager defaultManager] fileExistsAtPath:filePath]);
session.outputURL = [NSURL fileURLWithPath:filePath];
session.outputFileType = AVFileTypeQuickTimeMovie;
[session exportAsynchronouslyWithCompletionHandler:^
{
dispatch_async(dispatch_get_main_queue(), ^{
NSLog(#"Exported");
if(session.error)
{
NSLog(#"had an error %#", session.error);
}
if(delegate && [delegate respondsToSelector:#selector(didFinishExportingMovie:)])
{
[delegate didFinishExportingMovie:filePath];
}
});
}];
}
hope it helps..
Daniel