When selecting a song from the users library on either iPhone or iPad devices running iOS7 my app crashes out
The line it stops on is
FourCharCode formatID = audioDesc->mFormatID;
With a EXC_BAD_ACCESS code
Trying to check the console for more info, this is the last step it mentions;
appMake[8392:60b] Select music AssetUrl = (null)
Code below for the export session part;
- (void)exportAssetAsSourceFormat:(MPMediaItem *)item {
NSAutoreleasePool *pool = [[NSAutoreleasePool alloc]init];
NSURL *assetURL = [item valueForProperty:MPMediaItemPropertyAssetURL];
AVURLAsset *songAsset = [AVURLAsset URLAssetWithURL:assetURL options:nil];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc]
initWithAsset:songAsset
presetName:AVAssetExportPresetPassthrough];
NSArray *tracks = [songAsset tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack *track = [tracks objectAtIndex:0];
id desc = [track.formatDescriptions objectAtIndex:0];
const AudioStreamBasicDescription *audioDesc = CMAudioFormatDescriptionGetStreamBasicDescription((CMAudioFormatDescriptionRef)desc);
FourCharCode formatID = audioDesc->mFormatID;
NSString *fileType = nil;
NSString *ex = nil;
switch (formatID) {
case kAudioFormatLinearPCM:
{
UInt32 flags = audioDesc->mFormatFlags;
if (flags & kAudioFormatFlagIsBigEndian) {
fileType = #"public.aiff-audio";
ex = #"aif";
} else {
fileType = #"com.microsoft.waveform-audio";
ex = #"wav";
}
}
break;
case kAudioFormatMPEGLayer3:
fileType = #"com.apple.quicktime-movie";
ex = #"mp3";
break;
case kAudioFormatMPEG4AAC:
fileType = #"com.apple.m4a-audio";
ex = #"m4a";
break;
case kAudioFormatAppleLossless:
fileType = #"com.apple.m4a-audio";
ex = #"m4a";
break;
default:
break;
}
exportSession.outputFileType = fileType;
NSString *exportPath = [[NSTemporaryDirectory() stringByAppendingPathComponent:[EXPORT_NAME stringByAppendingPathExtension:ex]] retain];
if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath]) {
[[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
}
exportSession.outputURL = [NSURL fileURLWithPath:exportPath];
[exportSession exportAsynchronouslyWithCompletionHandler:^{
if (exportSession.status == AVAssetExportSessionStatusCompleted) {
NSLog(#"export session completed");
//return YES;
[self performSelectorOnMainThread:#selector(gotoMainView:)
withObject:[EXPORT_NAME stringByAppendingPathExtension:ex]
waitUntilDone:NO];
} else {
NSLog(#"export session error");
//return NO;
}
[exportSession release];
}];
[pool release];
}
Media picker code;
- (void) mediaPicker: (MPMediaPickerController *) mediaPicker didPickMediaItems: (MPMediaItemCollection *) mediaItemCollection
{
// Dismiss the media item picker.
[self dismissModalViewControllerAnimated: YES];
if ([mediaItemCollection count] < 1) {
return;
}
[selectedItem release];
selectedItem = [[[mediaItemCollection items] objectAtIndex:0] retain];
NSURL* filePath = [selectedItem valueForProperty: MPMediaItemPropertyAssetURL];
NSLog(#"Select music AssetUrl = %#", filePath);
[viewLoading setHidden:NO];
[self UnloadSound];
[NSThread detachNewThreadSelector:#selector(exportAssetAsSourceFormat:) toTarget:self withObject:selectedItem];
}
Related
-(BOOL)trimAudioFileAtPath:(NSString *)inputFilename
start:(float)start
end:(float) stop{
NSString *outputFilename = #"File Path";
NSError *error = nil; NSFileManager *fileManager = [NSFileManager defaultManager]; if ([fileManager fileExistsAtPath:outputFilename]) {
if (![fileManager removeItemAtPath:outputFilename error:&error]) {
DebugLog(#"error file remove:%#",error); } else {
DebugLog(#"sucess remove file");
} }
NSURL *audioFileInput = [NSURL fileURLWithPath:inputFilename];
NSURL *audioFileOutput = [NSURL fileURLWithPath:outputFilename];
if (!audioFileInput || !audioFileOutput){ return NO; }
[[NSFileManager defaultManager] removeItemAtURL:audioFileOutput error:NULL];
AVMutableComposition *mutableComposition = [AVMutableComposition composition]; // Create the video composition track.
AVMutableCompositionTrack *mutableCompositionAudioTrack = [mutableComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
NSLog(#"audioFileInput %#",audioFileInput); AVURLAsset *assetUrl = [AVURLAsset assetWithURL:audioFileInput];
if ([[assetUrl tracksWithMediaType:AVMediaTypeAudio] count]==0) { return NO; }
// Get the first music track from each asset. AVAssetTrack *audioAssetTrack = [[assetUrl tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]; [mutableCompositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,audioAssetTrack.timeRange.duration) ofTrack:audioAssetTrack atTime:kCMTimeZero error:nil];
// we need the audio asset to be at least 50 seconds long for this snippet
CMTime startTime = CMTimeMake(start, 1);
CMTime stopTime = CMTimeMake(stop, 1);
CMTimeRange exportTimeRange = CMTimeRangeFromTimeToTime(startTime,stopTime);
Float64 duration = CMTimeGetSeconds(exportTimeRange.duration);
// Create the export session with the composition and set the preset to the highest quality.
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mutableComposition presetName:AVAssetExportPresetAppleM4A];
if (duration > 6.0){
AVMutableAudioMix *mutableAudioMix = [AVMutableAudioMix audioMix];
// Create the audio mix input parameters object.
AVMutableAudioMixInputParameters *mixParameters = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:mutableCompositionAudioTrack];
// float totalDutaion=mutableComposition.duration.value;
float totalDutaion=duration;
float lenth=totalDutaion/3;
CMTime startCM = CMTimeMake(totalDutaion-lenth-1,mutableComposition.duration.timescale);
CMTime endCM = CMTimeMake(lenth, mutableComposition.duration.timescale);
// Set the volume ramp to slowly fade the audio out over the duration of the composition.
[mixParameters setVolumeRampFromStartVolume:0.f toEndVolume:1.f timeRange:CMTimeRangeMake(startTime, endCM)];
[mixParameters setVolumeRampFromStartVolume:1.f toEndVolume:0.f timeRange:CMTimeRangeMake(startCM,endCM)];
// Attach the input parameters to the audio mix.
mutableAudioMix.inputParameters = #[mixParameters];
exportSession.audioMix=mutableAudioMix; }
if (exportSession == nil){
return NO; }
exportSession.outputURL = audioFileOutput; exportSession.outputFileType = AVFileTypeAppleM4A; exportSession.timeRange = exportTimeRange;
[exportSession exportAsynchronouslyWithCompletionHandler:^ {
if (AVAssetExportSessionStatusCompleted == exportSession.status)
{
// It worked!
}
else if (AVAssetExportSessionStatusFailed == exportSession.status)
{
// It failed...
} }]; return YES; }
I am using this function to trim the music file from music libaray. The Above code is working file when I try to trim a file which is located in bundle. But When I try to use the same function with input file from iTune music Library it is give no Tracks i.e. if ([[assetUrl tracksWithMediaType:AVMediaTypeAudio] count]==0) {
return NO;
} return No. Can anyone help to trim music from iTunes Libaray
- (void) mediaPicker: (MPMediaPickerController *) mediaPicker didPickMediaItems: (MPMediaItemCollection *) mediaItemCollection
{
if (mediaItemCollection) {
NSLog(#"%#",[mediaItemCollection items]);
// [musicPlayer setQueueWithItemCollection: mediaItemCollection];
// [musicPlayer play];
}
[KVNProgress showWithStatus:#"Processing"];
MPMediaItem *item =mediaItemCollection.representativeItem;
NSURL* assetURL = [item valueForProperty:MPMediaItemPropertyAssetURL];
// set up an AVAssetReader to read from the iPod Library
AVURLAsset *songAsset =
[AVURLAsset URLAssetWithURL:assetURL options:nil];
NSError *assetError = nil;
AVAssetReader *assetReader =[AVAssetReader assetReaderWithAsset:songAsset error:&assetError];
if (assetError) {
NSLog (#"error: %#", assetError);
return;
}
AVAssetReaderOutput *assetReaderOutput =[AVAssetReaderAudioMixOutput assetReaderAudioMixOutputWithAudioTracks:songAsset.tracks
audioSettings: nil];
if (! [assetReader canAddOutput: assetReaderOutput]) {
NSLog (#"can't add reader output... die!");
return;
}
[assetReader addOutput: assetReaderOutput];
// NSArray *dirs = NSSearchPathForDirectoriesInDomains
// (NSDocumentDirectory, NSUserDomainMask, YES);
// NSString *documentsDirectoryPath = [dirs objectAtIndex:0];
// NSString *exportPath = [documentsDirectoryPath stringByAppendingPathComponent:#"out.m4a"];
NSString * exportPath =[NSString stringWithFormat:#"%#%#", NSTemporaryDirectory(), #"out.m4a"];
if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath]) {
[[NSFileManager defaultManager] removeItemAtPath:exportPath
error:nil];
}
NSURL *exportURL = [NSURL fileURLWithPath:exportPath];
AVAssetWriter *assetWriter =[AVAssetWriter assetWriterWithURL:exportURL
fileType:AVFileTypeCoreAudioFormat
error:&assetError];
if (assetError) {
NSLog (#"error: %#", assetError);
return;
}
AudioChannelLayout channelLayout;
memset(&channelLayout, 0, sizeof(AudioChannelLayout));
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
NSDictionary *outputSettings =[NSDictionary dictionaryWithObjectsAndKeys:
#(kAudioFormatLinearPCM), AVFormatIDKey,
#44100.0, AVSampleRateKey,
#2, AVNumberOfChannelsKey,
[NSData dataWithBytes:&channelLayout length:sizeof(AudioChannelLayout)],AVChannelLayoutKey,
#16, AVLinearPCMBitDepthKey,
#NO, AVLinearPCMIsNonInterleaved,
#NO,AVLinearPCMIsFloatKey,
#NO, AVLinearPCMIsBigEndianKey,
nil];
AVAssetWriterInput *assetWriterInput =[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio
outputSettings:outputSettings];
if ([assetWriter canAddInput:assetWriterInput]) {
[assetWriter addInput:assetWriterInput];
} else {
NSLog (#"can't add asset writer input... die!");
return;
}
assetWriterInput.expectsMediaDataInRealTime = NO;
[assetWriter startWriting];
[assetReader startReading];
AVAssetTrack *soundTrack = [songAsset.tracks objectAtIndex:0];
CMTime startTime = CMTimeMake (0, soundTrack.naturalTimeScale);
[assetWriter startSessionAtSourceTime: startTime];
__block UInt64 convertedByteCount = 0;
dispatch_queue_t mediaInputQueue =
dispatch_queue_create("mediaInputQueue", NULL);
[assetWriterInput requestMediaDataWhenReadyOnQueue:mediaInputQueue
usingBlock: ^
{
while (assetWriterInput.readyForMoreMediaData) {
CMSampleBufferRef nextBuffer =
[assetReaderOutput copyNextSampleBuffer];
if (nextBuffer) {
// append buffer
[assetWriterInput appendSampleBuffer: nextBuffer];
// update ui
convertedByteCount +=
CMSampleBufferGetTotalSampleSize (nextBuffer);
} else {
// done!
[assetWriterInput markAsFinished];
[assetWriter finishWritingWithCompletionHandler:^{
[assetReader cancelReading];
[self performSelectorOnMainThread:#selector(updateCompletedAtMusicPath:)
withObject:exportPath
waitUntilDone:NO];
// NSLog (#"done. file size is %llu",[outputFileAttributes fileSize]);
}];
break;
}}}];
[self dismissViewControllerAnimated:NO completion:^{
}];
}
This is code used for geting the URL form iTune Library and story in document directry
First i just want to point out that you can just read segments of the audio from the library by setting the timeRange property of your assetReader. This way instead of copying over the whole file first you can just copy the segments you need. That being said, if you are going to stick with your original implementation, i think you just need to change AVURLAsset *assetUrl = [AVURLAsset assetWithURL:audioFileInput]; to AVURLAsset *assetUrl = [[AVURLAsset URLAssetWithURL:audioFileInput options:nil];
I Got sucess to save itune music in document libray by using following Method
NSArray* paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString* documentsDirectory = [paths objectAtIndex:0];
self.fullPathToFileForAudio = [documentsDirectory stringByAppendingPathComponent:#"auto-old.m4a"];
NSFileManager *fileMgr = [NSFileManager defaultManager];
// get rid of existing mp4 if exists...
if ([fileMgr removeItemAtPath:self.fullPathToFileForAudio error:&error] != YES)
NSLog(#"Unable to delete file: %#", [error localizedDescription]);
[self convertVideoToLowQuailtyWithInputURL:self.musicUrl outputURL:[NSURL fileURLWithPath:self.fullPathToFileForAudio] handler:^(AVAssetExportSession *exportSession)
{
if (exportSession.status == AVAssetExportSessionStatusCompleted)
{
NSLog(#"completed %#",exportSession.error);
printf("completed\n");
dispatch_sync(dispatch_get_main_queue(), ^{
NSLog(#"%# PATH",self.fullPathToFileForAudio);
[self exporterCompleted:[NSURL fileURLWithPath:self.fullPathToFileForAudio]];
});
}
else
{
// NSLog(#"%#",exportSession.error);
printf("error\n");
dispatch_sync(dispatch_get_main_queue(), ^{
[SVProgressHUD dismiss];
});
}
}];
- (void)convertVideoToLowQuailtyWithInputURL:(NSURL*)inputURL
outputURL:(NSURL*)outputURL
handler:(void (^)(AVAssetExportSession*))handler
{
[[NSFileManager defaultManager] removeItemAtURL:outputURL error:nil];
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:inputURL options:nil];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetAppleM4A];
CMTime startTime = CMTimeMake(minValue, 1);
CMTime stopTime = CMTimeMake(maxValue, 1);
CMTimeRange exportTimeRange = CMTimeRangeFromTimeToTime(startTime, stopTime);
exportSession.outputURL = outputURL;
exportSession.outputFileType = #"com.apple.m4a-audio";
exportSession.timeRange = exportTimeRange;
[exportSession exportAsynchronouslyWithCompletionHandler:^(void)
{
handler(exportSession);
}];
}
I am trying to perform screen-recording using AVAssetWriter, which also accepts audio input. However, I have been stuck on this error, where AVAssetWriter sometimes becomes AVAssetWriterStatusFailed after a few calls on appendSampleBuffer: (inside encodeAudioFrame:)
Failed: Error Domain=AVFoundationErrorDomain Code=-11800 "The operation could not be completed" UserInfo=0x32b570 {NSLocalizedDescription=The operation could not be completed, NSUnderlyingError=0x70d710 "The operation couldn’t be completed. (OSStatus error -12737.)", NSLocalizedFailureReason=An unknown error occurred (-12737)}
Several observations:
Once it enters this state, subsequent recording attempts will also return AVAssetWriterStatusFailed, even if I use a different recorder object.
The error does not appear when I comment out the audio recording blocks.
But the error still appears when I comment out the video recording blocks, and without modifying any incoming CMSampleBufferRef.
Any assistance will be appreciated.
Below is the code I am using, with several parts omitted for brevity. I am currently using OSX 10.9 SDK, with ARC turned off.
- (BOOL) startRecording
{
if (!isRecording)
{
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
[self startCapture];
[self setUpWriter];
startedAt = [NSDate date];
isRecording = YES;
while (isRecording)
{
NSAutoreleasePool* pool = [NSAutoreleasePool new];
NSTimeInterval offset = [[NSDate date] timeIntervalSinceDate:startedAt];
CMTime tiem = CMTimeMakeWithSeconds(offset - pauseDelta, 1000);
[self encodeFrameAtTime:tiem];
[pool drain];
sleep(0.05f);
}
[self endCapture];
[self completeRecordingSession];
});
}
return YES;
}
- (void) stopRecording {
isRecording = NO;
}
-(void) startCapture
{
AVCaptureDevice* microphone = x //Device selection code omitted
videoCaptureSession = [[AVCaptureSession alloc] init];
videoCaptureSession.sessionPreset = AVCaptureSessionPresetHigh;
//------------------------------------------
NSError* err = nil;
audioInput = [AVCaptureDeviceInput deviceInputWithDevice:microphone error:&err];
[videoCaptureSession addInput:audioInput];
//------------------------------------------
audioOutput = [[AVCaptureAudioDataOutput alloc] init];
queue = dispatch_queue_create("videoQueue", NULL);
[audioOutput setSampleBufferDelegate:self queue:queue];
[videoCaptureSession addOutput:audioOutput];
audioDelta = -1;
[videoCaptureSession startRunning];
}
-(void) endCapture
{
[videoCaptureSession stopRunning];
[videoCaptureSession removeInput:audioInput];
[videoCaptureSession removeOutput:audioOutput];
[audioOutput release];
audioOutput = nil;
audioInput = nil;
[videoCaptureSession release];
videoCaptureSession = nil;
dispatch_release(queue);
}
-(BOOL) setUpWriter
{
//delete the file.
{
NSFileManager* fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:self.moviePath]) {
NSError* error;
if ([fileManager removeItemAtPath:self.moviePath error:&error] == NO) {
NSLog(#"Could not delete old recording file at path: %#", self.moviePath);
}
}
}
mCaptureRect = NSRectToCGRect([screen frame]);
int FWidth = mCaptureRect.size.width;
int FHeight = mCaptureRect.size.height;
int bitRate = FWidth * FHeight * 8;
videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:self.moviePath] fileType:AVFileTypeMPEG4 error:nil];
NSParameterAssert(videoWriter);
//Configure video
NSDictionary *codecSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:bitRate], AVVideoAverageBitRateKey,
nil];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
codecSettings,AVVideoCompressionPropertiesKey,
[NSNumber numberWithInt:FWidth], AVVideoWidthKey,
[NSNumber numberWithInt:FHeight], AVVideoHeightKey,
nil];
videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
NSParameterAssert(videoWriterInput);
videoWriterInput.expectsMediaDataInRealTime = YES;
NSDictionary* bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey,
[NSNumber numberWithInt:FWidth], kCVPixelBufferWidthKey,
[NSNumber numberWithInt:FHeight], kCVPixelBufferHeightKey,
nil];
avAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput sourcePixelBufferAttributes:bufferAttributes];
//*
//Configure Audio
AudioChannelLayout acl;
bzero(&acl, sizeof(acl));
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
NSDictionary* audioSettings = [ NSDictionary dictionaryWithObjectsAndKeys:
[ NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey,
[ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
[ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
[ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
[NSNumber numberWithInt:64000], AVEncoderBitRateKey,
nil ];
audioWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioSettings];
audioWriterInput.expectsMediaDataInRealTime = YES;
//add input
[videoWriter addInput:videoWriterInput];
[videoWriter addInput:audioWriterInput];
return YES;
}
- (void) cleanupWriter {
[videoWriter release];
videoWriter = nil;
avAdaptor = nil;
videoWriterInput = nil;
startedAt = nil;
audioWriterInput = nil;
}
- (void) encodeFrameAtTime:(CMTime)timestamp
{
if(!isRecording) return;
if(videoWriter == nil) return;
if(videoWriter.status == AVAssetWriterStatusFailed)
{
return;
}
if(videoWriter.status != AVAssetWriterStatusWriting)
{
if(videoWriter.status != AVAssetWriterStatusUnknown)
return;
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:timestamp];
startTime = CMTimeGetSeconds(timestamp);
}
timestamp = CMTimeMakeWithSeconds(startTime + CMTimeGetSeconds(timestamp), 1000);
[self writeVideoFrameAtTime:timestamp];
}
-(void) writeVideoFrameAtTime:(CMTime)time {
if (![videoWriterInput isReadyForMoreMediaData])
{
}
else
{
/*
CVPixelBufferRef manipulation omitted...
*/
{
BOOL success = [avAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:time];
if(videoWriter.status == AVAssetWriterStatusFailed) NSLog(#"Failed: %#", videoWriter.error);
if (!success) NSLog(#"Warning: Unable to write buffer to video");
}
CVPixelBufferRelease(pixelBuffer);
CGImageRelease(cgImage);
}
}
-(void) encodeAudioFrame:(CMSampleBufferRef)buffer
{
if(!isRecording) return;
CMTime timestamp = CMSampleBufferGetPresentationTimeStamp(buffer);
if(videoWriter.status != AVAssetWriterStatusWriting)
{
//Wait for video thread to start the writer
return;
}
if(![audioWriterInput isReadyForMoreMediaData])
return;
//*
NSTimeInterval offset = [[NSDate date] timeIntervalSinceDate:startedAt];
if(audioDelta == -1)
{
audioDelta = offset - CMTimeGetSeconds(timestamp);
}
//Adjusts CMSampleBufferRef's timestamp to match the video stream's zero-based timestamp
CMItemCount count;
CMTime newTimestamp = CMTimeMakeWithSeconds(CMTimeGetSeconds(timestamp) + audioDelta - pauseDelta, 1000);
CMSampleBufferGetSampleTimingInfoArray(buffer, 0, nil, &count);
CMSampleTimingInfo* pInfo = malloc(sizeof(CMSampleTimingInfo) * count);
CMSampleBufferGetSampleTimingInfoArray(buffer, count, pInfo, &count);
for(CMItemCount i = 0; i < count; i++)
{
pInfo[i].decodeTimeStamp = newTimestamp;
pInfo[i].presentationTimeStamp = newTimestamp;
}
CMSampleBufferRef newBuffer;
CMSampleBufferCreateCopyWithNewTiming(kCFAllocatorDefault, buffer, count, pInfo, &newBuffer);
free(pInfo);
timestamp = CMSampleBufferGetPresentationTimeStamp(newBuffer);
BOOL res = [audioWriterInput appendSampleBuffer:newBuffer];
}
- (void) completeRecordingSession {
#autoreleasepool {
if(videoWriter.status != AVAssetWriterStatusWriting)
{
while (videoWriter.status == AVAssetWriterStatusUnknown)
{
[NSThread sleepForTimeInterval:0.5f];
}
int status = videoWriter.status;
while (status == AVAssetWriterStatusUnknown)
{
NSLog(#"Waiting...");
[NSThread sleepForTimeInterval:0.5f];
status = videoWriter.status;
}
}
#synchronized(self)
{
[videoWriter finishWriting];
[self cleanupWriter];
}
}
}
-(void) captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
if(!CMSampleBufferDataIsReady(sampleBuffer))
return;
#autoreleasepool {
if(captureOutput == audioOutput)
{
if(isRecording && !isPaused)
{
[self encodeAudioFrame:sampleBuffer];
}
}
}
}
I had exactly the same problem with my swift code. I found out that my pc simply ran out of memory. So double check if you have enough free ram.
I previously had this code to capture a single image from a Mac's iSight camera using QTKit:
- (NSError*)takePicture
{
BOOL success;
NSError* error;
captureSession = [QTCaptureSession new];
QTCaptureDevice* device = [QTCaptureDevice defaultInputDeviceWithMediaType: QTMediaTypeVideo];
success = [device open: &error];
if (!success) { return error; }
QTCaptureDeviceInput* captureDeviceInput = [[QTCaptureDeviceInput alloc] initWithDevice: device];
success = [captureSession addInput: captureDeviceInput error: &error];
if (!success) { return error; }
QTCaptureDecompressedVideoOutput* captureVideoOutput = [QTCaptureDecompressedVideoOutput new];
[captureVideoOutput setDelegate: self];
success = [captureSession addOutput: captureVideoOutput error: &error];
if (!success) { return error; }
[captureSession startRunning];
return nil;
}
- (void)captureOutput: (QTCaptureOutput*)captureOutput
didOutputVideoFrame: (CVImageBufferRef)imageBuffer
withSampleBuffer: (QTSampleBuffer*)sampleBuffer
fromConnection: (QTCaptureConnection*)connection
{
CVBufferRetain(imageBuffer);
if (imageBuffer) {
[captureSession removeOutput: captureOutput];
[captureSession stopRunning];
NSCIImageRep* imageRep = [NSCIImageRep imageRepWithCIImage: [CIImage imageWithCVImageBuffer: imageBuffer]];
_result = [[NSImage alloc] initWithSize: [imageRep size]];
[_result addRepresentation: imageRep];
CVBufferRelease(imageBuffer);
_done = YES;
}
}
However, I found today that QTKit has been deprecated and so we must now use AVFoundation.
Can anyone help me convert this code to its AVFoundation equivalent? It seems as though many methods have the same name, but at the same time, a lot is different and I'm at a complete loss here... Any help?
Alright, I found the solution!! Here it is:
- (void)takePicture
{
NSError* error;
AVCaptureDevice* device = [AVCaptureDevice defaultDeviceWithMediaType: AVMediaTypeVideo];
AVCaptureDeviceInput* input = [AVCaptureDeviceInput deviceInputWithDevice: device error: &error];
if (!input) {
_error = error;
_done = YES;
return;
}
AVCaptureStillImageOutput* output = [AVCaptureStillImageOutput new];
[output setOutputSettings: #{(id)kCVPixelBufferPixelFormatTypeKey: #(k32BGRAPixelFormat)}];
captureSession = [AVCaptureSession new];
captureSession.sessionPreset = AVCaptureSessionPresetPhoto;
[captureSession addInput: input];
[captureSession addOutput: output];
[captureSession startRunning];
AVCaptureConnection* connection = [output connectionWithMediaType: AVMediaTypeVideo];
[output captureStillImageAsynchronouslyFromConnection: connection completionHandler: ^(CMSampleBufferRef sampleBuffer, NSError* error) {
if (error) {
_error = error;
_result = nil;
}
else {
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
if (imageBuffer) {
CVBufferRetain(imageBuffer);
NSCIImageRep* imageRep = [NSCIImageRep imageRepWithCIImage: [CIImage imageWithCVImageBuffer: imageBuffer]];
_result = [[NSImage alloc] initWithSize: [imageRep size]];
[_result addRepresentation: imageRep];
CVBufferRelease(imageBuffer);
}
}
_done = YES;
}];
}
I hope this helps whoever has any problems in doing this same thing.
I'm using AudioQueue Service to play audio in my app.
I need to play several audio files together. What I do now I just create as much audio queue as much i need to play files. I.e. I create two audio queue for two audio files and start them at the same time to have audio mixing effect.
So basically I would like to know is this an "elegant" way of doing it.
Please note, that I'm aware of Audio Unit service and its MixerHost example, please do not suggest that option, I need to do sound mixing exclusively using audio queue service.
- (void) setUpAndAddAudioAtPath:(NSURL*)assetURL toComposition:(AVMutableComposition *)composition {
AVURLAsset *songAsset = [AVURLAsset URLAssetWithURL:assetURL options:nil];
AVMutableCompositionTrack *track = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *sourceAudioTrack = [[songAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
NSError *error = nil;
BOOL ok = NO;
CMTime startTime = CMTimeMakeWithSeconds(0, 1);
CMTime trackDuration = songAsset.duration;
CMTime longestTime = CMTimeMake(848896, 44100); //(19.24 seconds)
CMTimeRange tRange = CMTimeRangeMake(startTime, trackDuration);
//Set Volume
AVMutableAudioMixInputParameters *trackMix = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:track];
[trackMix setVolume:0.8f atTime:startTime];
[audioMixParams addObject:trackMix];
//Insert audio into track
ok = [track insertTimeRange:tRange ofTrack:sourceAudioTrack atTime:CMTimeMake(0, 44100) error:&error];
}
- (BOOL) exportAudio {
if (defaultSoundPath == nil || recordingSoundPath == nil) {
[actvityIdicatiorView stopAnimating];
[actvityIdicatiorView setHidden:YES];
UIAlertView *alertView=[[UIAlertView alloc]initWithTitle:#"Select Sound" message:#"Both Sound is selected" delegate:self cancelButtonTitle:#"Ok" otherButtonTitles:nil];
[alertView show];
return NO;
}
AVMutableComposition *composition = [AVMutableComposition composition];
if (audioMixParams) {
[audioMixParams release];
audioMixParams=nil;
}
audioMixParams = [[NSMutableArray alloc] initWithObjects:nil];
//Add Audio Tracks to Composition
NSString *sourceA= [[NSBundle mainBundle] pathForResource:#"Beach Soundscape" ofType:#"mp3"];
//NSString *URLPath1 = pathToYourAudioFile1;
NSURL *assetURL1 = [NSURL fileURLWithPath:sourceA];
[self setUpAndAddAudioAtPath:assetURL1 toComposition:composition];
NSString *sourceB = [[NSBundle mainBundle] pathForResource:#"DrumsMonoSTP" ofType:#"aif"];
// NSString *URLPath2 = pathToYourAudioFile2;
NSURL *assetURL2 = [NSURL fileURLWithPath:sourceB];
[self setUpAndAddAudioAtPath:assetURL2 toComposition:composition];
AVMutableAudioMix *audioMix = [AVMutableAudioMix audioMix];
audioMix.inputParameters = [NSArray arrayWithArray:audioMixParams];
//If you need to query what formats you can export to, here's a way to find out
NSLog (#"compatible presets for songAsset: %#",
[AVAssetExportSession exportPresetsCompatibleWithAsset:composition]);
AVAssetExportSession *exporter = [[AVAssetExportSession alloc]
initWithAsset: composition
presetName: AVAssetExportPresetAppleM4A];
exporter.audioMix = audioMix;
exporter.outputFileType = #"com.apple.m4a-audio";
// NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
//
// NSString *fileName = #"someFilename";
//NSString *exportFile = [[paths objectAtIndex:0] stringByAppendingFormat: #"/%#.m4a", fileName];
mixingSoundPath= [[self mixingSoundFolder] stringByAppendingFormat: #"/Mixing%#.m4a", [self dateString]];
[mixingSoundPath retain];
// set up export
//myDeleteFile(exportFile);
NSURL *exportURL = [NSURL fileURLWithPath:mixingSoundPath];
exporter.outputURL = exportURL;
static BOOL isComplete;
// do the export
[exporter exportAsynchronouslyWithCompletionHandler:^{
int exportStatus = exporter.status;
NSLog(#"exporter.......%i",exportStatus);
switch (exportStatus) {
case AVAssetExportSessionStatusFailed:
// NSError *exportError =exporter.error;
isComplete=NO;
NSLog (#"AVAssetExportSessionStatusFailed");
NSLog (#"Error == %#", exporter.error);
break;
case AVAssetExportSessionStatusCompleted:
[self mixingDidFinshing];
isComplete=YES;
break;
case AVAssetExportSessionStatusUnknown:
NSLog (#"AVAssetExportSessionStatusUnknown");
isComplete=NO;
break;
case AVAssetExportSessionStatusExporting:
isComplete=NO;
NSLog (#"AVAssetExportSessionStatusExporting");
break;
case AVAssetExportSessionStatusCancelled:
isComplete=NO;
NSLog (#"AVAssetExportSessionStatusCancelled");
break;
case AVAssetExportSessionStatusWaiting:
isComplete=NO;
NSLog (#"AVAssetExportSessionStatusWaiting");
break;
default:
NSLog (#"didn't get export status");
isComplete=NO;
break;
}
}];
return isComplete;
}
In my app I have a Place entity that contains reviews and featured objects at the minute my app is constantly refreshing these everytime i parse my xml data which is fine however I need to delete the review and featued objects before they are re parsed. My code is as follows and I believe I need to delete the objects in the parseXML method at the top:
-(void)parseXML:(NSString*)xml{
TBXML * tbxml = [TBXML tbxmlWithXMLString:xml];
if(tbxml.rootXMLElement){
[self traverseElement:tbxml.rootXMLElement];
if(self.tempItem){
NSLog(#"Ending Application");
[self configurePlaceChildrenAndSave];
[self startGeoCodingQueue];
}
}
}
-(void)fireGeocoder:(BSForwardGeocoder*)g{
NSLog(#"Begining FGC for %# (%#)",g.searchQuery,g.metaData);
[g findLocation];
}
-(void)startGeoCodingQueue{
[[NSUserDefaults standardUserDefaults] setValue:[NSNumber numberWithBool:YES] forKey:kGeoCoderInProgress];
int i = 0;
BSForwardGeocoder * fgc;
NSArray * a = [CoreDataBasicService fetchResultsForEnity:#"Place" andSortDiscriptor:#"name" Ascending:YES];
for (Place * p in a) {
if(p.latitude && p.longitude)continue;//skip geocoding if location data exists
fgc = [[BSForwardGeocoder alloc] initWithDelegate:self];
fgc.metaData = p.name;
fgc.searchQuery = p.postcode;
NSLog(#"gc:%i-%i",i,[a count]);
if([a count] == i+1){
fgc.last = YES;
}
float delay = (float)i*kGeoCoderDelay;
[self performSelector:#selector(fireGeocoder:) withObject:[fgc autorelease] afterDelay:delay];
fgc = nil;
i++;
}
[[NSUserDefaults standardUserDefaults] setValue:[NSNumber numberWithInt:i] forKey:kGeoCoderCompletedKey];
}
-(void)finishedGeocoding{
[[NSUserDefaults standardUserDefaults] setValue:[NSDate date] forKey:kGeoCoderlastRanKey];
[[NSUserDefaults standardUserDefaults] setValue:[NSNumber numberWithBool:NO] forKey:kGeoCoderInProgress];
[[NSNotificationCenter defaultCenter] postNotificationName:kGeoCoderNotificationName object:nil];
}
-(void)forwardGeocoderError:(BSForwardGeocoder *)geocoder errorMessage:(NSString *)errorMessage{
NSLog(#"EX ERROR: GEOCODER FAILED - %#",errorMessage);
if(geocoder.last)[self finishedGeocoding];
}
- (void)forwardGeocoderFoundLocation:(BSForwardGeocoder*)geocoder
{
if(geocoder.status == G_GEO_SUCCESS)
{
BSKmlResult *result = [geocoder.results lastObject];
if(!result)return;
//[self.fowardGeocodingQueue setObject:[NSString stringWithString:value] forKey:self.tempItem.name];
Place * p = [CoreDataBasicService fetchPlaceNamed:geocoder.metaData];
p.latitude = [NSNumber numberWithFloat:result.latitude];
p.longitude = [NSNumber numberWithFloat:result.longitude];
[CoreDataBasicService saveChanges];
//NSLog(#"%# - %f,%f",p2.name,p2.latitude,p2.longitude);
NSLog(#"completed Foward geocoding for '%#' (%#) [%f,%f]",geocoder.metaData,geocoder.searchQuery,[p.latitude floatValue],[p.longitude floatValue]);
if(geocoder.last)[self finishedGeocoding];
}
else {
NSString *message = #"";
switch (geocoder.status) {
case G_GEO_BAD_KEY:
message = #"The API key is invalid.";
break;
case G_GEO_UNKNOWN_ADDRESS:
message = [NSString stringWithFormat:#"Could not find %#", geocoder.searchQuery];
break;
case G_GEO_TOO_MANY_QUERIES:
message = #"Too many queries has been made for this API key.";
break;
case G_GEO_SERVER_ERROR:
message = #"Server error, please try again.";
break;
default:
break;
}
NSLog(#"ERROR: GEOCODER FAILED - %#",message);
// UIAlertView *alert = [[UIAlertView alloc] initWithTitle:#"Information"
// message:message
// delegate:nil
// cancelButtonTitle:#"OK"
// otherButtonTitles: nil];
// [alert show];
// [alert release];
}
}
-(void)configurePlaceChildrenAndSave{
if(self.tempReviewArray.count==0 && self.tempReview)[self.tempReviewArray addObject:self.tempReview];
if(self.tempFeatureArray.count==0 && self.tempFeature)[self.tempFeatureArray addObject:self.tempFeature];
[self.tempItem setReviews:self.tempReviewArray];
[self.tempItem setFeatured:self.tempFeatureArray];
self.tempReviewArray = nil;
self.tempReview = nil;
self.tempFeatureArray = nil;
self.tempFeature = nil;
[CoreDataBasicService saveChanges];
self.tempItem = nil;
}
-(NSString*)formatKeyForCoreData:(NSString*)elementKey{
return [elementKey stringByReplacingOccurrencesOfString:#"-" withString:#""];
}
-(NSDate*)convertStringDateToDate:(NSString*)stringDate{
NSDateFormatter * df = [[NSDateFormatter alloc] init];
NSLog(#"DATE:%#",stringDate);
[df setDateFormat:#"y-M-d'T'H:m:s'Z'"];
NSDate * d = [df dateFromString:stringDate];
[df release];
return d;
}
-(Place*)getPlaceForName:(NSString*)name{
NSPredicate * pred = [NSPredicate predicateWithFormat:#"name = %#",name];
NSArray * results = [CoreDataBasicService fetchResultsForEnity:#"Place" WithPredicate:pred andSortDiscriptor:#"identifier" Ascending:YES];
return [results lastObject];
}
-(void)traverseElement:(TBXMLElement *)element {
do {
// Display the name of the element
NSString * value = [TBXML textForElement:element];
NSLog(#"%#-'%#'",[TBXML elementName:element],value);
// Obtain first attribute from element
NSString * ele = [TBXML elementName:element];
if([ele isEqualToString:#"place"]){
if(self.tempItem){
//GEOCODER HERE
[self configurePlaceChildrenAndSave];
}
//CREATE NEW CD ITEM HER
if(dataBaseExits){
TBXMLElement * idElement = [TBXML childElementNamed:#"name" parentElement:element];
Place * p = [self getPlaceForName:[NSString stringWithFormat:#"%#",[TBXML textForElement:idElement]]];
if(p){
[self setTempItem:p];
TBXMLElement * reviewsElement = [TBXML childElementNamed:#"reviews" parentElement:element];
if(reviewsElement){
self.tempItem.reviews = nil;
[CoreDataBasicService saveChanges];
[self traverseElement:reviewsElement];
}
TBXMLElement * promosElement = [TBXML childElementNamed:#"featured-places" parentElement:element];
if(promosElement){
self.tempItem.featured = nil;
[CoreDataBasicService saveChanges];
[self traverseElement:promosElement];
}
[CoreDataBasicService saveChanges];
continue;
}