I am trying to perform screen-recording using AVAssetWriter, which also accepts audio input. However, I have been stuck on this error, where AVAssetWriter sometimes becomes AVAssetWriterStatusFailed after a few calls on appendSampleBuffer: (inside encodeAudioFrame:)
Failed: Error Domain=AVFoundationErrorDomain Code=-11800 "The operation could not be completed" UserInfo=0x32b570 {NSLocalizedDescription=The operation could not be completed, NSUnderlyingError=0x70d710 "The operation couldn’t be completed. (OSStatus error -12737.)", NSLocalizedFailureReason=An unknown error occurred (-12737)}
Several observations:
Once it enters this state, subsequent recording attempts will also return AVAssetWriterStatusFailed, even if I use a different recorder object.
The error does not appear when I comment out the audio recording blocks.
But the error still appears when I comment out the video recording blocks, and without modifying any incoming CMSampleBufferRef.
Any assistance will be appreciated.
Below is the code I am using, with several parts omitted for brevity. I am currently using OSX 10.9 SDK, with ARC turned off.
- (BOOL) startRecording
{
if (!isRecording)
{
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
[self startCapture];
[self setUpWriter];
startedAt = [NSDate date];
isRecording = YES;
while (isRecording)
{
NSAutoreleasePool* pool = [NSAutoreleasePool new];
NSTimeInterval offset = [[NSDate date] timeIntervalSinceDate:startedAt];
CMTime tiem = CMTimeMakeWithSeconds(offset - pauseDelta, 1000);
[self encodeFrameAtTime:tiem];
[pool drain];
sleep(0.05f);
}
[self endCapture];
[self completeRecordingSession];
});
}
return YES;
}
- (void) stopRecording {
isRecording = NO;
}
-(void) startCapture
{
AVCaptureDevice* microphone = x //Device selection code omitted
videoCaptureSession = [[AVCaptureSession alloc] init];
videoCaptureSession.sessionPreset = AVCaptureSessionPresetHigh;
//------------------------------------------
NSError* err = nil;
audioInput = [AVCaptureDeviceInput deviceInputWithDevice:microphone error:&err];
[videoCaptureSession addInput:audioInput];
//------------------------------------------
audioOutput = [[AVCaptureAudioDataOutput alloc] init];
queue = dispatch_queue_create("videoQueue", NULL);
[audioOutput setSampleBufferDelegate:self queue:queue];
[videoCaptureSession addOutput:audioOutput];
audioDelta = -1;
[videoCaptureSession startRunning];
}
-(void) endCapture
{
[videoCaptureSession stopRunning];
[videoCaptureSession removeInput:audioInput];
[videoCaptureSession removeOutput:audioOutput];
[audioOutput release];
audioOutput = nil;
audioInput = nil;
[videoCaptureSession release];
videoCaptureSession = nil;
dispatch_release(queue);
}
-(BOOL) setUpWriter
{
//delete the file.
{
NSFileManager* fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:self.moviePath]) {
NSError* error;
if ([fileManager removeItemAtPath:self.moviePath error:&error] == NO) {
NSLog(#"Could not delete old recording file at path: %#", self.moviePath);
}
}
}
mCaptureRect = NSRectToCGRect([screen frame]);
int FWidth = mCaptureRect.size.width;
int FHeight = mCaptureRect.size.height;
int bitRate = FWidth * FHeight * 8;
videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:self.moviePath] fileType:AVFileTypeMPEG4 error:nil];
NSParameterAssert(videoWriter);
//Configure video
NSDictionary *codecSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:bitRate], AVVideoAverageBitRateKey,
nil];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
codecSettings,AVVideoCompressionPropertiesKey,
[NSNumber numberWithInt:FWidth], AVVideoWidthKey,
[NSNumber numberWithInt:FHeight], AVVideoHeightKey,
nil];
videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
NSParameterAssert(videoWriterInput);
videoWriterInput.expectsMediaDataInRealTime = YES;
NSDictionary* bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey,
[NSNumber numberWithInt:FWidth], kCVPixelBufferWidthKey,
[NSNumber numberWithInt:FHeight], kCVPixelBufferHeightKey,
nil];
avAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput sourcePixelBufferAttributes:bufferAttributes];
//*
//Configure Audio
AudioChannelLayout acl;
bzero(&acl, sizeof(acl));
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
NSDictionary* audioSettings = [ NSDictionary dictionaryWithObjectsAndKeys:
[ NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey,
[ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
[ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
[ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
[NSNumber numberWithInt:64000], AVEncoderBitRateKey,
nil ];
audioWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioSettings];
audioWriterInput.expectsMediaDataInRealTime = YES;
//add input
[videoWriter addInput:videoWriterInput];
[videoWriter addInput:audioWriterInput];
return YES;
}
- (void) cleanupWriter {
[videoWriter release];
videoWriter = nil;
avAdaptor = nil;
videoWriterInput = nil;
startedAt = nil;
audioWriterInput = nil;
}
- (void) encodeFrameAtTime:(CMTime)timestamp
{
if(!isRecording) return;
if(videoWriter == nil) return;
if(videoWriter.status == AVAssetWriterStatusFailed)
{
return;
}
if(videoWriter.status != AVAssetWriterStatusWriting)
{
if(videoWriter.status != AVAssetWriterStatusUnknown)
return;
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:timestamp];
startTime = CMTimeGetSeconds(timestamp);
}
timestamp = CMTimeMakeWithSeconds(startTime + CMTimeGetSeconds(timestamp), 1000);
[self writeVideoFrameAtTime:timestamp];
}
-(void) writeVideoFrameAtTime:(CMTime)time {
if (![videoWriterInput isReadyForMoreMediaData])
{
}
else
{
/*
CVPixelBufferRef manipulation omitted...
*/
{
BOOL success = [avAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:time];
if(videoWriter.status == AVAssetWriterStatusFailed) NSLog(#"Failed: %#", videoWriter.error);
if (!success) NSLog(#"Warning: Unable to write buffer to video");
}
CVPixelBufferRelease(pixelBuffer);
CGImageRelease(cgImage);
}
}
-(void) encodeAudioFrame:(CMSampleBufferRef)buffer
{
if(!isRecording) return;
CMTime timestamp = CMSampleBufferGetPresentationTimeStamp(buffer);
if(videoWriter.status != AVAssetWriterStatusWriting)
{
//Wait for video thread to start the writer
return;
}
if(![audioWriterInput isReadyForMoreMediaData])
return;
//*
NSTimeInterval offset = [[NSDate date] timeIntervalSinceDate:startedAt];
if(audioDelta == -1)
{
audioDelta = offset - CMTimeGetSeconds(timestamp);
}
//Adjusts CMSampleBufferRef's timestamp to match the video stream's zero-based timestamp
CMItemCount count;
CMTime newTimestamp = CMTimeMakeWithSeconds(CMTimeGetSeconds(timestamp) + audioDelta - pauseDelta, 1000);
CMSampleBufferGetSampleTimingInfoArray(buffer, 0, nil, &count);
CMSampleTimingInfo* pInfo = malloc(sizeof(CMSampleTimingInfo) * count);
CMSampleBufferGetSampleTimingInfoArray(buffer, count, pInfo, &count);
for(CMItemCount i = 0; i < count; i++)
{
pInfo[i].decodeTimeStamp = newTimestamp;
pInfo[i].presentationTimeStamp = newTimestamp;
}
CMSampleBufferRef newBuffer;
CMSampleBufferCreateCopyWithNewTiming(kCFAllocatorDefault, buffer, count, pInfo, &newBuffer);
free(pInfo);
timestamp = CMSampleBufferGetPresentationTimeStamp(newBuffer);
BOOL res = [audioWriterInput appendSampleBuffer:newBuffer];
}
- (void) completeRecordingSession {
#autoreleasepool {
if(videoWriter.status != AVAssetWriterStatusWriting)
{
while (videoWriter.status == AVAssetWriterStatusUnknown)
{
[NSThread sleepForTimeInterval:0.5f];
}
int status = videoWriter.status;
while (status == AVAssetWriterStatusUnknown)
{
NSLog(#"Waiting...");
[NSThread sleepForTimeInterval:0.5f];
status = videoWriter.status;
}
}
#synchronized(self)
{
[videoWriter finishWriting];
[self cleanupWriter];
}
}
}
-(void) captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
if(!CMSampleBufferDataIsReady(sampleBuffer))
return;
#autoreleasepool {
if(captureOutput == audioOutput)
{
if(isRecording && !isPaused)
{
[self encodeAudioFrame:sampleBuffer];
}
}
}
}
I had exactly the same problem with my swift code. I found out that my pc simply ran out of memory. So double check if you have enough free ram.
Related
I'm currently retrieving the Top 100 Scores for one of my leaderboards the following way:
- (void) retrieveTop100Scores {
__block int totalScore = 0;
GKLeaderboard *myLB = [[GKLeaderboard alloc] init];
myLB.identifier = [Team currentTeam];
myLB.timeScope = GKLeaderboardTimeScopeAllTime;
myLB.playerScope = GKLeaderboardPlayerScopeGlobal;
myLB.range = NSMakeRange(1, 100);
[myLB loadScoresWithCompletionHandler:^(NSArray *scores, NSError *error) {
if (error != nil) {
NSLog(#"%#", [error localizedDescription]);
}
if (scores != nil) {
for (GKScore *score in scores) {
NSLog(#"%lld", score.value);
totalScore += score.value;
}
NSLog(#"Total Score: %d", totalScore);
[self loadingDidEnd];
}
}];
}
The problem is I want to do this for 32 leaderboards. I have an array with all 32 leaderboard identifiers:
NSArray *leaderboardIDs;
So my question is, how can I combine those 2 segments of code to pull in the 100 values for each leaderboard, resulting in a dictionary with all of the leaderboard names as keys, and the Total (of each 100 scores) of the leaderboards for values.
UPDATED:
So I have updated my answer using CrimsonChris' help. The only question I have is, how can I know when it is done totaling the score for all 32 teams? The reason I ask, is because I would then like to organize them from highest to lowest, and display them in that order in a tableView.
This is what I've updated my answer to:
In my viewDidLoad:
- (void)loadLeaderboardData {
// Array of leaderboard ID's to get high scores for
NSArray *leaderboardIDs = #[#"algeria", #"argentina", #"australia", #"belgium", #"bosniaandherzegovina", #"brazil", #"cameroon", #"chile", #"colombia", #"costarica", #"croatia", #"ecuador", #"england", #"france", #"germany", #"ghana", #"greece", #"honduras", #"iran", #"italy", #"ivorycoast", #"japan", #"mexico", #"netherlands", #"nigeria", #"portugal", #"russia", #"southkorea", #"spain", #"switzerland", #"unitedstates", #"uruguay"];
scoresByLeaderboardID = [NSMutableDictionary dictionary];
__block int requestCount = (int)leaderboardIDs.count;
for (NSString *leaderboardID in leaderboardIDs) {
[self loadScoresForLeaderboardID:leaderboardID range:NSMakeRange(1, 100) callback:^(NSArray *scores) {
scoresByLeaderboardID[leaderboardID] = scores;
if (--requestCount <= 0) {
if (callback)callback(scoresByLeaderboardID);
}
}];
}
}
- (void)loadScoresForLeaderboardID:(NSString*)leaderboardID range:(NSRange)range callback:(CallbackBlock)callback {
__block int totalScore = 0;
GKLeaderboard *myLB = [[GKLeaderboard alloc] init];
myLB.identifier = leaderboardID;
myLB.timeScope = GKLeaderboardTimeScopeAllTime;
myLB.playerScope = GKLeaderboardPlayerScopeGlobal;
myLB.range = range;
[myLB loadScoresWithCompletionHandler:^(NSArray *scores, NSError *error) {
if (error != nil) {
//NSLog(#"%#", [error localizedDescription]);
}
if (scores != nil) {
for (GKScore *score in scores) {
//NSLog(#"Individual Scores: %lld (For %#)", score.value, leaderboardID);
}
}
}];
}
Your method can be cleaned up by using callback blocks.
typedef void(^CallbackBlock)(id object);
//callback accepts an NSArray* of GKScore*
- (void)loadScoresForLeaderboardID:(NSString *)leaderboardID range:(NSRange)range callback:(CallbackBlock)callback {
GKLeaderboard *myLB = [[GKLeaderboard alloc] init];
myLB.identifier = leaderboardID;
myLB.timeScope = GKLeaderboardTimeScopeAllTime;
myLB.playerScope = GKLeaderboardPlayerScopeGlobal;
myLB.range = range;
[myLB loadScoresWithCompletionHandler:^(NSArray *scores, NSError *error) {
if (error != nil) NSLog(#"%#", [error localizedDescription]);
if (callback) callback(scores);
}];
}
You can then loop over your leaderboards.
//callback accepts an NSDictionary* of NSArray*(of GKScore*) by NSNumber*
- (void)loadScoresForLeaderboardIDs:(NSArray *)leaderboardIDs withCallback:(CallbackBlock)callback {
NSMutableDictionary *scoresByLeaderboardID = [NSMutableDictionary dictionary];
__block int requestCount = leaderboardIDs.count;
for (NSString *leaderboardID in leaderboardIDs) {
[LeaderboardLoader loadScoresForLeaderboardID:leaderboardID range:NSMakeRange(1, 100) callback:^(NSArray *scores) {
scoresByLeaderboardID[leaderboardID] = scores;
if (--requestCount <= 0) { //not thread safe
if (callback) callback(scoresByLeaderboardID);
}
}];
}
}
Once this method fires its callback you should have all your scores.
[LeaderboardLoader loadScoresForLeaderboardIDs:#[#"FirstID", #"SecondID", #"ThirdID"] withCallback:^(NSDictionary *scoresByLeaderboardID) {
NSLog(#"%#", scoresByLeaderboardID);
//do whatever you need to with all your scores here.
}];
I previously had this code to capture a single image from a Mac's iSight camera using QTKit:
- (NSError*)takePicture
{
BOOL success;
NSError* error;
captureSession = [QTCaptureSession new];
QTCaptureDevice* device = [QTCaptureDevice defaultInputDeviceWithMediaType: QTMediaTypeVideo];
success = [device open: &error];
if (!success) { return error; }
QTCaptureDeviceInput* captureDeviceInput = [[QTCaptureDeviceInput alloc] initWithDevice: device];
success = [captureSession addInput: captureDeviceInput error: &error];
if (!success) { return error; }
QTCaptureDecompressedVideoOutput* captureVideoOutput = [QTCaptureDecompressedVideoOutput new];
[captureVideoOutput setDelegate: self];
success = [captureSession addOutput: captureVideoOutput error: &error];
if (!success) { return error; }
[captureSession startRunning];
return nil;
}
- (void)captureOutput: (QTCaptureOutput*)captureOutput
didOutputVideoFrame: (CVImageBufferRef)imageBuffer
withSampleBuffer: (QTSampleBuffer*)sampleBuffer
fromConnection: (QTCaptureConnection*)connection
{
CVBufferRetain(imageBuffer);
if (imageBuffer) {
[captureSession removeOutput: captureOutput];
[captureSession stopRunning];
NSCIImageRep* imageRep = [NSCIImageRep imageRepWithCIImage: [CIImage imageWithCVImageBuffer: imageBuffer]];
_result = [[NSImage alloc] initWithSize: [imageRep size]];
[_result addRepresentation: imageRep];
CVBufferRelease(imageBuffer);
_done = YES;
}
}
However, I found today that QTKit has been deprecated and so we must now use AVFoundation.
Can anyone help me convert this code to its AVFoundation equivalent? It seems as though many methods have the same name, but at the same time, a lot is different and I'm at a complete loss here... Any help?
Alright, I found the solution!! Here it is:
- (void)takePicture
{
NSError* error;
AVCaptureDevice* device = [AVCaptureDevice defaultDeviceWithMediaType: AVMediaTypeVideo];
AVCaptureDeviceInput* input = [AVCaptureDeviceInput deviceInputWithDevice: device error: &error];
if (!input) {
_error = error;
_done = YES;
return;
}
AVCaptureStillImageOutput* output = [AVCaptureStillImageOutput new];
[output setOutputSettings: #{(id)kCVPixelBufferPixelFormatTypeKey: #(k32BGRAPixelFormat)}];
captureSession = [AVCaptureSession new];
captureSession.sessionPreset = AVCaptureSessionPresetPhoto;
[captureSession addInput: input];
[captureSession addOutput: output];
[captureSession startRunning];
AVCaptureConnection* connection = [output connectionWithMediaType: AVMediaTypeVideo];
[output captureStillImageAsynchronouslyFromConnection: connection completionHandler: ^(CMSampleBufferRef sampleBuffer, NSError* error) {
if (error) {
_error = error;
_result = nil;
}
else {
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
if (imageBuffer) {
CVBufferRetain(imageBuffer);
NSCIImageRep* imageRep = [NSCIImageRep imageRepWithCIImage: [CIImage imageWithCVImageBuffer: imageBuffer]];
_result = [[NSImage alloc] initWithSize: [imageRep size]];
[_result addRepresentation: imageRep];
CVBufferRelease(imageBuffer);
}
}
_done = YES;
}];
}
I hope this helps whoever has any problems in doing this same thing.
When selecting a song from the users library on either iPhone or iPad devices running iOS7 my app crashes out
The line it stops on is
FourCharCode formatID = audioDesc->mFormatID;
With a EXC_BAD_ACCESS code
Trying to check the console for more info, this is the last step it mentions;
appMake[8392:60b] Select music AssetUrl = (null)
Code below for the export session part;
- (void)exportAssetAsSourceFormat:(MPMediaItem *)item {
NSAutoreleasePool *pool = [[NSAutoreleasePool alloc]init];
NSURL *assetURL = [item valueForProperty:MPMediaItemPropertyAssetURL];
AVURLAsset *songAsset = [AVURLAsset URLAssetWithURL:assetURL options:nil];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc]
initWithAsset:songAsset
presetName:AVAssetExportPresetPassthrough];
NSArray *tracks = [songAsset tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack *track = [tracks objectAtIndex:0];
id desc = [track.formatDescriptions objectAtIndex:0];
const AudioStreamBasicDescription *audioDesc = CMAudioFormatDescriptionGetStreamBasicDescription((CMAudioFormatDescriptionRef)desc);
FourCharCode formatID = audioDesc->mFormatID;
NSString *fileType = nil;
NSString *ex = nil;
switch (formatID) {
case kAudioFormatLinearPCM:
{
UInt32 flags = audioDesc->mFormatFlags;
if (flags & kAudioFormatFlagIsBigEndian) {
fileType = #"public.aiff-audio";
ex = #"aif";
} else {
fileType = #"com.microsoft.waveform-audio";
ex = #"wav";
}
}
break;
case kAudioFormatMPEGLayer3:
fileType = #"com.apple.quicktime-movie";
ex = #"mp3";
break;
case kAudioFormatMPEG4AAC:
fileType = #"com.apple.m4a-audio";
ex = #"m4a";
break;
case kAudioFormatAppleLossless:
fileType = #"com.apple.m4a-audio";
ex = #"m4a";
break;
default:
break;
}
exportSession.outputFileType = fileType;
NSString *exportPath = [[NSTemporaryDirectory() stringByAppendingPathComponent:[EXPORT_NAME stringByAppendingPathExtension:ex]] retain];
if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath]) {
[[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
}
exportSession.outputURL = [NSURL fileURLWithPath:exportPath];
[exportSession exportAsynchronouslyWithCompletionHandler:^{
if (exportSession.status == AVAssetExportSessionStatusCompleted) {
NSLog(#"export session completed");
//return YES;
[self performSelectorOnMainThread:#selector(gotoMainView:)
withObject:[EXPORT_NAME stringByAppendingPathExtension:ex]
waitUntilDone:NO];
} else {
NSLog(#"export session error");
//return NO;
}
[exportSession release];
}];
[pool release];
}
Media picker code;
- (void) mediaPicker: (MPMediaPickerController *) mediaPicker didPickMediaItems: (MPMediaItemCollection *) mediaItemCollection
{
// Dismiss the media item picker.
[self dismissModalViewControllerAnimated: YES];
if ([mediaItemCollection count] < 1) {
return;
}
[selectedItem release];
selectedItem = [[[mediaItemCollection items] objectAtIndex:0] retain];
NSURL* filePath = [selectedItem valueForProperty: MPMediaItemPropertyAssetURL];
NSLog(#"Select music AssetUrl = %#", filePath);
[viewLoading setHidden:NO];
[self UnloadSound];
[NSThread detachNewThreadSelector:#selector(exportAssetAsSourceFormat:) toTarget:self withObject:selectedItem];
}
I'm writing a MP4 video file with a AVAssetWriter using a AVAssetWriterInputPixelBufferAdaptor.
The source is a video from a UIImagePickerController, either freshly captured from the camera or from the asset library. Quality right now is UIImagePickerControllerQualityTypeMedium.
Some times the writer fails. It's status is AVAssetWriterStatusFailed and the AVAssetWriter objects error property is:
Error Domain=AVFoundationErrorDomain Code=-11800 "The operation could not be completed"
UserInfo=0xf5d8990 {NSLocalizedFailureReason=An unknown error occurred (-536870210),
NSUnderlyingError=0x4dd8e0 "The operation couldn’t be completed. (OSStatus error -536870210.)",
NSLocalizedDescription=The operation could not be completed
The error occurs approximately 20% of the times the code is run. It seems to fail more frequently on iPhone 4 / 4S than on iPhone 5.
It also occurs more frequently if the source video quality is higher.
Using UIImagePickerControllerQualityTypeLow the error doesn't happen so often.
Using UIImagePickerControllerQualityTypeHigh, the error happens a little more frequently.
I have also noticed something else:
It seems to come in waves. When it fails, the following runs will often fail too, even though I delete the app and reinstall it. That leaves me wondering, whether my program leaks some memory and if that memory stays alive even if the app gets killed (is that even possible?).
Here is the code i use to render my video:
- (void)writeVideo
{
offlineRenderingInProgress = YES;
/* --- Writer Setup --- */
[locationQueue cancelAllOperations];
[self stopWithoutRewinding];
NSError *writerError = nil;
BOOL succes;
succes = [[NSFileManager defaultManager] removeItemAtURL:self.outputURL error:nil];
// DLog(#"Url: %#, succes: %i, error: %#", self.outputURL, succes, fileError);
writer = [AVAssetWriter assetWriterWithURL:self.outputURL fileType:(NSString *)kUTTypeQuickTimeMovie error:&writerError];
//writer.shouldOptimizeForNetworkUse = NO;
if (writerError) {
DLog(#"Writer error: %#", writerError);
return;
}
float bitsPerPixel;
CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions((__bridge CMVideoFormatDescriptionRef)([readerVideoOutput.videoTracks[0] formatDescriptions][0]));
int numPixels = dimensions.width * dimensions.height;
int bitsPerSecond;
// Assume that lower-than-SD resolutions are intended for streaming, and use a lower bitrate
if ( numPixels < (640 * 480) )
bitsPerPixel = 4.05; // This bitrate matches the quality produced by AVCaptureSessionPresetMedium or Low.
else
bitsPerPixel = 11.4; // This bitrate matches the quality produced by AVCaptureSessionPresetHigh.
bitsPerSecond = numPixels * bitsPerPixel;
NSDictionary *videoCompressionSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithFloat:videoSize.width], AVVideoWidthKey,
[NSNumber numberWithInteger:videoSize.height], AVVideoHeightKey,
[NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInteger:30], AVVideoMaxKeyFrameIntervalKey,
nil], AVVideoCompressionPropertiesKey,
nil];
writerVideoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoCompressionSettings];
writerVideoInput.transform = movie.preferredTransform;
writerVideoInput.expectsMediaDataInRealTime = YES;
[writer addInput:writerVideoInput];
NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];
writerPixelAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerVideoInput
sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
BOOL couldStart = [writer startWriting];
if (!couldStart) {
DLog(#"Could not start AVAssetWriter!");
abort = YES;
[locationQueue cancelAllOperations];
return;
}
[self configureFilters];
CIContext *offlineRenderContext = [CIContext contextWithOptions:#{kCIContextUseSoftwareRenderer : #NO}];
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
if (!self.canEdit) {
[self createVideoReaderWithAsset:movie timeRange:CMTimeRangeFromTimeToTime(kCMTimeZero, kCMTimePositiveInfinity) forOfflineRender:YES];
} else {
[self createVideoReaderWithAsset:movie timeRange:CMTimeRangeWithNOVideoRangeInDuration(self.thumbnailEditView.range, movie.duration) forOfflineRender:YES];
}
CMTime startOffset = reader.timeRange.start;
DLog(#"startOffset: %llu", startOffset.value);
[self.thumbnailEditView removeFromSuperview];
// self.thumbnailEditView = nil;
[glLayer removeFromSuperlayer];
glLayer = nil;
[playerView removeFromSuperview];
playerView = nil;
glContext = nil;
[writerVideoInput requestMediaDataWhenReadyOnQueue:dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0) usingBlock:^{
#try {
BOOL didWriteSomething = NO;
DLog(#"Preparing to write...");
while ([writerVideoInput isReadyForMoreMediaData]) {
if (abort) {
NSLog(#"Abort == YES");
[locationQueue cancelAllOperations];
[writerVideoInput markAsFinished];
videoConvertCompletionBlock(NO, writer.error.localizedDescription);
}
if (writer.status == AVAssetWriterStatusFailed) {
DLog(#"Writer.status: AVAssetWriterStatusFailed, error: %#", writer.error);
[[NSUserDefaults standardUserDefaults] setObject:[NSNumber numberWithInt:1] forKey:#"QualityOverride"];
[[NSUserDefaults standardUserDefaults] synchronize];
abort = YES;
[locationQueue cancelAllOperations];
videoConvertCompletionBlock(NO, writer.error.localizedDescription);
return;
DLog(#"Source file exists: %i", [[NSFileManager defaultManager] fileExistsAtPath:movie.URL.relativePath]);
}
DLog(#"Writing started...");
CMSampleBufferRef buffer = nil;
if (reader.status != AVAssetReaderStatusUnknown) {
if (reader.status == AVAssetReaderStatusReading) {
buffer = [readerVideoOutput copyNextSampleBuffer];
if (didWriteSomething == NO) {
DLog(#"Copying sample buffers...");
}
}
if (!buffer) {
[writerVideoInput markAsFinished];
DLog(#"Finished...");
CGColorSpaceRelease(colorSpace);
[self offlineRenderingDidFinish];
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
[writer finishWriting];
if (writer.error != nil) {
DLog(#"Error: %#", writer.error);
} else {
DLog(#"Succes!");
}
if (writer.status == AVAssetWriterStatusCompleted) {
videoConvertCompletionBlock(YES, nil);
}
else {
abort = YES;
videoConvertCompletionBlock(NO, writer.error.localizedDescription);
}
});
return;
}
didWriteSomething = YES;
}
else {
DLog(#"Still waiting...");
//Reader just needs a moment to get ready...
continue;
}
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(buffer);
if (pixelBuffer == NULL) {
DLog(#"Pixelbuffer == NULL");
continue;
}
//DLog(#"Sample call back! Pixelbuffer: %lu", CVPixelBufferGetHeight(pixelBuffer));
//NSDictionary *options = [NSDictionary dictionaryWithObject:(__bridge id)CGColorSpaceCreateDeviceRGB() forKey:kCIImageColorSpace];
CIImage *ciimage = [CIImage imageWithCVPixelBuffer:pixelBuffer options:nil];
CIImage *outputImage = [self filteredImageWithImage:ciimage];
CVPixelBufferRef outPixelBuffer = NULL;
CVReturn status;
CFDictionaryRef empty; // empty value for attr value.
CFMutableDictionaryRef attrs;
empty = CFDictionaryCreate(kCFAllocatorDefault, // our empty IOSurface properties dictionary
NULL,
NULL,
0,
&kCFTypeDictionaryKeyCallBacks,
&kCFTypeDictionaryValueCallBacks);
attrs = CFDictionaryCreateMutable(kCFAllocatorDefault,
1,
&kCFTypeDictionaryKeyCallBacks,
&kCFTypeDictionaryValueCallBacks);
CFDictionarySetValue(attrs,
kCVPixelBufferIOSurfacePropertiesKey,
empty);
CFDictionarySetValue(attrs,
kCVPixelBufferCGImageCompatibilityKey,
(__bridge const void *)([NSNumber numberWithBool:YES]));
CFDictionarySetValue(attrs,
kCVPixelBufferCGBitmapContextCompatibilityKey,
(__bridge const void *)([NSNumber numberWithBool:YES]));
status = CVPixelBufferCreate(kCFAllocatorDefault, ciimage.extent.size.width, ciimage.extent.size.height, kCVPixelFormatType_32BGRA, attrs, &outPixelBuffer);
//DLog(#"Output image size: %f, %f, pixelbuffer height: %lu", outputImage.extent.size.width, outputImage.extent.size.height, CVPixelBufferGetHeight(outPixelBuffer));
if (status != kCVReturnSuccess) {
DLog(#"Couldn't allocate output pixelBufferRef!");
continue;
}
[offlineRenderContext render:outputImage toCVPixelBuffer:outPixelBuffer bounds:outputImage.extent colorSpace:colorSpace];
CMTime currentSourceTime = CMSampleBufferGetPresentationTimeStamp(buffer);
CMTime currentTime = CMTimeSubtract(currentSourceTime, startOffset);
CMTime duration = reader.timeRange.duration;
if (CMTIME_IS_POSITIVE_INFINITY(duration)) {
duration = movie.duration;
}
CMTime durationConverted = CMTimeConvertScale(duration, currentTime.timescale, kCMTimeRoundingMethod_Default);
float durationFloat = (float)durationConverted.value;
float progress = ((float) currentTime.value) / durationFloat;
//DLog(#"duration : %f, progress: %f", durationFloat, progress);
[self updateOfflineRenderProgress:progress];
if (pixelBuffer != NULL && writerVideoInput.readyForMoreMediaData) {
[writerPixelAdaptor appendPixelBuffer:outPixelBuffer withPresentationTime:currentTime];
} else {
continue;
}
if (writer.status == AVAssetWriterStatusWriting) {
DLog(#"Writer.status: AVAssetWriterStatusWriting");
}
CFRelease(buffer);
CVPixelBufferRelease(outPixelBuffer);
}
}
#catch (NSException *exception) {
DLog(#"Catching exception: %#", exception);
}
}];
}
Ok, I think I solved it myself. The bad guy was this line:
[writerVideoInput requestMediaDataWhenReadyOnQueue:dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0) usingBlock:^{ ....
The global queue I was passing is a concurrent queue. This allows a new callback to be made before the previous one is finished. The asset writer is not designed to be written to from more than one thread at a time.
Creating and using a new serial queue seems to remedy the problem:
assetWriterQueue = dispatch_queue_create("AssetWriterQueue", DISPATCH_QUEUE_SERIAL);
[writerVideoInput requestMediaDataWhenReadyOnQueue:assetWriterQueue usingBlock:^{...
In my app I have a Place entity that contains reviews and featured objects at the minute my app is constantly refreshing these everytime i parse my xml data which is fine however I need to delete the review and featued objects before they are re parsed. My code is as follows and I believe I need to delete the objects in the parseXML method at the top:
-(void)parseXML:(NSString*)xml{
TBXML * tbxml = [TBXML tbxmlWithXMLString:xml];
if(tbxml.rootXMLElement){
[self traverseElement:tbxml.rootXMLElement];
if(self.tempItem){
NSLog(#"Ending Application");
[self configurePlaceChildrenAndSave];
[self startGeoCodingQueue];
}
}
}
-(void)fireGeocoder:(BSForwardGeocoder*)g{
NSLog(#"Begining FGC for %# (%#)",g.searchQuery,g.metaData);
[g findLocation];
}
-(void)startGeoCodingQueue{
[[NSUserDefaults standardUserDefaults] setValue:[NSNumber numberWithBool:YES] forKey:kGeoCoderInProgress];
int i = 0;
BSForwardGeocoder * fgc;
NSArray * a = [CoreDataBasicService fetchResultsForEnity:#"Place" andSortDiscriptor:#"name" Ascending:YES];
for (Place * p in a) {
if(p.latitude && p.longitude)continue;//skip geocoding if location data exists
fgc = [[BSForwardGeocoder alloc] initWithDelegate:self];
fgc.metaData = p.name;
fgc.searchQuery = p.postcode;
NSLog(#"gc:%i-%i",i,[a count]);
if([a count] == i+1){
fgc.last = YES;
}
float delay = (float)i*kGeoCoderDelay;
[self performSelector:#selector(fireGeocoder:) withObject:[fgc autorelease] afterDelay:delay];
fgc = nil;
i++;
}
[[NSUserDefaults standardUserDefaults] setValue:[NSNumber numberWithInt:i] forKey:kGeoCoderCompletedKey];
}
-(void)finishedGeocoding{
[[NSUserDefaults standardUserDefaults] setValue:[NSDate date] forKey:kGeoCoderlastRanKey];
[[NSUserDefaults standardUserDefaults] setValue:[NSNumber numberWithBool:NO] forKey:kGeoCoderInProgress];
[[NSNotificationCenter defaultCenter] postNotificationName:kGeoCoderNotificationName object:nil];
}
-(void)forwardGeocoderError:(BSForwardGeocoder *)geocoder errorMessage:(NSString *)errorMessage{
NSLog(#"EX ERROR: GEOCODER FAILED - %#",errorMessage);
if(geocoder.last)[self finishedGeocoding];
}
- (void)forwardGeocoderFoundLocation:(BSForwardGeocoder*)geocoder
{
if(geocoder.status == G_GEO_SUCCESS)
{
BSKmlResult *result = [geocoder.results lastObject];
if(!result)return;
//[self.fowardGeocodingQueue setObject:[NSString stringWithString:value] forKey:self.tempItem.name];
Place * p = [CoreDataBasicService fetchPlaceNamed:geocoder.metaData];
p.latitude = [NSNumber numberWithFloat:result.latitude];
p.longitude = [NSNumber numberWithFloat:result.longitude];
[CoreDataBasicService saveChanges];
//NSLog(#"%# - %f,%f",p2.name,p2.latitude,p2.longitude);
NSLog(#"completed Foward geocoding for '%#' (%#) [%f,%f]",geocoder.metaData,geocoder.searchQuery,[p.latitude floatValue],[p.longitude floatValue]);
if(geocoder.last)[self finishedGeocoding];
}
else {
NSString *message = #"";
switch (geocoder.status) {
case G_GEO_BAD_KEY:
message = #"The API key is invalid.";
break;
case G_GEO_UNKNOWN_ADDRESS:
message = [NSString stringWithFormat:#"Could not find %#", geocoder.searchQuery];
break;
case G_GEO_TOO_MANY_QUERIES:
message = #"Too many queries has been made for this API key.";
break;
case G_GEO_SERVER_ERROR:
message = #"Server error, please try again.";
break;
default:
break;
}
NSLog(#"ERROR: GEOCODER FAILED - %#",message);
// UIAlertView *alert = [[UIAlertView alloc] initWithTitle:#"Information"
// message:message
// delegate:nil
// cancelButtonTitle:#"OK"
// otherButtonTitles: nil];
// [alert show];
// [alert release];
}
}
-(void)configurePlaceChildrenAndSave{
if(self.tempReviewArray.count==0 && self.tempReview)[self.tempReviewArray addObject:self.tempReview];
if(self.tempFeatureArray.count==0 && self.tempFeature)[self.tempFeatureArray addObject:self.tempFeature];
[self.tempItem setReviews:self.tempReviewArray];
[self.tempItem setFeatured:self.tempFeatureArray];
self.tempReviewArray = nil;
self.tempReview = nil;
self.tempFeatureArray = nil;
self.tempFeature = nil;
[CoreDataBasicService saveChanges];
self.tempItem = nil;
}
-(NSString*)formatKeyForCoreData:(NSString*)elementKey{
return [elementKey stringByReplacingOccurrencesOfString:#"-" withString:#""];
}
-(NSDate*)convertStringDateToDate:(NSString*)stringDate{
NSDateFormatter * df = [[NSDateFormatter alloc] init];
NSLog(#"DATE:%#",stringDate);
[df setDateFormat:#"y-M-d'T'H:m:s'Z'"];
NSDate * d = [df dateFromString:stringDate];
[df release];
return d;
}
-(Place*)getPlaceForName:(NSString*)name{
NSPredicate * pred = [NSPredicate predicateWithFormat:#"name = %#",name];
NSArray * results = [CoreDataBasicService fetchResultsForEnity:#"Place" WithPredicate:pred andSortDiscriptor:#"identifier" Ascending:YES];
return [results lastObject];
}
-(void)traverseElement:(TBXMLElement *)element {
do {
// Display the name of the element
NSString * value = [TBXML textForElement:element];
NSLog(#"%#-'%#'",[TBXML elementName:element],value);
// Obtain first attribute from element
NSString * ele = [TBXML elementName:element];
if([ele isEqualToString:#"place"]){
if(self.tempItem){
//GEOCODER HERE
[self configurePlaceChildrenAndSave];
}
//CREATE NEW CD ITEM HER
if(dataBaseExits){
TBXMLElement * idElement = [TBXML childElementNamed:#"name" parentElement:element];
Place * p = [self getPlaceForName:[NSString stringWithFormat:#"%#",[TBXML textForElement:idElement]]];
if(p){
[self setTempItem:p];
TBXMLElement * reviewsElement = [TBXML childElementNamed:#"reviews" parentElement:element];
if(reviewsElement){
self.tempItem.reviews = nil;
[CoreDataBasicService saveChanges];
[self traverseElement:reviewsElement];
}
TBXMLElement * promosElement = [TBXML childElementNamed:#"featured-places" parentElement:element];
if(promosElement){
self.tempItem.featured = nil;
[CoreDataBasicService saveChanges];
[self traverseElement:promosElement];
}
[CoreDataBasicService saveChanges];
continue;
}