This code below will compile and run, but the AVAssetWriterInputPixelBufferAdaptor is returning NO when I try to appendBuffer:
Any thoughts as to why?
- (void) performCompression
{
CGSize size = CGSizeMake(480, 320);
NSString *compressionDirectory = [NSHomeDirectory() stringByAppendingPathComponent:#"Documents"];
[self evaluateTrimmers];
scrubTime = 0.0;
currentFrameIndex = 0;
NSError *error = nil;
//----initialize compression engine
self.videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:[compressionDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"%#.m4v",[[NSUserDefaults standardUserDefaults] valueForKey:#"RecentFileName"]]]]
fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);
if(error)
NSLog(#"error = %#", [error localizedDescription]);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey, nil];
writerInput = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings] retain]; //[[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];
adaptor = [[AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary] retain]; //[[AVAssetWriterInputPixelBufferAdaptor alloc] initWithAssetWriterInput:writerInput
//sourcePixelBufferAttributes:[NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil]];
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
[videoWriter addInput:writerInput];
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
//---
// insert demo debugging code to write the same image repeated as a movie
CGImageRef theImage = [[UIImage imageNamed:#"Default-Landscape.png"] CGImage];
CVPixelBufferRef buffer = NULL;
buffer = (CVPixelBufferRef)[self pixelBufferFromCGImage:theImage size:size];
[adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];
for (int i = 0; i < 120; i++)
{
if (adaptor.assetWriterInput.readyForMoreMediaData)
{
printf("inside for loop %i\n", i);
CMTime frameTime = CMTimeMake(1, 20);
CMTime lastTime = CMTimeMake(i, 20);
CMTime presentTime = CMTimeAdd(lastTime, frameTime);
buffer = (CVPixelBufferRef)[self pixelBufferFromCGImage:theImage size:size];
if (![adaptor appendPixelBuffer:buffer withPresentationTime:frameTime])
NSLog(#"FAIL");
CVBufferRelease(buffer);
}
else {
printf("error\n");
i--;
}
}
NSLog(#"outside for loop");
[writerInput markAsFinished];
[videoWriter finishWriting];
}
- (CVPixelBufferRef )pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)size
{
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32ARGB, (CFDictionaryRef) options, &pxbuffer);
// CVReturn status = CVPixelBufferPoolCreatePixelBuffer(NULL, adaptor.pixelBufferPool, &pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata != NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width, size.height, 8, 4*size.width, rgbColorSpace, kCGImageAlphaNoneSkipFirst);
NSParameterAssert(context);
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
I got it all working!
The problem I found was because of an apple bug: try using the same technique but with blocks and not a while loop:
[writerInput requestMediaDataWhenReadyOnQueue:dispatchQueue usingBlock:^{
while ([writerInput isReadyForMoreMediaData])
{
if(++frame >= 120)
{
[writerInput markAsFinished];
[videoWriter finishWriting];
[videoWriter release];
break;
}
CVPixelBufferRef buffer = (CVPixelBufferRef)[self pixelBufferFromCGImage:theImage size:size];
if (buffer)
{
if(![adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(frame, 20)])
NSLog(#"FAIL");
else
NSLog(#"Success:%d", frame);
CFRelease(buffer);
}
}
}];
Here is the sample code link: git#github.com:RudyAramayo/AVAssetWriterInputPixelBufferAdaptorSample.git
Here is the code you need:
- (void) testCompressionSession
{
CGSize size = CGSizeMake(480, 320);
NSString *betaCompressionDirectory = [NSHomeDirectory() stringByAppendingPathComponent:#"Documents/Movie.m4v"];
NSError *error = nil;
unlink([betaCompressionDirectory UTF8String]);
//----initialize compression engine
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:betaCompressionDirectory]
fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);
if(error)
NSLog(#"error = %#", [error localizedDescription]);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey, nil];
AVAssetWriterInput *writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
if ([videoWriter canAddInput:writerInput])
NSLog(#"I can add this input");
else
NSLog(#"i can't add this input");
[videoWriter addInput:writerInput];
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
//---
// insert demo debugging code to write the same image repeated as a movie
CGImageRef theImage = [[UIImage imageNamed:#"Lotus.png"] CGImage];
dispatch_queue_t dispatchQueue = dispatch_queue_create("mediaInputQueue", NULL);
int __block frame = 0;
[writerInput requestMediaDataWhenReadyOnQueue:dispatchQueue usingBlock:^{
while ([writerInput isReadyForMoreMediaData])
{
if(++frame >= 120)
{
[writerInput markAsFinished];
[videoWriter finishWriting];
[videoWriter release];
break;
}
CVPixelBufferRef buffer = (CVPixelBufferRef)[self pixelBufferFromCGImage:theImage size:size];
if (buffer)
{
if(![adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(frame, 20)])
NSLog(#"FAIL");
else
NSLog(#"Success:%d", frame);
CFRelease(buffer);
}
}
}];
NSLog(#"outside for loop");
}
- (CVPixelBufferRef )pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)size
{
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32ARGB, (CFDictionaryRef) options, &pxbuffer);
// CVReturn status = CVPixelBufferPoolCreatePixelBuffer(NULL, adaptor.pixelBufferPool, &pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata != NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width, size.height, 8, 4*size.width, rgbColorSpace, kCGImageAlphaPremultipliedFirst);
NSParameterAssert(context);
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
Related
In my Application I need to put Image on the video. The Size of Image and video are same.
Video size is fix = 1080 x 1080
Image Size is fix = 1080 x 1080
My problem is blur video when i share on whatsapp status.
I have check other applications also but it not too much low compress. But in my case it too much lower quality.
I am Export video in AVAssetExportPresetHighestQuality.
Please suggest me any github link which is add image on video.
URL path :
NSURL *filepath = [NSURL URLWithString:self.video_url];
Mix Composition :
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:filepath options:nil];
AVMutableComposition* mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *clipAudioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
//If you need audio as well add the Asset Track for audio here
CGSize firstvideoSize = clipVideoTrack.naturalSize;
CGSize videoSize = clipVideoTrack.naturalSize;
UIInterfaceOrientation mode = [self checkVideoTrack:videoAsset];
if (mode == UIInterfaceOrientationPortrait)
{
if(firstvideoSize.height<=firstvideoSize.width)
{
videoSize.height=firstvideoSize.width;
videoSize.width=firstvideoSize.height;
}
}
else if(mode == UIInterfaceOrientationLandscapeLeft)
{
if(firstvideoSize.height>=firstvideoSize.width)
{
videoSize.height=firstvideoSize.width;
videoSize.width=firstvideoSize.height;
}
}
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:clipVideoTrack atTime:kCMTimeZero error:nil];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:clipAudioTrack atTime:kCMTimeZero error:nil];
[compositionVideoTrack setPreferredTransform:[[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] preferredTransform]];
Create Image Layer :
UIImage *myImage = image;
CALayer *aLayer = [CALayer layer];
aLayer.contents = (id)myImage.CGImage;
aLayer.frame = CGRectMake(0,0, image.size.width, image.size.height); //Needed for proper display. We are using the app icon (57x57). If you use 0,0 you will not see it
aLayer.contentsGravity=kCAGravityResizeAspectFill;
aLayer.opacity = 1.0; //Feel free to alter the alpha here
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:aLayer];
AVMutableVideoComposition* videoComp = [AVMutableVideoComposition videoComposition];
videoComp.renderSize = videoSize;
videoComp.frameDuration = CMTimeMake(1, 30);
videoComp.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
instruction
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);
AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
videoComp.instructions = [NSArray arrayWithObject: instruction];
AVAssetExportSession *_assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.videoComposition = videoComp;
NSURL *documentsDirectoryURL = [[NSFileManager defaultManager] URLForDirectory:NSDocumentDirectory inDomain:NSUserDomainMask appropriateForURL:nil create:NO error:nil];
NSString *stringPath = [[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES)objectAtIndex:0]stringByAppendingPathComponent:#"Videos"];
// New Folder is your folder name
NSError *error = nil;
if (![[NSFileManager defaultManager] fileExistsAtPath:stringPath])
[[NSFileManager defaultManager] createDirectoryAtPath:stringPath withIntermediateDirectories:NO attributes:nil error:&error];
NSString *saveFilePath = [NSString stringWithFormat:#"Videos/%#",[self generateFileNameWithExtension:#".mp4"]];
NSURL *exportUrl = [documentsDirectoryURL URLByAppendingPathComponent:saveFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:exportUrl.path])
{
[[NSFileManager defaultManager] removeItemAtPath:exportUrl.path error:nil];
}
_assetExport.outputFileType = AVFileTypeMPEG4;
_assetExport.outputURL = exportUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
switch (_assetExport.status)
{
case AVAssetExportSessionStatusUnknown:
NSLog(#"AVAssetExportSessionStatusUnknown");
break;
case AVAssetExportSessionStatusWaiting:
NSLog(#"AVAssetExportSessionStatusWaiting");
break;
case AVAssetExportSessionStatusExporting:
NSLog(#"AVAssetExportSessionStatusExporting");
break;
case AVAssetExportSessionStatusCompleted:
NSLog(#"AVAssetExportSessionStatusCompleted");
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"AVAssetExportSessionStatusFailed");
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"AVAssetExportSessionStatusCancelled");
break;
}
dispatch_async(dispatch_get_main_queue(), ^{
[self saveVideoWithURL:exportUrl];
});
}];
Save Video With URL :
- (void)saveVideoWithURL:(NSURL *)filePath {
[[[ALAssetsLibrary alloc] init] writeVideoAtPathToSavedPhotosAlbum:filePath completionBlock:^(NSURL *assetURL, NSError *error) {
if(assetURL) {
[self.navigationController.view makeToast:#"Save video successfully in gallery"
duration:1.0
position:CSToastPositionBottom];
} else {
UIAlertController * alert = [UIAlertController
alertControllerWithTitle:#"Error"
message:#"something went wrong"
preferredStyle:UIAlertControllerStyleAlert];
UIAlertAction* yesButton = [UIAlertAction
actionWithTitle:#"Ok"
style:UIAlertActionStyleDefault
handler:^(UIAlertAction * action)
{
}];
[alert addAction:yesButton];
[self presentViewController:alert animated:YES completion:nil];
}
}];
}
I want to create video from images taken from gallery. I have stored images into an array. Now I want to create video from it with size user gives. I have written code but it displays black area if image is small then given size. Can anyone kindly help me to solve this?
Any needful help will be appreciated.
Code I'm using to generate video :
-(void) writeImagesToMovieAtPath:(NSString *)path withSize:(CGSize) size
{
NSLog(#"Write Started");
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:path] fileType:AVFileTypeMPEG4
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
//Video encoding
CVPixelBufferRef buffer = NULL;
//convert uiimage to CGImage.
int frameCount = 0;
for(int i = 0; i<[arrSelectedImgs count]; i++)
{
// UIImage *img = [self scaleImage:[arrSelectedImgs objectAtIndex:i] toSize:size];
buffer = [self pixelBufferFromCGImage:[[arrSelectedImgs objectAtIndex:i] CGImage] size:size];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30)
{
if (adaptor.assetWriterInput.readyForMoreMediaData)
{
printf("appending %d attemp %d\n", frameCount, j);
CMTime frameTime = CMTimeMake(frameCount,(int32_t) 5);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
CVPixelBufferPoolRef bufferPool = adaptor.pixelBufferPool;
NSParameterAssert(bufferPool != NULL);
[NSThread sleepForTimeInterval:0.05];
}
else
{
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok)
{
printf("error appending image %d times %d\n", frameCount, j);
}
frameCount++;
CVBufferRelease(buffer);
}
[videoWriterInput markAsFinished];
[videoWriter finishWriting];
[arrSelectedImgs removeAllObjects];
NSLog(#"Write Ended at path == %#",path);
[self createGIFFromVideoURL:[NSURL URLWithString:path]];
[self viewMovieAtUrl:[NSURL URLWithString:path]];
}
- (CVPixelBufferRef) pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)size
{
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width,
size.height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options,
&pxbuffer);
status=status;//Added to make the stupid compiler not show a stupid warning.
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata != NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width,
size.height, 8, 4*size.width, rgbColorSpace,
kCGImageAlphaNoneSkipFirst);
NSParameterAssert(context);
//CGContextTranslateCTM(context, 0, CGImageGetHeight(image));
//CGContextScaleCTM(context, 1.0, -1.0);//Flip vertically to account for different origin
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
Thanks in advance.
I have taken screen shoot of images and stored its document directory path to an array..
i want to create a video by passing this array.. here is my code for creating the video
-(void)writeImagesAsMovie:(NSArray *)array toPath:(NSString *)path
{
NSError *error1 = nil;
NSFileManager *fileMgr = [NSFileManager defaultManager];
if ([fileMgr removeItemAtURL:[NSURL fileURLWithPath:path] error:&error1]!=YES)
{
NSLog(#"Unable to delete file: %#", [error1 localizedDescription]);
}
UIImage *first = [UIImage imageWithContentsOfFile:[array objectAtIndex:0]];
CGSize frameSize = first.size;
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:path] fileType:AVFileTypeMPEG4
error:&error];
if(error)
{
NSLog(#"error creating AssetWriter: %#",[error description]);
}
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:frameSize.width], AVVideoWidthKey,
[NSNumber numberWithInt:frameSize.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* writerInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
NSMutableDictionary *attributes = [[NSMutableDictionary alloc] init];
[attributes setObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32RGBA] forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:frameSize.width] forKey:(NSString*)kCVPixelBufferWidthKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:frameSize.height] forKey:(NSString*)kCVPixelBufferHeightKey];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
sourcePixelBufferAttributes:attributes];
[videoWriter addInput:writerInput];
// fixes all errors
writerInput.expectsMediaDataInRealTime = YES;
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
int fps = 25;
int cnt = 0;
for (NSString *filename in array)
{
if (adaptor.assetWriterInput.readyForMoreMediaData)
{
cnt++;
CMTime frameTime = CMTimeMake(1, fps);
CMTime lastTime = CMTimeMake(cnt, fps);
CMTime presentTime = CMTimeAdd(lastTime, frameTime);
UIImage *imgFrame=[UIImage imageWithContentsOfFile:filename];
buffer = [self pixelBufferFromCGImage:[imgFrame CGImage]];
BOOL result = [adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];
if (result == NO) {
NSLog(#"failed to append buffer");
NSLog(#"The error is %#", [videoWriter error]);
}
if(buffer) {
CVBufferRelease(buffer);
}
}
else {
NSLog(#"error");
cnt--;
}
}
// finish the session
[writerInput markAsFinished];
[videoWriter finishWritingWithCompletionHandler:^{
}];
CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
UIAlertView *saveAlert = [[UIAlertView alloc] initWithTitle:#"Complete" message:#"Finished making movie" delegate:nil cancelButtonTitle:#"OK" otherButtonTitles:nil, nil];
[saveAlert show];
}
- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) cgiImage {
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVPixelBufferCreate(kCFAllocatorDefault, CGImageGetWidth(cgiImage),
CGImageGetHeight(cgiImage), kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options,
&pxbuffer);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, CGImageGetWidth(cgiImage),
CGImageGetHeight(cgiImage), 8, 4*CGImageGetWidth(cgiImage), rgbColorSpace,
kCGImageAlphaNoneSkipFirst);
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGAffineTransform flipVertical = CGAffineTransformMake(
1, 0, 0, -1, 0, CGImageGetHeight(cgiImage)
);
CGContextConcatCTM(context, flipVertical);
CGAffineTransform flipHorizontal = CGAffineTransformMake(
-1.0, 0.0, 0.0, 1.0, CGImageGetWidth(cgiImage), 0.0
);
CGContextConcatCTM(context, flipHorizontal);
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(cgiImage),
CGImageGetHeight(cgiImage)), cgiImage);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
This code helps me to create the video but the video is not proper and all the pixels are blurred....
in .m file.
import "ViewController.h"
#interface ViewController ()
#end
#implementation ViewController
-(void)viewDidLoad
{
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
}
-(void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
-(IBAction)createVideo:(id)sender
{
NSError *error = nil;
NSFileManager *fileMgr = [NSFileManager defaultManager];
NSString *documentsDirectory = [NSHomeDirectory()
stringByAppendingPathComponent:#"Documents"];
NSString *videoOutputPath = [documentsDirectory stringByAppendingPathComponent:#"test_output.mp4"];
if ([fileMgr removeItemAtPath:videoOutputPath error:&error] != YES)
NSLog(#"Unable to delete file: %#", [error localizedDescription]);
CGSize imageSize = CGSizeMake(400, 200);
NSUInteger fps = 30;
NSMutableArray *imageArray;
imageArray = [[NSMutableArray alloc] initWithObjects:#"photo1.png",#"photo2.png",#"photo3.png",#"photo4.png",#"photo5.png", nil];
NSArray* imagePaths = [[NSBundle mainBundle] pathsForResourcesOfType:#"png" inDirectory:nil];
imageArray = [[NSMutableArray alloc] initWithCapacity:imagePaths.count];
NSLog(#"-->imageArray.count= %lu", (unsigned long)imageArray.count);
for (NSString* path in imagePaths)
{
[imageArray addObject:[UIImage imageWithContentsOfFile:path]];
}
NSLog(#"Start building video from defined frames.");
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:videoOutputPath] fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:imageSize.width], AVVideoWidthKey,
[NSNumber numberWithInt:imageSize.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
int frameCount = 0;
double numberOfSecondsPerFrame = 6;
double frameDuration = fps * numberOfSecondsPerFrame;
NSLog(#"**************************************************");
for(UIImage * img in imageArray)
{
buffer = [self pixelBufferFromCGImage:[img CGImage]];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30) {
if (adaptor.assetWriterInput.readyForMoreMediaData) {
NSLog(#"Processing video frame (%d,%d)",frameCount,[imageArray count]);
CMTime frameTime = CMTimeMake(frameCount*frameDuration,(int32_t) fps);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
if(!append_ok){
NSError *error = videoWriter.error;
if(error!=nil) {
NSLog(#"Unresolved error %#,%#.", error, [error userInfo]);
}
}
}
else {
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok) {
printf("error appending image %d times %d\n, with error.", frameCount, j);
}
frameCount++;
}
NSLog(#"**************************************************");
[videoWriterInput markAsFinished];
[videoWriter finishWriting];
AVMutableComposition* mixComposition = [AVMutableComposition composition];
NSString *bundleDirectory = [[NSBundle mainBundle] bundlePath];
NSString *audio_inputFilePath = [bundleDirectory stringByAppendingPathComponent:#"30secs.mp3"];
NSURL *audio_inputFileUrl = [NSURL fileURLWithPath:audio_inputFilePath];
NSURL *video_inputFileUrl = [NSURL fileURLWithPath:videoOutputPath];
NSString *outputFilePath = [documentsDirectory stringByAppendingPathComponent:#"final_video.mp4"];
NSURL *outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.outputFileType = #"public.mpeg-4";
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
//[self saveVideoToAlbum:outputFilePath];
}
];
NSLog(#"DONE.....outputFilePath--->%#", outputFilePath);
}
-(CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image
{
CGSize size = CGSizeMake(400, 200);
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault,
size.width,
size.height,
kCVPixelFormatType_32ARGB,
(__bridge CFDictionaryRef) options,
&pxbuffer);
if (status != kCVReturnSuccess){
NSLog(#"Failed to create pixel buffer");
}
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width,
size.height, 8, 4*size.width, rgbColorSpace,(CGBitmapInfo)
kCGImageAlphaNoneSkipLast);
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
#end
in .h file...
import
import Foundation/Foundation.h
import CoreMedia/CoreMedia.h
import CoreVideo/CoreVideo.h
import CoreGraphics/CoreGraphics.h
import AVFoundation/AVFoundation.h
import QuartzCore/QuartzCore.h
#interface ViewController : UIViewController
-(IBAction)createVideo:(id)sender;
#end
and also add following framework..
CoreMedia.framework
CoreVideo.framework
CoreGraphics.framework
AVFoundation.framework
QuartzCore.framework
I want to create a movie from an array of UIImages. I tried this code:
-(void)writeImageAsMovie:(UIImage*)image toPath:(NSString*)path size:(CGSize)size duration:(int)duration
{
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* writerInput = [[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings] retain];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
[videoWriter addInput:writerInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
//Write samples:
CVPixelBufferRef buffer = [Utils pixelBufferFromCGImage:image.CGImage size:size];
[adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];
[adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(duration-1, 2)];
//Finish the session:
[writerInput markAsFinished];
[videoWriter endSessionAtSourceTime:CMTimeMake(duration, 2)];
[videoWriter finishWriting];
}
But the output isn't ok.
The image is like this:
http://imageshack.us/photo/my-images/854/screenshot20120921at140.png/
And video output is:
http://imageshack.us/photo/my-images/856/screenshot20120921at140.png/
Check for the size you gave for buffer.
try this size and see once:
buffer = [self pixelBufferFromCGImage:[[array objectAtIndex:0] CGImage] size:CGSizeMake(480, 320)];
try this
https://github.com/meslater/MSImageMovieEncoder
I am trying to mix video, coming from the camera with a static image (watermarking).
I have checked around questions/answers here and some examples, including WWDC AVEditDemo from Apple and ended with the following code.
Unfortunately, the exported video does not contain the layer with the image.
Any ideas?
- (void)imagePickerController:(UIImagePickerController *)picker didFinishPickingMediaWithInfo:(NSDictionary *)info {
/// incoming video
NSURL *videoURL = [info valueForKey:UIImagePickerControllerMediaURL];
/// UIImage into CALayer
UIImage *myImage = [UIImage imageNamed:#"m1h.png"];
CALayer *aLayer = [CALayer layer];
aLayer.contents = (id)myImage.CGImage;
AVURLAsset* url = [AVURLAsset URLAssetWithURL:videoURL options:nil];
AVMutableComposition *videoComposition = [AVMutableComposition composition];
NSError *error;
NSFileManager *fileManager = [NSFileManager defaultManager];
AVMutableCompositionTrack *compositionVideoTrack = [videoComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *clipVideoTrack = [[url tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [url duration]) ofTrack:clipVideoTrack atTime:kCMTimeZero error:&error];
AVMutableVideoComposition* videoComp = [[AVMutableVideoComposition videoComposition] retain];
videoComp.renderSize = CGSizeMake(640, 480);
videoComp.frameDuration = CMTimeMake(1, 30);
videoComp.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithAdditionalLayer:aLayer asTrackID:2];
/// instruction
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60, 30) );
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:clipVideoTrack];
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
videoComp.instructions = [NSArray arrayWithObject: instruction];
/// outputs
NSString *filePath = nil;
filePath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
filePath = [filePath stringByAppendingPathComponent:#"temp.mov"];
NSLog(#"exporting to: %#", filePath);
if ([fileManager fileExistsAtPath:filePath])
{
BOOL success = [fileManager removeItemAtPath:filePath error:&error];
if (!success) NSLog(#"FM error: %#", [error localizedDescription]);
}
/// exporting
AVAssetExportSession *exporter;
exporter = [[AVAssetExportSession alloc] initWithAsset:videoComposition presetName:AVAssetExportPresetHighestQuality] ;
exporter.videoComposition = videoComp;
exporter.outputURL=[NSURL fileURLWithPath:filePath];
exporter.outputFileType=AVFileTypeQuickTimeMovie;
[statusLabel setText:#"processing..."];
[exporter exportAsynchronouslyWithCompletionHandler:^(void){
switch (exporter.status) {
case AVAssetExportSessionStatusFailed:
NSLog(#"exporting failed");
break;
case AVAssetExportSessionStatusCompleted:
NSLog(#"exporting completed");
UISaveVideoAtPathToSavedPhotosAlbum(filePath, self, #selector(video:didFinishSavingWithError:contextInfo:), NULL);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"export cancelled");
break;
}
}];
}
After playing around I ended up with something like this in a addition to the above code, also changing the used method to videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:aLayer];
videoComp.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
Hope it helps somebody.
i got this to work! heres the code! i didn't write most of it, i just tweaked some, but the only issue is the video itself is rotated for landscape in portrait mode? and then in landscape its portrait video, but the image is right side up!
CALayer *aLayer = [CALayer layer];
aLayer.frame = CGRectMake(5, 0, 320, 480);
aLayer.bounds = CGRectMake(5, 0, 320, 480);
aLayer.contents = (id) [UIImage imageNamed:#"image.png"].CGImage;
aLayer.opacity = 0.5;
aLayer.backgroundColor = [UIColor clearColor].CGColor;
NSURL *url = [NSURL fileURLWithPath:[urlsOfVideos objectAtIndex:self.pageControl.currentPage]];
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:url options:nil];
cmp = [AVMutableComposition composition];
AVMutableCompositionTrack *trackA = [cmp addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *sourceVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[trackA insertTimeRange:CMTimeRangeMake(kCMTimeZero, [asset duration]) ofTrack:sourceVideoTrack atTime:kCMTimeZero error:nil] ;
animComp = [AVMutableVideoComposition videoComposition];
animComp.renderSize = CGSizeMake(320, 480);
animComp.frameDuration = CMTimeMake(1,30);
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, 320, 480);
videoLayer.frame = CGRectMake(0, 0, 320, 480);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:aLayer];
animComp.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [asset duration]);
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:trackA];
//[layerInstruction setTrackID:2];
[layerInstruction setOpacity:1.0 atTime:kCMTimeZero];
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction] ;
animComp.instructions = [NSArray arrayWithObject:instruction];
[self exportMovie:self];
and here is the exporting code
-(IBAction) exportMovie:(id)sender{
NSArray *docPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *tempPath = [docPaths objectAtIndex:0];
NSLog(#"Temp Path: %#",tempPath);
NSString *fileName = [NSString stringWithFormat:#"%#/output-anot.MOV",tempPath];
NSFileManager *fileManager = [NSFileManager defaultManager] ;
if([fileManager fileExistsAtPath:fileName ]){
//NSError *ferror = nil ;
//BOOL success = [fileManager removeItemAtPath:fileName error:&ferror];
}
NSURL *exportURL = [NSURL fileURLWithPath:fileName];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:cmp presetName:AVAssetExportPresetHighestQuality] ;
exporter.outputURL = exportURL;
exporter.videoComposition = animComp;
exporter.outputFileType= AVFileTypeQuickTimeMovie;
[exporter exportAsynchronouslyWithCompletionHandler:^(void){
switch (exporter.status) {
case AVAssetExportSessionStatusFailed:{
NSLog(#"Fail");
break;
}
case AVAssetExportSessionStatusCompleted:{
NSLog(#"Success");
break;
}
default:
break;
}
}];
}