In my Application I need to put Image on the video. The Size of Image and video are same.
Video size is fix = 1080 x 1080
Image Size is fix = 1080 x 1080
My problem is blur video when i share on whatsapp status.
I have check other applications also but it not too much low compress. But in my case it too much lower quality.
I am Export video in AVAssetExportPresetHighestQuality.
Please suggest me any github link which is add image on video.
URL path :
NSURL *filepath = [NSURL URLWithString:self.video_url];
Mix Composition :
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:filepath options:nil];
AVMutableComposition* mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *clipAudioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
//If you need audio as well add the Asset Track for audio here
CGSize firstvideoSize = clipVideoTrack.naturalSize;
CGSize videoSize = clipVideoTrack.naturalSize;
UIInterfaceOrientation mode = [self checkVideoTrack:videoAsset];
if (mode == UIInterfaceOrientationPortrait)
{
if(firstvideoSize.height<=firstvideoSize.width)
{
videoSize.height=firstvideoSize.width;
videoSize.width=firstvideoSize.height;
}
}
else if(mode == UIInterfaceOrientationLandscapeLeft)
{
if(firstvideoSize.height>=firstvideoSize.width)
{
videoSize.height=firstvideoSize.width;
videoSize.width=firstvideoSize.height;
}
}
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:clipVideoTrack atTime:kCMTimeZero error:nil];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:clipAudioTrack atTime:kCMTimeZero error:nil];
[compositionVideoTrack setPreferredTransform:[[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] preferredTransform]];
Create Image Layer :
UIImage *myImage = image;
CALayer *aLayer = [CALayer layer];
aLayer.contents = (id)myImage.CGImage;
aLayer.frame = CGRectMake(0,0, image.size.width, image.size.height); //Needed for proper display. We are using the app icon (57x57). If you use 0,0 you will not see it
aLayer.contentsGravity=kCAGravityResizeAspectFill;
aLayer.opacity = 1.0; //Feel free to alter the alpha here
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:aLayer];
AVMutableVideoComposition* videoComp = [AVMutableVideoComposition videoComposition];
videoComp.renderSize = videoSize;
videoComp.frameDuration = CMTimeMake(1, 30);
videoComp.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
instruction
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);
AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
videoComp.instructions = [NSArray arrayWithObject: instruction];
AVAssetExportSession *_assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.videoComposition = videoComp;
NSURL *documentsDirectoryURL = [[NSFileManager defaultManager] URLForDirectory:NSDocumentDirectory inDomain:NSUserDomainMask appropriateForURL:nil create:NO error:nil];
NSString *stringPath = [[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES)objectAtIndex:0]stringByAppendingPathComponent:#"Videos"];
// New Folder is your folder name
NSError *error = nil;
if (![[NSFileManager defaultManager] fileExistsAtPath:stringPath])
[[NSFileManager defaultManager] createDirectoryAtPath:stringPath withIntermediateDirectories:NO attributes:nil error:&error];
NSString *saveFilePath = [NSString stringWithFormat:#"Videos/%#",[self generateFileNameWithExtension:#".mp4"]];
NSURL *exportUrl = [documentsDirectoryURL URLByAppendingPathComponent:saveFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:exportUrl.path])
{
[[NSFileManager defaultManager] removeItemAtPath:exportUrl.path error:nil];
}
_assetExport.outputFileType = AVFileTypeMPEG4;
_assetExport.outputURL = exportUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
switch (_assetExport.status)
{
case AVAssetExportSessionStatusUnknown:
NSLog(#"AVAssetExportSessionStatusUnknown");
break;
case AVAssetExportSessionStatusWaiting:
NSLog(#"AVAssetExportSessionStatusWaiting");
break;
case AVAssetExportSessionStatusExporting:
NSLog(#"AVAssetExportSessionStatusExporting");
break;
case AVAssetExportSessionStatusCompleted:
NSLog(#"AVAssetExportSessionStatusCompleted");
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"AVAssetExportSessionStatusFailed");
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"AVAssetExportSessionStatusCancelled");
break;
}
dispatch_async(dispatch_get_main_queue(), ^{
[self saveVideoWithURL:exportUrl];
});
}];
Save Video With URL :
- (void)saveVideoWithURL:(NSURL *)filePath {
[[[ALAssetsLibrary alloc] init] writeVideoAtPathToSavedPhotosAlbum:filePath completionBlock:^(NSURL *assetURL, NSError *error) {
if(assetURL) {
[self.navigationController.view makeToast:#"Save video successfully in gallery"
duration:1.0
position:CSToastPositionBottom];
} else {
UIAlertController * alert = [UIAlertController
alertControllerWithTitle:#"Error"
message:#"something went wrong"
preferredStyle:UIAlertControllerStyleAlert];
UIAlertAction* yesButton = [UIAlertAction
actionWithTitle:#"Ok"
style:UIAlertActionStyleDefault
handler:^(UIAlertAction * action)
{
}];
[alert addAction:yesButton];
[self presentViewController:alert animated:YES completion:nil];
}
}];
}
I have this code to add a overlay to my asset and export it to a URL:
AVAsset *videoAsset = editableAsset;
AVMutableComposition* mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] lastObject];
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:clipVideoTrack atTime:kCMTimeZero error:nil];
if ([videoAsset tracksWithMediaType:AVMediaTypeAudio].count != 0) {
AVAssetTrack *clipAudioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:clipAudioTrack atTime:kCMTimeZero error:nil];
}
[compositionVideoTrack setPreferredTransform:[[[videoAsset tracksWithMediaType:AVMediaTypeVideo] lastObject] preferredTransform]];
CGSize sizeOfVideo = [videoAsset naturalSize];
CALayer *layerCa = [CALayer layer];
layerCa.contents = (id)[UIImage imageNamed:#"Overlay.png"].CGImage;
layerCa.frame = CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height);
layerCa.opacity = 1.0;
CALayer *parentLayer=[CALayer layer];
CALayer *videoLayer=[CALayer layer];
parentLayer.frame=CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height);
videoLayer.frame=CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:layerCa];
AVMutableVideoComposition *videoComposition=[AVMutableVideoComposition videoComposition] ;
videoComposition.frameDuration=CMTimeMake(1, 30);
videoComposition.renderSize = sizeOfVideo;
videoComposition.animationTool=[AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);
AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
videoComposition.instructions = [NSArray arrayWithObject: instruction];
NSString* docFolder = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) lastObject];
NSString* outputPath = [docFolder stringByAppendingPathComponent:#"output.mov"];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputPath])
[[NSFileManager defaultManager] removeItemAtPath:outputPath error:nil];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:editableAsset presetName:AVAssetExportPresetHighestQuality];
exportSession.videoComposition = videoComposition;
exportSession.outputURL = [NSURL fileURLWithPath:outputPath];
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
assetURL = [NSURL fileURLWithPath:outputPath];
});
}];
The strange think: It tilt the AVAsset and I get a landscape video instead of portrait. But I dont have any code to change the orientation. Can you explain it to me?
At the moment, whenever I export a video it appears to be rotated 90 degrees to the right, but I am not sure why. My current code is:
AVURLAsset *videoAsset = [[AVURLAsset alloc] initWithURL:self.video options:nil];
AVMutableComposition *mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] lastObject];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:clipVideoTrack
atTime:kCMTimeZero
error:nil];
[compositionVideoTrack setPreferredTransform:[[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] preferredTransform]];
CGSize videoSize = [clipVideoTrack naturalSize];
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
[parentLayer addSublayer:videoLayer];
AVMutableVideoComposition* videoComp = [AVMutableVideoComposition videoComposition];
videoComp.renderSize = [clipVideoTrack naturalSize];
videoComp.frameDuration = CMTimeMake(1, 30);
videoComp.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, mixComposition.duration);
AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
videoComp.instructions = [NSArray arrayWithObject:instruction];
AVAssetExportSession *assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
assetExport.videoComposition = videoComp;
NSString *videoName = #"output.mov";
NSString *exportPath = [NSTemporaryDirectory() stringByAppendingString:videoName];
NSURL *exportURL = [NSURL fileURLWithPath:exportPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath])
[[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
assetExport.outputFileType = AVFileTypeQuickTimeMovie;
assetExport.outputURL = exportURL;
assetExport.shouldOptimizeForNetworkUse = YES;
[assetExport exportAsynchronouslyWithCompletionHandler:^(void){
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:assetExport];
});
}];
I tried using the following code to transform it, and even though it technically does rotate the video, it shrinks the video into a square, and I dont know how to enlarge it to fit the entire screen:
CGAffineTransform rotation = CGAffineTransformMakeRotation(M_PI / 2);
CGAffineTransform translateToCenter = CGAffineTransformMakeTranslation(1000, 1000);
CGAffineTransform mixedTransform = CGAffineTransformConcat(rotation, translateToCenter);
[layerInstruction setTransform:mixedTransform atTime:kCMTimeZero];
Any advice?
I have taken screen shoot of images and stored its document directory path to an array..
i want to create a video by passing this array.. here is my code for creating the video
-(void)writeImagesAsMovie:(NSArray *)array toPath:(NSString *)path
{
NSError *error1 = nil;
NSFileManager *fileMgr = [NSFileManager defaultManager];
if ([fileMgr removeItemAtURL:[NSURL fileURLWithPath:path] error:&error1]!=YES)
{
NSLog(#"Unable to delete file: %#", [error1 localizedDescription]);
}
UIImage *first = [UIImage imageWithContentsOfFile:[array objectAtIndex:0]];
CGSize frameSize = first.size;
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:path] fileType:AVFileTypeMPEG4
error:&error];
if(error)
{
NSLog(#"error creating AssetWriter: %#",[error description]);
}
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:frameSize.width], AVVideoWidthKey,
[NSNumber numberWithInt:frameSize.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* writerInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
NSMutableDictionary *attributes = [[NSMutableDictionary alloc] init];
[attributes setObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32RGBA] forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:frameSize.width] forKey:(NSString*)kCVPixelBufferWidthKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:frameSize.height] forKey:(NSString*)kCVPixelBufferHeightKey];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
sourcePixelBufferAttributes:attributes];
[videoWriter addInput:writerInput];
// fixes all errors
writerInput.expectsMediaDataInRealTime = YES;
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
int fps = 25;
int cnt = 0;
for (NSString *filename in array)
{
if (adaptor.assetWriterInput.readyForMoreMediaData)
{
cnt++;
CMTime frameTime = CMTimeMake(1, fps);
CMTime lastTime = CMTimeMake(cnt, fps);
CMTime presentTime = CMTimeAdd(lastTime, frameTime);
UIImage *imgFrame=[UIImage imageWithContentsOfFile:filename];
buffer = [self pixelBufferFromCGImage:[imgFrame CGImage]];
BOOL result = [adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];
if (result == NO) {
NSLog(#"failed to append buffer");
NSLog(#"The error is %#", [videoWriter error]);
}
if(buffer) {
CVBufferRelease(buffer);
}
}
else {
NSLog(#"error");
cnt--;
}
}
// finish the session
[writerInput markAsFinished];
[videoWriter finishWritingWithCompletionHandler:^{
}];
CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
UIAlertView *saveAlert = [[UIAlertView alloc] initWithTitle:#"Complete" message:#"Finished making movie" delegate:nil cancelButtonTitle:#"OK" otherButtonTitles:nil, nil];
[saveAlert show];
}
- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) cgiImage {
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVPixelBufferCreate(kCFAllocatorDefault, CGImageGetWidth(cgiImage),
CGImageGetHeight(cgiImage), kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options,
&pxbuffer);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, CGImageGetWidth(cgiImage),
CGImageGetHeight(cgiImage), 8, 4*CGImageGetWidth(cgiImage), rgbColorSpace,
kCGImageAlphaNoneSkipFirst);
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGAffineTransform flipVertical = CGAffineTransformMake(
1, 0, 0, -1, 0, CGImageGetHeight(cgiImage)
);
CGContextConcatCTM(context, flipVertical);
CGAffineTransform flipHorizontal = CGAffineTransformMake(
-1.0, 0.0, 0.0, 1.0, CGImageGetWidth(cgiImage), 0.0
);
CGContextConcatCTM(context, flipHorizontal);
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(cgiImage),
CGImageGetHeight(cgiImage)), cgiImage);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
This code helps me to create the video but the video is not proper and all the pixels are blurred....
in .m file.
import "ViewController.h"
#interface ViewController ()
#end
#implementation ViewController
-(void)viewDidLoad
{
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
}
-(void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
-(IBAction)createVideo:(id)sender
{
NSError *error = nil;
NSFileManager *fileMgr = [NSFileManager defaultManager];
NSString *documentsDirectory = [NSHomeDirectory()
stringByAppendingPathComponent:#"Documents"];
NSString *videoOutputPath = [documentsDirectory stringByAppendingPathComponent:#"test_output.mp4"];
if ([fileMgr removeItemAtPath:videoOutputPath error:&error] != YES)
NSLog(#"Unable to delete file: %#", [error localizedDescription]);
CGSize imageSize = CGSizeMake(400, 200);
NSUInteger fps = 30;
NSMutableArray *imageArray;
imageArray = [[NSMutableArray alloc] initWithObjects:#"photo1.png",#"photo2.png",#"photo3.png",#"photo4.png",#"photo5.png", nil];
NSArray* imagePaths = [[NSBundle mainBundle] pathsForResourcesOfType:#"png" inDirectory:nil];
imageArray = [[NSMutableArray alloc] initWithCapacity:imagePaths.count];
NSLog(#"-->imageArray.count= %lu", (unsigned long)imageArray.count);
for (NSString* path in imagePaths)
{
[imageArray addObject:[UIImage imageWithContentsOfFile:path]];
}
NSLog(#"Start building video from defined frames.");
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:videoOutputPath] fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:imageSize.width], AVVideoWidthKey,
[NSNumber numberWithInt:imageSize.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
int frameCount = 0;
double numberOfSecondsPerFrame = 6;
double frameDuration = fps * numberOfSecondsPerFrame;
NSLog(#"**************************************************");
for(UIImage * img in imageArray)
{
buffer = [self pixelBufferFromCGImage:[img CGImage]];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30) {
if (adaptor.assetWriterInput.readyForMoreMediaData) {
NSLog(#"Processing video frame (%d,%d)",frameCount,[imageArray count]);
CMTime frameTime = CMTimeMake(frameCount*frameDuration,(int32_t) fps);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
if(!append_ok){
NSError *error = videoWriter.error;
if(error!=nil) {
NSLog(#"Unresolved error %#,%#.", error, [error userInfo]);
}
}
}
else {
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok) {
printf("error appending image %d times %d\n, with error.", frameCount, j);
}
frameCount++;
}
NSLog(#"**************************************************");
[videoWriterInput markAsFinished];
[videoWriter finishWriting];
AVMutableComposition* mixComposition = [AVMutableComposition composition];
NSString *bundleDirectory = [[NSBundle mainBundle] bundlePath];
NSString *audio_inputFilePath = [bundleDirectory stringByAppendingPathComponent:#"30secs.mp3"];
NSURL *audio_inputFileUrl = [NSURL fileURLWithPath:audio_inputFilePath];
NSURL *video_inputFileUrl = [NSURL fileURLWithPath:videoOutputPath];
NSString *outputFilePath = [documentsDirectory stringByAppendingPathComponent:#"final_video.mp4"];
NSURL *outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.outputFileType = #"public.mpeg-4";
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
//[self saveVideoToAlbum:outputFilePath];
}
];
NSLog(#"DONE.....outputFilePath--->%#", outputFilePath);
}
-(CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image
{
CGSize size = CGSizeMake(400, 200);
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault,
size.width,
size.height,
kCVPixelFormatType_32ARGB,
(__bridge CFDictionaryRef) options,
&pxbuffer);
if (status != kCVReturnSuccess){
NSLog(#"Failed to create pixel buffer");
}
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width,
size.height, 8, 4*size.width, rgbColorSpace,(CGBitmapInfo)
kCGImageAlphaNoneSkipLast);
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
#end
in .h file...
import
import Foundation/Foundation.h
import CoreMedia/CoreMedia.h
import CoreVideo/CoreVideo.h
import CoreGraphics/CoreGraphics.h
import AVFoundation/AVFoundation.h
import QuartzCore/QuartzCore.h
#interface ViewController : UIViewController
-(IBAction)createVideo:(id)sender;
#end
and also add following framework..
CoreMedia.framework
CoreVideo.framework
CoreGraphics.framework
AVFoundation.framework
QuartzCore.framework
I'm trying to export a .mov file from a source video created by UIImagePickerController. The problem is that the output file AVAssetExportSession creates is only 668 bytes. Why is it failing? my code:
- (void)imagePickerController:(UIImagePickerController *)picker didFinishPickingMediaWithInfo:(NSDictionary *)info
{
NSURL *imagePickerVideoURL = [info objectForKey:UIImagePickerControllerMediaURL];
NSString *filename = #"vid1.mov";
AVAsset *video = [AVAsset assetWithURL:imagePickerVideoURL];
AVAssetExportSession *exportSession
= [AVAssetExportSession exportSessionWithAsset:video presetName:AVAssetExportPresetMediumQuality];
exportSession.shouldOptimizeForNetworkUse = YES;
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
exportSession.outputURL = [pathToSavedVideosDirectory URLByAppendingPathComponent:filename];
NSLog(#"processing video...: %#", exportSession);
[exportSession exportAsynchronouslyWithCompletionHandler:^{
NSLog(#"done processing video!");
}];
}
I'm going to guess it's because exportSession was not retained in memory, thus gets killed after the didFinishPickingMediaWithInfo completes (before the export session completion handler runs).
Store exportSession to a #property or change your completion handler to copy a reference to exportSession like so:
[exportSession exportAsynchronouslyWithCompletionHandler:^{
if (AVAssetExportSessionStatusCompleted == exportSession.status)
{
NSLog(#"done processing video!");
}
}];
you need to write down steps for video file with AVAssetExportSession..
Start with Asset
Put your fileURL
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:fileURL options:nil];
Create AVMutableComposition
AVMutableComposition* mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:clipVideoTrack atTime:kCMTimeZero error:nil];
[compositionVideoTrack setPreferredTransform:[[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] preferredTransform]];
AVAssetTrack *clipAudioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,videoAsset.duration) ofTrack:clipAudioTrack atTime:kCMTimeZero error:nil];
Create AVMutableVideoComposition with Aniamtion layer if you want
AVMutableVideoComposition* videoComp = [[AVMutableVideoComposition videoComposition] retain];
videoComp.renderSize = CGSizeMake(videoSize.width, videoSize.height);
videoComp.frameDuration = CMTimeMake(1, 30);
videoComp.renderScale = 1.0;
videoComp.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
Animation layer are here if you need otherwise pass nil.
CGSize videoSize = [videoAsset naturalSize];
//layer mixing
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
[parentLayer addSublayer:videoLayer];
Add aniamtion to your selected layer which you need to add as sublayer in parentLayer
Add instruction in video composition with mention animation layer and time duration.
AVMutableVideoCompositionInstruction
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);
AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
videoComp.instructions = [NSArray arrayWithObject: instruction];
Now creating AVAssetExportSession with use of AVMutableComposition and AVMutableVideoComposition
_assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.videoComposition = videoComp;
//Create tmepory path for exported video file. .m4a would be preferred extension for AVFoundation.
NSString *videoName = #"demo.m4a";
NSString *exportPath = [NSTemporaryDirectory() stringByAppendingPathComponent:videoName];
NSURL *exportUrl = [NSURL fileURLWithPath:exportPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath])
{
[[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
}
Add url in Export session and some required property.
_assetExport.outputFileType = AVFileTypeQuickTimeMovie;
_assetExport.outputURL = exportUrl;
_assetExport.shouldOptimizeForNetworkUse = YES;
Start Export session and perform action when you will get complete message.
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
switch (_assetExport.status)
{
case AVAssetExportSessionStatusUnknown:
NSLog(#"Unknown");
case AVAssetExportSessionStatusExporting:
NSLog(#"Exporting");
case AVAssetExportSessionStatusFailed:
NSLog(#"exporting failed");
break;
case AVAssetExportSessionStatusCompleted:
NSLog(#"exporting completed");
[self performSelectorOnMainThread:#selector(completeVideoExport) withObject:nil waitUntilDone:YES];
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"export cancelled");
break;
}
_assetExport = nil;
[_assetExport release];
}
];
if you need any more let me know..
The exportSession has an "error" property that you can check to determine what the error is. A lot of times the errors aren't particularly helpful but it's at least a place to start.
Note that I've seen issues with .mov files, where codecs in them aren't supported by AVFoundation.
For debugging, try this...
[exportSession exportAsynchronouslyWithCompletionHandler:^
{
dispatch_async(dispatch_get_main_queue(), ^{
NSLog(#"exportSessionMetaData:%#", exportSession.metadata);
if(exportSession.status == AVAssetExportSessionStatusCompleted){
NSError *dataReadingError = nil;
NSData *videoData = nil;
videoData =
[NSData dataWithContentsOfURL:[pathToSavedVideosDirectory
URLByAppendingPathComponent:filename];
options:NSDataReadingMapped
error:&dataReadingError];
if (videoData != nil) {
// You got video data, do you work here...
} else {
NSLog(#"Failed to load the video data. ERROR:%#", dataReadingError);
}
}
});
}];
Check out TSLibraryImport - the import code works.