AVMutableVideoComposition sometimes won't play video - objective-c

I'm using this short snippet to set up my video. For some unknown reason - sometimes the video simply won't show up, while for other video it'll work perfectly.
let videoTrack: AVAssetTrack = asset.tracksWithMediaType(AVMediaTypeVideo)[0]
let composition: AVMutableComposition = AVMutableComposition()
let videoComposition: AVMutableVideoComposition = AVMutableVideoComposition()
var videoSize: CGSize = videoTrack.naturalSize
let isPortrait_: Bool = self.isVideoPortrait(asset)
if isPortrait_ {
NSLog("video is portrait ")
videoSize = CGSizeMake(videoSize.height, videoSize.width)
}
composition.naturalSize = videoSize
videoComposition.renderSize = videoSize
// videoComposition.renderSize = videoTrack.naturalSize; //
videoComposition.frameDuration = CMTimeMake(1, 30)
let compositionVideoTrack = composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: 1)
let timeRange = videoTrack.timeRange
do {
try compositionVideoTrack.insertTimeRange(timeRange, ofTrack: videoTrack, atTime: kCMTimeZero)
} catch {
print("error")
}
let layerInst = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
layerInst.setTransform(videoTrack.preferredTransform, atTime: kCMTimeZero)
let inst: AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
inst.timeRange = timeRange
inst.layerInstructions = [layerInst]
videoComposition.instructions = [inst]
let playerItem = AVPlayerItem(asset: composition)
playerItem.videoComposition = videoComposition
Tho for some videos, it simply wont show them up.
Any suggestions? Thanks!!

Hello i had a relative similar code hope this helps you figure out your problem
class func MergeVideosSequentially(URLS : [NSURL], callback : (error : ErrorType? , outURL : NSURL?) -> Void){
let composition = AVMutableComposition()
//videoTrack
let videoTrack = composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
let audioTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
var cursorTime = kCMTimeZero
for URL in URLS {
let asset = AVAsset(URL: URL)
let assetVideoTrack = asset.tracksWithMediaType(AVMediaTypeVideo).first! as AVAssetTrack
let assetAudioTrack = asset.tracksWithMediaType(AVMediaTypeAudio).first! as AVAssetTrack
var duration : CMTimeRange? = nil
duration = CMTimeRangeMake(kCMTimeZero, asset.duration)
do {
try videoTrack.insertTimeRange(duration!, ofTrack: assetVideoTrack, atTime: cursorTime)
try audioTrack.insertTimeRange(duration!, ofTrack: assetAudioTrack, atTime: cursorTime)
}catch {
print(error)
}
cursorTime = CMTimeAdd(cursorTime, asset.duration)
}
let directory = NSTemporaryDirectory()
let dateFormatter = NSDateFormatter()
dateFormatter.dateStyle = .LongStyle
dateFormatter.timeStyle = .ShortStyle
let date = dateFormatter.stringFromDate(NSDate())
let savePath = "\(directory)/mergedVideo-\(date).mp4"
let url = NSURL(fileURLWithPath: savePath)
let exporter = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality)
exporter!.outputURL = url
exporter!.shouldOptimizeForNetworkUse = true
exporter!.outputFileType = AVFileTypeMPEG4
exporter!.exportAsynchronouslyWithCompletionHandler({ () -> Void in
let outputURL = exporter!.outputURL
switch exporter!.status {
case .Completed :
dispatch_async(dispatch_get_main_queue(), { () -> Void in
callback(error: nil, outURL: outputURL)
})
default:
callback(error: CCMovieWriterError.CannotMergeClips, outURL: nil)
}
})
}

Related

Hyperloop module LFLiveKit

I have an issue with native issues in my hyperloop module for LFLiveKit, looking for some advice and help.
I only see a red screen showing.
I also get the alerts for the permissions.
Here is my code:
var UIScreen = require('UIKit/UIScreen'),
UIViewController = require('UIKit/UIViewController'),
UIView = require('UIKit/UIView'),
UIColor = require('UIKit/UIColor'),
CGPointMake = require('CoreGraphics').CGPointMake,
CGRectMake = require('CoreGraphics').CGRectMake,
NSBundle = require('Foundation/NSBundle'),
NSURL = require('Foundation/NSURL'),
NSData = require('Foundation/NSData'),
AVPlayer = require('AVFoundation/AVPlayer'),
AVPlayerLayer = require('AVFoundation/AVPlayerLayer'),
UIColor = require('UIKit/UIColor'),
NSString = require("Foundation/NSString"),
UIImage = require('UIKit/UIImage'),
UIImageView = require('UIKit/UIImageView'),
LFLiveKit = require('LFLiveKit/LFLiveKit'),
LFLiveAudioConfiguration = require("LFLiveKit/LFLiveAudioConfiguration"),
LFLiveVideoConfiguration = require("LFLiveKit/LFLiveVideoConfiguration"),
LFLiveSession = require("LFLiveKit/LFLiveSession"),
LFLiveStreamInfo = require("LFLiveKit/LFLiveStreamInfo"),
LFLiveVideoQuality = require("LFLiveKit").LFLiveVideoQuality,
AVCaptureDevice = require("AVFoundation/AVCaptureDevice"),
AVMediaTypeVideo = require("AVFoundation").AVMediaTypeVideo,
AVMediaTypeAudio = require("AVFoundation").AVMediaTypeAudio,
TiApp = require('Titanium/TiApp');
var config = LFLiveAudioConfiguration.defaultConfiguration();
var audioConfiguration = LFLiveAudioConfiguration.defaultConfiguration();
var videoConfiguration = LFLiveVideoConfiguration.defaultConfiguration();
var viewController = UIViewController.alloc().init();
var session = LFLiveSession.alloc().init();
session.audioConfiguration = audioConfiguration;
session.videoConfiguration = videoConfiguration;
var bounds = UIScreen.mainScreen.bounds;
var frame = CGRectMake(0, 0, bounds.size.width, bounds.size.height);
function requestAccessForVideo() {
Ti.API.info("requestAccessForVideo");
var status = AVCaptureDevice.authorizationStatusForMediaType(AVMediaTypeVideo);
Ti.API.info(status);
}
Ti.Media.requestAudioRecorderPermissions(function(e) {
if (e.success) {
requestAccessForAudio();
alert('You dont denied permission');
} else {
alert('You denied permission');
}
});
function requestAccessForAudio() {
Ti.API.info("requestAccessForAudio");
var status = AVCaptureDevice.authorizationStatusForMediaType(AVMediaTypeAudio);
Ti.API.info(status);
startLive();
}
requestAccessForVideo();
viewController.view.frame = frame;
viewController.view.backgroundColor = UIColor.redColor;
session.preView = viewController.view;
function startLive() {
session.running = true;
streamInfo = new LFLiveStreamInfo();
streamInfo.url = "rtmp://live.hkstv.hk.lxdns.com:1935/live/stream153";
session.startLive(streamInfo);
Ti.API.info("STARTED");
// TiApp.app().showModalController(viewController, true);
}
$.index.add(viewController.view);
$.index.open();

How to merge one pdf into another pdf file in swift 3.0.?

I am using this code.but pdfDocumentRef3 coming nil. so its crashing.
Please let me know where i am doing wrong.
let cachesDirectoryPath = NSSearchPathForDirectoriesInDomains(.cachesDirectory, .userDomainMask, true)[0]
let filePath1 = "(cachesDirectoryPath)/ShowPdf_(self.airportMainId).pdf"
let filePath2 = "(cachesDirectoryPath)/ImagePdf_(self.airportMainId).pdf
let filePath3 = cachesDirectoryPath.appending("/Combine_(self.airportMainId).pdf")
self.mergeTwoPDF(filePath1: filePath1, filePath2: filePath2, combinePath: filePath3 )
func mergeTwoPDF(filePath1: String, filePath2: String, combinePath: String) {
let pdfURL1 = NSURL(fileURLWithPath: filePath1)
let pdfURL2 = NSURL(fileURLWithPath: filePath2)
let pdfURL3 = NSURL(fileURLWithPath: combinePath)
let pdfDocumentRef1 = CGPDFDocument(pdfURL1 as CFURL)
let pdfDocumentRef2 = CGPDFDocument(pdfURL2 as CFURL)
let pdfDocumentRef3 = CGPDFDocument(pdfURL3 as CFURL)
let numberOfPages1 = pdfDocumentRef1!.numberOfPages
let numberOfPages2 = pdfDocumentRef2!.numberOfPages
let writeContext = CGContext(pdfDocumentRef3 as! CFURL, mediaBox: nil, nil)
var page: CGPDFPage?
var mediaBox: CGRect
for var i in 0..<numberOfPages1 {
page = pdfDocumentRef1!.page(at: i)
mediaBox = page!.getBoxRect(.mediaBox)
writeContext!.beginPage(mediaBox: &mediaBox)
writeContext!.drawPDFPage(page!)
writeContext!.endPage()
}
for var i in 0..<numberOfPages2 {
page = pdfDocumentRef2!.page(at: i)
mediaBox = page!.getBoxRect(.mediaBox)
writeContext!.beginPage(mediaBox: &mediaBox)
writeContext!.drawPDFPage(page!)
writeContext!.endPage()
}
writeContext!.closePDF();
}
pdfDocumentRef3 is nil because the file does not exist.
You should use:
UIGraphicsBeginPDFContextToFile(combinePath, CGRect.zero, nil)
let writeContext = UIGraphicsGetCurrentContext()

How to access the media library with Swift code?

I have to display a title of the song which I was selected I get the title(No need to play the song in the music library).What my question is I want to send song I was selected(Encoded) to PHP server..To display the song in a table view from the server and play the song...I want to find the correct path for iTunes music library so that I can encode easily.
func mediaPicker(mediaPicker: MPMediaPickerController, didPickMediaItems mediaItemCollection: MPMediaItemCollection) {
let documentsUrl = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask).first!
do {
// Get the directory contents urls (including subfolders urls)
let directoryContents = try NSFileManager.defaultManager().contentsOfDirectoryAtURL( documentsUrl, includingPropertiesForKeys: nil, options: [])
let mp3Files = directoryContents.filter{ $0.pathExtension == "m4a" }
let mp3FileNames = mp3Files.flatMap({$0.URLByDeletingPathExtension?.lastPathComponent})
} catch let error as NSError {
}
let name = "/aFileName"
var filePath = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true).first
print("\nfilePath: \(filePath)")
filePath = filePath!.stringByAppendingString(name)
print("\nfilePath: \(filePath)")
var filePathURL = NSURL.fileURLWithPath(filePath!)
print("\nfilePathURL: \(filePathURL)")
let item: MPMediaItem = mediaItemCollection.items[0]
print(item)
exportFiles = (item.valueForProperty(MPMediaItemPropertyAssetURL) as? NSURL)!
print(exportFiles)
// Export the ipod library as .m4a file to local directory for remote upload
let exportSession = AVAssetExportSession(asset: AVAsset(URL: exportFiles), presetName: AVAssetExportPresetAppleM4A)
print(exportFiles)
exportSession?.shouldOptimizeForNetworkUse = true
print(exportSession)
playingMusictitle = item.valueForProperty(MPMediaItemPropertyTitle) as? String ?? "Now Playing..."
print("\(exportFiles), title : \(title) ")
let str = exportFiles.absoluteString
let str2 = str!.stringByReplacingOccurrencesOfString("ipod-library://item/item", withString: "")
print(str2)
let arr = str2.componentsSeparatedByString("?")
mimeType = mimeType1.stringByReplacingOccurrencesOfString("id=", withString: "")
let path = item.valueForProperty(MPMediaItemPropertyLyrics) as? String ?? ""
print(path)
exportSession?.outputFileType = AVFileTypeAppleM4A
let documentsDirectory = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0]
let format = NSDateFormatter()
format.dateFormat="yyyy-MM-dd-HH-mm-ss"
audioname = "\(playingMusictitle)-\(format.stringFromDate(NSDate())).m4a"
print(audioname)
let documentsDirectoryy = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0]
self.soundFileURL = documentsDirectoryy.URLByAppendingPathComponent(audioname)
if NSFileManager.defaultManager().fileExistsAtPath(soundFileURL.absoluteString!) {
// probably won't happen. want to do something about it?
print("soundfile \(soundFileURL.absoluteString) exists")
}
Due to copyright protection you can only play songs from music library. You can't access row audio file!.I guess

Uploading Photos Swift/iOS

Sorry for my English I'll do my best.
I have an issue trying to upload photos from the user's library.
First, I get user's photo with this method
func grabPhotos(){
let imgManager = PHImageManager.defaultManager()
let requestOptions = PHImageRequestOptions()
requestOptions.synchronous = false
requestOptions.deliveryMode = .FastFormat
let fetchOptions = PHFetchOptions()
fetchOptions.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: false)]
if let fetchResult : PHFetchResult = PHAsset.fetchAssetsWithMediaType(.Image, options: fetchOptions){
if fetchResult.count > 0{
for i in 0..<fetchResult.count{
let asset = fetchResult.objectAtIndex(i) as! PHAsset
if NSComparisonResult.OrderedSame == asset.creationDate!.compare(self.appDelegate.dateLastUpload!){
print("meme date")
}
else if NSComparisonResult.OrderedAscending == asset.creationDate!.compare(self.appDelegate.dateLastUpload!){
}
else {
imgManager.requestImageDataForAsset(asset, options: requestOptions, resultHandler: { (data, string, orientation, objects) in
self.Upload((UIImage(data: data!)?.CGImage)! , nomImage: "\(asset.creationDate)" )
})
}
}
}
else{
print("you got no photos")
}
}
}
as you can see, each time I get a photo I want to upload it to my server.
the upload part works well.
Here is the upload method
func clickUpload(image:CGImage,nomImage : String){
let url = NSURL(string: "http://192.168.1.20:1993/upload")
let image_photo = UIImage(CGImage: image)
let request = NSMutableURLRequest(URL: url!)
request.HTTPMethod = "POST"
let boundary = generateBoundaryString()
//define the multipart request type
request.setValue("multipart/form-data; boundary=\(boundary)", forHTTPHeaderField: "Content-Type")
if var image_data = UIImageJPEGRepresentation(image_photo,0.8){
let body = NSMutableData()
let fname = nomImage
let mimetype = "image/jpg"
//define the data post parameter
body.appendData("--\(boundary)\r\n".dataUsingEncoding(NSUTF8StringEncoding)!)
body.appendData("Content-Disposition:multipart/form-data; name=\"test\"\r\n\r\n".dataUsingEncoding(NSUTF8StringEncoding)!)
body.appendData("enctype=\"multipart/form-data".dataUsingEncoding(NSUTF8StringEncoding)!)
body.appendData("hi\r\n".dataUsingEncoding(NSUTF8StringEncoding)!)
body.appendData("--\(boundary)\r\n".dataUsingEncoding(NSUTF8StringEncoding)!)
body.appendData("Content-Disposition:form-data; name=\"file\"; filename=\"\(fname)\"\r\n".dataUsingEncoding(NSUTF8StringEncoding)!)
body.appendData("Content-Type: \(mimetype)\r\n\r\n".dataUsingEncoding(NSUTF8StringEncoding)!)
body.appendData(image_data)
body.appendData("\r\n".dataUsingEncoding(NSUTF8StringEncoding)!)
body.appendData("--\(boundary)--\r\n".dataUsingEncoding(NSUTF8StringEncoding)!)
//request.setValue("multipart/form-data", forHTTPHeaderField: "content-Type")
request.HTTPBody = body
let session = NSURLSession.sharedSession()
let task = session.dataTaskWithRequest(request) {
(
let data, let response, let error) in
guard let _:NSData = data, let _:NSURLResponse = response where error == nil else {
print("error")
return
}
let dataString = NSString(data: data!, encoding: NSUTF8StringEncoding)
print(dataString)
}
task.resume()
}
else {
print(« data nil")
}
}
Now problems come... It works well if I upload photos with reduced size, but I want to upload them in HighQualityFormat.
I got 170 photos on my device, and it uploads approximatively 80 photos before crashing with this message
Terminating app due to uncaught exception 'NSInvalidArgumentException', reason: 'NSAllocateMemoryPages(1504802) failed'
Could you guys help me to solve it or give me another way to achieve this?
Thank you all.

MPNowPlayingInfoCenter : What is the best way to set MPMediaItemArtwork from an Url?

All methods I found to set MPMediaItemArtwork of MPNowPlayingInfoCenter are with local images.
MPMediaItemArtwork *albumArt = [[MPMediaItemArtwork alloc] initWithImage: [UIImage imageNamed:#"myimage"];
But I need to set this from an imageURL
Currently i use this...
UIImage *artworkImage = [UIImage imageWithData:[NSData dataWithContentsOfURL:[NSURL URLWithString:self.currentTrack.imageUrl]]];
MPMediaItemArtwork *albumArt = [[MPMediaItemArtwork alloc] initWithImage: artworkImage];
[self.payingInfoCenter setValue:albumArt forKey:MPMediaItemPropertyArtwork];
Any idea?
That's my best solution:
- (void)updateControlCenterImage:(NSURL *)imageUrl
{
dispatch_queue_t queue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0);
dispatch_async(queue, ^{
NSMutableDictionary *songInfo = [NSMutableDictionary dictionary];
UIImage *artworkImage = [UIImage imageWithData:[NSData dataWithContentsOfURL:imageUrl]];
if(artworkImage)
{
MPMediaItemArtwork *albumArt = [[MPMediaItemArtwork alloc] initWithImage: artworkImage];
[songInfo setValue:albumArt forKey:MPMediaItemPropertyArtwork];
}
MPNowPlayingInfoCenter *infoCenter = [MPNowPlayingInfoCenter defaultCenter];
infoCenter.nowPlayingInfo = songInfo;
});
}
/!\ if you've already setted the MPNowPlayingInfoCenter , get it or all other values will be overridden
let mpic = MPNowPlayingInfoCenter.default()
DispatchQueue.global().async {
if let urlString = yourUrlString, let url = URL(string:urlString) {
if let data = try? Data.init(contentsOf: url), let image = UIImage(data: data) {
let artwork = MPMediaItemArtwork(boundsSize: image.size, requestHandler: { (_ size : CGSize) -> UIImage in
return image
})
DispatchQueue.main.async {
mpic.nowPlayingInfo = [
MPMediaItemPropertyTitle:"Title",
MPMediaItemPropertyArtist:"Artist",
MPMediaItemPropertyArtwork:artwork
]
}
}
}
}
That worked for me in iOS 11, swift 4
Here's a function that sets up a media session with image on the lock screen and control center:
(This code was a modified version of #NickDK's answer)
func setupNowPlaying(title: String, albumArtwork: String, artist:String, isExplicit: Bool, rate: Float, duration: Any) {
let url = URL.init(string: albumArtwork)!
let mpic = MPNowPlayingInfoCenter.default()
DispatchQueue.global().async {
if let data = try? Data.init(contentsOf: url), let image = UIImage(data: data) {
let artwork = MPMediaItemArtwork(boundsSize: image.size, requestHandler: { (_ size : CGSize) -> UIImage in
return image
})
DispatchQueue.main.async {
mpic.nowPlayingInfo = [
MPMediaItemPropertyTitle: title,
MPMediaItemPropertyArtist: artist,
MPMediaItemPropertyArtwork:artwork,
MPMediaItemPropertyIsExplicit: isExplicit,
MPNowPlayingInfoPropertyPlaybackRate: soundManager.audioPlayer?.rate ?? 0,
MPMediaItemPropertyPlaybackDuration: CMTimeGetSeconds(soundManager.audioPlayer?.currentItem?.asset.duration ?? CMTime(seconds: 0, preferredTimescale: 0))
]
}
}
}
}
Usage:
setupNowPlaying(
title: "Pull up at the mansion",
albumArtwork: "https://static.wixstatic.com/media/89b4e7_5f29de0db68c4d888065b0f03d393050~mv2.png/v1/fill/w_512,h_512/ImageTitle.png",
artist: "DJ bon26",
isExplicit: true
)
Full usage:
import MediaPlayer
class SoundManager : ObservableObject {
var audioPlayer: AVPlayer?
func playSound(sound: String){
if let url = URL(string: sound) {
self.audioPlayer = AVPlayer(url: url)
}
}
}
struct ContentView: View {
#State var song1 = false
#StateObject private var soundManager = SoundManager()
var body: some View {
Image(systemName: song1 ? "pause.circle.fill": "play.circle.fill")
.font(.system(size: 25))
.padding(.trailing)
.onTapGesture {
playSound(
url: "https://static.wixstatic.com/mp3/0fd70b_8d4e15117ff0458792a6a901c6dddc6b.mp3",
title: "Pull up at the mansion",
albumArtwork: "https://static.wixstatic.com/media/89b4e7_5f29de0db68c4d888065b0f03d393050~mv2.png/v1/fill/w_512,h_512/ImageTitle.png",
artist: "DJ bon26",
isExplicit: true
)
}
}
func playSound(url: String, title: String, albumArtwork: String, artist: String, isExplicit: Bool) {
do {
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default, options:
.init(rawValue: 0))
try AVAudioSession.sharedInstance().setActive(true)
soundManager.playSound(sound: url)
song1.toggle()
if song1{
soundManager.audioPlayer?.play()
setupNowPlaying(
title: title,
albumArtwork: albumArtwork,
artist: artist,
isExplicit: isExplicit
)
UIApplication.shared.beginReceivingRemoteControlEvents()
MPNowPlayingInfoCenter.default().playbackState = .playing
} else {
soundManager.audioPlayer?.pause()
}
}catch{
print("Something came up")
}
}
}
I believe this is very useful,
Brendan Okey-iwobi