Does anyone know the solution to this? I can hear video but can't see it in the simulator.
func playinitiate() {
let locationVideo = NSURL(fileURLWithPath: Bundle.main.path(forResource: "Video", ofType: "mov")!)
let video = AVPlayer(url: locationVideo as URL)
let videoLayer = AVPlayerLayer(player: video)
videoLayer.frame = specialView.bounds
specialView.layer.addSublayer(videoLayer)
videoLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
video.play()
print("Player is running")
Related
I have to display a title of the song which I was selected I get the title(No need to play the song in the music library).What my question is I want to send song I was selected(Encoded) to PHP server..To display the song in a table view from the server and play the song...I want to find the correct path for iTunes music library so that I can encode easily.
func mediaPicker(mediaPicker: MPMediaPickerController, didPickMediaItems mediaItemCollection: MPMediaItemCollection) {
let documentsUrl = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask).first!
do {
// Get the directory contents urls (including subfolders urls)
let directoryContents = try NSFileManager.defaultManager().contentsOfDirectoryAtURL( documentsUrl, includingPropertiesForKeys: nil, options: [])
let mp3Files = directoryContents.filter{ $0.pathExtension == "m4a" }
let mp3FileNames = mp3Files.flatMap({$0.URLByDeletingPathExtension?.lastPathComponent})
} catch let error as NSError {
}
let name = "/aFileName"
var filePath = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true).first
print("\nfilePath: \(filePath)")
filePath = filePath!.stringByAppendingString(name)
print("\nfilePath: \(filePath)")
var filePathURL = NSURL.fileURLWithPath(filePath!)
print("\nfilePathURL: \(filePathURL)")
let item: MPMediaItem = mediaItemCollection.items[0]
print(item)
exportFiles = (item.valueForProperty(MPMediaItemPropertyAssetURL) as? NSURL)!
print(exportFiles)
// Export the ipod library as .m4a file to local directory for remote upload
let exportSession = AVAssetExportSession(asset: AVAsset(URL: exportFiles), presetName: AVAssetExportPresetAppleM4A)
print(exportFiles)
exportSession?.shouldOptimizeForNetworkUse = true
print(exportSession)
playingMusictitle = item.valueForProperty(MPMediaItemPropertyTitle) as? String ?? "Now Playing..."
print("\(exportFiles), title : \(title) ")
let str = exportFiles.absoluteString
let str2 = str!.stringByReplacingOccurrencesOfString("ipod-library://item/item", withString: "")
print(str2)
let arr = str2.componentsSeparatedByString("?")
mimeType = mimeType1.stringByReplacingOccurrencesOfString("id=", withString: "")
let path = item.valueForProperty(MPMediaItemPropertyLyrics) as? String ?? ""
print(path)
exportSession?.outputFileType = AVFileTypeAppleM4A
let documentsDirectory = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0]
let format = NSDateFormatter()
format.dateFormat="yyyy-MM-dd-HH-mm-ss"
audioname = "\(playingMusictitle)-\(format.stringFromDate(NSDate())).m4a"
print(audioname)
let documentsDirectoryy = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0]
self.soundFileURL = documentsDirectoryy.URLByAppendingPathComponent(audioname)
if NSFileManager.defaultManager().fileExistsAtPath(soundFileURL.absoluteString!) {
// probably won't happen. want to do something about it?
print("soundfile \(soundFileURL.absoluteString) exists")
}
Due to copyright protection you can only play songs from music library. You can't access row audio file!.I guess
Sorry for my English I'll do my best.
I have an issue trying to upload photos from the user's library.
First, I get user's photo with this method
func grabPhotos(){
let imgManager = PHImageManager.defaultManager()
let requestOptions = PHImageRequestOptions()
requestOptions.synchronous = false
requestOptions.deliveryMode = .FastFormat
let fetchOptions = PHFetchOptions()
fetchOptions.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: false)]
if let fetchResult : PHFetchResult = PHAsset.fetchAssetsWithMediaType(.Image, options: fetchOptions){
if fetchResult.count > 0{
for i in 0..<fetchResult.count{
let asset = fetchResult.objectAtIndex(i) as! PHAsset
if NSComparisonResult.OrderedSame == asset.creationDate!.compare(self.appDelegate.dateLastUpload!){
print("meme date")
}
else if NSComparisonResult.OrderedAscending == asset.creationDate!.compare(self.appDelegate.dateLastUpload!){
}
else {
imgManager.requestImageDataForAsset(asset, options: requestOptions, resultHandler: { (data, string, orientation, objects) in
self.Upload((UIImage(data: data!)?.CGImage)! , nomImage: "\(asset.creationDate)" )
})
}
}
}
else{
print("you got no photos")
}
}
}
as you can see, each time I get a photo I want to upload it to my server.
the upload part works well.
Here is the upload method
func clickUpload(image:CGImage,nomImage : String){
let url = NSURL(string: "http://192.168.1.20:1993/upload")
let image_photo = UIImage(CGImage: image)
let request = NSMutableURLRequest(URL: url!)
request.HTTPMethod = "POST"
let boundary = generateBoundaryString()
//define the multipart request type
request.setValue("multipart/form-data; boundary=\(boundary)", forHTTPHeaderField: "Content-Type")
if var image_data = UIImageJPEGRepresentation(image_photo,0.8){
let body = NSMutableData()
let fname = nomImage
let mimetype = "image/jpg"
//define the data post parameter
body.appendData("--\(boundary)\r\n".dataUsingEncoding(NSUTF8StringEncoding)!)
body.appendData("Content-Disposition:multipart/form-data; name=\"test\"\r\n\r\n".dataUsingEncoding(NSUTF8StringEncoding)!)
body.appendData("enctype=\"multipart/form-data".dataUsingEncoding(NSUTF8StringEncoding)!)
body.appendData("hi\r\n".dataUsingEncoding(NSUTF8StringEncoding)!)
body.appendData("--\(boundary)\r\n".dataUsingEncoding(NSUTF8StringEncoding)!)
body.appendData("Content-Disposition:form-data; name=\"file\"; filename=\"\(fname)\"\r\n".dataUsingEncoding(NSUTF8StringEncoding)!)
body.appendData("Content-Type: \(mimetype)\r\n\r\n".dataUsingEncoding(NSUTF8StringEncoding)!)
body.appendData(image_data)
body.appendData("\r\n".dataUsingEncoding(NSUTF8StringEncoding)!)
body.appendData("--\(boundary)--\r\n".dataUsingEncoding(NSUTF8StringEncoding)!)
//request.setValue("multipart/form-data", forHTTPHeaderField: "content-Type")
request.HTTPBody = body
let session = NSURLSession.sharedSession()
let task = session.dataTaskWithRequest(request) {
(
let data, let response, let error) in
guard let _:NSData = data, let _:NSURLResponse = response where error == nil else {
print("error")
return
}
let dataString = NSString(data: data!, encoding: NSUTF8StringEncoding)
print(dataString)
}
task.resume()
}
else {
print(« data nil")
}
}
Now problems come... It works well if I upload photos with reduced size, but I want to upload them in HighQualityFormat.
I got 170 photos on my device, and it uploads approximatively 80 photos before crashing with this message
Terminating app due to uncaught exception 'NSInvalidArgumentException', reason: 'NSAllocateMemoryPages(1504802) failed'
Could you guys help me to solve it or give me another way to achieve this?
Thank you all.
I'm using this short snippet to set up my video. For some unknown reason - sometimes the video simply won't show up, while for other video it'll work perfectly.
let videoTrack: AVAssetTrack = asset.tracksWithMediaType(AVMediaTypeVideo)[0]
let composition: AVMutableComposition = AVMutableComposition()
let videoComposition: AVMutableVideoComposition = AVMutableVideoComposition()
var videoSize: CGSize = videoTrack.naturalSize
let isPortrait_: Bool = self.isVideoPortrait(asset)
if isPortrait_ {
NSLog("video is portrait ")
videoSize = CGSizeMake(videoSize.height, videoSize.width)
}
composition.naturalSize = videoSize
videoComposition.renderSize = videoSize
// videoComposition.renderSize = videoTrack.naturalSize; //
videoComposition.frameDuration = CMTimeMake(1, 30)
let compositionVideoTrack = composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: 1)
let timeRange = videoTrack.timeRange
do {
try compositionVideoTrack.insertTimeRange(timeRange, ofTrack: videoTrack, atTime: kCMTimeZero)
} catch {
print("error")
}
let layerInst = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
layerInst.setTransform(videoTrack.preferredTransform, atTime: kCMTimeZero)
let inst: AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
inst.timeRange = timeRange
inst.layerInstructions = [layerInst]
videoComposition.instructions = [inst]
let playerItem = AVPlayerItem(asset: composition)
playerItem.videoComposition = videoComposition
Tho for some videos, it simply wont show them up.
Any suggestions? Thanks!!
Hello i had a relative similar code hope this helps you figure out your problem
class func MergeVideosSequentially(URLS : [NSURL], callback : (error : ErrorType? , outURL : NSURL?) -> Void){
let composition = AVMutableComposition()
//videoTrack
let videoTrack = composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
let audioTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
var cursorTime = kCMTimeZero
for URL in URLS {
let asset = AVAsset(URL: URL)
let assetVideoTrack = asset.tracksWithMediaType(AVMediaTypeVideo).first! as AVAssetTrack
let assetAudioTrack = asset.tracksWithMediaType(AVMediaTypeAudio).first! as AVAssetTrack
var duration : CMTimeRange? = nil
duration = CMTimeRangeMake(kCMTimeZero, asset.duration)
do {
try videoTrack.insertTimeRange(duration!, ofTrack: assetVideoTrack, atTime: cursorTime)
try audioTrack.insertTimeRange(duration!, ofTrack: assetAudioTrack, atTime: cursorTime)
}catch {
print(error)
}
cursorTime = CMTimeAdd(cursorTime, asset.duration)
}
let directory = NSTemporaryDirectory()
let dateFormatter = NSDateFormatter()
dateFormatter.dateStyle = .LongStyle
dateFormatter.timeStyle = .ShortStyle
let date = dateFormatter.stringFromDate(NSDate())
let savePath = "\(directory)/mergedVideo-\(date).mp4"
let url = NSURL(fileURLWithPath: savePath)
let exporter = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality)
exporter!.outputURL = url
exporter!.shouldOptimizeForNetworkUse = true
exporter!.outputFileType = AVFileTypeMPEG4
exporter!.exportAsynchronouslyWithCompletionHandler({ () -> Void in
let outputURL = exporter!.outputURL
switch exporter!.status {
case .Completed :
dispatch_async(dispatch_get_main_queue(), { () -> Void in
callback(error: nil, outURL: outputURL)
})
default:
callback(error: CCMovieWriterError.CannotMergeClips, outURL: nil)
}
})
}
func downloadPDF() {
// Running operations that takes a long time in a background thread is recommended
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), { () -> Void in
// Get the PDF data from the URL
let url = self.webview.request?.URL
let pdfURL = url?.absoluteString
let pdfData = NSData(contentsOfURL: NSURL(string: pdfURL!)!)!
// Store the data locally as a PDF file in the Documents directory
let documentsDirPath = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true).first as? String
localPdfPath = documentsDirPath.stringByAppendingPathComponent(pdfURL!.lastPathComponent)
pdfData.writeToFile(localPdfPath, atomically: true)
// UI related stuff should be called in the main thread.
dispatch_async(dispatch_get_main_queue(), { () -> Void in
self.openIniBooks()
self.stopActivityIndicator()
})
})
}
Save PDF file to iBooks was working fine until upgrade to xCode7. Now getting an error:
Downcast from String? to String only unwraps optionals... for this line:
let documentsDirPath = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true).first as? String
Not sure how to fix that. Tried:
let documentsDirPath = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0] as String
But got new error. Any help would be appreciated.
Try this:
let documentsDirPath = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true).first as String!
I use AVPlayer to play video link and add observer to update progress bar. But with the video which have time more than 10s, my app get crash. Below is my code
Play video button press:
#IBAction func playVideoButtonPressed(sender: AnyObject) {
if let contentUrl = curVideoModel?.video_Content_Url{
player = AVPlayer(URL: NSURL(string: contentUrl))
player?.actionAtItemEnd = AVPlayerActionAtItemEnd.None
playerLayer = AVPlayerLayer(player: player)
playerLayer!.frame = CGRectMake(0, 0, self.imvVideoThumbnail.frame.size.width, self.imvVideoThumbnail.frame.size.height)
playerLayer!.backgroundColor = UIColor.blackColor().CGColor
playerLayer!.videoGravity = AVLayerVideoGravityResizeAspectFill
self.playvideoHolderView.layer.addSublayer(playerLayer)
NSNotificationCenter.defaultCenter().addObserver(self, selector: "itemDidFinishPlaying:", name: AVPlayerItemDidPlayToEndTimeNotification, object: player?.currentItem)
player?.addPeriodicTimeObserverForInterval(CMTimeMakeWithSeconds(1.0/60.0, Int32(NSEC_PER_SEC)), queue: nil, usingBlock: { (time) -> Void in
self.updateProgressbar()
})
player!.play()
}
}
The function call back when video finish play:
NSNotificationCenter.defaultCenter().removeObserver(self, name: AVPlayerItemDidPlayToEndTimeNotification, object: player?.currentItem)
self.progressBar.progress = 0
if notification.name == AVPlayerItemDidPlayToEndTimeNotification{
if player != nil{
player?.pause()
}
if playerLayer != nil{
playerLayer!.removeFromSuperlayer()
}
}
And the function to update progressbar:
var duration: Double
var time: Double
if player != nil{
duration = CMTimeGetSeconds(player!.currentItem.duration)
time = CMTimeGetSeconds(player!.currentTime())
} else{
duration = 0
time = 0
}
self.lblTotalTime.text = "\(Int(duration))"
self.lblCurrentTime.text = "\(Int(time))"
self.progressBar.progress = Float(time / duration)
P/S: It crash on real device(iPod touch iOS 8.3). But play OK on simulator
EDIT: Maybe the comment of Peter is right. I check: if player!.currentItem.duration.value > 0 before get: duration = CMTimeGetSeconds(player!.currentItem.duration) .Then the crash is fixed**