AVFoundation: Take High Quality Still Image in Video-Session - objective-c

I am anlysing live-images in a Capture-Session of Type AVMediaTypeVideo. How can I capture a high-quality-still image (not the low-resolution sample buffer), at certain events=
var videoCaptureDevice: AVCaptureDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
var device = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
var previewLayer: AVCaptureVideoPreviewLayer?
var captureSession = AVCaptureSession()
let cameraOutput = AVCapturePhotoOutput()
//called in view did load
private func setupCamera() {
let input = try? AVCaptureDeviceInput(device: videoCaptureDevice)
captureSession.sessionPreset = AVCaptureSessionPreset640x480
if self.captureSession.canAddInput(input) {
self.captureSession.addInput(input)
}
self.previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
let videoDataOutput = AVCaptureVideoDataOutput()
if self.captureSession.canAddOutput(videoDataOutput){
self.captureSession.addOutput(videoDataOutput)
videoDataOutput.setSampleBufferDelegate(self, queue: DispatchQueue.main)
}
}
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
// sampleBuffer is analysed
// if result is positive, I would like to take a high quality picture
}

Here's a little snippet that I use in my apps. I hope this will be helpful
private func takePhoto() -> Void
{
if let videoConnection = stillImageOutput!.connection(withMediaType: AVMediaTypeVideo)
{
videoConnection.videoOrientation = AVCaptureVideoOrientation.portrait
stillImageOutput?.captureStillImageAsynchronously(from: videoConnection, completionHandler: { (sampleBuffer, error) in
guard let buffer = sampleBuffer else
{
return
}
let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(buffer)
let dataProvider = CGDataProvider(data: imageData as! CFData)
let cgImageRef = CGImage(jpegDataProviderSource: dataProvider!,
decode: nil,
shouldInterpolate: true,
intent: CGColorRenderingIntent.defaultIntent)
// The image taked
let image: UIImage = UIImage(cgImage: cgImageRef!, scale: 1.0, orientation: UIImageOrientation.right)
// Detenemos la captura de imagenes
self.captureSession!.stopRunning()
})
}
}
If you don't set the sessionPreset property of your captureSession variable, by default his values is AVCapture​Session​Preset​High. The only preset that is higher than this is AVCaptureSessionPresetPhoto

Related

Playing WAV data with AVAudioEngine

Currently, I'm getting an EXC_BAD_ACCESS error on the audio thread, and I'm trying to deduce what is going wrong.
When converting .wav file data from Data to an AVAudioPCMBuffer, do I need to strip the RIFF header first?
import AVFoundation
public class Player : NSObject {
let engine = AVAudioEngine()
public override init() {
super.init()
do {
let _ = engine.mainMixerNode
try engine.start()
} catch {
print("Player error: \(error)")
}
}
#objc public func play(_ data: Data) {
let format = AVAudioFormat(commonFormat: .pcmFormatInt16, sampleRate: 48000, channels: 2, interleaved: true)!
let buffer = data.toPCMBuffer(format: format)!
let player = AVAudioPlayerNode()
engine.attach(player)
engine.connect(player, to: engine.mainMixerNode, format: nil)
player.scheduleBuffer(buffer, at: nil, completionCallbackType: .dataPlayedBack) {
callbackType in
// Nothing in here.
}
player.play()
}
}
Here's the toPCMBuffer extension:
// Taken from: https://stackoverflow.com/a/52731480/2228559
extension Data {
func toPCMBuffer(format: AVAudioFormat) -> AVAudioPCMBuffer? {
let streamDesc = format.streamDescription.pointee
let frameCapacity = UInt32(count) / streamDesc.mBytesPerFrame
guard let buffer = AVAudioPCMBuffer(pcmFormat: format, frameCapacity: frameCapacity) else { return nil }
buffer.frameLength = buffer.frameCapacity
let audioBuffer = buffer.audioBufferList.pointee.mBuffers
withUnsafeBytes { addr in
audioBuffer.mData?.copyMemory(from: addr, byteCount: Int(audioBuffer.mDataByteSize))
}
return buffer
}
}
Note: I cannot use AVAudioFile because the .wav file data is loaded over-the-wire.
IDK, but my mac crashes if I play interleaved AVAudioPCMBuffers, and garbled audio if they're not float data, so you could convert to non-interleaved float data:
#objc public func play(_ data: Data) {
let sampleRate: Double = 48000
let interleavedFormat = AVAudioFormat(commonFormat: .pcmFormatInt16, sampleRate: sampleRate, channels: 2, interleaved: true)!
let interleavedBuffer = data.toPCMBuffer(format: interleavedFormat)!
let nonInterleavedFormat = AVAudioFormat(commonFormat: .pcmFormatFloat32, sampleRate: sampleRate, channels: 2, interleaved: false)!
let nonInterleavedBuffer = AVAudioPCMBuffer(pcmFormat: nonInterleavedFormat, frameCapacity: interleavedBuffer.frameCapacity)!
nonInterleavedBuffer.frameLength = interleavedBuffer.frameLength
let converter = AVAudioConverter(from: interleavedFormat, to: nonInterleavedFormat)!
try! converter.convert(to: nonInterleavedBuffer, from: interleavedBuffer)
let player = AVAudioPlayerNode()
engine.attach(player)
engine.connect(player, to: engine.mainMixerNode, format: nil)
player.scheduleBuffer(nonInterleavedBuffer, at: nil, completionCallbackType: .dataPlayedBack) {
callbackType in
// Nothing in here.
}
player.play()
}
extension Data {
func toPCMBuffer(format: AVAudioFormat) -> AVAudioPCMBuffer? {
assert(format.isInterleaved)
let streamDesc = format.streamDescription.pointee
let frameCapacity = UInt32(count) / streamDesc.mBytesPerFrame
guard let buffer = AVAudioPCMBuffer(pcmFormat: format, frameCapacity: frameCapacity) else { return nil }
buffer.frameLength = buffer.frameCapacity
let b = UnsafeMutableBufferPointer(start: buffer.int16ChannelData![0], count: buffer.stride * Int(frameCapacity))
let bytesCopied = self.copyBytes(to: b)
assert(bytesCopied == count)
return buffer
}
}

Animate the drawing of an MKPolyline on MKMapView

I’ve got an MKMapView to animate a line by adding a line, removing it, adding a minor segment and re-adding it to the map. However, this puts a lot of overhead on the phone and doesn’t look the best. I noticed Google Maps and Uber have cleanly animated lines for showing routes that run smoothly no matter what the length or route type. Does anyone have any suggestions for a solution which is less energy-draining and looks cleaner?
Thanks, SebO.
An array of coordinates will be needed. If you have only beginning and end coordinates, get array of coordinates using below code
func getPointsOnRoute(from: CLLocation?, to: CLLocation?, on mapView: MKMapView?) -> [CLLocation]? {
let NUMBER_OF_PIXELS_TO_SKIP: Int = 120
//lower number will give a more smooth animation, but will result in more layers
var ret = [Any]()
var fromPoint: CGPoint? = nil
if let aCoordinate = from?.coordinate {
fromPoint = mapView?.convert(aCoordinate, toPointTo: mapView)
}
var toPoint: CGPoint? = nil
if let aCoordinate = to?.coordinate {
toPoint = mapView?.convert(aCoordinate, toPointTo: mapView)
}
let allPixels = getAllPoints(from: fromPoint!, to: toPoint!)
var i = 0
while i < (allPixels?.count)! {
let pointVal = allPixels![i] as? NSValue
ret.append(point(toLocation: mapView, from: (pointVal?.cgPointValue)!)!)
i += NUMBER_OF_PIXELS_TO_SKIP
}
ret.append(point(toLocation: mapView, from: toPoint!)!)
return ret as? [CLLocation] }
Having array of coordinates add rendering of the overlays in MKMapViewDelegate’s delegate method — mapView(_:rendererFor:).
func mapView(_ mapView: MKMapView, rendererFor overlay: MKOverlay) -> MKOverlayRenderer {
guard let polyline = overlay as? MKPolyline else {
return MKOverlayRenderer()
}
let polylineRenderer = MKPolylineRenderer(overlay: polyline)
polylineRenderer.strokeColor = .black
polylineRenderer.lineWidth = 2
return polylineRenderer
}
mapView.addOverlay(polyline) // add it to mapview
render the polyline in small segments to create the animation effect
var drawingTimer: Timer?
// ....// Somewhere in your View Controller
func animate(route: [CLLocationCoordinate2D], duration: TimeInterval, completion: (() -> Void)?) {
guard route.count > 0 else { return }
var currentStep = 1
let totalSteps = route.count
let stepDrawDuration = duration/TimeInterval(totalSteps)
var previousSegment: MKPolyline?
drawingTimer = Timer.scheduledTimer(withTimeInterval: stepDrawDuration, repeats: true) { [weak self] timer in
guard let self = self else {
// Invalidate animation if we can't retain self
timer.invalidate()
completion?()
return
}
if let previous = previousSegment {
// Remove last drawn segment if needed.
self.mapView.removeOverlay(previous)
previousSegment = nil
}
guard currentStep < totalSteps else {
// If this is the last animation step...
let finalPolyline = MKPolyline(coordinates: route, count: route.count)
self.mapView.addOverlay(finalPolyline)
// Assign the final polyline instance to the class property.
self.polyline = finalPolyline
timer.invalidate()
completion?()
return
}
// Animation step.
// The current segment to draw consists of a coordinate array from 0 to the 'currentStep' taken from the route.
let subCoordinates = Array(route.prefix(upTo: currentStep))
let currentSegment = MKPolyline(coordinates: subCoordinates, count: subCoordinates.count)
self.mapView.addOverlay(currentSegment)
previousSegment = currentSegment
currentStep += 1
}
}

Record and Resample to aac format with AVAudioEngine, AVAudioSession setCategory AVAudioSessionCategoryRecord?

I already found an example code to recording with AVAudioEngine & success to output sampleRate 22050 aac format audioFile.
After I review the code, there is a problem that I have to setCategory AVAudioSessionCategoryPlayAndRecord instead of AVAudioSessionCategoryRecord.
How to improve the code for using "AVAudioSessionCategoryRecord" without
"PlayAndRecord"?
(In this case I should use "AVAudioSessionCategoryRecord", right?)
If I setCategory to AVAudioSessionCategoryRecord, I get an ERROR:
AVAudioEngineGraph.mm:1070: Initialize: required condition is false:
IsFormatSampleRateAndChannelCountValid(outputHWFormat)
Terminating app due to uncaught exception
'com.apple.coreaudio.avfaudio', reason: 'required condition is false:
IsFormatSampleRateAndChannelCountValid(outputHWFormat)'
There is an example about using microphone in AVAudioSessionCategoryRecord, it seems to me that we can record only with inputNode.
https://developer.apple.com/library/prerelease/content/samplecode/SpeakToMe/Introduction/Intro.html
The following is code can work now.
import UIKit
import AVFoundation
class ViewController: UIViewController {
let audioEngine = AVAudioEngine()
lazy var inputNode : AVAudioInputNode = {
return self.audioEngine.inputNode!
}()
let sampleRate = Double(22050)
lazy var outputFormat : AVAudioFormat = {
return AVAudioFormat(commonFormat: self.inputNode.outputFormatForBus(0).commonFormat,
sampleRate: self.sampleRate,
channels: AVAudioChannelCount(1),
interleaved: false)
}()
let converterNode = AVAudioMixerNode()
lazy var aacFileURL : NSURL = {
let dirPath = NSURL(fileURLWithPath: NSTemporaryDirectory(), isDirectory: true)
let filePath = dirPath.URLByAppendingPathComponent("rec.aac")
return filePath
}()
lazy var outputAACFile : AVAudioFile = {
var settings = self.outputFormat.settings
settings[AVFormatIDKey] = NSNumber(unsignedInt: kAudioFormatMPEG4AAC)
return try! AVAudioFile(forWriting: self.aacFileURL,
settings:settings)
}()
override func viewDidAppear(animated: Bool) {
super.viewDidAppear(animated)
let audioSession = AVAudioSession.sharedInstance()
try! audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord)
try! audioSession.setActive(true)
self.audioEngine.attachNode(converterNode)
self.audioEngine.connect(inputNode,
to: converterNode,
format: inputNode.outputFormatForBus(0))
self.audioEngine.connect(converterNode,
to: self.audioEngine.mainMixerNode,
format: outputFormat)
converterNode.volume = 0
converterNode.installTapOnBus(0, bufferSize: 1024, format: converterNode.outputFormatForBus(0)) { (buffer, when) in
try! self.outputAACFile.writeFromBuffer(buffer)
}
try! self.audioEngine.start()
}
}
Related link
How can I specify the format of AVAudioEngine Mic-Input?
Recording audio file using AVAudioEngine
The AVAudioMixerNode is not capable of converting between non-compressed and compressed formats. For that you need the AVAudioConverter.

How to recover PDF's from .writetoFile in Swift

I'm saving an image using .writetofile but I don't know how to recover it.
This how I save the image:
self.pdfData.writeToURL(NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask).first!.URLByAppendingPathComponent("testgogo.pdf"), atomically: true) // what it is saved as
self.pdfData.writeToFile("tessst.pdf", atomically: false)
print(NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask).first!.path!)
var pdfData: NSData {
let result = NSMutableData()
UIGraphicsBeginPDFContextToData(result, frame, nil)
guard let context = UIGraphicsGetCurrentContext()
else { return result }
UIGraphicsBeginPDFPage()
layer.renderInContext(context)
UIGraphicsEndPDFContext()
return result
}
How can I fetch the image back later on ?
Here is an example of how you could do it in Swift 2.x.
It uses the NSData(contentsOfFile: myFilePath) to load the file.
The example uses a PNG file.
Directly from my Playground:
import UIKit
/*
* Creates an UIImage from a UIView
*/
func createImage(fromView view: UIView) -> UIImage {
UIGraphicsBeginImageContext(view.frame.size)
let context = UIGraphicsGetCurrentContext()
view.layer.renderInContext(context!)
let image = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext();
return image
}
/*
* Finds the path in Document folder
*/
func createMyFilePath(forFileName fileName: String) -> String? {
let paths = NSSearchPathForDirectoriesInDomains(.DocumentDirectory,.UserDomainMask,true)
if let path = paths.first {
return path + "/\(fileName)"
}
return nil
}
/*
* Main behaviour
*/
// ImageView with Label on it
let imageView = UIImageView(image: UIImage(named: "borat"))
let label = UILabel(frame: imageView.frame)
label.font = UIFont(name: "helvetica", size: 40)
label.text = "Great Success!"
imageView .addSubview(label)
// Find the path where to save
guard let myFilePath = createMyFilePath(forFileName: "borat-with-label.png") else {
print("Cannot generate file path ☹️")
exit(0)
}
// Use this to check in finder where your file is saved
print(myFilePath)
// Transform the imageView in UIImage to save it
let imageToSave = createImage(fromView: imageView)
// Get the image as data
guard let imageToSaveAsData = UIImagePNGRepresentation(imageToSave) else {
print("Cannot transform image to data ☹️")
exit(1)
}
// Save to Disk!
do{
try imageToSaveAsData.writeToFile(myFilePath, options: .DataWritingAtomic)
} catch {
print("Error, cannot write to the location \(myFilePath)")
}
// Load from Disk!
let loadedImageData = NSData(contentsOfFile: myFilePath)
// Check the data is the same
if loadedImageData == imageToSaveAsData {
print("✌️")
}
// Have a look at the loaded image!
UIImage(data: loadedImageData!)
You will need to remember the URL where you saved the image/pdf that you are storing.
In order to get it back, you can use the NSData class to get the contents of the file at that url.
dataWithContentsOfURL:(NSURL *)aURL is a good place to start.
It looks like the problem might be that you are trying to load the pdf file as an image which won't work. Try this method:
if let pdfURL = NSBundle.mainBundle().URLForResource("myPDF", withExtension: "pdf", subdirectory: nil, localization: nil),data = NSData(contentsOfURL: pdfURL), baseURL = pdfURL.URLByDeletingLastPathComponent {
let webView = UIWebView(frame: CGRectMake(20,20,self.view.frame.size.width-40,self.view.frame.size.height-40))
webView.loadData(data, MIMEType: "application/pdf", textEncodingName:"", baseURL: baseURL)
self.view.addSubview(webView)
}
Ps I got this code from here: How to Load Local PDF in UIWebView in Swift
Using a slightly modified version of the code in this other answer
I have the following code:
class ViewController: UIViewController {
#IBOutlet weak var webView: UIWebView!
lazy var documentsPath = {
return NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0]
}()
let fileName = "file.pdf"
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
createPDF()
loadPDF()
}
func createPDF() {
let html = "<b>Hello <i>World!</i></b> <p>Generate PDF file from HTML in Swift</p>"
let fmt = UIMarkupTextPrintFormatter(markupText: html)
// 2. Assign print formatter to UIPrintPageRenderer
let render = UIPrintPageRenderer()
render.addPrintFormatter(fmt, startingAtPageAtIndex: 0)
// 3. Assign paperRect and printableRect
let page = CGRect(x: 0, y: 0, width: 595.2, height: 841.8) // A4, 72 dpi
let printable = CGRectInset(page, 0, 0)
render.setValue(NSValue(CGRect: page), forKey: "paperRect")
render.setValue(NSValue(CGRect: printable), forKey: "printableRect")
// 4. Create PDF context and draw
let pdfData = NSMutableData()
UIGraphicsBeginPDFContextToData(pdfData, CGRectZero, nil)
for i in 1...render.numberOfPages() {
UIGraphicsBeginPDFPage();
let bounds = UIGraphicsGetPDFContextBounds()
render.drawPageAtIndex(i - 1, inRect: bounds)
}
UIGraphicsEndPDFContext();
// 5. Save PDF file
pdfData.writeToFile("\(documentsPath)/\(fileName)", atomically: true)
}
func loadPDF() {
let filePath = "\(documentsPath)/\(fileName)"
let url = NSURL(fileURLWithPath: filePath)
let urlRequest = NSURLRequest(URL: url)
webView.loadRequest(urlRequest)
}
}
This code works, it creates a PDF file and then loads the same PDF file into a webView. The only thing that I changed was to create a lazy variable that returns the documents directory and I use a constant for the file path.
You should be able to use the same methods to save and retrieve your PDF file.

iOS8 video dimension, CMVideoDimensions returns 0,0

in iOS8 the dimension returned is 0,0
CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
This was working on iOS7, so how to know the supported video dimension, as i need to know the video aspect ratio
You need to wait for the AVCaptureInputPortFormatDescriptionDidChangeNotification
- (void)avCaptureInputPortFormatDescriptionDidChangeNotification:(NSNotification *)notification {
AVCaptureInput *input = [self.recorder.captureSession.inputs objectAtIndex:0];
AVCaptureInputPort *port = [input.ports objectAtIndex:0];
CMFormatDescriptionRef formatDescription = port.formatDescription;
if (formatDescription) {
CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
if ((dimensions.width == 0) || (dimensions.height == 0)) {
return;
}
CGFloat aspect = (CGFloat)dimensions.width / (CGFloat)dimensions.height;
if (floor(NSFoundationVersionNumber) > NSFoundationVersionNumber_iOS_7_1) {
// since iOS8 the aspect ratio is inverted
// remove this check if iOS7 will not be supported
aspect = 1.f / aspect;
}
}
}
Provided you're tracking the device being used, you can access the current format from activeFormat: https://developer.apple.com/library/ios/documentation/AVFoundation/Reference/AVCaptureDevice_Class/index.html#//apple_ref/occ/instp/AVCaptureDevice/activeFormat
I recently ran into this particular issue, here's the Swift 5 version for those who need it too:
import Foundation
import AVFoundation
class MySessionManager: NSObject {
static let notificationName = "AVCaptureInputPortFormatDescriptionDidChangeNotification"
let session: AVCaptureSession
var videoCaptureDimensions: CMVideoDimensions?
init(session: AVCaptureSession) {
self.session = session
let notificationName = NSNotification.Name()
NotificationCenter.default.addObserver(
self,
selector: #selector(formatDescription(didChange:)),
name: .init(Self.notificationName),
object: nil
)
}
deinit { NotificationCenter.default.removeObserver(self) }
#objc func formatDescription(didChange notification: NSNotification) {
guard
let input = session.inputs.first,
let port = input.ports.first,
let formatDesc = port.formatDescription
else { return }
var dimensions = CMVideoFormatDescriptionGetDimensions(formatDesc)
// ... perform any necessary dim adjustments ...
videoCaptureDimensions = dimensions
}
}