USB device send/receive data - objective-c

I have implemented function to detect USB device. It works and now i need to send/read data.
I started look over a lot of obj-c sources and found only one good article in apple documentation, that describes how do we can send package to our USB device:
IOReturn WriteToDevice(IOUSBDeviceInterface **dev, UInt16 deviceAddress,
UInt16 length, UInt8 writeBuffer[])
{
IOUSBDevRequest request;
request.bmRequestType = USBmakebmRequestType(kUSBOut, kUSBVendor,
kUSBDevice);
request.bRequest = 0xa0;
request.wValue = deviceAddress;
request.wIndex = 0;
request.wLength = length;
request.pData = writeBuffer;
return (*dev)->DeviceRequest(dev, &request);
}
But I didn't find a way how to create and send data with Swift. The struct on Swift looks like:
public struct IOUSBDevRequest {
public var bmRequestType: UInt8
public var bRequest: UInt8
public var wValue: UInt16
public var wIndex: UInt16
public var wLength: UInt16
public var pData: UnsafeMutableRawPointer!
public var wLenDone: UInt32
public init()
public init(bmRequestType: UInt8, bRequest: UInt8, wValue: UInt16, wIndex: UInt16, wLength: UInt16, pData: UnsafeMutableRawPointer!, wLenDone: UInt32)
}
I can't figure out what parameters is pData, zwLenDone.
This is data that i need to send:
{
'direction':'in',
'recipient':'device',
'requestType': 'standard',
'request': 6,
'value': 0x300,
'index': 0,
'length': 255
}
The next question is: How i can receive data. I know the answer is in this article, but i can't convert it to Swift.
Here is what i could converted on Swift 3. My class detects USB device, get his configuration:
class DFUDevice: NSObject {
let vendorId = 0x0483
let productId = 0xdf11
static let sharedInstance = DFUDevice()
var deviceName:String = ""
private func deviceAdded(iterator: io_iterator_t) {
var plugInInterfacePtrPtr: UnsafeMutablePointer<UnsafeMutablePointer<IOCFPlugInInterface>?>?
var deviceInterfacePtrPtr: UnsafeMutablePointer<UnsafeMutablePointer<IOUSBDeviceInterface>?>?
var configPtr:IOUSBConfigurationDescriptorPtr?
var score: Int32 = 0
while case let usbDevice = IOIteratorNext(iterator), usbDevice != 0 {
// io_name_t imports to swift as a tuple (Int8, ..., Int8) 128 ints
// although in device_types.h it's defined:
// typedef char io_name_t[128];
var deviceNameCString: [CChar] = [CChar](repeating: 0, count: 128)
let deviceNameResult = IORegistryEntryGetName(usbDevice, &deviceNameCString)
if(deviceNameResult != kIOReturnSuccess) {
print("Error getting device name")
}
self.deviceName = String.init(cString: &deviceNameCString)
print("usb Device Name: \(deviceName)")
// Get plugInInterface for current USB device
let plugInInterfaceResult = IOCreatePlugInInterfaceForService(
usbDevice,
kIOUSBDeviceUserClientTypeID,
kIOCFPlugInInterfaceID,
&plugInInterfacePtrPtr,
&score)
// dereference pointer for the plug in interface
guard plugInInterfaceResult == kIOReturnSuccess,
let plugInInterface = plugInInterfacePtrPtr?.pointee?.pointee else {
print("Unable to get Plug-In Interface")
continue
}
// use plug in interface to get a device interface
let deviceInterfaceResult = withUnsafeMutablePointer(to: &deviceInterfacePtrPtr) {
$0.withMemoryRebound(to: Optional<LPVOID>.self, capacity: 1) {
plugInInterface.QueryInterface(
plugInInterfacePtrPtr,
CFUUIDGetUUIDBytes(kIOUSBDeviceInterfaceID),
$0)
}
}
// dereference pointer for the device interface
guard deviceInterfaceResult == kIOReturnSuccess,
let deviceInterface = deviceInterfacePtrPtr?.pointee?.pointee else {
print("Unable to get Device Interface")
continue
}
var ret = deviceInterface.USBDeviceOpen(deviceInterfacePtrPtr)
if (ret == kIOReturnSuccess)
{
// set first configuration as active
ret = deviceInterface.GetConfigurationDescriptorPtr(deviceInterfacePtrPtr, 0, &configPtr)
if (ret != kIOReturnSuccess)
{
print("Could not set active configuration (error: %x)\n", ret);
continue
}
guard let config = configPtr?.pointee else {
continue
}
if config.bLength > 0 {
//HERE I NEED SEND DATA
} else {
print("ConfigurationDescriptor not valid")
}
print(config.bLength)
}
else if (ret == kIOReturnExclusiveAccess)
{
// this is not a problem as we can still do some things
}
else
{
print("Could not open device (error: %x)\n", ret)
continue
}
IOObjectRelease(usbDevice)
}
}
func initUsb() {
var matchedIterator:io_iterator_t = 0
var removalIterator:io_iterator_t = 0
let notifyPort:IONotificationPortRef = IONotificationPortCreate(kIOMasterPortDefault)
IONotificationPortSetDispatchQueue(notifyPort, DispatchQueue(label: "IODetector"))
let matchingDict = IOServiceMatching(kIOUSBDeviceClassName)
as NSMutableDictionary
matchingDict[kUSBVendorID] = NSNumber(value: self.vendorId)
matchingDict[kUSBProductID] = NSNumber(value: self.productId)
let matchingCallback:IOServiceMatchingCallback = { (userData, iterator) in
let this = Unmanaged<DFUDevice>
.fromOpaque(userData!).takeUnretainedValue()
this.deviceAdded(iterator: iterator)
this.connected(iterator: iterator)
}
let removalCallback: IOServiceMatchingCallback = {
(userData, iterator) in
let this = Unmanaged<DFUDevice>
.fromOpaque(userData!).takeUnretainedValue()
this.disconnected(iterator: iterator)
}
let selfPtr = Unmanaged.passUnretained(self).toOpaque()
IOServiceAddMatchingNotification(notifyPort, kIOFirstMatchNotification, matchingDict, matchingCallback, selfPtr, &matchedIterator)
IOServiceAddMatchingNotification(notifyPort, kIOTerminatedNotification, matchingDict, removalCallback, selfPtr, &removalIterator)
self.deviceAdded(iterator: matchedIterator)
self.deviceAdded(iterator: removalIterator)
RunLoop.current.run()
}
}
I call it like:
let DFUDeviceDaemon = Thread(target: DFUDevice.sharedInstance, selector:#selector(DFUDevice.initUsb), object: nil)
DFUDeviceDaemon.start()

The article you reference has a function called WriteToDevice. One of its parameters is
UInt8 writeBuffer[]
This writeBuffer, the data that you want to send, is a C array of bytes:
uint8_t msgLength = 3;
uint8_t writeBuffer[msgLength];
writeBuffer[0] = 0x41; // ASCII 'A'
writeBuffer[1] = 0x42; // ASCII 'B'
writeBuffer[2] = 0x43; // ASCII 'C'
What bytes do you need to send? That really depends on the device at the other end -- the technical data from the manufacturer should tell you that.
To pass the C-array as NSData, which is probably what pData is, you'd use:
NSData *data = [NSData dataWithBytes:&writeBuffer length:3];
The (z)wLenDone is probably what I called the msgLength, 3. C-array's have no knowledge of their own length, so most functions require the length as a separate parameter.
As for receiving data, I would guess that happens in the matchingCallback: you use the iterator to receive the bytes and then parse them.
ANSWER TO COMMENT:
I'm not familiar with C#, and I'm no expert at this stuff, but maybe this will help:
var package = new UsbSetupPacket(
(byte)(UsbCtrlFlags.Direction_In |
UsbCtrlFlags.Recipient_Device |
UsbCtrlFlags.RequestType_Standard), // Index
6, // length of data, second phase
0x200, // Request
0, // RequestType
(short)length); // Value
A few observations: know nothing of C#, but should not the package be typed to struct? RequestType is 0, so you will receive no response -- is that what you want? Or did you want to send UsbCtrlFlags.RequestType_Standard as the fourth parameter? Why send the length as a value?
Anyway, what you do now is send the package to the USB device and see what happens.

After a lot of questions on stackoverflow and learning sources i figure it out:
First define not implemented functions
import Foundation
import IOKit
import IOKit.usb
import IOKit.usb.IOUSBLib
//from IOUSBLib.h
let kIOUSBDeviceUserClientTypeID = CFUUIDGetConstantUUIDWithBytes(nil,
0x9d, 0xc7, 0xb7, 0x80, 0x9e, 0xc0, 0x11, 0xD4,
0xa5, 0x4f, 0x00, 0x0a, 0x27, 0x05, 0x28, 0x61)
let kIOUSBDeviceInterfaceID = CFUUIDGetConstantUUIDWithBytes(nil,
0x5c, 0x81, 0x87, 0xd0, 0x9e, 0xf3, 0x11, 0xD4,
0x8b, 0x45, 0x00, 0x0a, 0x27, 0x05, 0x28, 0x61)
//from IOCFPlugin.h
let kIOCFPlugInInterfaceID = CFUUIDGetConstantUUIDWithBytes(nil,
0xC2, 0x44, 0xE8, 0x58, 0x10, 0x9C, 0x11, 0xD4,
0x91, 0xD4, 0x00, 0x50, 0xE4, 0xC6, 0x42, 0x6F)
/*!
#defined USBmakebmRequestType
#discussion Macro to encode the bRequest field of a Device Request. It is used when constructing an IOUSBDevRequest.
*/
func USBmakebmRequestType(direction:Int, type:Int, recipient:Int) -> UInt8 {
return UInt8((direction & kUSBRqDirnMask) << kUSBRqDirnShift)|UInt8((type & kUSBRqTypeMask) << kUSBRqTypeShift)|UInt8(recipient & kUSBRqRecipientMask)
}
Then create our class:
extension Notification.Name {
static let dfuDeviceConnected = Notification.Name("DFUDeviceConnected")
static let dfuDeviceDisconnected = Notification.Name("DFUDeviceDisconnected")
}
class DFUDevice: NSObject {
let vendorId = 0x0483
let productId = 0xdf11
static let sharedInstance = DFUDevice()
var deviceInterfacePtrPtr: UnsafeMutablePointer<UnsafeMutablePointer<IOUSBDeviceInterface>?>?
var plugInInterfacePtrPtr: UnsafeMutablePointer<UnsafeMutablePointer<IOCFPlugInInterface>?>?
var interfacePtrPtr:UnsafeMutablePointer<UnsafeMutablePointer<IOUSBInterfaceInterface>?>?
private func rawDeviceAdded(iterator: io_iterator_t) {
var score:Int32 = 0
var kr:Int32 = 0
while case let usbDevice = IOIteratorNext(iterator), usbDevice != 0 {
// io_name_t imports to swift as a tuple (Int8, ..., Int8) 128 ints
// although in device_types.h it's defined:
// typedef char io_name_t[128];
var deviceNameCString: [CChar] = [CChar](repeating: 0, count: 128)
let deviceNameResult = IORegistryEntryGetName(usbDevice, &deviceNameCString)
if(deviceNameResult != kIOReturnSuccess) {
print("Error getting device name")
}
let deviceName = String.init(cString: &deviceNameCString)
print("usb Device Name: \(deviceName)")
// Get plugInInterface for current USB device
let plugInInterfaceResult = IOCreatePlugInInterfaceForService(
usbDevice,
kIOUSBDeviceUserClientTypeID,
kIOCFPlugInInterfaceID,
&plugInInterfacePtrPtr,
&score)
// USB device object is no longer needed.
IOObjectRelease(usbDevice)
// Dereference pointer for the plug-in interface
guard plugInInterfaceResult == kIOReturnSuccess,
let plugInInterface = plugInInterfacePtrPtr?.pointee?.pointee else {
print("Unable to get Plug-In Interface")
continue
}
// use plug in interface to get a device interface
let deviceInterfaceResult = withUnsafeMutablePointer(to: &deviceInterfacePtrPtr) {
$0.withMemoryRebound(to: Optional<LPVOID>.self, capacity: 1) {
plugInInterface.QueryInterface(
plugInInterfacePtrPtr,
CFUUIDGetUUIDBytes(kIOUSBDeviceInterfaceID),
$0)
}
}
// dereference pointer for the device interface
guard deviceInterfaceResult == kIOReturnSuccess,
let deviceInterface = deviceInterfacePtrPtr?.pointee?.pointee else {
print("Unable to get Device Interface")
continue
}
kr = deviceInterface.USBDeviceOpen(deviceInterfacePtrPtr)
if (kr != kIOReturnSuccess)
{
print("Could not open device (error: \(kr))")
continue
}
else if (kr == kIOReturnExclusiveAccess)
{
// this is not a problem as we can still do some things
continue
}
self.connected()
}
}
private func rawDeviceRemoved(iterator: io_iterator_t) {
var kr:Int32 = 0
while case let usbDevice = IOIteratorNext(iterator), usbDevice != 0 {
// USB device object is no longer needed.
kr = IOObjectRelease(usbDevice)
if (kr != kIOReturnSuccess)
{
print("Couldn’t release raw device object (error: \(kr))")
continue
}
self.disconnected()
}
}
func getStatus() throws -> [UInt8] {
guard let deviceInterface = self.deviceInterfacePtrPtr?.pointee?.pointee else {
throw DFUDeviceError.DeviceInterfaceNotFound
}
var kr:Int32 = 0
let length:Int = 6
var requestPtr:[UInt8] = [UInt8](repeating: 0, count: length)
var request = IOUSBDevRequest(bmRequestType: USBmakebmRequestType(direction: kUSBIn, type: kUSBDevice, recipient: kUSBStandard),
bRequest: DFUREQUEST.GETSTATUS.rawValue,
wValue: 0,
wIndex: 0,
wLength: UInt16(length),
pData: &requestPtr,
wLenDone: 255)
kr = deviceInterface.DeviceRequest(self.deviceInterfacePtrPtr, &request)
if (kr != kIOReturnSuccess) {
throw DFUDeviceError.RequestError(desc: "Get device status request error: \(kr)")
}
return requestPtr
}
private func configureDevice() -> Int32 {
var kr:Int32 = 0
guard let deviceInterface = deviceInterfacePtrPtr?.pointee?.pointee else {
print("Unable to get Device Interface")
return -1
}
var numConfig:UInt8 = 0
kr = deviceInterface.GetNumberOfConfigurations(deviceInterfacePtrPtr, &numConfig)
if numConfig == 0 {
print("Device Number Of Configurations: 0")
return -1
}
var configPtr:IOUSBConfigurationDescriptorPtr?
// set first configuration as active
kr = deviceInterface.GetConfigurationDescriptorPtr(deviceInterfacePtrPtr, 0, &configPtr)
if (kr != kIOReturnSuccess)
{
print("Couldn’t get configuration descriptor for index (error: %x)\n", kr);
return -1
}
guard let config = configPtr?.pointee else {
return -1
}
//Set the device’s configuration. The configuration value is found in
//the bConfigurationValue field of the configuration descriptor
kr = deviceInterface.SetConfiguration(deviceInterfacePtrPtr, config.bConfigurationValue)
if (kr != kIOReturnSuccess)
{
print("Couldn’t set configuration to value (error: %x)\n", kr);
return -1
}
return kIOReturnSuccess
}
func connected() {
NotificationCenter.default.post(name: .dfuDeviceConnected, object: nil)
globalLogPost("DFU device has been device connected")
}
func disconnected() {
NotificationCenter.default.post(name: .dfuDeviceDisconnected, object: nil)
globalLogPost("DFU device has been disconnected")
}
func initUsb() {
var matchedIterator:io_iterator_t = 0
var removalIterator:io_iterator_t = 0
let notifyPort:IONotificationPortRef = IONotificationPortCreate(kIOMasterPortDefault)
IONotificationPortSetDispatchQueue(notifyPort, DispatchQueue(label: "IODetector"))
let matchingDict = IOServiceMatching(kIOUSBDeviceClassName)
as NSMutableDictionary
matchingDict[kUSBVendorID] = NSNumber(value: self.vendorId)
matchingDict[kUSBProductID] = NSNumber(value: self.productId)
let matchingCallback:IOServiceMatchingCallback = { (userData, iterator) in
let this = Unmanaged<DFUDevice>
.fromOpaque(userData!).takeUnretainedValue()
this.rawDeviceAdded(iterator: iterator)
}
let removalCallback: IOServiceMatchingCallback = {
(userData, iterator) in
let this = Unmanaged<DFUDevice>
.fromOpaque(userData!).takeUnretainedValue()
this.rawDeviceRemoved(iterator: iterator)
}
let selfPtr = Unmanaged.passUnretained(self).toOpaque()
IOServiceAddMatchingNotification(notifyPort, kIOFirstMatchNotification, matchingDict, matchingCallback, selfPtr, &matchedIterator)
IOServiceAddMatchingNotification(notifyPort, kIOTerminatedNotification, matchingDict, removalCallback, selfPtr, &removalIterator)
self.rawDeviceAdded(iterator: matchedIterator)
self.rawDeviceRemoved(iterator: removalIterator)
RunLoop.current.run()
}
}
You can look on method getStatus where i create a USBRequest and send it to device. Then in requestPtr:[UInt8] i received answer from device. Thank you for helping guys.
We can use ore device pointer anywhere in project, for example:
func upload(value:UInt16, length:UInt16) throws -> [UInt8] {
guard let deviceInterface = DFUDevice.sharedInstance.deviceInterfacePtrPtr?.pointee?.pointee else {
throw DFUDeviceError.DeviceInterfaceNotFound
}
var kr:Int32 = 0
var requestPtr:[UInt8] = [UInt8](repeating: 0, count: Int(length))
var request = IOUSBDevRequest(bmRequestType: 161,
bRequest: DFUREQUEST.UPLOAD.rawValue,
wValue: value,
wIndex: 0,
wLength: length,
pData: &requestPtr,
wLenDone: 255)
kr = deviceInterface.DeviceRequest(DFUDevice.sharedInstance.deviceInterfacePtrPtr, &request)
if (kr != kIOReturnSuccess) {
throw DFUDeviceError.RequestError(desc: "Upload request error: \(kr), request data: \(request)")
}
return requestPtr
}

Related

Type of expression is ambiguous without more context

I'm trying to convert the code of objective C mentioned here - https://gist.github.com/Coeur/1409855/f6df10c79f8cdd0fcb2a0735b99f4b3a74b9f954
to swift
The code I've wrote till now in swift -
class func getMacAddress(_ ifName: String?) -> String? {
var mgmtInfoBase = [Int32](repeating: 0, count: 6)
var msgBuffer: Int8? = nil
var length: size_t
var macAddress = [UInt8](repeating: 0, count: 6)
var interfaceMsgStruct: if_msghdr?
var socketStruct: sockaddr_dl?
var errorFlag: String? = nil
// Setup the management Information Base (mib)
mgmtInfoBase[0] = Int32(Int(CTL_NET)) // Request network subsystem
mgmtInfoBase[1] = Int32(Int(AF_ROUTE)) // Routing table info
mgmtInfoBase[2] = 0
mgmtInfoBase[3] = Int32(Int(AF_LINK)) // Request link layer information
mgmtInfoBase[4] = Int32(Int(NET_RT_IFLIST)) // Request all configured interfaces
mgmtInfoBase[5] = Int32(if_nametoindex(ifName?.utf8CString)) //ERROR: Type of expression is ambiguous without more context
// With all configured interfaces requested, get handle index
if ( mgmtInfoBase[5] == 0) {
errorFlag = "if_nametoindex failure"
} else {
// Get the size of the data available (store in len)
if sysctl(&mgmtInfoBase, 6, nil, &length, nil, 0) < 0 {
errorFlag = "sysctl mgmtInfoBase failure"
} else {
// Alloc memory based on above call
if (msgBuffer = Int8((length))) == nil {
errorFlag = "buffer allocation failure"
} else {
// Get system information, store in buffer
if sysctl(&mgmtInfoBase, 6, &msgBuffer, &length, nil, 0) < 0 {
errorFlag = "sysctl msgBuffer failure"
}
}
}
}
// Before going any further...
if errorFlag != nil {
// Release the buffer memory
if (msgBuffer != nil) {
free(&msgBuffer)
}
return nil
}
// Map msgbuffer to interface message structure
interfaceMsgStruct = msgBuffer as? if_msghdr
// Map to link-level socket structure
socketStruct = (interfaceMsgStruct + 1) as? sockaddr_dl // ERROR: Cannot convert value of type 'if_msghdr?' to expected argument type 'Int'
// Copy link layer address data in socket structure to an array
if socketStruct == nil {
return nil
}
memcpy(&macAddress, socketStruct.sdl_data + socketStruct.sdl_nlen, 6) // ERROR: Type of expression is ambiguous without more context
// Read from char array into a string object, into traditional Mac address format
let macAddressString = String(format: "%02X:%02X:%02X:%02X:%02X:%02X", macAddress[0], macAddress[1], macAddress[2], macAddress[3], macAddress[4], macAddress[5])
// Release the buffer memory
free(&msgBuffer)
return macAddressString
}
I'm getting the errors that I've mentioned. I searched and tried every possible thing and read articles from the documentation but still I couldn't get away with these errors. Please help.
The definitions of the functions for which I'm getting error as mentioned in Darwin.posix.net.if -
public func if_nametoindex(_: UnsafePointer<Int8>!) -> UInt32
public struct if_msghdr {
public var ifm_msglen: UInt16 /* to skip non-understood messages */
public var ifm_version: UInt8 /* future binary compatability */
public var ifm_type: UInt8 /* message type */
public var ifm_addrs: Int32 /* like rtm_addrs */
public var ifm_flags: Int32 /* value of if_flags */
public var ifm_index: UInt16 /* index for associated ifp */
public var ifm_data: if_data /* statistics and other data about if */
public init()
public init(ifm_msglen: UInt16, ifm_version: UInt8, ifm_type: UInt8, ifm_addrs: Int32, ifm_flags: Int32, ifm_index: UInt16, ifm_data: if_data)
}
public func memcpy(_ __dst: UnsafeMutableRawPointer!, _ __src: UnsafeRawPointer!, _ __n: Int) -> UnsafeMutableRawPointer!````

Cannot create logical device only in debug mode

I'm getting VK_ERROR_FEATURE_NOT_PRESENT(-8).
But i'm using vkGetPhysicalDeviceFeatures to get features.
My Code:
std::vector<VkDeviceQueueCreateInfo> LogicalDevice::CreateDeviceQueueCreateInfos(QueueFamilies queueFamilies)
{
std::vector uniqueQueueFamilies = queueFamilies.GetUniqueQueueFamilies();
std::vector<VkDeviceQueueCreateInfo> queueCreateInfos;
for (auto queueFamily : uniqueQueueFamilies)
{
const int countOfQueues = queueFamily.CountOfQueues;
std::vector<float> queuePriorities(countOfQueues);
for (int indexOfPriority = 0; indexOfPriority < countOfQueues; indexOfPriority++)
{
queuePriorities[indexOfPriority] = 1.0f - ( (float) indexOfPriority / countOfQueues);
}
VkDeviceQueueCreateInfo queueCreateInfo{};
queueCreateInfo.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
queueCreateInfo.queueFamilyIndex = queueFamily.Index.value();
queueCreateInfo.queueCount = queueFamily.CountOfQueues;
queueCreateInfo.flags = queueFamily.Flags;
queueCreateInfo.pQueuePriorities = queuePriorities.data();
queueCreateInfos.push_back(queueCreateInfo);
}
return queueCreateInfos;
}
VkDeviceCreateInfo LogicalDevice::GetDeviceCreateInfo(std::vector<VkDeviceQueueCreateInfo> deviceQueueCreateInfos, VkPhysicalDevice physicalDevice)
{
VkPhysicalDeviceFeatures deviceFeatures{};
vkGetPhysicalDeviceFeatures(physicalDevice, &deviceFeatures);
VkDeviceCreateInfo deviceCreateInfo{};
deviceCreateInfo.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
deviceCreateInfo.queueCreateInfoCount = static_cast<uint32_t>(deviceQueueCreateInfos.size());
deviceCreateInfo.pQueueCreateInfos = deviceQueueCreateInfos.data();
deviceCreateInfo.pEnabledFeatures = &deviceFeatures;
return deviceCreateInfo;
}
void LogicalDevice::Initialize(VkPhysicalDevice physicalDevice, VkSurfaceKHR surfaceForPickingPhysicalDevice)
{
m_queueFamilies = QueueFamilies::GetQueueFamilies(physicalDevice, surfaceForPickingPhysicalDevice);
std::vector<VkDeviceQueueCreateInfo> deviceQueueCreateInfos = CreateDeviceQueueCreateInfos(m_queueFamilies);
VkDeviceCreateInfo deviceCreateInfo = GetDeviceCreateInfo(deviceQueueCreateInfos, physicalDevice);
VkResult result = vkCreateDevice(physicalDevice, &deviceCreateInfo, nullptr, &m_vulkanDevice);
if (result != VK_SUCCESS)
{
throw new std::runtime_error("Cannot create logical device.");
}
}
The deviceFeature variable that you read the features into and which is pointed at in the create info structure is local to GetDeviceCreateInfo. This is out-of-scope at the point where you call vkCreateDevice, which results in undefined behavior. You're probably getting random junk at device creation time instead, which causes that error.

Playing WAV data with AVAudioEngine

Currently, I'm getting an EXC_BAD_ACCESS error on the audio thread, and I'm trying to deduce what is going wrong.
When converting .wav file data from Data to an AVAudioPCMBuffer, do I need to strip the RIFF header first?
import AVFoundation
public class Player : NSObject {
let engine = AVAudioEngine()
public override init() {
super.init()
do {
let _ = engine.mainMixerNode
try engine.start()
} catch {
print("Player error: \(error)")
}
}
#objc public func play(_ data: Data) {
let format = AVAudioFormat(commonFormat: .pcmFormatInt16, sampleRate: 48000, channels: 2, interleaved: true)!
let buffer = data.toPCMBuffer(format: format)!
let player = AVAudioPlayerNode()
engine.attach(player)
engine.connect(player, to: engine.mainMixerNode, format: nil)
player.scheduleBuffer(buffer, at: nil, completionCallbackType: .dataPlayedBack) {
callbackType in
// Nothing in here.
}
player.play()
}
}
Here's the toPCMBuffer extension:
// Taken from: https://stackoverflow.com/a/52731480/2228559
extension Data {
func toPCMBuffer(format: AVAudioFormat) -> AVAudioPCMBuffer? {
let streamDesc = format.streamDescription.pointee
let frameCapacity = UInt32(count) / streamDesc.mBytesPerFrame
guard let buffer = AVAudioPCMBuffer(pcmFormat: format, frameCapacity: frameCapacity) else { return nil }
buffer.frameLength = buffer.frameCapacity
let audioBuffer = buffer.audioBufferList.pointee.mBuffers
withUnsafeBytes { addr in
audioBuffer.mData?.copyMemory(from: addr, byteCount: Int(audioBuffer.mDataByteSize))
}
return buffer
}
}
Note: I cannot use AVAudioFile because the .wav file data is loaded over-the-wire.
IDK, but my mac crashes if I play interleaved AVAudioPCMBuffers, and garbled audio if they're not float data, so you could convert to non-interleaved float data:
#objc public func play(_ data: Data) {
let sampleRate: Double = 48000
let interleavedFormat = AVAudioFormat(commonFormat: .pcmFormatInt16, sampleRate: sampleRate, channels: 2, interleaved: true)!
let interleavedBuffer = data.toPCMBuffer(format: interleavedFormat)!
let nonInterleavedFormat = AVAudioFormat(commonFormat: .pcmFormatFloat32, sampleRate: sampleRate, channels: 2, interleaved: false)!
let nonInterleavedBuffer = AVAudioPCMBuffer(pcmFormat: nonInterleavedFormat, frameCapacity: interleavedBuffer.frameCapacity)!
nonInterleavedBuffer.frameLength = interleavedBuffer.frameLength
let converter = AVAudioConverter(from: interleavedFormat, to: nonInterleavedFormat)!
try! converter.convert(to: nonInterleavedBuffer, from: interleavedBuffer)
let player = AVAudioPlayerNode()
engine.attach(player)
engine.connect(player, to: engine.mainMixerNode, format: nil)
player.scheduleBuffer(nonInterleavedBuffer, at: nil, completionCallbackType: .dataPlayedBack) {
callbackType in
// Nothing in here.
}
player.play()
}
extension Data {
func toPCMBuffer(format: AVAudioFormat) -> AVAudioPCMBuffer? {
assert(format.isInterleaved)
let streamDesc = format.streamDescription.pointee
let frameCapacity = UInt32(count) / streamDesc.mBytesPerFrame
guard let buffer = AVAudioPCMBuffer(pcmFormat: format, frameCapacity: frameCapacity) else { return nil }
buffer.frameLength = buffer.frameCapacity
let b = UnsafeMutableBufferPointer(start: buffer.int16ChannelData![0], count: buffer.stride * Int(frameCapacity))
let bytesCopied = self.copyBytes(to: b)
assert(bytesCopied == count)
return buffer
}
}

Example of detecting USB device with VID and PID

I have a usb device that will not use SERIAL or HID device communication. It is in DFU mode. How do i can detect it for beginning ? I read a lot of articles and look on examples but they not helped for me.
As i understand i should use IOKit.usb library for this? It will be great if someone will show me how do i can detect this device:(
Thanks for answers, i build my own solution:
class DFUDevice: NSObject {
let vendorId = 0x0483
let productId = 0xdf11
static let sharedInstance = DFUDevice()
private func reloadMonitor(iterator:io_iterator_t) {
repeat {
let nextService = IOIteratorNext(iterator)
guard nextService != 0 else { break }
IOObjectRelease(nextService)
} while (true)
}
func connected(iterator:io_iterator_t) {
self.reloadMonitor(iterator: iterator)
}
func disconnected(iterator:io_iterator_t) {
self.reloadMonitor(iterator: iterator)
}
func initUsb() {
var matchedIterator:io_iterator_t = 0
var removalIterator:io_iterator_t = 0
let notifyPort:IONotificationPortRef = IONotificationPortCreate(kIOMasterPortDefault)
IONotificationPortSetDispatchQueue(notifyPort, DispatchQueue(label: "IODetector"))
let matchingDict = IOServiceMatching(kIOUSBDeviceClassName)
as NSMutableDictionary
matchingDict[kUSBVendorID] = NSNumber(value: self.vendorId)
matchingDict[kUSBProductID] = NSNumber(value: self.productId)
let matchingCallback:IOServiceMatchingCallback = { (userData, iterator) in
let this = Unmanaged<DFUDevice>
.fromOpaque(userData!).takeUnretainedValue()
this.connected(iterator: iterator)
}
let removalCallback: IOServiceMatchingCallback = {
(userData, iterator) in
let this = Unmanaged<DFUDevice>
.fromOpaque(userData!).takeUnretainedValue()
this.disconnected(iterator: iterator)
};
let selfPtr = Unmanaged.passUnretained(self).toOpaque()
IOServiceAddMatchingNotification(notifyPort, kIOFirstMatchNotification, matchingDict, matchingCallback, selfPtr, &matchedIterator)
IOServiceAddMatchingNotification(notifyPort, kIOTerminatedNotification, matchingDict, removalCallback, selfPtr, &removalIterator)
if matchedIterator != 0 {
self.connected(iterator: matchedIterator)
matchedIterator = 0
}
if removalIterator != 0 {
self.reloadMonitor(iterator: removalIterator)
removalIterator = 0
}
self.reloadMonitor(iterator: matchedIterator)
self.reloadMonitor(iterator: removalIterator)
RunLoop.current.run();
}
}
To run it:
let DFUDeviceDaemon = Thread(target: DFUDevice.sharedInstance, selector:#selector(DFUDevice.initUsb), object: nil)
DFUDeviceDaemon.start()

Record rtsp stream with ffmpeg in iOS

I've followed iFrameExtractor to successfully stream rtsp in my swift project. In this project, it also has recording function. It basically use avformat_write_header
, av_interleaved_write_frame and av_write_trailer to save the rtsp source into mp4 file.
When I used this project in my device, the rtsp streaming works fine, but recording function will always generate a blank mp4 file with no image and sound.
Could anyone tell me what step that I miss?
I'm using iPhone5 with iOS 9.1 and XCode 7.1.1.
The ffmpeg is 2.8.3 version and followed the compile instruction by CompilationGuide – FFmpeg
Following is the sample code in this project
The function that generate every frame:
-(BOOL)stepFrame {
// AVPacket packet;
int frameFinished=0;
static bool bFirstIFrame=false;
static int64_t vPTS=0, vDTS=0, vAudioPTS=0, vAudioDTS=0;
while(!frameFinished && av_read_frame(pFormatCtx, &packet)>=0) {
// Is this a packet from the video stream?
if(packet.stream_index==videoStream) {
// 20130525 albert.liao modified start
// Initialize a new format context for writing file
if(veVideoRecordState!=eH264RecIdle)
{
switch(veVideoRecordState)
{
case eH264RecInit:
{
if ( !pFormatCtx_Record )
{
int bFlag = 0;
//NSString *videoPath = [NSHomeDirectory() stringByAppendingPathComponent:#"Documents/test.mp4"];
NSString *videoPath = #"/Users/liaokuohsun/iFrameTest.mp4";
const char *file = [videoPath UTF8String];
pFormatCtx_Record = avformat_alloc_context();
bFlag = h264_file_create(file, pFormatCtx_Record, pCodecCtx, pAudioCodecCtx,/*fps*/0.0, packet.data, packet.size );
if(bFlag==true)
{
veVideoRecordState = eH264RecActive;
fprintf(stderr, "h264_file_create success\n");
}
else
{
veVideoRecordState = eH264RecIdle;
fprintf(stderr, "h264_file_create error\n");
}
}
}
//break;
case eH264RecActive:
{
if((bFirstIFrame==false) &&(packet.flags&AV_PKT_FLAG_KEY)==AV_PKT_FLAG_KEY)
{
bFirstIFrame=true;
vPTS = packet.pts ;
vDTS = packet.dts ;
#if 0
NSRunLoop *pRunLoop = [NSRunLoop currentRunLoop];
[pRunLoop addTimer:RecordingTimer forMode:NSDefaultRunLoopMode];
#else
[NSTimer scheduledTimerWithTimeInterval:5.0//2.0
target:self
selector:#selector(StopRecording:)
userInfo:nil
repeats:NO];
#endif
}
// Record audio when 1st i-Frame is obtained
if(bFirstIFrame==true)
{
if ( pFormatCtx_Record )
{
#if PTS_DTS_IS_CORRECT==1
packet.pts = packet.pts - vPTS;
packet.dts = packet.dts - vDTS;
#endif
h264_file_write_frame( pFormatCtx_Record, packet.stream_index, packet.data, packet.size, packet.dts, packet.pts);
}
else
{
NSLog(#"pFormatCtx_Record no exist");
}
}
}
break;
case eH264RecClose:
{
if ( pFormatCtx_Record )
{
h264_file_close(pFormatCtx_Record);
#if 0
// 20130607 Test
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^(void)
{
ALAssetsLibrary *library = [[ALAssetsLibrary alloc]init];
NSString *filePathString = [NSHomeDirectory() stringByAppendingPathComponent:#"Documents/test.mp4"];
NSURL *filePathURL = [NSURL fileURLWithPath:filePathString isDirectory:NO];
if(1)// ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:filePathURL])
{
[library writeVideoAtPathToSavedPhotosAlbum:filePathURL completionBlock:^(NSURL *assetURL, NSError *error){
if (error) {
// TODO: error handling
NSLog(#"writeVideoAtPathToSavedPhotosAlbum error");
} else {
// TODO: success handling
NSLog(#"writeVideoAtPathToSavedPhotosAlbum success");
}
}];
}
[library release];
});
#endif
vPTS = 0;
vDTS = 0;
vAudioPTS = 0;
vAudioDTS = 0;
pFormatCtx_Record = NULL;
NSLog(#"h264_file_close() is finished");
}
else
{
NSLog(#"fc no exist");
}
bFirstIFrame = false;
veVideoRecordState = eH264RecIdle;
}
break;
default:
if ( pFormatCtx_Record )
{
h264_file_close(pFormatCtx_Record);
pFormatCtx_Record = NULL;
}
NSLog(#"[ERROR] unexpected veVideoRecordState!!");
veVideoRecordState = eH264RecIdle;
break;
}
}
// Decode video frame
avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
}
else if(packet.stream_index==audioStream)
{
// 20131024 albert.liao modfied start
static int vPktCount=0;
BOOL bIsAACADTS = FALSE;
int ret = 0;
if(aPlayer.vAACType == eAAC_UNDEFINED)
{
tAACADTSHeaderInfo vxAACADTSHeaderInfo = {0};
bIsAACADTS = [AudioUtilities parseAACADTSHeader:(uint8_t *)packet.data ToHeader:&vxAACADTSHeaderInfo];
}
#synchronized(aPlayer)
{
if(aPlayer==nil)
{
aPlayer = [[AudioPlayer alloc]initAudio:nil withCodecCtx:(AVCodecContext *) pAudioCodecCtx];
NSLog(#"aPlayer initAudio");
if(bIsAACADTS)
{
aPlayer.vAACType = eAAC_ADTS;
//NSLog(#"is ADTS AAC");
}
}
else
{
if(vPktCount<5) // The voice is listened once image is rendered
{
vPktCount++;
}
else
{
if([aPlayer getStatus]!=eAudioRunning)
{
dispatch_async(dispatch_get_main_queue(), ^(void) {
#synchronized(aPlayer)
{
NSLog(#"aPlayer start play");
[aPlayer Play];
}
});
}
}
}
};
#synchronized(aPlayer)
{
int ret = 0;
ret = [aPlayer putAVPacket:&packet];
if(ret <= 0)
NSLog(#"Put Audio Packet Error!!");
}
// 20131024 albert.liao modfied end
if(bFirstIFrame==true)
{
switch(veVideoRecordState)
{
case eH264RecActive:
{
if ( pFormatCtx_Record )
{
h264_file_write_audio_frame(pFormatCtx_Record, pAudioCodecCtx, packet.stream_index, packet.data, packet.size, packet.dts, packet.pts);
}
else
{
NSLog(#"pFormatCtx_Record no exist");
}
}
}
}
}
else
{
//fprintf(stderr, "packet len=%d, Byte=%02X%02X%02X%02X%02X\n",\
packet.size, packet.data[0],packet.data[1],packet.data[2],packet.data[3], packet.data[4]);
}
// 20130525 albert.liao modified end
}
return frameFinished!=0;
}
avformat_write_header:
int h264_file_create(const char *pFilePath, AVFormatContext *fc, AVCodecContext *pCodecCtx,AVCodecContext *pAudioCodecCtx, double fps, void *p, int len )
{
int vRet=0;
AVOutputFormat *of=NULL;
AVStream *pst=NULL;
AVCodecContext *pcc=NULL, *pAudioOutputCodecContext=NULL;
avcodec_register_all();
av_register_all();
av_log_set_level(AV_LOG_VERBOSE);
if(!pFilePath)
{
fprintf(stderr, "FilePath no exist");
return -1;
}
if(!fc)
{
fprintf(stderr, "AVFormatContext no exist");
return -1;
}
fprintf(stderr, "file=%s\n",pFilePath);
// Create container
of = av_guess_format( 0, pFilePath, 0 );
fc->oformat = of;
strcpy( fc->filename, pFilePath );
// Add video stream
pst = avformat_new_stream( fc, 0 );
vVideoStreamIdx = pst->index;
fprintf(stderr,"Video Stream:%d",vVideoStreamIdx);
pcc = pst->codec;
avcodec_get_context_defaults3( pcc, AVMEDIA_TYPE_VIDEO );
// Save the stream as origin setting without convert
pcc->codec_type = pCodecCtx->codec_type;
pcc->codec_id = pCodecCtx->codec_id;
pcc->bit_rate = pCodecCtx->bit_rate;
pcc->width = pCodecCtx->width;
pcc->height = pCodecCtx->height;
if(fps==0)
{
double fps=0.0;
AVRational pTimeBase;
pTimeBase.num = pCodecCtx->time_base.num;
pTimeBase.den = pCodecCtx->time_base.den;
fps = 1.0/ av_q2d(pCodecCtx->time_base)/ FFMAX(pCodecCtx->ticks_per_frame, 1);
fprintf(stderr,"fps_method(tbc): 1/av_q2d()=%g",fps);
pcc->time_base.num = 1;
pcc->time_base.den = fps;
}
else
{
pcc->time_base.num = 1;
pcc->time_base.den = fps;
}
// reference ffmpeg\libavformat\utils.c
// For SPS and PPS in avcC container
pcc->extradata = malloc(sizeof(uint8_t)*pCodecCtx->extradata_size);
memcpy(pcc->extradata, pCodecCtx->extradata, pCodecCtx->extradata_size);
pcc->extradata_size = pCodecCtx->extradata_size;
// For Audio stream
if(pAudioCodecCtx)
{
AVCodec *pAudioCodec=NULL;
AVStream *pst2=NULL;
pAudioCodec = avcodec_find_encoder(AV_CODEC_ID_AAC);
// Add audio stream
pst2 = avformat_new_stream( fc, pAudioCodec );
vAudioStreamIdx = pst2->index;
pAudioOutputCodecContext = pst2->codec;
avcodec_get_context_defaults3( pAudioOutputCodecContext, pAudioCodec );
fprintf(stderr,"Audio Stream:%d",vAudioStreamIdx);
fprintf(stderr,"pAudioCodecCtx->bits_per_coded_sample=%d",pAudioCodecCtx->bits_per_coded_sample);
pAudioOutputCodecContext->codec_type = AVMEDIA_TYPE_AUDIO;
pAudioOutputCodecContext->codec_id = AV_CODEC_ID_AAC;
// Copy the codec attributes
pAudioOutputCodecContext->channels = pAudioCodecCtx->channels;
pAudioOutputCodecContext->channel_layout = pAudioCodecCtx->channel_layout;
pAudioOutputCodecContext->sample_rate = pAudioCodecCtx->sample_rate;
pAudioOutputCodecContext->bit_rate = 12000;//pAudioCodecCtx->sample_rate * pAudioCodecCtx->bits_per_coded_sample;
pAudioOutputCodecContext->bits_per_coded_sample = pAudioCodecCtx->bits_per_coded_sample;
pAudioOutputCodecContext->profile = pAudioCodecCtx->profile;
//FF_PROFILE_AAC_LOW;
// pAudioCodecCtx->bit_rate;
// AV_SAMPLE_FMT_U8P, AV_SAMPLE_FMT_S16P
//pAudioOutputCodecContext->sample_fmt = AV_SAMPLE_FMT_FLTP;//pAudioCodecCtx->sample_fmt;
pAudioOutputCodecContext->sample_fmt = pAudioCodecCtx->sample_fmt;
//pAudioOutputCodecContext->sample_fmt = AV_SAMPLE_FMT_U8;
pAudioOutputCodecContext->sample_aspect_ratio = pAudioCodecCtx->sample_aspect_ratio;
pAudioOutputCodecContext->time_base.num = pAudioCodecCtx->time_base.num;
pAudioOutputCodecContext->time_base.den = pAudioCodecCtx->time_base.den;
pAudioOutputCodecContext->ticks_per_frame = pAudioCodecCtx->ticks_per_frame;
pAudioOutputCodecContext->frame_size = 1024;
fprintf(stderr,"profile:%d, sample_rate:%d, channles:%d", pAudioOutputCodecContext->profile, pAudioOutputCodecContext->sample_rate, pAudioOutputCodecContext->channels);
AVDictionary *opts = NULL;
av_dict_set(&opts, "strict", "experimental", 0);
if (avcodec_open2(pAudioOutputCodecContext, pAudioCodec, &opts) < 0) {
fprintf(stderr, "\ncould not open codec\n");
}
av_dict_free(&opts);
#if 0
// For Audio, this part is no need
if(pAudioCodecCtx->extradata_size!=0)
{
NSLog(#"extradata_size !=0");
pAudioOutputCodecContext->extradata = malloc(sizeof(uint8_t)*pAudioCodecCtx->extradata_size);
memcpy(pAudioOutputCodecContext->extradata, pAudioCodecCtx->extradata, pAudioCodecCtx->extradata_size);
pAudioOutputCodecContext->extradata_size = pAudioCodecCtx->extradata_size;
}
else
{
// For WMA test only
pAudioOutputCodecContext->extradata_size = 0;
NSLog(#"extradata_size ==0");
}
#endif
}
if(fc->oformat->flags & AVFMT_GLOBALHEADER)
{
pcc->flags |= CODEC_FLAG_GLOBAL_HEADER;
pAudioOutputCodecContext->flags |= CODEC_FLAG_GLOBAL_HEADER;
}
if ( !( fc->oformat->flags & AVFMT_NOFILE ) )
{
vRet = avio_open( &fc->pb, fc->filename, AVIO_FLAG_WRITE );
if(vRet!=0)
{
fprintf(stderr,"avio_open(%s) error", fc->filename);
}
}
// dump format in console
av_dump_format(fc, 0, pFilePath, 1);
vRet = avformat_write_header( fc, NULL );
if(vRet==0)
return 1;
else
return 0;
}
av_interleaved_write_frame:
void h264_file_write_frame(AVFormatContext *fc, int vStreamIdx, const void* p, int len, int64_t dts, int64_t pts )
{
AVStream *pst = NULL;
AVPacket pkt;
if ( 0 > vVideoStreamIdx )
return;
// may be audio or video
pst = fc->streams[ vStreamIdx ];
// Init packet
av_init_packet( &pkt );
if(vStreamIdx ==vVideoStreamIdx)
{
pkt.flags |= ( 0 >= getVopType( p, len ) ) ? AV_PKT_FLAG_KEY : 0;
//pkt.flags |= AV_PKT_FLAG_KEY;
pkt.stream_index = pst->index;
pkt.data = (uint8_t*)p;
pkt.size = len;
pkt.dts = AV_NOPTS_VALUE;
pkt.pts = AV_NOPTS_VALUE;
// TODO: mark or unmark the log
//fprintf(stderr, "dts=%lld, pts=%lld\n",dts,pts);
// av_write_frame( fc, &pkt );
}
av_interleaved_write_frame( fc, &pkt );
}
av_write_trailer:
void h264_file_close(AVFormatContext *fc)
{
if ( !fc )
return;
av_write_trailer( fc );
if ( fc->oformat && !( fc->oformat->flags & AVFMT_NOFILE ) && fc->pb )
avio_close( fc->pb );
av_free( fc );
}
Thanks.
It looks like you're using the same AVFormatContext for both the input and output?
In the line
pst = fc->streams[ vStreamIdx ];
You're assigning the AVStream* from your AVFormatContext connected with your input (RTSP stream). But then later on you're trying to write the packet back to the same context av_interleaved_write_frame( fc, &pkt );. I kind of think of a context as a file which has helped me navagate this type of thing better. I do something identicial to what you're doing (not iOS though) where I use a separate AVFormatContext for each of the input (RTSP stream) and output (mp4 file). If I'm correct, I think what you just need to do is initialize an AVFormatContext and properly.
The following code (without error checking everything) is what I do to take an AVFormatContext * output_format_context = NULL and the AVFormatContext * input_format_context that I had associated with the RTSP stream and write from one to the other. This is after I have fetched a packet, etc., which in your case it looks like you're populating (I just take the packet from av_read_frame and re-package it.
This is code that could be in your write frame function (but it also does include the writing of the header).
AVFormatContext * output_format_context;
AVStream * in_stream_2;
AVStream * out_stream_2;
// Allocate the context with the output file
avformat_alloc_output_context2(&output_format_context, NULL, NULL, out_filename.c_str());
// Point to AVOutputFormat * output_format for manipulation
output_format = output_format_context->oformat;
// Loop through all streams
for (i = 0; i < input_format_context->nb_streams; i++) {
// Create a pointer to the input stream that was allocated earlier in the code
AVStream *in_stream = input_format_context->streams[i];
// Create a pointer to a new stream that will be part of the output
AVStream *out_stream = avformat_new_stream(output_format_context, in_stream->codec->codec);
// Set time_base of the new output stream to equal the input stream one since I'm not changing anything (can avoid but get a deprecation warning)
out_stream->time_base = in_stream->time_base;
// This is the non-deprecated way of copying all the parameters from the input stream into the output stream since everything stays the same
avcodec_parameters_from_context(out_stream->codecpar, in_stream->codec);
// I don't remember what this is for :)
out_stream->codec->codec_tag = 0;
// This just sets a flag from the format context to the stream relating to the header
if (output_format_context->oformat->flags & AVFMT_GLOBALHEADER)
out_stream->codec->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
}
// Check NOFILE flag and open the output file context (previously the output file was associated with the format context, now it is actually opened.
if (!(output_format->flags & AVFMT_NOFILE))
avio_open(&output_format_context->pb, out_filename.c_str(), AVIO_FLAG_WRITE);
// Write the header (not sure if this is always needed but h264 I believe it is.
avformat_write_header(output_format_context,NULL);
// Re-getting the appropriate stream that was populated above (this should allow for both audio/video)
in_stream_2 = input_format_context->streams[packet.stream_index];
out_stream_2 = output_format_context->streams[packet.stream_index];
// Rescaling pts and dts, duration and pos - you would do as you need in your code.
packet.pts = av_rescale_q_rnd(packet.pts, in_stream_2->time_base, out_stream_2->time_base, (AVRounding) (AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
packet.dts = av_rescale_q_rnd(packet.dts, in_stream_2->time_base, out_stream_2->time_base, (AVRounding) (AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
packet.duration = av_rescale_q(packet.duration, in_stream_2->time_base, out_stream_2->time_base);
packet.pos = -1;
// The first packet of my stream always gives me negative dts/pts so this just protects that first one for my purposes. You probably don't need.
if (packet.dts < 0) packet.dts = 0;
if (packet.pts < 0) packet.pts = 0;
// Finally write the frame
av_interleaved_write_frame(output_format_context, &packet);
// ....
// Write header, close/cleanup... etc
// ....
This code is fairly bare bones and doesn't include the setup (which it sounds like you're doing correctly anyway). I would also imagine this code could be cleaned up and tweaked for your purposes, but this works for me to re-write the RTSP stream into a file (in my case many files but code not shown).
The code is C code, so you might need to do minor tweaks for making it Swift compatible (for some of the library function calls maybe). I think overall it should be compatible though.
Hopefully this helps point you to the right direction. This was cobbled together thanks to several sample code sources (I don't remember where), along with warning prompts from the libraries themselves.