Resolving SRV records with iOS SDK - objective-c

I want to resolve DNS SRV records using the iOS SDK.
I've already tried the high-level Bonjour APIs Apple is providing, but they're not what I need. Now I'm using DNS SD.
void *processQueryForSRVRecord(void *record) {
DNSServiceRef sdRef;
int context;
printf("Setting up query for record: %s\n", record);
DNSServiceQueryRecord(&sdRef, 0, 0, record, kDNSServiceType_SRV, kDNSServiceClass_IN, callback, &context);
printf("Processing query for record: %s\n", record);
DNSServiceProcessResult(sdRef);
printf("Deallocating query for record: %s\n", record);
DNSServiceRefDeallocate(sdRef);
return NULL;
}
This works as long as it gets only correct SRV records (for example: _xmpp-server._tcp.gmail.com), but when the record is typed wrong, DNSServiceProcessResult(sdRef) goes into an infinite loop.
Is there a way to stop DNSServiceProcessResult or must I cancel the thread calling it?

Use good old select(). This is what I have at the moment:
- (void)updateDnsRecords
{
if (self.dnsUpdatePending == YES)
{
return;
}
else
{
self.dnsUpdatePending = YES;
}
NSLog(#"DNS update");
DNSServiceRef sdRef;
DNSServiceErrorType err;
const char* host = [self.dnsHost UTF8String];
if (host != NULL)
{
NSTimeInterval remainingTime = self.dnsUpdateTimeout;
NSDate* startTime = [NSDate date];
err = DNSServiceQueryRecord(&sdRef, 0, 0,
host,
kDNSServiceType_SRV,
kDNSServiceClass_IN,
processDnsReply,
&remainingTime);
// This is necessary so we don't hang forever if there are no results
int dns_sd_fd = DNSServiceRefSockFD(sdRef);
int nfds = dns_sd_fd + 1;
fd_set readfds;
int result;
while (remainingTime > 0)
{
FD_ZERO(&readfds);
FD_SET(dns_sd_fd, &readfds);
struct timeval tv;
tv.tv_sec = (time_t)remainingTime;
tv.tv_usec = (remainingTime - tv.tv_sec) * 1000000;
result = select(nfds, &readfds, (fd_set*)NULL, (fd_set*)NULL, &tv);
if (result == 1)
{
if (FD_ISSET(dns_sd_fd, &readfds))
{
err = DNSServiceProcessResult(sdRef);
if (err != kDNSServiceErr_NoError)
{
NSLog(#"There was an error reading the DNS SRV records.");
break;
}
}
}
else if (result == 0)
{
NBLog(#"DNS SRV select() timed out");
break;
}
else
{
if (errno == EINTR)
{
NBLog(#"DNS SRV select() interrupted, retry.");
}
else
{
NBLog(#"DNS SRV select() returned %d errno %d %s.", result, errno, strerror(errno));
break;
}
}
NSTimeInterval elapsed = [[NSDate date] timeIntervalSinceDate:startTime];
remainingTime -= elapsed;
}
DNSServiceRefDeallocate(sdRef);
}
}
static void processDnsReply(DNSServiceRef sdRef,
DNSServiceFlags flags,
uint32_t interfaceIndex,
DNSServiceErrorType errorCode,
const char* fullname,
uint16_t rrtype,
uint16_t rrclass,
uint16_t rdlen,
const void* rdata,
uint32_t ttl,
void* context)
{
NSTimeInterval* remainingTime = (NSTimeInterval*)context;
// If a timeout occurs the value of the errorCode argument will be
// kDNSServiceErr_Timeout.
if (errorCode != kDNSServiceErr_NoError)
{
return;
}
// The flags argument will have the kDNSServiceFlagsAdd bit set if the
// callback is being invoked when a record is received in response to
// the query.
//
// If kDNSServiceFlagsAdd bit is clear then callback is being invoked
// because the record has expired, in which case the ttl argument will
// be 0.
if ((flags & kDNSServiceFlagsMoreComing) == 0)
{
*remainingTime = 0;
}
// Record parsing code below was copied from Apple SRVResolver sample.
NSMutableData * rrData = [NSMutableData data];
dns_resource_record_t * rr;
uint8_t u8;
uint16_t u16;
uint32_t u32;
u8 = 0;
[rrData appendBytes:&u8 length:sizeof(u8)];
u16 = htons(kDNSServiceType_SRV);
[rrData appendBytes:&u16 length:sizeof(u16)];
u16 = htons(kDNSServiceClass_IN);
[rrData appendBytes:&u16 length:sizeof(u16)];
u32 = htonl(666);
[rrData appendBytes:&u32 length:sizeof(u32)];
u16 = htons(rdlen);
[rrData appendBytes:&u16 length:sizeof(u16)];
[rrData appendBytes:rdata length:rdlen];
rr = dns_parse_resource_record([rrData bytes], (uint32_t) [rrData length]);
// If the parse is successful, add the results.
if (rr != NULL)
{
NSString *target;
target = [NSString stringWithCString:rr->data.SRV->target encoding:NSASCIIStringEncoding];
if (target != nil)
{
uint16_t priority = rr->data.SRV->priority;
uint16_t weight = rr->data.SRV->weight;
uint16_t port = rr->data.SRV->port;
[[FailoverWebInterface sharedInterface] addDnsServer:target priority:priority weight:weight port:port ttl:ttl]; // You'll have to do this in with your own method.
}
}
dns_free_resource_record(rr);
}
Here's the Apple SRVResolver sample from which I got the RR parsing.
This Apple sample mentions that it may block forever, but strange enough suggest to use NSTimer when trying to add a timeout yourself. But I think using select() is a much better way.
I have one to-do: Implement flushing cache with DNSServiceReconfirmRecord. But won't do that now.
Be aware, this code is working, but I'm still testing it.
You need to add libresolv.dylib to your Xcode project's 'Linked Frameworks and Libraries'.

Related

calling Objective C and C from Swift passing callback function

I am trying to call the HappyTime onvif library from Swift.
I have the library linked in to my project and I am able to call some simple functions, but I am having trouble getting the syntax right in my call which passes my callback function.
Here is the Swift code:
func discoverCameras()
{
HappyInterface.sharedInstance().startProb()
//this line gives syntax error
HappyInterface.sharedInstance().setProbeCB(cameraDiscovered)
}
func cameraDiscovered(cameraFound:UnsafeMutablePointer<DEVICE_BINFO>)
{
table.reloadData()
}
my setProbeCB call gives this error:
Cannot convert value of type '(UnsafeMutablePointer) -> ()' to expected argument type 'UnsafeMutablePointer' (aka 'UnsafeMutablePointer, UnsafeMutablePointer<()>) -> ()>>')
Here is the Obj C implementation:
- (void) setProbeCB:(onvif_probe_cb *)cb {
set_probe_cb(*cb, 0);
}
This is the Obj C header:
- (void) setProbeCB:(onvif_probe_cb *)cb;
This is the C header:
#ifndef __H_ONVIF_PROBE_H__
#define __H_ONVIF_PROBE_H__
#include "onvif.h"
typedef void (* onvif_probe_cb)(DEVICE_BINFO * p_res, void * pdata);
#ifdef __cplusplus
extern "C" {
#endif
ONVIF_API void set_probe_cb(onvif_probe_cb cb, void * pdata);
ONVIF_API void set_probe_interval(int interval);
ONVIF_API int start_probe(int interval);
ONVIF_API void stop_probe();
ONVIF_API void send_probe_req();
#ifdef __cplusplus
}
#endif
#endif // __H_ONVIF_PROBE_H__
This is the C code:
/***************************************************************************************/
#define MAX_PROBE_FD 8
/***************************************************************************************/
onvif_probe_cb g_probe_cb = 0;
void * g_probe_cb_data = 0;
pthread_t g_probe_thread = 0;
int g_probe_fd[MAX_PROBE_FD];
int g_probe_interval = 30;
BOOL g_probe_running = FALSE;
/***************************************************************************************/
int onvif_probe_init(unsigned int ip)
{
int opt = 1;
SOCKET fd;
struct sockaddr_in addr;
struct ip_mreq mcast;
fd = socket(AF_INET, SOCK_DGRAM, 0);
if(fd < 0)
{
log_print(LOG_ERR, "socket SOCK_DGRAM error!\n");
return -1;
}
addr.sin_family = AF_INET;
addr.sin_port = htons(3702);
addr.sin_addr.s_addr = ip;
if (bind(fd, (struct sockaddr *)&addr, sizeof(addr)) == -1)
{
// if port 3702 already occupied, only receive unicast message
addr.sin_port = 0;
if (bind(fd, (struct sockaddr *)&addr, sizeof(addr)) == -1)
{
closesocket(fd);
log_print(LOG_ERR, "bind error! %s\n", sys_os_get_socket_error());
return -1;
}
}
/* reuse socket addr */
if (setsockopt(fd, SOL_SOCKET, SO_REUSEADDR, (char*)&opt, sizeof(opt)))
{
log_print(LOG_WARN, "setsockopt SO_REUSEADDR error!\n");
}
memset(&mcast, 0, sizeof(mcast));
mcast.imr_multiaddr.s_addr = inet_addr("239.255.255.250");
mcast.imr_interface.s_addr = ip;
if (setsockopt(fd, IPPROTO_IP, IP_ADD_MEMBERSHIP, (char*)&mcast, sizeof(mcast)) < 0)
{
#if __WIN32_OS__
if(setsockopt(fd, IPPROTO_IP, 5, (char*)&mcast, sizeof(mcast)) < 0)
#endif
{
closesocket(fd);
log_print(LOG_ERR, "setsockopt IP_ADD_MEMBERSHIP error! %s\n", sys_os_get_socket_error());
return -1;
}
}
return fd;
}
char probe_req1[] =
"<?xml version=\"1.0\" encoding=\"utf-8\"?>"
"<Envelope xmlns:tds=\"http://www.onvif.org/ver10/device/wsdl\" xmlns=\"http://www.w3.org/2003/05/soap-envelope\">"
"<Header>"
"<wsa:MessageID xmlns:wsa=\"http://schemas.xmlsoap.org/ws/2004/08/addressing\">uuid:%s</wsa:MessageID>"
"<wsa:To xmlns:wsa=\"http://schemas.xmlsoap.org/ws/2004/08/addressing\">urn:schemas-xmlsoap-org:ws:2005:04:discovery</wsa:To>"
"<wsa:Action xmlns:wsa=\"http://schemas.xmlsoap.org/ws/2004/08/addressing\">http://schemas.xmlsoap.org/ws/2005/04/discovery/Probe</wsa:Action>"
"</Header>"
"<Body>"
"<Probe xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xmlns:xsd=\"http://www.w3.org/2001/XMLSchema\" xmlns=\"http://schemas.xmlsoap.org/ws/2005/04/discovery\">"
"<Types>tds:Device</Types>"
"<Scopes />"
"</Probe>"
"</Body>"
"</Envelope>";
char probe_req2[] =
"<?xml version=\"1.0\" encoding=\"utf-8\"?>"
"<Envelope xmlns:dn=\"http://www.onvif.org/ver10/network/wsdl\" xmlns=\"http://www.w3.org/2003/05/soap-envelope\">"
"<Header>"
"<wsa:MessageID xmlns:wsa=\"http://schemas.xmlsoap.org/ws/2004/08/addressing\">uuid:%s</wsa:MessageID>"
"<wsa:To xmlns:wsa=\"http://schemas.xmlsoap.org/ws/2004/08/addressing\">urn:schemas-xmlsoap-org:ws:2005:04:discovery</wsa:To>"
"<wsa:Action xmlns:wsa=\"http://schemas.xmlsoap.org/ws/2004/08/addressing\">http://schemas.xmlsoap.org/ws/2005/04/discovery/Probe</wsa:Action>"
"</Header>"
"<Body>"
"<Probe xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xmlns:xsd=\"http://www.w3.org/2001/XMLSchema\" xmlns=\"http://schemas.xmlsoap.org/ws/2005/04/discovery\">"
"<Types>dn:NetworkVideoTransmitter</Types>"
"<Scopes />"
"</Probe>"
"</Body>"
"</Envelope>";
int onvif_probe_req_tx(int fd)
{
int len;
int rlen;
char * p_bufs = NULL;
struct sockaddr_in addr;
int buflen = 10*1024;
p_bufs = (char *)malloc(buflen);
if (NULL == p_bufs)
{
return -1;
}
memset(p_bufs, 0, buflen);
sprintf(p_bufs, probe_req1, onvif_uuid_create());
memset(&addr, 0, sizeof(addr));
addr.sin_family = AF_INET;
addr.sin_addr.s_addr = inet_addr("239.255.255.250");
addr.sin_port = htons(3702);
len = strlen(p_bufs);
rlen = sendto(fd, p_bufs, len, 0, (struct sockaddr *)&addr, sizeof(struct sockaddr_in));
if (rlen != len)
{
log_print(LOG_ERR, "onvif_probe_req_tx::rlen = %d,slen = %d\r\n", rlen, len);
}
usleep(1000);
memset(p_bufs, 0, buflen);
sprintf(p_bufs, probe_req2, onvif_uuid_create());
len = strlen(p_bufs);
rlen = sendto(fd, p_bufs, len, 0, (struct sockaddr *)&addr, sizeof(struct sockaddr_in));
if (rlen != len)
{
log_print(LOG_ERR, "onvif_probe_req_tx::rlen = %d,slen = %d\r\n", rlen, len);
}
free(p_bufs);
return rlen;
}
BOOL onvif_parse_device_binfo(XMLN * p_node, DEVICE_BINFO * p_res)
{
XMLN * p_EndpointReference;
XMLN * p_Types;
XMLN * p_XAddrs;
p_EndpointReference = xml_node_soap_get(p_node, "EndpointReference");
if (p_EndpointReference)
{
XMLN * p_Address = xml_node_soap_get(p_EndpointReference, "Address");
if (p_Address && p_Address->data)
{
strncpy(p_res->EndpointReference, p_Address->data, sizeof(p_res->EndpointReference)-1);
}
}
p_Types = xml_node_soap_get(p_node, "Types");
if (p_Types && p_Types->data)
{
p_res->type = parse_DeviceType(p_Types->data);
}
p_XAddrs = xml_node_soap_get(p_node, "XAddrs");
if (p_XAddrs && p_XAddrs->data)
{
parse_XAddr(p_XAddrs->data, &p_res->XAddr);
if (p_res->XAddr.host[0] == '\0' || p_res->XAddr.port == 0)
{
return FALSE;
}
}
else
{
return FALSE;
}
return TRUE;
}
BOOL onvif_probe_res(XMLN * p_node, DEVICE_BINFO * p_res)
{
XMLN * p_body = xml_node_soap_get(p_node, "Body");
if (p_body)
{
XMLN * p_ProbeMatches = xml_node_soap_get(p_body, "ProbeMatches");
if (p_ProbeMatches)
{
XMLN * p_ProbeMatch = xml_node_soap_get(p_ProbeMatches, "ProbeMatch");
while (p_ProbeMatch && soap_strcmp(p_ProbeMatch->name, "ProbeMatch") == 0)
{
if (onvif_parse_device_binfo(p_ProbeMatch, p_res))
{
if (g_probe_cb)
{
g_probe_cb(p_res, g_probe_cb_data);
}
}
p_ProbeMatch = p_ProbeMatch->next;
}
}
else
{
XMLN * p_Hello = xml_node_soap_get(p_body, "Hello");
if (p_Hello)
{
if (onvif_parse_device_binfo(p_Hello, p_res))
{
if (g_probe_cb)
{
g_probe_cb(p_res, g_probe_cb_data);
}
}
}
}
}
return TRUE;
}
int onvif_probe_net_rx()
{
int i;
int ret;
int maxfd = 0;
int fd = 0;
char rbuf[10*1024];
fd_set fdread;
struct timeval tv = {1, 0};
FD_ZERO(&fdread);
for (i = 0; i < MAX_PROBE_FD; i++)
{
if (g_probe_fd[i] > 0)
{
FD_SET(g_probe_fd[i], &fdread);
if (g_probe_fd[i] > maxfd)
{
maxfd = g_probe_fd[i];
}
}
}
ret = select(maxfd+1, &fdread, NULL, NULL, &tv);
if (ret == 0) // Time expired
{
return 0;
}
for (i = 0; i < MAX_PROBE_FD; i++)
{
if (g_probe_fd[i] > 0 && FD_ISSET(g_probe_fd[i], &fdread))
{
int rlen;
int addr_len;
struct sockaddr_in addr;
unsigned int src_ip;
unsigned int src_port;
XMLN * p_node;
fd = g_probe_fd[i];
addr_len = sizeof(struct sockaddr_in);
rlen = recvfrom(fd, rbuf, sizeof(rbuf), 0, (struct sockaddr *)&addr, (socklen_t*)&addr_len);
if (rlen <= 0)
{
log_print(LOG_ERR, "onvif_probe_net_rx::rlen = %d, fd = %d\r\n", rlen, fd);
continue;
}
src_ip = addr.sin_addr.s_addr;
src_port = addr.sin_port;
p_node = xxx_hxml_parse(rbuf, rlen);
if (p_node == NULL)
{
log_print(LOG_ERR, "onvif_probe_net_rx::hxml parse err!!!\r\n");
}
else
{
DEVICE_BINFO res;
memset(&res, 0, sizeof(DEVICE_BINFO));
onvif_probe_res(p_node, &res);
}
xml_node_del(p_node);
}
}
return 1;
}
void * onvif_probe_thread(void * argv)
{
int count = 0;
int i = 0;
int j = 0;
for (; i < get_if_nums() && j < MAX_PROBE_FD; i++, j++)
{
unsigned int ip = get_if_ip(i);
if (ip != 0 && ip != inet_addr("127.0.0.1"))
{
g_probe_fd[j] = onvif_probe_init(ip);
}
}
for (i = 0; i < MAX_PROBE_FD; i++)
{
if (g_probe_fd[i] > 0)
{
onvif_probe_req_tx(g_probe_fd[i]);
}
}
while (g_probe_running)
{
if (onvif_probe_net_rx() == 0)
{
count++;
}
if (count >= g_probe_interval)
{
count = 0;
for (i = 0; i < MAX_PROBE_FD; i++)
{
if (g_probe_fd[i] > 0)
{
onvif_probe_req_tx(g_probe_fd[i]);
}
}
}
usleep(1000);
}
g_probe_thread = 0;
return NULL;
}
ONVIF_API void set_probe_cb(onvif_probe_cb cb, void * pdata)
{
g_probe_cb = cb;
g_probe_cb_data = pdata;
}
ONVIF_API void send_probe_req()
{
int i;
for (i = 0; i < MAX_PROBE_FD; i++)
{
if (g_probe_fd[i] > 0)
{
onvif_probe_req_tx(g_probe_fd[i]);
}
}
}
ONVIF_API void set_probe_interval(int interval)
{
g_probe_interval = interval;
if (g_probe_interval < 10)
{
g_probe_interval = 30;
}
}
ONVIF_API int start_probe(int interval)
{
g_probe_running = TRUE;
set_probe_interval(interval);
g_probe_thread = sys_os_create_thread((void *)onvif_probe_thread, NULL);
if (g_probe_thread)
{
return 0;
}
return -1;
}
ONVIF_API void stop_probe()
{
int i;
g_probe_running = FALSE;
while (g_probe_thread)
{
usleep(1000);
}
for (i = 0; i < MAX_PROBE_FD; i++)
{
if (g_probe_fd[i] > 0)
{
closesocket(g_probe_fd[i]);
g_probe_fd[i] = 0;
}
}
}
Here is what the DEVICE_BINFO struct looks like:
typedef struct
{
int type; // device type
char EndpointReference[100];
onvif_XAddr XAddr; // xaddr, include port host, url
} DEVICE_BINFO;
One thing that should be fixed is a mismatch in the number of arguments to the callback. Swift calls the Objective-C setProbeCB() method, giving it a pointer to the cameraDiscovered() function, which takes a single argument. Then setProbeCB() gives the function pointer to the C set_probe_cb() function, which expects a pointer to a function that takes two arguments.
Another observation is that setProbeCB() could just take onvif_probe_cb instead of onvif_probe_cb* and then call C code simply as set_probe_cb(cb, 0). However, I don't think it makes much difference.
Also, I think the question could have been distilled to a smaller size.
The following is a simplified example based on your original code. It shows how to implement a callback in Swift and have C code call it, but the real fun starts when passing data via callback parameters and return values. It gets very tricky very fast, and that's why the example doesn't show how to deal with DEVICE_BINFO in Swift code. It's a topic in its own right.
The clue to using (Objective-)C functions and types in Swift is figuring out how they are imported into Swift. For example, to find out how onvif_probe_cb is imported, type it on a line in the Swift code, place the cursor in it, and Quick Help will show you this:
Declaration: typealias onvif_probe_cb = (UnsafeMutablePointer<DEVICE_BINFO>, UnsafeMutablePointer<Void>) -> Void
Declared in: clib.h
That tells us the parameter and return types to use in our Swift implementation of the callback.
The example is by no means production quality: there are all kinds of things that can go haywire in terms of memory management etc. Please see the code comments for additional info.
First, here is the C code header (clib.h):
#ifndef clib_h
#define clib_h
#include <stdio.h>
typedef struct {
char hostname[50];
int32_t port;
char url[200];
} onvif_XAddr;
typedef struct
{
int type; // device type
char EndpointReference[100];
onvif_XAddr XAddr; // xaddr, include port host, url
} DEVICE_BINFO;
/**
* This is the typedef of the function pointer to be used for our callback.
* The function takes a pointer to DEVICE_BINFO and a pointer to some arbitrary
* data meaningful to the code that provides the callback implementation. It will
* be NULL in this example.
*/
typedef void (* onvif_probe_cb)(DEVICE_BINFO * p_res, void * pdata);
/**
* A function to set the callback.
*/
void set_probe_cb(onvif_probe_cb cb, void * pdata);
/**
* This is a function that calls the callback.
*/
void find_device();
#endif /* clib_h */
Here is the rest of our C source (clib.c):
#include "clib.h"
#include <string.h>
onvif_probe_cb gCB = 0; // global variable to store the callback pointer
void * gUserData = 0; // global variable to store pointer to user data
DEVICE_BINFO gDeviceInfo; // global variable to store device info struct
void find_device() {
// Set up gDeviceInfo
gDeviceInfo.XAddr.port = 1234;
strcpy( gDeviceInfo.XAddr.hostname, "myhost");
strcpy( gDeviceInfo.XAddr.url, "http://junk.com");
gDeviceInfo.type = 777;
// ... and, if a callback is available, call it with the device info
if (gCB) gCB(&gDeviceInfo, gUserData);
else puts("No callback available");
}
void set_probe_cb(onvif_probe_cb cb, void * pdata) {
gCB = cb;
gUserData = pdata;
}
Here is the Objective-C wrapper header (oclib.h):
#ifndef oclib_h
#define oclib_h
#import "clib.h"
#import <Foundation/Foundation.h>
/**
* Interface of an Objective-C wrapper around C code in clib.*. We could have
* gone straight to C from Swift, but I'm trying to keep the example close to the
* code in the question. Also, this extra Objective C layer could be helpful in
* translating data structures, such as DEVICE_BINFO, between C and Swift, since
* Objective-C plays much nicer with C data types. This is no surprise: any C code
* is valid Objective-C (Objective-C is a strict superset of C).
*/
#interface MyWrapper : NSObject
-(id)init;
// Please note: this one takes a single argument, while the C function it wraps
// takes 2; see the implementation.
-(void) setProbeCB:(onvif_probe_cb) cb;
-(void) findDevice;
#end
#endif /* oclib_h */
And the wrapper implementation (oclib.m):
#import "oclib.h"
/**
* Implementation of our Objective-C wrapper.
*/
#implementation MyWrapper
-(id)init { return self; }
-(void) setProbeCB:(onvif_probe_cb) cb {
// We don't want anything other than device info to be passed back and
// forth via the callback, so this wrapper function takes a single argument
// and passes 0 as the 2nd argument to the wrapped C function.
set_probe_cb(cb, 0);
}
-(void) findDevice {
find_device();
}
#end
Finally, here is the Swift code that implements the callback (main.swift):
var w : MyWrapper = MyWrapper()
/**
* This is the callback implementation in Swift. We don't use the 2nd argument, userData, but it still
* has to be present to satisfy the way the callback function pointer is specified in C code.
*/
func cameraDiscovered( info : UnsafeMutablePointer<DEVICE_BINFO>, userData : UnsafeMutablePointer<Void>) {
print("Called the Swift callback!")
let devInfo : DEVICE_BINFO = info.memory;
print( "The device type is \(devInfo.type)")
print( "The device port is \(devInfo.XAddr.port)")
}
// Provide the callback to C code via Objective-C
w.setProbeCB(cameraDiscovered)
// ... and call a function that will cause the C code to invoke the callback.
w.findDevice()
The bridging header just has #import oclib.h, thus exposing the contents of both C and Objective-C headers to Swift.
The expected output:
Called the Swift callback!
The device type is 777
The device port is 1234

Set and get kIsStationary bit using NSURL

I'm porting some old FSRef code to use NSURL, NSFileManager and friends. Everything works except setting and getting the Finder kIsStationery bit on the file.
Is there a way to do this without falling back on deprecated FSRef methods?
So after a bit of research here is the answer for setting and getting the stationery bit:
struct FileInfoBuf
{
u_int32_t info_length;
union
{
u_int32_t padding[8];
struct
{
u_int32_t type;
u_int32_t creator;
u_int16_t fdFlags;
u_int16_t location;
u_int32_t padding[4];
}
info;
}
data;
};
bool IsStationeryPad(const std::string& path)
{
attrlist attrList;
FileInfoBuf fileInfo;
attrList.bitmapcount = ATTR_BIT_MAP_COUNT;
attrList.reserved = 0;
attrList.commonattr = ATTR_CMN_FNDRINFO;
attrList.volattr = 0;
attrList.dirattr = 0;
attrList.fileattr = 0;
attrList.forkattr = 0;
if (getattrlist(path.c_str(), &attrList, &fileInfo, sizeof(fileInfo), FSOPT_NOFOLLOW) == noErr)
{
return (CFSwapInt16BigToHost(fileInfo.data.info.fdFlags) & kIsStationery);
}
return false;
}
void SetStationeryPad(const std::string& path, bool isStationery)
{
OSErr err = noErr;
attrlist attrList;
FileInfoBuf fileInfo;
attrList.bitmapcount = ATTR_BIT_MAP_COUNT;
attrList.reserved = 0;
attrList.commonattr = ATTR_CMN_FNDRINFO;
attrList.volattr = 0;
attrList.dirattr = 0;
attrList.fileattr = 0;
attrList.forkattr = 0;
err = getattrlist(path.c_str(), &attrList, &fileInfo, sizeof(fileInfo), FSOPT_NOFOLLOW);
if (err == noErr)
{
fileInfo.data.info.fdFlags |= CFSwapInt16HostToBig(kIsStationery);
setattrlist(path.c_str(), &attrList, &fileInfo.data, sizeof(fileInfo.data), FSOPT_NOFOLLOW);
}
}
Note that there is no real error checking in this code. Also, applications probably shouldn't be setting this bit, this is really a user decision and shuld be controlled through the Finder.

Record rtsp stream with ffmpeg in iOS

I've followed iFrameExtractor to successfully stream rtsp in my swift project. In this project, it also has recording function. It basically use avformat_write_header
, av_interleaved_write_frame and av_write_trailer to save the rtsp source into mp4 file.
When I used this project in my device, the rtsp streaming works fine, but recording function will always generate a blank mp4 file with no image and sound.
Could anyone tell me what step that I miss?
I'm using iPhone5 with iOS 9.1 and XCode 7.1.1.
The ffmpeg is 2.8.3 version and followed the compile instruction by CompilationGuide – FFmpeg
Following is the sample code in this project
The function that generate every frame:
-(BOOL)stepFrame {
// AVPacket packet;
int frameFinished=0;
static bool bFirstIFrame=false;
static int64_t vPTS=0, vDTS=0, vAudioPTS=0, vAudioDTS=0;
while(!frameFinished && av_read_frame(pFormatCtx, &packet)>=0) {
// Is this a packet from the video stream?
if(packet.stream_index==videoStream) {
// 20130525 albert.liao modified start
// Initialize a new format context for writing file
if(veVideoRecordState!=eH264RecIdle)
{
switch(veVideoRecordState)
{
case eH264RecInit:
{
if ( !pFormatCtx_Record )
{
int bFlag = 0;
//NSString *videoPath = [NSHomeDirectory() stringByAppendingPathComponent:#"Documents/test.mp4"];
NSString *videoPath = #"/Users/liaokuohsun/iFrameTest.mp4";
const char *file = [videoPath UTF8String];
pFormatCtx_Record = avformat_alloc_context();
bFlag = h264_file_create(file, pFormatCtx_Record, pCodecCtx, pAudioCodecCtx,/*fps*/0.0, packet.data, packet.size );
if(bFlag==true)
{
veVideoRecordState = eH264RecActive;
fprintf(stderr, "h264_file_create success\n");
}
else
{
veVideoRecordState = eH264RecIdle;
fprintf(stderr, "h264_file_create error\n");
}
}
}
//break;
case eH264RecActive:
{
if((bFirstIFrame==false) &&(packet.flags&AV_PKT_FLAG_KEY)==AV_PKT_FLAG_KEY)
{
bFirstIFrame=true;
vPTS = packet.pts ;
vDTS = packet.dts ;
#if 0
NSRunLoop *pRunLoop = [NSRunLoop currentRunLoop];
[pRunLoop addTimer:RecordingTimer forMode:NSDefaultRunLoopMode];
#else
[NSTimer scheduledTimerWithTimeInterval:5.0//2.0
target:self
selector:#selector(StopRecording:)
userInfo:nil
repeats:NO];
#endif
}
// Record audio when 1st i-Frame is obtained
if(bFirstIFrame==true)
{
if ( pFormatCtx_Record )
{
#if PTS_DTS_IS_CORRECT==1
packet.pts = packet.pts - vPTS;
packet.dts = packet.dts - vDTS;
#endif
h264_file_write_frame( pFormatCtx_Record, packet.stream_index, packet.data, packet.size, packet.dts, packet.pts);
}
else
{
NSLog(#"pFormatCtx_Record no exist");
}
}
}
break;
case eH264RecClose:
{
if ( pFormatCtx_Record )
{
h264_file_close(pFormatCtx_Record);
#if 0
// 20130607 Test
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^(void)
{
ALAssetsLibrary *library = [[ALAssetsLibrary alloc]init];
NSString *filePathString = [NSHomeDirectory() stringByAppendingPathComponent:#"Documents/test.mp4"];
NSURL *filePathURL = [NSURL fileURLWithPath:filePathString isDirectory:NO];
if(1)// ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:filePathURL])
{
[library writeVideoAtPathToSavedPhotosAlbum:filePathURL completionBlock:^(NSURL *assetURL, NSError *error){
if (error) {
// TODO: error handling
NSLog(#"writeVideoAtPathToSavedPhotosAlbum error");
} else {
// TODO: success handling
NSLog(#"writeVideoAtPathToSavedPhotosAlbum success");
}
}];
}
[library release];
});
#endif
vPTS = 0;
vDTS = 0;
vAudioPTS = 0;
vAudioDTS = 0;
pFormatCtx_Record = NULL;
NSLog(#"h264_file_close() is finished");
}
else
{
NSLog(#"fc no exist");
}
bFirstIFrame = false;
veVideoRecordState = eH264RecIdle;
}
break;
default:
if ( pFormatCtx_Record )
{
h264_file_close(pFormatCtx_Record);
pFormatCtx_Record = NULL;
}
NSLog(#"[ERROR] unexpected veVideoRecordState!!");
veVideoRecordState = eH264RecIdle;
break;
}
}
// Decode video frame
avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
}
else if(packet.stream_index==audioStream)
{
// 20131024 albert.liao modfied start
static int vPktCount=0;
BOOL bIsAACADTS = FALSE;
int ret = 0;
if(aPlayer.vAACType == eAAC_UNDEFINED)
{
tAACADTSHeaderInfo vxAACADTSHeaderInfo = {0};
bIsAACADTS = [AudioUtilities parseAACADTSHeader:(uint8_t *)packet.data ToHeader:&vxAACADTSHeaderInfo];
}
#synchronized(aPlayer)
{
if(aPlayer==nil)
{
aPlayer = [[AudioPlayer alloc]initAudio:nil withCodecCtx:(AVCodecContext *) pAudioCodecCtx];
NSLog(#"aPlayer initAudio");
if(bIsAACADTS)
{
aPlayer.vAACType = eAAC_ADTS;
//NSLog(#"is ADTS AAC");
}
}
else
{
if(vPktCount<5) // The voice is listened once image is rendered
{
vPktCount++;
}
else
{
if([aPlayer getStatus]!=eAudioRunning)
{
dispatch_async(dispatch_get_main_queue(), ^(void) {
#synchronized(aPlayer)
{
NSLog(#"aPlayer start play");
[aPlayer Play];
}
});
}
}
}
};
#synchronized(aPlayer)
{
int ret = 0;
ret = [aPlayer putAVPacket:&packet];
if(ret <= 0)
NSLog(#"Put Audio Packet Error!!");
}
// 20131024 albert.liao modfied end
if(bFirstIFrame==true)
{
switch(veVideoRecordState)
{
case eH264RecActive:
{
if ( pFormatCtx_Record )
{
h264_file_write_audio_frame(pFormatCtx_Record, pAudioCodecCtx, packet.stream_index, packet.data, packet.size, packet.dts, packet.pts);
}
else
{
NSLog(#"pFormatCtx_Record no exist");
}
}
}
}
}
else
{
//fprintf(stderr, "packet len=%d, Byte=%02X%02X%02X%02X%02X\n",\
packet.size, packet.data[0],packet.data[1],packet.data[2],packet.data[3], packet.data[4]);
}
// 20130525 albert.liao modified end
}
return frameFinished!=0;
}
avformat_write_header:
int h264_file_create(const char *pFilePath, AVFormatContext *fc, AVCodecContext *pCodecCtx,AVCodecContext *pAudioCodecCtx, double fps, void *p, int len )
{
int vRet=0;
AVOutputFormat *of=NULL;
AVStream *pst=NULL;
AVCodecContext *pcc=NULL, *pAudioOutputCodecContext=NULL;
avcodec_register_all();
av_register_all();
av_log_set_level(AV_LOG_VERBOSE);
if(!pFilePath)
{
fprintf(stderr, "FilePath no exist");
return -1;
}
if(!fc)
{
fprintf(stderr, "AVFormatContext no exist");
return -1;
}
fprintf(stderr, "file=%s\n",pFilePath);
// Create container
of = av_guess_format( 0, pFilePath, 0 );
fc->oformat = of;
strcpy( fc->filename, pFilePath );
// Add video stream
pst = avformat_new_stream( fc, 0 );
vVideoStreamIdx = pst->index;
fprintf(stderr,"Video Stream:%d",vVideoStreamIdx);
pcc = pst->codec;
avcodec_get_context_defaults3( pcc, AVMEDIA_TYPE_VIDEO );
// Save the stream as origin setting without convert
pcc->codec_type = pCodecCtx->codec_type;
pcc->codec_id = pCodecCtx->codec_id;
pcc->bit_rate = pCodecCtx->bit_rate;
pcc->width = pCodecCtx->width;
pcc->height = pCodecCtx->height;
if(fps==0)
{
double fps=0.0;
AVRational pTimeBase;
pTimeBase.num = pCodecCtx->time_base.num;
pTimeBase.den = pCodecCtx->time_base.den;
fps = 1.0/ av_q2d(pCodecCtx->time_base)/ FFMAX(pCodecCtx->ticks_per_frame, 1);
fprintf(stderr,"fps_method(tbc): 1/av_q2d()=%g",fps);
pcc->time_base.num = 1;
pcc->time_base.den = fps;
}
else
{
pcc->time_base.num = 1;
pcc->time_base.den = fps;
}
// reference ffmpeg\libavformat\utils.c
// For SPS and PPS in avcC container
pcc->extradata = malloc(sizeof(uint8_t)*pCodecCtx->extradata_size);
memcpy(pcc->extradata, pCodecCtx->extradata, pCodecCtx->extradata_size);
pcc->extradata_size = pCodecCtx->extradata_size;
// For Audio stream
if(pAudioCodecCtx)
{
AVCodec *pAudioCodec=NULL;
AVStream *pst2=NULL;
pAudioCodec = avcodec_find_encoder(AV_CODEC_ID_AAC);
// Add audio stream
pst2 = avformat_new_stream( fc, pAudioCodec );
vAudioStreamIdx = pst2->index;
pAudioOutputCodecContext = pst2->codec;
avcodec_get_context_defaults3( pAudioOutputCodecContext, pAudioCodec );
fprintf(stderr,"Audio Stream:%d",vAudioStreamIdx);
fprintf(stderr,"pAudioCodecCtx->bits_per_coded_sample=%d",pAudioCodecCtx->bits_per_coded_sample);
pAudioOutputCodecContext->codec_type = AVMEDIA_TYPE_AUDIO;
pAudioOutputCodecContext->codec_id = AV_CODEC_ID_AAC;
// Copy the codec attributes
pAudioOutputCodecContext->channels = pAudioCodecCtx->channels;
pAudioOutputCodecContext->channel_layout = pAudioCodecCtx->channel_layout;
pAudioOutputCodecContext->sample_rate = pAudioCodecCtx->sample_rate;
pAudioOutputCodecContext->bit_rate = 12000;//pAudioCodecCtx->sample_rate * pAudioCodecCtx->bits_per_coded_sample;
pAudioOutputCodecContext->bits_per_coded_sample = pAudioCodecCtx->bits_per_coded_sample;
pAudioOutputCodecContext->profile = pAudioCodecCtx->profile;
//FF_PROFILE_AAC_LOW;
// pAudioCodecCtx->bit_rate;
// AV_SAMPLE_FMT_U8P, AV_SAMPLE_FMT_S16P
//pAudioOutputCodecContext->sample_fmt = AV_SAMPLE_FMT_FLTP;//pAudioCodecCtx->sample_fmt;
pAudioOutputCodecContext->sample_fmt = pAudioCodecCtx->sample_fmt;
//pAudioOutputCodecContext->sample_fmt = AV_SAMPLE_FMT_U8;
pAudioOutputCodecContext->sample_aspect_ratio = pAudioCodecCtx->sample_aspect_ratio;
pAudioOutputCodecContext->time_base.num = pAudioCodecCtx->time_base.num;
pAudioOutputCodecContext->time_base.den = pAudioCodecCtx->time_base.den;
pAudioOutputCodecContext->ticks_per_frame = pAudioCodecCtx->ticks_per_frame;
pAudioOutputCodecContext->frame_size = 1024;
fprintf(stderr,"profile:%d, sample_rate:%d, channles:%d", pAudioOutputCodecContext->profile, pAudioOutputCodecContext->sample_rate, pAudioOutputCodecContext->channels);
AVDictionary *opts = NULL;
av_dict_set(&opts, "strict", "experimental", 0);
if (avcodec_open2(pAudioOutputCodecContext, pAudioCodec, &opts) < 0) {
fprintf(stderr, "\ncould not open codec\n");
}
av_dict_free(&opts);
#if 0
// For Audio, this part is no need
if(pAudioCodecCtx->extradata_size!=0)
{
NSLog(#"extradata_size !=0");
pAudioOutputCodecContext->extradata = malloc(sizeof(uint8_t)*pAudioCodecCtx->extradata_size);
memcpy(pAudioOutputCodecContext->extradata, pAudioCodecCtx->extradata, pAudioCodecCtx->extradata_size);
pAudioOutputCodecContext->extradata_size = pAudioCodecCtx->extradata_size;
}
else
{
// For WMA test only
pAudioOutputCodecContext->extradata_size = 0;
NSLog(#"extradata_size ==0");
}
#endif
}
if(fc->oformat->flags & AVFMT_GLOBALHEADER)
{
pcc->flags |= CODEC_FLAG_GLOBAL_HEADER;
pAudioOutputCodecContext->flags |= CODEC_FLAG_GLOBAL_HEADER;
}
if ( !( fc->oformat->flags & AVFMT_NOFILE ) )
{
vRet = avio_open( &fc->pb, fc->filename, AVIO_FLAG_WRITE );
if(vRet!=0)
{
fprintf(stderr,"avio_open(%s) error", fc->filename);
}
}
// dump format in console
av_dump_format(fc, 0, pFilePath, 1);
vRet = avformat_write_header( fc, NULL );
if(vRet==0)
return 1;
else
return 0;
}
av_interleaved_write_frame:
void h264_file_write_frame(AVFormatContext *fc, int vStreamIdx, const void* p, int len, int64_t dts, int64_t pts )
{
AVStream *pst = NULL;
AVPacket pkt;
if ( 0 > vVideoStreamIdx )
return;
// may be audio or video
pst = fc->streams[ vStreamIdx ];
// Init packet
av_init_packet( &pkt );
if(vStreamIdx ==vVideoStreamIdx)
{
pkt.flags |= ( 0 >= getVopType( p, len ) ) ? AV_PKT_FLAG_KEY : 0;
//pkt.flags |= AV_PKT_FLAG_KEY;
pkt.stream_index = pst->index;
pkt.data = (uint8_t*)p;
pkt.size = len;
pkt.dts = AV_NOPTS_VALUE;
pkt.pts = AV_NOPTS_VALUE;
// TODO: mark or unmark the log
//fprintf(stderr, "dts=%lld, pts=%lld\n",dts,pts);
// av_write_frame( fc, &pkt );
}
av_interleaved_write_frame( fc, &pkt );
}
av_write_trailer:
void h264_file_close(AVFormatContext *fc)
{
if ( !fc )
return;
av_write_trailer( fc );
if ( fc->oformat && !( fc->oformat->flags & AVFMT_NOFILE ) && fc->pb )
avio_close( fc->pb );
av_free( fc );
}
Thanks.
It looks like you're using the same AVFormatContext for both the input and output?
In the line
pst = fc->streams[ vStreamIdx ];
You're assigning the AVStream* from your AVFormatContext connected with your input (RTSP stream). But then later on you're trying to write the packet back to the same context av_interleaved_write_frame( fc, &pkt );. I kind of think of a context as a file which has helped me navagate this type of thing better. I do something identicial to what you're doing (not iOS though) where I use a separate AVFormatContext for each of the input (RTSP stream) and output (mp4 file). If I'm correct, I think what you just need to do is initialize an AVFormatContext and properly.
The following code (without error checking everything) is what I do to take an AVFormatContext * output_format_context = NULL and the AVFormatContext * input_format_context that I had associated with the RTSP stream and write from one to the other. This is after I have fetched a packet, etc., which in your case it looks like you're populating (I just take the packet from av_read_frame and re-package it.
This is code that could be in your write frame function (but it also does include the writing of the header).
AVFormatContext * output_format_context;
AVStream * in_stream_2;
AVStream * out_stream_2;
// Allocate the context with the output file
avformat_alloc_output_context2(&output_format_context, NULL, NULL, out_filename.c_str());
// Point to AVOutputFormat * output_format for manipulation
output_format = output_format_context->oformat;
// Loop through all streams
for (i = 0; i < input_format_context->nb_streams; i++) {
// Create a pointer to the input stream that was allocated earlier in the code
AVStream *in_stream = input_format_context->streams[i];
// Create a pointer to a new stream that will be part of the output
AVStream *out_stream = avformat_new_stream(output_format_context, in_stream->codec->codec);
// Set time_base of the new output stream to equal the input stream one since I'm not changing anything (can avoid but get a deprecation warning)
out_stream->time_base = in_stream->time_base;
// This is the non-deprecated way of copying all the parameters from the input stream into the output stream since everything stays the same
avcodec_parameters_from_context(out_stream->codecpar, in_stream->codec);
// I don't remember what this is for :)
out_stream->codec->codec_tag = 0;
// This just sets a flag from the format context to the stream relating to the header
if (output_format_context->oformat->flags & AVFMT_GLOBALHEADER)
out_stream->codec->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
}
// Check NOFILE flag and open the output file context (previously the output file was associated with the format context, now it is actually opened.
if (!(output_format->flags & AVFMT_NOFILE))
avio_open(&output_format_context->pb, out_filename.c_str(), AVIO_FLAG_WRITE);
// Write the header (not sure if this is always needed but h264 I believe it is.
avformat_write_header(output_format_context,NULL);
// Re-getting the appropriate stream that was populated above (this should allow for both audio/video)
in_stream_2 = input_format_context->streams[packet.stream_index];
out_stream_2 = output_format_context->streams[packet.stream_index];
// Rescaling pts and dts, duration and pos - you would do as you need in your code.
packet.pts = av_rescale_q_rnd(packet.pts, in_stream_2->time_base, out_stream_2->time_base, (AVRounding) (AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
packet.dts = av_rescale_q_rnd(packet.dts, in_stream_2->time_base, out_stream_2->time_base, (AVRounding) (AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
packet.duration = av_rescale_q(packet.duration, in_stream_2->time_base, out_stream_2->time_base);
packet.pos = -1;
// The first packet of my stream always gives me negative dts/pts so this just protects that first one for my purposes. You probably don't need.
if (packet.dts < 0) packet.dts = 0;
if (packet.pts < 0) packet.pts = 0;
// Finally write the frame
av_interleaved_write_frame(output_format_context, &packet);
// ....
// Write header, close/cleanup... etc
// ....
This code is fairly bare bones and doesn't include the setup (which it sounds like you're doing correctly anyway). I would also imagine this code could be cleaned up and tweaked for your purposes, but this works for me to re-write the RTSP stream into a file (in my case many files but code not shown).
The code is C code, so you might need to do minor tweaks for making it Swift compatible (for some of the library function calls maybe). I think overall it should be compatible though.
Hopefully this helps point you to the right direction. This was cobbled together thanks to several sample code sources (I don't remember where), along with warning prompts from the libraries themselves.

Getting several data at a time by UDP

Client (iOS) sends a message and the server checks it and answers.
iOS displays the answer.
There are two types of answer. One is just an answer. Server sends only one time.
The other is a little different. Server sends 20 times.
When server sends one time, I can process well. It's not difficult.
The problem is with the second type:
I tried two way of getting the data.
First, I used a simple CFSocket example with some modification. When it gets a message, it works well. When it gets 20 messages, it stops with an error. It says "Program received signal 'SIGABRT' "
//main code
CFSocketRef ref = CFSocketCreate(kCFAllocatorDefault, PF_INET, SOCK_DGRAM, 0, kCFSocketReadCallBack|kCFSocketDataCallBack|kCFSocketConnectCallBack|kCFSocketWriteCallBack, CFSockCallBack, NULL);
struct sockaddr_in theName;
struct hostent *hp;
theName.sin_port = htons(5003);
theName.sin_family = AF_INET;
hp = gethostbyname(IPADDRESS);
if( hp == NULL ) {
return;
}
memcpy( &theName.sin_addr.s_addr, hp->h_addr_list[PORT_NUM], hp->h_length );
CFDataRef addressData = CFDataCreate( NULL, &theName, sizeof( struct sockaddr_in ) );
CFSocketConnectToAddress(ref, addressData, 30);
CFRunLoopSourceRef FrameRunLoopSource = CFSocketCreateRunLoopSource(NULL, ref , 0);
CFRunLoopAddSource(CFRunLoopGetCurrent(), FrameRunLoopSource, kCFRunLoopCommonModes);
//Callback Method
void CFSockCallBack (
CFSocketRef s,
CFSocketCallBackType callbackType,
CFDataRef address,
const void *data,
void *info
) {
NSLog(#"callback!");
if(callbackType == kCFSocketDataCallBack) {
[lock lock];
NSLog(#"has data");
UInt8 * d = CFDataGetBytePtr((CFDataRef)data);
int len = CFDataGetLength((CFDataRef)data);
for(int i=0; i < len; i++) {
// NSLog(#"%c",*(d+i));
}
//Data processing area=
[lock unlock];
}
if(callbackType == kCFSocketReadCallBack) {
NSLog(#"to read");
char buf[100] = {0};
int sock = CFSocketGetNative(s);
NSLog(#"to read");
NSLog(#"read:%d",recv(sock, &buf, 100, 0));
NSLog(#"%s",buf);
}
if(callbackType == kCFSocketWriteCallBack) {
NSLog(#"to write");
char sendbuf[100]={0x53, 0x4D, 0x49, 0x43, 0x2};
//strcpy(sendbuf,"GET / HTTP/1.0\r\n\r\n");
//NSLog(#"%c",0x53);
CFDataRef dt = CFDataCreate(NULL, sendbuf, 5);
CFSocketSendData(s, NULL, dt, strlen(sendbuf));
}
if(callbackType == kCFSocketConnectCallBack) {
NSLog(#"connected");
}
}
The second way is using CocoaAsyncSocket. It seems to work well. But sometimes it stops with an error.
I used GCDAsyncUDPSocket. It stops randomly with signal "EXC_BAD_ACCESS".
I did it well on Android... So I think there is some other way possible.

Getting the MAC Address in Objective-C

How do I get the MAC address of the computer in Objective-C? I was using the following code but it started crashing once I switched to using the LLVM compiler. Can anyone tell me how to fix this code or give me new code that works? I found a way to do it in 10.6+, but I need it to work with 10.5 too.
void GetHWAddresses()
{
struct ifconf ifc;
struct ifreq *ifr;
int i, sockfd;
char buffer[BUFFERSIZE], *cp, *cplim;
char temp[80];
for (i=0; i<MAXADDRS; ++i)
{
hw_addrs[i] = NULL;
}
sockfd = socket(AF_INET, SOCK_DGRAM, 0);
if (sockfd < 0)
{
perror("socket failed");
return;
}
ifc.ifc_len = BUFFERSIZE;
ifc.ifc_buf = buffer;
if (ioctl(sockfd, SIOCGIFCONF, (char *)&ifc) < 0)
{
perror("ioctl error");
close(sockfd);
return;
}
ifr = ifc.ifc_req;
cplim = buffer + ifc.ifc_len;
for (cp=buffer; cp < cplim; )
{
ifr = (struct ifreq *)cp;
if (ifr->ifr_addr.sa_family == AF_LINK)
{
struct sockaddr_dl *sdl = (struct sockaddr_dl *)&ifr->ifr_addr;
int a,b,c,d,e,f;
int i;
strcpy(temp, (char *)ether_ntoa(LLADDR(sdl)));
sscanf(temp, "%x:%x:%x:%x:%x:%x", &a, &b, &c, &d, &e, &f);
sprintf(temp, "%02X:%02X:%02X:%02X:%02X:%02X",a,b,c,d,e,f);
for (i=0; i<MAXADDRS; ++i)
{
if ((if_names[i] != NULL) && (strcmp(ifr->ifr_name, if_names[i]) == 0))
{
if (hw_addrs[i] == NULL)
{
hw_addrs[i] = (char *)malloc(strlen(temp)+1);
strcpy(hw_addrs[i], temp);
break;
}
}
}
}
cp += sizeof(ifr->ifr_name) + max(sizeof(ifr->ifr_addr), ifr->ifr_addr.sa_len);
}
close(sockfd);
}
Apple actually have some example code for getting the MAC address from the IO registry