Intel Skeletal Hand Tracking Library with Kinect - kinect

I've found this -> http://software.intel.com/en-us/articles/the-intel-skeletal-hand-tracking-library-experimental-release
I think is a good one...the problem is that i don't want to buy the "Creative Interactive Gesture Camera"...instead i want to use my Kinect.
It's not so easy and i need your help. I started with the Init()
this one is the original function :
inline bool Tracker::Init()
{
if(PXCSession_Create(session.ReleaseRef()) < PXC_STATUS_NO_ERROR || !session.IsValid()) return false;
for(int i=0; ; ++i) // For valid capture contexts
{
PXCSession::ImplDesc desc, filter = { PXCSession::IMPL_GROUP_SENSOR, PXCSession::IMPL_SUBGROUP_VIDEO_CAPTURE };
if(session->QueryImpl(&filter, i, &desc) < PXC_STATUS_NO_ERROR) break;
if(session->CreateImpl(&desc, PXCCapture::CUID, (void**)capture.ReleaseRef()) < PXC_STATUS_NO_ERROR || !capture.IsValid()) continue;
for(int j=0; ; ++j) // For valid devices
{
PXCCapture::DeviceInfo dinfo;
if(capture->QueryDevice(j, &dinfo) < PXC_STATUS_NO_ERROR) break;
if(capture->CreateDevice(j, device.ReleaseRef()) < PXC_STATUS_NO_ERROR || !device.IsValid()) continue;
for(int k=0; ; ++k) // For valid video streams
{
PXCCapture::Device::StreamInfo sinfo;
if(device->QueryStream(k, &sinfo) < PXC_STATUS_NO_ERROR) break;
if(sinfo.cuid != PXCCapture::VideoStream::CUID || device->CreateStream(k, PXCCapture::VideoStream::CUID, (void**)stream.ReleaseRef()) < PXC_STATUS_NO_ERROR || !device.IsValid()) continue;
for (int m=0; ; ++m) // For depth buffer profiles of at least 60 FPS
{
PXCCapture::VideoStream::ProfileInfo pinfo;
if(stream->QueryProfile(m, &pinfo) < PXC_STATUS_NO_ERROR) break;
if(pinfo.imageInfo.format != PXCImage::IMAGE_TYPE_DEPTH || pinfo.frameRateMin.numerator / pinfo.frameRateMin.denominator < 60 || stream->SetProfile(&pinfo) < PXC_STATUS_NO_ERROR) continue;
// If we can read at least one frame
stream->ReadStreamAsync(image.ReleaseRef(), sp.ReleaseRef());
if(sp && sp->Synchronize() >= PXC_STATUS_NO_ERROR)
{
// Obtain useful properties and reserve room for local copies of depth and color images
dimx = pinfo.imageInfo.width; dimy = pinfo.imageInfo.height;
PXCPointF32 flen; device->QueryPropertyAsPoint(PXCCapture::Device::PROPERTY_DEPTH_FOCAL_LENGTH, &flen);
fovx = atan(dimx / (flen.x*2))*2; fovy = atan(dimy / (flen.y*2))*2;
color = new unsigned char[dimx*dimy*3];
depth = new unsigned short[dimx*dimy];
// Initialize tracking library
tracker = hsklCreateTracker(HSKL_COORDS_X_RIGHT_Y_DOWN_Z_FWD, HSKL_API_VERSION);
hsklSetSensorProperties(tracker, HSKL_SENSOR_CREATIVE, dimx, dimy, fovx, fovy);
return true;
}
}
stream.ReleaseRef();
}
device.ReleaseRef();
}
capture.ReleaseRef();
}
return false;
}
}
this one is mine :
inline bool Tracker::Init()
{
if(PXCSession_Create(session.ReleaseRef()) < PXC_STATUS_NO_ERROR || !session.IsValid()) return false;
for(int i=0; ; ++i) // For valid capture contexts
{
PXCSession::ImplDesc desc, filter = { PXCSession::IMPL_GROUP_SENSOR, PXCSession::IMPL_SUBGROUP_VIDEO_CAPTURE };
if(session->QueryImpl(&filter, i, &desc) < PXC_STATUS_NO_ERROR) break;
if(session->CreateImpl(&desc, PXCCapture::CUID, (void**)capture.ReleaseRef()) < PXC_STATUS_NO_ERROR || !capture.IsValid()) continue;
for(int j=0; ; ++j) // For valid devices
{
PXCCapture::DeviceInfo dinfo;
if(capture->QueryDevice(j, &dinfo) < PXC_STATUS_NO_ERROR) break;
if(capture->CreateDevice(j, device.ReleaseRef()) < PXC_STATUS_NO_ERROR || !device.IsValid()) continue;
for(int k=0; ; ++k) // For valid video streams
{
PXCCapture::Device::StreamInfo sinfo;
if(device->QueryStream(k, &sinfo) < PXC_STATUS_NO_ERROR) break;
if(sinfo.cuid != PXCCapture::VideoStream::CUID || device->CreateStream(k, PXCCapture::VideoStream::CUID, (void**)stream.ReleaseRef()) < PXC_STATUS_NO_ERROR || !device.IsValid()) continue;
for (int m=0; ; ++m) // For depth buffer profiles of at least 60 FPS
{
PXCCapture::VideoStream::ProfileInfo pinfo;
//if(stream->QueryProfile(m, &pinfo) < PXC_STATUS_NO_ERROR) break;
//if(pinfo.imageInfo.format != PXCImage::IMAGE_TYPE_DEPTH || pinfo.frameRateMin.numerator / pinfo.frameRateMin.denominator < 60 || stream->SetProfile(&pinfo) < PXC_STATUS_NO_ERROR) continue;
//Sleep(1000);
dimx=(int)K.m_colorWidth;
dimy=(int)K.m_colorHeight;
fovx=0.99;
fovy=0.75;
// If we can read at least one frame
stream->ReadStreamAsync(image.ReleaseRef(), sp.ReleaseRef());
if((sp && sp->Synchronize() >= PXC_STATUS_NO_ERROR)||true)
{
// Obtain useful properties and reserve room for local copies of depth and color images
//dimx = pinfo.imageInfo.width; dimy = pinfo.imageInfo.height;
//PXCPointF32 flen; device->QueryPropertyAsPoint(PXCCapture::Device::PROPERTY_DEPTH_FOCAL_LENGTH, &flen);
//fovx = atan(dimx / (flen.x*2))*2; fovy = atan(dimy / (flen.y*2))*2;
color = new unsigned char[dimx*dimy*3];
depth = new unsigned short[dimx*dimy];
// Initialize tracking library
tracker = hsklCreateTracker(HSKL_COORDS_X_RIGHT_Y_DOWN_Z_FWD, HSKL_API_VERSION);
hsklSetSensorProperties(tracker, HSKL_SENSOR_IDEAL, dimx, dimy, fovx, fovy);
return true;
}
}
stream.ReleaseRef();
}
device.ReleaseRef();
}
capture.ReleaseRef();
}
return false;
}
}
And this is the original update class :
inline void Tracker::Update()
{
if(sp && sp->Synchronize() >= PXC_STATUS_NO_ERROR)
{
PXCImage::ImageData depthData; image->AcquireAccess(PXCImage::ACCESS_READ, &depthData);
memcpy_s(depth, sizeof(unsigned short)*dimx*dimy, depthData.planes[0], sizeof(unsigned short)*dimx*dimy);
const unsigned short * conf = reinterpret_cast<const unsigned short *>(depthData.planes[1]);
for(int i=0; i<dimx*dimy; ++i) color[3*i+2] = color[3*i+1] = color[3*i] = conf[i]>>2; // Can we just use IR here?
hsklTrackOneFrame(tracker, depth, conf); // Pass data to tracking library
image->ReleaseAccess(&depthData);
}
stream->ReadStreamAsync(image.ReleaseRef(), sp.ReleaseRef());
}
and mine :
inline void Tracker::Update()
{
if((sp && sp->Synchronize() >= PXC_STATUS_NO_ERROR)||true)
{
Mat matd(dimy,dimx,CV_16UC1);
Mat matir(dimy,dimx,CV_16UC1);
K.getDepth(&matd);
matir = K.getIR();
double min,max;
cv::minMaxLoc(matir,&min,&max);
Mat exit(dimy,dimx,CV_8UC1);
matir.convertTo(exit,CV_8U,pow(2.0,8.0)/max);
memcpy_s(depth,sizeof(unsigned short)*dimx*dimy,(unsigned short*)(matd.ptr()),sizeof(unsigned short)*dimx*dimy);
const unsigned short * conf = reinterpret_cast<const unsigned short *>(matir.ptr());
//PXCImage::ImageData depthData; image->AcquireAccess(PXCImage::ACCESS_READ, &depthData);
//memcpy_s(depth, sizeof(unsigned short)*dimx*dimy, depthData.planes[0], sizeof(unsigned short)*dimx*dimy);
//const unsigned short * conf = reinterpret_cast<const unsigned short *>(depthData.planes[1]);
// Can we just use IR here?
for(int i=0; i<dimx*dimy; ++i) {
color[3*i+2] = color[3*i+1] = color[3*i] = (exit.at<unsigned char>(i)) ;
}
hsklTrackOneFrame(tracker, depth, conf); // Pass data to tracking library
//image->ReleaseAccess(&depthData);
}
stream->ReadStreamAsync(image.ReleaseRef(), sp.ReleaseRef());
}
If i try to run the program there are no errors...but it doesn't track my hand...So any suggestions ?? or is there somebody who is interested and want to share the project with me ?
thanks

Related

Arduino. Read two MQTT topics from arduino and store payload of topics at two diferent variable

I new in Arduino!
I have two topics on MQTT server, "arduino/temp" and "arduino/humid".
On Arduino, I want to store in a two diferent variable, payload of this topics("arduino/temp" and "arduino/humid")
My code is here:
char myTemp[5];
char myHum[5];
void callback(char *topic, byte *payload, unsigned int length) {
Serial.print("Message arrived in topic: ");
Serial.println(topic);
// if(strcmp(topic, "arduino/temp") == 0) {
if (String(topic) == "arduino/temp") {
Serial.println(strcmp(topic, "arduino/temp") == 0);
Serial.print("Message:");
for (int i = 0; i < length; i++) {
Serial.print((char) payload[i]);
myTemp[i] = (char) payload[i];
}
Serial.println();
Serial.println(myTemp);
Serial.println("-----------------------");
}
else {
Serial.println(strcmp(topic, "arduino/humid") == 0);
Serial.print("Message:");
for (int i = 0; i < length; i++) {
Serial.print((char) payload[i]);
myHum[i] = (char) payload[i];
}
Serial.println();
Serial.println(myHum);
Serial.println("-----------------------");
}
}
I get correct payload, but i can't asign corectlly to an variablle.
This is the result:
13:08:34.741 -> Message arrived in topic: arduino/temp
13:08:34.741 -> 1
13:08:34.741 -> Message: 21.30
13:08:34.741 -> 21.30
13:08:34.741 -> -----------------------
13:08:35.670 -> Message arrived in topic: arduino/humid
13:08:35.670 -> 1
13:08:35.670 -> Message: 43.30
13:08:35.670 -> 43.3021.30
13:08:35.670 -> -----------------------
Can you help me, please?
A quick fix is adding one more char to both and setting its value to '\0'. This character you are missing is called the null terminator which will let the println() function know that your string has ended.
char myTemp[6];
char myHum[6];
void callback(char *topic, byte *payload, unsigned int length) {
Serial.print("Message arrived in topic: ");
Serial.println(topic);
if (String(topic) == "arduino/temp") {
Serial.println(strcmp(topic, "arduino/temp") == 0);
Serial.print("Message:");
for (int i = 0; i < length; i++) {
Serial.print((char) payload[i]);
myTemp[i] = (char) payload[i];
}
myTemp[5] = '\0';
Serial.println();
Serial.println(myTemp);
Serial.println("-----------------------");
}
else {
Serial.println(strcmp(topic, "arduino/humid") == 0);
Serial.print("Message:");
for (int i = 0; i < length; i++) {
Serial.print((char) payload[i]);
myHum[i] = (char) payload[i];
}
myHum[5] = '\0';
Serial.println();
Serial.println(myHum);
Serial.println("-----------------------");
}
}
Note: you can also test by adding the extra char only and not setting the value up as some compilers will set it to 0 by default.

Dijkstras Algorithmn Vue

I am trying to implement dijkstras pathfinding algorithm in vue. I am following the psuedocode from https://medium.com/#nicholas.w.swift/easy-dijkstras-pathfinding-324a51eeb0f. This is what I have come up with so far, however I am struggling to translate the psuedocode into vue js. How would I be able to check every node and pop the visited nodes off the list?
Dijkstra() {
this.unexploredset = [];
for (let i = 0; i < 16; i++){
for (let j = 0; j < 16; j++){
this.nodes[i][j].position = '∞';
this.nodes[i][j].distance = '∞';
if(this.nodes[i][j].hasWall == false){
this.unexploredset.push(this.nodes[i][j])
}
}
}
let current = this.nodes[3][4];
let goal = this.nodes[14][14];
for(let i = 0; i < 255; i++) {
for (let k = 0; k < 4; k++) {
if (current.distance <= current.neighbours[k].distance && current.unvisited == true)
{
current.unvisited = false;
let temp = current.neighbours[k];
current = temp
this.unexploredset.pop(current);
current = temp
if (current == goal)
{
console.log("found");
break
}
console.log(this.unexploredset.length)
}
}
}
}

mupdf render jpeg2000 lose color?

I am working on an android project, which use vudroid, which in turn use mupdf version 0.5.
Vudroid remove the original openjpeg support of mupdf, I have ported the mupdf version 1.5's openjpeg support.
But I encounter a new problem, color information in jpx image gone, the desired effect:
my effect:
the ported load-jpx code:
#include "fitz.h"
#include "mupdf.h"
/* Without the definition of OPJ_STATIC, compilation fails on windows
* due to the use of __stdcall. We believe it is required on some
* linux toolchains too. */
#define OPJ_STATIC
#ifndef _MSC_VER
#define OPJ_HAVE_STDINT_H
#endif
#include <openjpeg.h>
static void fz_opj_error_callback(const char *msg, void *client_data)
{
//fz_context *ctx = (fz_context *)client_data;
//fz_warn(ctx, "openjpeg error: %s", msg);
}
static void fz_opj_warning_callback(const char *msg, void *client_data)
{
//fz_context *ctx = (fz_context *)client_data;
//fz_warn(ctx, "openjpeg warning: %s", msg);
}
static void fz_opj_info_callback(const char *msg, void *client_data)
{
/* fz_warn("openjpeg info: %s", msg); */
}
typedef struct stream_block_s
{
unsigned char *data;
int size;
int pos;
} stream_block;
static OPJ_SIZE_T fz_opj_stream_read(void * p_buffer, OPJ_SIZE_T p_nb_bytes, void * p_user_data)
{
stream_block *sb = (stream_block *)p_user_data;
int len;
len = sb->size - sb->pos;
if (len < 0)
len = 0;
if (len == 0)
return (OPJ_SIZE_T)-1; /* End of file! */
if ((OPJ_SIZE_T)len > p_nb_bytes)
len = p_nb_bytes;
memcpy(p_buffer, sb->data + sb->pos, len);
sb->pos += len;
return len;
}
static OPJ_OFF_T fz_opj_stream_skip(OPJ_OFF_T skip, void * p_user_data)
{
stream_block *sb = (stream_block *)p_user_data;
if (skip > sb->size - sb->pos)
skip = sb->size - sb->pos;
sb->pos += skip;
return sb->pos;
}
static OPJ_BOOL fz_opj_stream_seek(OPJ_OFF_T seek_pos, void * p_user_data)
{
stream_block *sb = (stream_block *)p_user_data;
if (seek_pos > sb->size)
return OPJ_FALSE;
sb->pos = seek_pos;
return OPJ_TRUE;
}
fz_error
fz_load_jpx(pdf_image* img, unsigned char *data, int size, fz_colorspace *defcs, int indexed)
{
//fz_pixmap *img;
opj_dparameters_t params;
opj_codec_t *codec;
opj_image_t *jpx;
opj_stream_t *stream;
fz_colorspace *colorspace;
unsigned char *p;
OPJ_CODEC_FORMAT format;
int a, n, w, h, depth, sgnd;
int x, y, k, v;
stream_block sb;
if (size < 2)
fz_throw("not enough data to determine image format");
/* Check for SOC marker -- if found we have a bare J2K stream */
if (data[0] == 0xFF && data[1] == 0x4F)
format = OPJ_CODEC_J2K;
else
format = OPJ_CODEC_JP2;
opj_set_default_decoder_parameters(&params);
if (indexed)
params.flags |= OPJ_DPARAMETERS_IGNORE_PCLR_CMAP_CDEF_FLAG;
codec = opj_create_decompress(format);
opj_set_info_handler(codec, fz_opj_info_callback, 0);
opj_set_warning_handler(codec, fz_opj_warning_callback, 0);
opj_set_error_handler(codec, fz_opj_error_callback, 0);
if (!opj_setup_decoder(codec, &params))
{
fz_throw("j2k decode failed");
}
stream = opj_stream_default_create(OPJ_TRUE);
sb.data = data;
sb.pos = 0;
sb.size = size;
opj_stream_set_read_function(stream, fz_opj_stream_read);
opj_stream_set_skip_function(stream, fz_opj_stream_skip);
opj_stream_set_seek_function(stream, fz_opj_stream_seek);
opj_stream_set_user_data(stream, &sb);
/* Set the length to avoid an assert */
opj_stream_set_user_data_length(stream, size);
if (!opj_read_header(stream, codec, &jpx))
{
opj_stream_destroy(stream);
opj_destroy_codec(codec);
fz_throw("Failed to read JPX header");
}
if (!opj_decode(codec, stream, jpx))
{
opj_stream_destroy(stream);
opj_destroy_codec(codec);
opj_image_destroy(jpx);
fz_throw("Failed to decode JPX image");
}
opj_stream_destroy(stream);
opj_destroy_codec(codec);
/* jpx should never be NULL here, but check anyway */
if (!jpx)
fz_throw("opj_decode failed");
pdf_logimage("opj_decode succeeded");
for (k = 1; k < (int)jpx->numcomps; k++)
{
if (!jpx->comps[k].data)
{
opj_image_destroy(jpx);
fz_throw("image components are missing data");
}
if (jpx->comps[k].w != jpx->comps[0].w)
{
opj_image_destroy(jpx);
fz_throw("image components have different width");
}
if (jpx->comps[k].h != jpx->comps[0].h)
{
opj_image_destroy(jpx);
fz_throw("image components have different height");
}
if (jpx->comps[k].prec != jpx->comps[0].prec)
{
opj_image_destroy(jpx);
fz_throw("image components have different precision");
}
}
n = jpx->numcomps;
w = jpx->comps[0].w;
h = jpx->comps[0].h;
depth = jpx->comps[0].prec;
sgnd = jpx->comps[0].sgnd;
if (jpx->color_space == OPJ_CLRSPC_SRGB && n == 4) { n = 3; a = 1; }
else if (jpx->color_space == OPJ_CLRSPC_SYCC && n == 4) { n = 3; a = 1; }
else if (n == 2) { n = 1; a = 1; }
else if (n > 4) { n = 4; a = 1; }
else { a = 0; }
if (defcs)
{
if (defcs->n == n)
{
colorspace = defcs;
}
else
{
fz_warn("jpx file and dict colorspaces do not match");
defcs = NULL;
}
}
if (!defcs)
{
switch (n)
{
case 1: colorspace = pdf_devicegray; break;
case 3: colorspace = pdf_devicergb; break;
case 4: colorspace = pdf_devicecmyk; break;
}
}
//error = fz_new_pixmap(&img, colorspace, w, h);
//if (error)
// return error;
pdf_logimage("colorspace handled\n");
int bpc = 1;
if (colorspace) {
bpc = 1 + colorspace->n;
};
pdf_logimage("w = %d, bpc = %d, h = %d\n", w, bpc, h);
img->samples = fz_newbuffer(w * bpc * h);
//opj_image_destroy(jpx);
//fz_throw("out of memory loading jpx");
p = (char*)img->samples->bp;
pdf_logimage("start to deal with samples");
for (y = 0; y < h; y++)
{
for (x = 0; x < w; x++)
{
for (k = 0; k < n + a; k++)
{
v = jpx->comps[k].data[y * w + x];
if (sgnd)
v = v + (1 << (depth - 1));
if (depth > 8)
v = v >> (depth - 8);
*p++ = v;
}
if (!a)
*p++ = 255;
}
}
img->samples->wp = p;
pdf_logimage("start to deal with samples succeeded");
opj_image_destroy(jpx);
// if (a)
// {
// if (n == 4)
// {
// fz_pixmap *tmp = fz_new_pixmap(ctx, fz_device_rgb(ctx), w, h);
// fz_convert_pixmap(ctx, tmp, img);
// fz_drop_pixmap(ctx, img);
// img = tmp;
// }
// fz_premultiply_pixmap(ctx, img);
// }
return fz_okay;
}
The render code:
JNIEXPORT jbyteArray JNICALL Java_org_vudroid_pdfdroid_codec_PdfPage_drawPage
(JNIEnv *env, jclass clazz, jlong dochandle, jlong pagehandle)
{
renderdocument_t *doc = (renderdocument_t*) dochandle;
renderpage_t *page = (renderpage_t*) pagehandle;
//DEBUG("PdfView(%p).drawpage(%p, %p)", this, doc, page);
fz_error error;
fz_matrix ctm;
fz_irect viewbox;
fz_pixmap *pixmap;
jfloat *matrix;
jint *viewboxarr;
jint *dimen;
jint *buffer;
int length, val;
pixmap = nil;
/* initialize parameter arrays for MuPDF */
ctm.a = 1;
ctm.b = 0;
ctm.c = 0;
ctm.d = 1;
ctm.e = 0;
ctm.f = 0;
// matrix = (*env)->GetPrimitiveArrayCritical(env, matrixarray, 0);
// ctm.a = matrix[0];
// ctm.b = matrix[1];
// ctm.c = matrix[2];
// ctm.d = matrix[3];
// ctm.e = matrix[4];
// ctm.f = matrix[5];
// (*env)->ReleasePrimitiveArrayCritical(env, matrixarray, matrix, 0);
// DEBUG("Matrix: %f %f %f %f %f %f",
// ctm.a, ctm.b, ctm.c, ctm.d, ctm.e, ctm.f);
// viewboxarr = (*env)->GetPrimitiveArrayCritical(env, viewboxarray, 0);
// viewbox.x0 = viewboxarr[0];
// viewbox.y0 = viewboxarr[1];
// viewbox.x1 = viewboxarr[2];
// viewbox.y1 = viewboxarr[3];
// (*env)->ReleasePrimitiveArrayCritical(env, viewboxarray, viewboxarr, 0);
// DEBUG("Viewbox: %d %d %d %d",
// viewbox.x0, viewbox.y0, viewbox.x1, viewbox.y1);
viewbox.x0 = 0;
viewbox.y0 = 0;
viewbox.x1 = 595;
viewbox.y1 = 841;
/* do the rendering */
DEBUG("doing the rendering...");
//buffer = (*env)->GetPrimitiveArrayCritical(env, bufferarray, 0);
// do the actual rendering:
error = fz_rendertree(&pixmap, doc->rast, page->page->tree,
ctm, viewbox, 1);
/* evil magic: we transform the rendered image's byte order
*/
int x, y;
if (bmpdata)
fz_free(bmpdata);
bmpstride = ((pixmap->w * 3 + 3) / 4) * 4;
bmpdata = fz_malloc(pixmap->h * bmpstride);
DEBUG("inside drawpage, bmpstride = %d, pixmap->w = %d, pixmap->h = %d\n", bmpstride, pixmap->w, pixmap->h);
if (!bmpdata)
return;
for (y = 0; y < pixmap->h; y++)
{
unsigned char *p = bmpdata + y * bmpstride;
unsigned char *s = pixmap->samples + y * pixmap->w * 4;
for (x = 0; x < pixmap->w; x++)
{
p[x * 3 + 0] = s[x * 4 + 3];
p[x * 3 + 1] = s[x * 4 + 2];
p[x * 3 + 2] = s[x * 4 + 1];
}
}
FILE* fp = fopen("/sdcard/drawpage", "wb");
fwrite(bmpdata, pixmap->h * bmpstride, 1, fp);
fclose(fp);
jbyteArray array = (*env)->NewByteArray(env, pixmap->h * bmpstride);
(*env)->SetByteArrayRegion(env, array, 0, pixmap->h * bmpstride, bmpdata);
// if(!error) {
// DEBUG("Converting image buffer pixel order");
// length = pixmap->w * pixmap->h;
// unsigned int *col = pixmap->samples;
// int c = 0;
// for(val = 0; val < length; val++) {
// col[val] = ((col[val] & 0xFF000000) >> 24) |
// ((col[val] & 0x00FF0000) >> 8) |
// ((col[val] & 0x0000FF00) << 8);
// }
// winconvert(pixmap);
// }
// (*env)->ReleasePrimitiveArrayCritical(env, bufferarray, buffer, 0);
fz_free(pixmap);
if (error) {
DEBUG("error!");
throw_exception(env, "error rendering page");
}
DEBUG("PdfView.drawPage() done");
return array;
}
I have compare the jpx output samples to the mupdf-1.5 windows, it is the same, but the colorspace of original jpx have gone.
Could help me to get the colorspace back?
It seems you are trying to use an old version of MuPDF with some bits pulled in from a more recent version. TO be honest that's hardly likely to work. I would also guess that its not the OpenJPEG library causing your problem, since the image appears, but converted to grayscale.
Have you tried opening the file in the current version of MuPDF ? Does it work ?
If so then it seems to me your correct approach should be to use the current code, not try and bolt pieces onto an older version.

How to read Audio queue service bufer by byte?

I am recording sound from mic input using Audio queue service.
-(void)startRecording{
[self setupAudioFormat:&recordState.dataFormat];
recordState.currentPacket = 0;
OSStatus status;
status = AudioQueueNewInput(&recordState.dataFormat,
AudioInputCallback,
&recordState,
CFRunLoopGetCurrent(),
kCFRunLoopCommonModes,
0,
&recordState.queue);
if (status == 0)
{
// Prime recording buffers with empty data
for (int i = 0; i < NUM_BUFFERS; i++)
{
NSLog(#"buf in");
AudioQueueAllocateBuffer(recordState.queue, 16000, &recordState.buffers[i]);
AudioQueueEnqueueBuffer (recordState.queue, recordState.buffers[i], 0, NULL);
}
status = AudioFileCreateWithURL(fileURL,
kAudioFileAIFFType,
&recordState.dataFormat,
kAudioFileFlags_EraseFile,
&recordState.audioFile);
if (status == 0)
{
recordState.recording = true;
status = AudioQueueStart(recordState.queue, NULL);
if (status == 0)
{
NSLog(#"Recording");
}
}
}
if (status != 0)
{
//[self stopRecording];
NSLog(#"recording failed");
}
}
on callback:
void AudioInputCallback(void * inUserData,
AudioQueueRef inAQ,
AudioQueueBufferRef inBuffer,
const AudioTimeStamp * inStartTime,
UInt32 inNumberPacketDescriptions,
const AudioStreamPacketDescription * inPacketDescs)
{
RecordState * recordState = (RecordState*)inUserData;
if (!recordState->recording)
{
printf("Not recording, returning\n");
}
// if (inNumberPacketDescriptions == 0 && recordState->dataFormat.mBytesPerPacket != 0)
// {
// inNumberPacketDescriptions = inBuffer->mAudioDataByteSize / recordState->dataFormat.mBytesPerPacket;
// }
/*
int sampleCount = recordState->buffers[0]->mAudioDataBytesCapacity / sizeof (AUDIO_DATA_TYPE_FORMAT);
NSLog(#"sample count = %i",sampleCount);
AUDIO_DATA_TYPE_FORMAT *p = (AUDIO_DATA_TYPE_FORMAT*)recordState->buffers[0]->mAudioData;
for (int i = 0; i < sampleCount; i++) {
if (p[i] > 1000) {
NSLog(#"%hd",p[i]);
}
}*/
printf("Writing buffer %lld\n", recordState->currentPacket);
OSStatus status = AudioFileWritePackets(recordState->audioFile,
false,
inBuffer->mAudioDataByteSize,
inPacketDescs,
recordState->currentPacket,
&inNumberPacketDescriptions,
inBuffer->mAudioData);
if (status == 0)
{
recordState->buffers[0] = nil;
recordState->currentPacket += inNumberPacketDescriptions;
}
AudioQueueEnqueueBuffer(recordState->queue, inBuffer, 0, NULL);
}
Here i want to read recorded buffer. is it possible to get something like this:
short[] buffer = ?;//here should an audio buffer converted to some structure (short[] just for example)
then i would like to read every element of this structure:
for (int i = 0; i < sizeOfBuffer; i++) {
bufferVal = buffer[i];
}
In short how to handle buffer when recording ?
Thanks.

Getting the MAC Address in Objective-C

How do I get the MAC address of the computer in Objective-C? I was using the following code but it started crashing once I switched to using the LLVM compiler. Can anyone tell me how to fix this code or give me new code that works? I found a way to do it in 10.6+, but I need it to work with 10.5 too.
void GetHWAddresses()
{
struct ifconf ifc;
struct ifreq *ifr;
int i, sockfd;
char buffer[BUFFERSIZE], *cp, *cplim;
char temp[80];
for (i=0; i<MAXADDRS; ++i)
{
hw_addrs[i] = NULL;
}
sockfd = socket(AF_INET, SOCK_DGRAM, 0);
if (sockfd < 0)
{
perror("socket failed");
return;
}
ifc.ifc_len = BUFFERSIZE;
ifc.ifc_buf = buffer;
if (ioctl(sockfd, SIOCGIFCONF, (char *)&ifc) < 0)
{
perror("ioctl error");
close(sockfd);
return;
}
ifr = ifc.ifc_req;
cplim = buffer + ifc.ifc_len;
for (cp=buffer; cp < cplim; )
{
ifr = (struct ifreq *)cp;
if (ifr->ifr_addr.sa_family == AF_LINK)
{
struct sockaddr_dl *sdl = (struct sockaddr_dl *)&ifr->ifr_addr;
int a,b,c,d,e,f;
int i;
strcpy(temp, (char *)ether_ntoa(LLADDR(sdl)));
sscanf(temp, "%x:%x:%x:%x:%x:%x", &a, &b, &c, &d, &e, &f);
sprintf(temp, "%02X:%02X:%02X:%02X:%02X:%02X",a,b,c,d,e,f);
for (i=0; i<MAXADDRS; ++i)
{
if ((if_names[i] != NULL) && (strcmp(ifr->ifr_name, if_names[i]) == 0))
{
if (hw_addrs[i] == NULL)
{
hw_addrs[i] = (char *)malloc(strlen(temp)+1);
strcpy(hw_addrs[i], temp);
break;
}
}
}
}
cp += sizeof(ifr->ifr_name) + max(sizeof(ifr->ifr_addr), ifr->ifr_addr.sa_len);
}
close(sockfd);
}
Apple actually have some example code for getting the MAC address from the IO registry