ESP-32 CAM returning pictures with a grey band on them when higher resolution than VGA - file-upload

I am using ESP-32CAM (with an OV2640 camera) to send pictures to Google Drive, via HTTPS POST request to Google App Script. The payload photo is base64 format. The photo is uploaded and fully visible.
But at a higher resolution ( than VGA),the pictures are incomplete, they have a large part of them as a single color band. In Google Drive that band is grey, but when I download the photo, the color of the band is changed.
I checked if the buffer is too small using the psramFound() function, but that returned 1.
Photo upload on Google Drive : https://i.stack.imgur.com/bJwBq.jpg
Any idea how I can upload the photos correctly at the maximum resolution allowed by the board (UXGA-1600 x 1200)?
Thanks for help! :D
Code that I am using under Arduino IDE (1.8.13):
#include <WiFi.h>
#include <WiFiClientSecure.h>
#include "soc/soc.h"
#include "soc/rtc_cntl_reg.h"
#include "Base64.h"
#include "esp_camera.h"
const char* ssid = "***"; //your network SSID
const char* password = "***"; //your network password
const char* myDomain = "script.google.com";
String myScript = "/macros/s/********/exec"; //Replace with your own url
String myFilename = "filename=ESP32-CAM.jpg";
String mimeType = "&mimetype=image/jpeg";
String myImage = "&data=";
int waitingTime = 30000; //Wait 30 seconds to google response.
#define PWDN_GPIO_NUM 32
#define RESET_GPIO_NUM -1
#define XCLK_GPIO_NUM 0
#define SIOD_GPIO_NUM 26
#define SIOC_GPIO_NUM 27
#define Y9_GPIO_NUM 35
#define Y8_GPIO_NUM 34
#define Y7_GPIO_NUM 39
#define Y6_GPIO_NUM 36
#define Y5_GPIO_NUM 21
#define Y4_GPIO_NUM 19
#define Y3_GPIO_NUM 18
#define Y2_GPIO_NUM 5
#define VSYNC_GPIO_NUM 25
#define HREF_GPIO_NUM 23
#define PCLK_GPIO_NUM 22
void setup()
{
WRITE_PERI_REG(RTC_CNTL_BROWN_OUT_REG, 0);
Serial.begin(115200);
delay(10);
WiFi.mode(WIFI_STA);
Serial.println("");
Serial.print("Connecting to ");
Serial.println(ssid);
WiFi.begin(ssid, password);
while (WiFi.status() != WL_CONNECTED) {
Serial.print(".");
delay(500);
}
Serial.println("");
Serial.println("STAIP address: ");
Serial.println(WiFi.localIP());
Serial.println("");
camera_config_t config;
config.ledc_channel = LEDC_CHANNEL_0;
config.ledc_timer = LEDC_TIMER_0;
config.pin_d0 = Y2_GPIO_NUM;
config.pin_d1 = Y3_GPIO_NUM;
config.pin_d2 = Y4_GPIO_NUM;
config.pin_d3 = Y5_GPIO_NUM;
config.pin_d4 = Y6_GPIO_NUM;
config.pin_d5 = Y7_GPIO_NUM;
config.pin_d6 = Y8_GPIO_NUM;
config.pin_d7 = Y9_GPIO_NUM;
config.pin_xclk = XCLK_GPIO_NUM;
config.pin_pclk = PCLK_GPIO_NUM;
config.pin_vsync = VSYNC_GPIO_NUM;
config.pin_href = HREF_GPIO_NUM;
config.pin_sscb_sda = SIOD_GPIO_NUM;
config.pin_sscb_scl = SIOC_GPIO_NUM;
config.pin_pwdn = PWDN_GPIO_NUM;
config.pin_reset = RESET_GPIO_NUM;
config.xclk_freq_hz = 20000000;
config.pixel_format = PIXFORMAT_JPEG;
if(psramFound()){
config.frame_size = FRAMESIZE_UXGA; // FRAMESIZE_ + QVGA|CIF|VGA|SVGA|XGA|SXGA|UXGA
config.jpeg_quality = 10; //10-63 lower number means higher quality
config.fb_count = 2;
} else {
config.frame_size = FRAMESIZE_SVGA;
config.jpeg_quality = 12;
config.fb_count = 1;
}
esp_err_t err = esp_camera_init(&config);
if (err != ESP_OK) {
Serial.printf("Camera init failed with error 0x%x", err);
delay(1000);
ESP.restart();
}
sensor_t * s = esp_camera_sensor_get();
s->set_brightness(s, 0); // -2 to 2
s->set_contrast(s, 0); // -2 to 2
s->set_saturation(s, 0); // -2 to 2
s->set_special_effect(s, 0); // 0 to 6 (0 - No Effect, 1 - Negative, 2 - Grayscale, 3 - Red Tint, 4 - Green Tint, 5 - Blue Tint, 6 - Sepia)
s->set_whitebal(s, 1); // 0 = disable , 1 = enable
s->set_awb_gain(s, 1); // 0 = disable , 1 = enable
s->set_wb_mode(s, 0); // 0 to 4 - if awb_gain enabled (0 - Auto, 1 - Sunny, 2 - Cloudy, 3 - Office, 4 - Home)
s->set_exposure_ctrl(s, 1); // 0 = disable , 1 = enable
s->set_aec2(s, 0); // 0 = disable , 1 = enable
s->set_ae_level(s, 0); // -2 to 2
s->set_aec_value(s, 300); // 0 to 1200
s->set_gain_ctrl(s, 1); // 0 = disable , 1 = enable
s->set_agc_gain(s, 0); // 0 to 30
s->set_gainceiling(s, (gainceiling_t)0); // 0 to 6
s->set_bpc(s, 0); // 0 = disable , 1 = enable
s->set_wpc(s, 1); // 0 = disable , 1 = enable
s->set_raw_gma(s, 1); // 0 = disable , 1 = enable
s->set_lenc(s, 1); // 0 = disable , 1 = enable
s->set_hmirror(s, 0); // 0 = disable , 1 = enable
s->set_vflip(s, 0); // 0 = disable , 1 = enable
s->set_dcw(s, 1); // 0 = disable , 1 = enable
s->set_colorbar(s, 0); // 0 = disable , 1 = enable
s->set_framesize(s, FRAMESIZE_UXGA);
}
boolean enviar = true;
void loop() {
//if(enviar) {
saveCapturedImage();
enviar = false;
delay(60000);
//}
}
void saveCapturedImage() {
Serial.println("Connect to " + String(myDomain));
WiFiClientSecure client;
if (client.connect(myDomain, 443)) {
Serial.println("Connection successful");
camera_fb_t * fb = NULL;
fb = esp_camera_fb_get();
if(!fb) {
Serial.println("Camera capture failed");
delay(1000);
ESP.restart();
return;
}
char *input = (char *)fb->buf;
char output[base64_enc_len(3)];
String imageFile = "";
for (int i=0;i<fb->len;i++) {
base64_encode(output, (input++), 3);
if (i%3==0) imageFile += urlencode(String(output));
}
String Data = myFilename+mimeType+myImage;
esp_camera_fb_return(fb);
Serial.println("Send a captured image to Google Drive.");
client.println("POST " + myScript + " HTTP/1.1");
client.println("Host: " + String(myDomain));
client.println("Content-Length: " + String(Data.length()+imageFile.length()));
client.println("Content-Type: application/x-www-form-urlencoded");
client.println();
client.print(Data);
int Index;
for (Index = 0; Index < imageFile.length(); Index = Index+1000) {
client.print(imageFile.substring(Index, Index+1000));
}
Serial.println("Waiting for response.");
long int StartTime=millis();
while (!client.available()) {
Serial.print(".");
delay(100);
if ((StartTime+waitingTime) < millis()) {
Serial.println();
Serial.println("No response.");
//If you have no response, maybe need a greater value of waitingTime
break;
}
}
Serial.println();
while (client.available()) {
Serial.print(char(client.read()));
}
} else {
Serial.println("Connected to " + String(myDomain) + " failed.");
}
client.stop();
}
//https://github.com/zenmanenergy/ESP8266-Arduino-Examples/
String urlencode(String str)
{
String encodedString="";
char c;
char code0;
char code1;
char code2;
for (int i =0; i < str.length(); i++){
c=str.charAt(i);
if (c == ' '){
encodedString+= '+';
} else if (isalnum(c)){
encodedString+=c;
} else{
code1=(c & 0xf)+'0';
if ((c & 0xf) >9){
code1=(c & 0xf) - 10 + 'A';
}
c=(c>>4)&0xf;
code0=c+'0';
if (c > 9){
code0=c - 10 + 'A';
}
code2='\0';
encodedString+='%';
encodedString+=code0;
encodedString+=code1;
//encodedString+=code2;
}
yield();
}
return encodedString;
}

Related

Error in uploading sensor data to Azure IOT central

I have watched this video and tried to implement the same using all the required components and I have been getting errors in getting connected to the portal and am neither getting output from sensors.
this is the code that I have been using:
`
#include "DHT.h" // including the library of DHT11 temperature and humidity sensor
#include <ESP8266WiFi.h>
#define DHTTYPE DHT11
#include "D:/ARDUINO/ESP8266/ESP8266/src/iotc/common/string_buffer.h"
#include "D:/ARDUINO/ESP8266/ESP8266/src/iotc/iot"
#include "D:/ARDUINO/ESP8266/ESP8266/src/connection.h"
#define dht_dpin 12 // creating the object sensor on pin 'D12'
DHT dht(dht_dpin, DHTTYPE);
#define WIFI_SSID "<ENTER WIFI SSID>"
#define WIFI_PASSWORD "<ENTER WIFI PASSWORD>"
const char* SCOPE_ID = "<ENTER SCOPE ID>";
const char* DEVICE_ID = "<ENTER DEVICE ID>";
const char* DEVICE_KEY = "<ENTER DEVICE KEY>";
int echoPin = D6;
int trigPin = D8;
int pingTravelTime;
float pingTravelDistance;
float distanceToTarget;
float speedOfsound;
void on_event(IOTContext ctx, IOTCallbackInfo* callbackInfo);
void on_event(IOTContext ctx, IOTCallbackInfo* callbackInfo) {
// ConnectionStatus
if (strcmp(callbackInfo->eventName, "ConnectionStatus") == 0) {
LOG_VERBOSE("Is connected ? %s (%d)",
callbackInfo->statusCode == IOTC_CONNECTION_OK ? "YES" : "NO",
callbackInfo->statusCode);
isConnected = callbackInfo->statusCode == IOTC_CONNECTION_OK;
return;
}
// payload buffer doesn't have a null ending.
// add null ending in another buffer before print
AzureIOT::StringBuffer buffer;
if (callbackInfo->payloadLength > 0) {
buffer.initialize(callbackInfo->payload, callbackInfo->payloadLength);
}
LOG_VERBOSE("- [%s] event was received. Payload => %s\n",
callbackInfo->eventName, buffer.getLength() ? *buffer : "EMPTY");
if (strcmp(callbackInfo->eventName, "Command") == 0) {
LOG_VERBOSE("- Command name was => %s\r\n", callbackInfo->tag);
}
dht.begin();
}
void setup() {
Serial.begin(9600);
connect_wifi(WIFI_SSID, WIFI_PASSWORD);
connect_client(SCOPE_ID, DEVICE_ID, DEVICE_KEY);
if (context != NULL) {
lastTick = 0; // set timer in the past to enable first telemetry a.s.a.p
}
pinMode(trigPin,OUTPUT);
pinMode(echoPin,INPUT);
}
void loop() {
digitalWrite(trigPin,LOW);
delayMicroseconds(10);
digitalWrite(trigPin,HIGH);
delayMicroseconds(10);
digitalWrite(trigPin,LOW);
float h = dht.readHumidity();
float t = dht.readTemperature();
// Reading the temperature in Celsius degrees and store in the t variable
// Reading the humidity index and store in the h variable
pingTravelTime = pulseIn(echoPin,HIGH);
delay(25);
pingTravelDistance = (pingTravelTime*330*100)/(1000000);
speedOfsound = (pingTravelDistance*1000000)/pingTravelTime;
distanceToTarget = pingTravelDistance/2;
if (isConnected) {
unsigned long ms = millis();
if (ms - lastTick > 10000) { // send telemetry every 10 seconds
char msg[64] = {0};
int pos = 0, errorCode = 0;
lastTick = ms;
if (loopId++ % 4 == 0) { // send telemetry
pos = snprintf(msg, sizeof(msg) - 1, "{\"Temperature\": %f}",
t);
errorCode = iotc_send_telemetry(context, msg, pos);
pos = snprintf(msg, sizeof(msg) - 1, "{\"Humidity\":%f}",
h);
errorCode = iotc_send_telemetry(context, msg, pos);
pos = snprintf(msg, sizeof(msg) - 1, "{\"Distance\":%f}",
distanceToTarget);
errorCode = iotc_send_telemetry(context, msg, pos);
pos = snprintf(msg, sizeof(msg) - 1, "{\"Speed\":%f}",
speedOfsound);
errorCode = iotc_send_telemetry(context, msg, pos);
} else { // send property
}
msg[pos] = 0;
if (errorCode != 0) {
LOG_ERROR("Sending message has failed with error code %d", errorCode);
}
}
iotc_do_work(context); // do background work for iotc
} else {
iotc_free_context(context);
context = NULL;
connect_client(SCOPE_ID, DEVICE_ID, DEVICE_KEY);
}
delay(50);
}
`
I tried to implement this project for a thesis of mine and was not getting the results since the connection is not being established.
These are the errors i was getting:
ERROR: couldn't fetch the time from NTP. - -
X - Error at connection.h:32
Error # tcp_connect. Code 1 -
ERROR: Client was not connected. - -
iot.dps : getting auth... -
iotc.dps : getting operation id... -
ERROR: DPS endpoint PUT call has failed.
this is the github link for downloading the required header files for connection establishment
I could reproduce the same error in my NodeMCU. Please find the below image for reference.
If you let the code run and observe the NodeMCU serial monitor, it will occasionally spit out additional information as you can find in the below image
The error message in my case indicates that it is an issue with Authorization. I had the wrong Primary key provided in the code. In order to be able to connect to the device on Azure IoT Central, make sure the device is not set to be simulated when you create it.
Once you have the device created from the template, navigate to the device and click on connect to get the following details that needs to be entered in the code. Attached the below image for reference.
Here is the code snippet I have used to generate data
#include <ESP8266WiFi.h>
#include "src/iotc/common/string_buffer.h"
#include "src/iotc/iotc.h"
#include "DHT.h"
#define DHTPIN 2
#define DHTTYPE DHT11 // DHT 11
#define WIFI_SSID "<SSID>"
#define WIFI_PASSWORD "<WIFIPASSWORD>"
const char *SCOPE_ID = "<value 2 from above image>";
const char *DEVICE_ID = "<value 3 from above image>";
const char *DEVICE_KEY = "<value 4 from above image>";
DHT dht(DHTPIN, DHTTYPE);
void on_event(IOTContext ctx, IOTCallbackInfo *callbackInfo);
#include "src/connection.h"
void on_event(IOTContext ctx, IOTCallbackInfo *callbackInfo)
{
// ConnectionStatus
if (strcmp(callbackInfo->eventName, "ConnectionStatus") == 0)
{
LOG_VERBOSE("Is connected ? %s (%d)",
callbackInfo->statusCode == IOTC_CONNECTION_OK ? "YES" : "NO",
callbackInfo->statusCode);
isConnected = callbackInfo->statusCode == IOTC_CONNECTION_OK;
return;
}
// payload buffer doesn't have a null ending.
// add null ending in another buffer before print
AzureIOT::StringBuffer buffer;
if (callbackInfo->payloadLength > 0)
{
buffer.initialize(callbackInfo->payload, callbackInfo->payloadLength);
}
LOG_VERBOSE("- [%s] event was received. Payload => %s\n",
callbackInfo->eventName, buffer.getLength() ? *buffer : "EMPTY");
if (strcmp(callbackInfo->eventName, "Command") == 0)
{
LOG_VERBOSE("- Command name was => %s\r\n", callbackInfo->tag);
}
}
void setup()
{
Serial.begin(9600);
connect_wifi(WIFI_SSID, WIFI_PASSWORD);
connect_client(SCOPE_ID, DEVICE_ID, DEVICE_KEY);
if (context != NULL)
{
lastTick = 0; // set timer in the past to enable first telemetry a.s.a.p
}
dht.begin();
}
void loop()
{
float h = dht.readHumidity();
float t = dht.readTemperature();
if (isConnected)
{
unsigned long ms = millis();
if (ms - lastTick > 10000)
{ // send telemetry every 10 seconds
char msg[64] = {0};
int pos = 0, errorCode = 0;
lastTick = ms;
if (loopId++ % 2 == 0)
{ // send telemetry
pos = snprintf(msg, sizeof(msg) - 1, "{\"Temperature\": %f}",
t);
errorCode = iotc_send_telemetry(context, msg, pos);
pos = snprintf(msg, sizeof(msg) - 1, "{\"Humidity\":%f}",
h);
errorCode = iotc_send_telemetry(context, msg, pos);
}
else
{ // send property
}
msg[pos] = 0;
if (errorCode != 0)
{
LOG_ERROR("Sending message has failed with error code %d", errorCode);
}
}
iotc_do_work(context); // do background work for iotc
}
else
{
iotc_free_context(context);
context = NULL;
connect_client(SCOPE_ID, DEVICE_ID, DEVICE_KEY);
}
}
Here are the Temperature and Humidity values generated from the code
The data generated the following graph on the Azure IoT Central.
Please note that I have used DHT11 sensor and connected to read it from the GPI0 2 (D4) pin on my NodeMCU board. I have used the Arduino IDE version 1.8.19 and ESP8266 board version 2.7.4

Why result of GetPageLabels is different from the Adobe Acrobat

I edit page number of pdf in Adobe Acrobat X Pro.
Test PDF
result:
1-FrontCover
2-FrontFold
3-i
4-ii
5-iii
6-1
7-2
8-3
9-4
10-5
11-BackFold
12-BackCover
But this result of GetPageLabels is wrong
page number:
0-FrontCover1
1-FrontFold1
2-FrontFoldi
3-FrontFoldii
4-FrontFoldiii
5-FrontFold1
6-FrontFold2
7-FrontFold3
8-FrontFold4
9-FrontFold5
10-BackFold1
11-BackCover1
C# Code:
objLabels = PdfPageLabels.GetPageLabels(objReader);
TextBox1.Text += "page number:" + Environment.NewLine;
if (objLabels != null) {
for (i = 0; i <= objLabels.Length - 1; i++) {
TextBox1.Text += i + "-" + objLabels(i) + Environment.NewLine;
}
}
How to get the correct result like Adobe Acrobat X Pro?
There is a small bug in PdfPageLabels.GetPageLabels(PdfReader). When encountering a new page label dictionary without a P (prefix) entry, it does not reset the current prefix value:
int pagecount = 1;
String prefix = "";
char type = 'D';
for (int i = 0; i < n; i++) {
if (numberTree.ContainsKey(i)) {
PdfDictionary d = (PdfDictionary)PdfReader.GetPdfObjectRelease(numberTree[i]);
if (d.Contains(PdfName.ST)) {
pagecount = ((PdfNumber)d.Get(PdfName.ST)).IntValue;
}
else {
pagecount = 1;
}
if (d.Contains(PdfName.P)) {
prefix = ((PdfString)d.Get(PdfName.P)).ToUnicodeString();
}
if (d.Contains(PdfName.S)) {
type = ((PdfName)d.Get(PdfName.S)).ToString()[1];
}
else {
type = 'e';
}
}
...
}
You can fix this by adding the following else clause to the if in question:
if (d.Contains(PdfName.P)) {
prefix = ((PdfString)d.Get(PdfName.P)).ToUnicodeString();
}
else
{
prefix = "";
}
Whith this change I get I get
page number:
0 - FrontCover
1 - FrontFold
2 - i
3 - ii
4 - iii
5 - 1
6 - 2
7 - 3
8 - 4
9 - 5
10 - BackFold
11 - BackCover
PS: The same issue is present in the Java iText version, tested in ReadPageLabels.java.
Thank you for helping to solve my problem,
Here is my complete program.
public string[] ReadPageLabel(PdfReader objReader, int intPageCount)
{
PdfDictionary objDictionary ;
Dictionary<int, PdfObject> objTree ;
string[] arrLabels ;
int i ;
char chrLabelKind ;
string strLabelPrefix ;
int intLableNumber ;
//PdfPageLabels is wrong
//arrLabels = PdfPageLabels.GetPageLabels(objReader)
arrLabels = new string[intPageCount];
if (objReader.Catalog.Get(PdfName.PAGELABELS) != null) {
objTree = PdfNumberTree.ReadTree(PdfReader.GetPdfObjectRelease(objReader.Catalog.Get(PdfName.PAGELABELS)));
chrLabelKind = 'D';
strLabelPrefix = "";
intLableNumber = 1;
for (i = 0; i <= intPageCount - 1; i++) {
if (objTree.ContainsKey(i)) { //if reset page number
objDictionary = PdfReader.GetPdfObjectRelease(objTree[i]);
//PdfName.S:Number Kind
if (objDictionary.Contains(PdfName.S)) {
chrLabelKind = ((PdfName)objDictionary.Get(PdfName.S)).ToString()(1);
//PdfName.S:/R,/r,/A,/a,/e,/D,ToString()(1)get alphabet of Index=1
} else {
chrLabelKind = 'e';
}
//PdfName.P:Prefix
if (objDictionary.Contains(PdfName.P)) {
strLabelPrefix = ((PdfString)objDictionary.Get(PdfName.P)).ToUnicodeString();
} else {
strLabelPrefix = "";
}
//PdfName.ST:Start Number
if (objDictionary.Contains(PdfName.ST)) {
intLableNumber = ((PdfNumber)objDictionary.Get(PdfName.ST)).IntValue;
} else {
intLableNumber = 1;
}
}
switch (chrLabelKind) {
case 'R':
//I,II,III
arrLabels[i] = strLabelPrefix + factories.RomanNumberFactory.GetUpperCaseString(intLableNumber);
break;
case 'r':
//i,ii,iii
arrLabels[i] = strLabelPrefix + factories.RomanNumberFactory.GetLowerCaseString(intLableNumber);
break;
case 'A':
//A,B,C
arrLabels[i] = strLabelPrefix + factories.RomanAlphabetFactory.GetUpperCaseString(intLableNumber);
break;
case 'a':
//a,b,c
arrLabels[i] = strLabelPrefix + factories.RomanAlphabetFactory.GetLowerCaseString(intLableNumber);
break;
case 'e':
//no number kind
arrLabels[i] = strLabelPrefix;
break;
default:
//1,2,3
arrLabels[i] = strLabelPrefix + intLableNumber;
break;
}
intLableNumber += 1;
}
} else {
for (i = 0; i <= intPageCount - 1; i++) {
arrLabels[i] = i + 1;
}
}
return arrLabels;
}

mupdf render jpeg2000 lose color?

I am working on an android project, which use vudroid, which in turn use mupdf version 0.5.
Vudroid remove the original openjpeg support of mupdf, I have ported the mupdf version 1.5's openjpeg support.
But I encounter a new problem, color information in jpx image gone, the desired effect:
my effect:
the ported load-jpx code:
#include "fitz.h"
#include "mupdf.h"
/* Without the definition of OPJ_STATIC, compilation fails on windows
* due to the use of __stdcall. We believe it is required on some
* linux toolchains too. */
#define OPJ_STATIC
#ifndef _MSC_VER
#define OPJ_HAVE_STDINT_H
#endif
#include <openjpeg.h>
static void fz_opj_error_callback(const char *msg, void *client_data)
{
//fz_context *ctx = (fz_context *)client_data;
//fz_warn(ctx, "openjpeg error: %s", msg);
}
static void fz_opj_warning_callback(const char *msg, void *client_data)
{
//fz_context *ctx = (fz_context *)client_data;
//fz_warn(ctx, "openjpeg warning: %s", msg);
}
static void fz_opj_info_callback(const char *msg, void *client_data)
{
/* fz_warn("openjpeg info: %s", msg); */
}
typedef struct stream_block_s
{
unsigned char *data;
int size;
int pos;
} stream_block;
static OPJ_SIZE_T fz_opj_stream_read(void * p_buffer, OPJ_SIZE_T p_nb_bytes, void * p_user_data)
{
stream_block *sb = (stream_block *)p_user_data;
int len;
len = sb->size - sb->pos;
if (len < 0)
len = 0;
if (len == 0)
return (OPJ_SIZE_T)-1; /* End of file! */
if ((OPJ_SIZE_T)len > p_nb_bytes)
len = p_nb_bytes;
memcpy(p_buffer, sb->data + sb->pos, len);
sb->pos += len;
return len;
}
static OPJ_OFF_T fz_opj_stream_skip(OPJ_OFF_T skip, void * p_user_data)
{
stream_block *sb = (stream_block *)p_user_data;
if (skip > sb->size - sb->pos)
skip = sb->size - sb->pos;
sb->pos += skip;
return sb->pos;
}
static OPJ_BOOL fz_opj_stream_seek(OPJ_OFF_T seek_pos, void * p_user_data)
{
stream_block *sb = (stream_block *)p_user_data;
if (seek_pos > sb->size)
return OPJ_FALSE;
sb->pos = seek_pos;
return OPJ_TRUE;
}
fz_error
fz_load_jpx(pdf_image* img, unsigned char *data, int size, fz_colorspace *defcs, int indexed)
{
//fz_pixmap *img;
opj_dparameters_t params;
opj_codec_t *codec;
opj_image_t *jpx;
opj_stream_t *stream;
fz_colorspace *colorspace;
unsigned char *p;
OPJ_CODEC_FORMAT format;
int a, n, w, h, depth, sgnd;
int x, y, k, v;
stream_block sb;
if (size < 2)
fz_throw("not enough data to determine image format");
/* Check for SOC marker -- if found we have a bare J2K stream */
if (data[0] == 0xFF && data[1] == 0x4F)
format = OPJ_CODEC_J2K;
else
format = OPJ_CODEC_JP2;
opj_set_default_decoder_parameters(&params);
if (indexed)
params.flags |= OPJ_DPARAMETERS_IGNORE_PCLR_CMAP_CDEF_FLAG;
codec = opj_create_decompress(format);
opj_set_info_handler(codec, fz_opj_info_callback, 0);
opj_set_warning_handler(codec, fz_opj_warning_callback, 0);
opj_set_error_handler(codec, fz_opj_error_callback, 0);
if (!opj_setup_decoder(codec, &params))
{
fz_throw("j2k decode failed");
}
stream = opj_stream_default_create(OPJ_TRUE);
sb.data = data;
sb.pos = 0;
sb.size = size;
opj_stream_set_read_function(stream, fz_opj_stream_read);
opj_stream_set_skip_function(stream, fz_opj_stream_skip);
opj_stream_set_seek_function(stream, fz_opj_stream_seek);
opj_stream_set_user_data(stream, &sb);
/* Set the length to avoid an assert */
opj_stream_set_user_data_length(stream, size);
if (!opj_read_header(stream, codec, &jpx))
{
opj_stream_destroy(stream);
opj_destroy_codec(codec);
fz_throw("Failed to read JPX header");
}
if (!opj_decode(codec, stream, jpx))
{
opj_stream_destroy(stream);
opj_destroy_codec(codec);
opj_image_destroy(jpx);
fz_throw("Failed to decode JPX image");
}
opj_stream_destroy(stream);
opj_destroy_codec(codec);
/* jpx should never be NULL here, but check anyway */
if (!jpx)
fz_throw("opj_decode failed");
pdf_logimage("opj_decode succeeded");
for (k = 1; k < (int)jpx->numcomps; k++)
{
if (!jpx->comps[k].data)
{
opj_image_destroy(jpx);
fz_throw("image components are missing data");
}
if (jpx->comps[k].w != jpx->comps[0].w)
{
opj_image_destroy(jpx);
fz_throw("image components have different width");
}
if (jpx->comps[k].h != jpx->comps[0].h)
{
opj_image_destroy(jpx);
fz_throw("image components have different height");
}
if (jpx->comps[k].prec != jpx->comps[0].prec)
{
opj_image_destroy(jpx);
fz_throw("image components have different precision");
}
}
n = jpx->numcomps;
w = jpx->comps[0].w;
h = jpx->comps[0].h;
depth = jpx->comps[0].prec;
sgnd = jpx->comps[0].sgnd;
if (jpx->color_space == OPJ_CLRSPC_SRGB && n == 4) { n = 3; a = 1; }
else if (jpx->color_space == OPJ_CLRSPC_SYCC && n == 4) { n = 3; a = 1; }
else if (n == 2) { n = 1; a = 1; }
else if (n > 4) { n = 4; a = 1; }
else { a = 0; }
if (defcs)
{
if (defcs->n == n)
{
colorspace = defcs;
}
else
{
fz_warn("jpx file and dict colorspaces do not match");
defcs = NULL;
}
}
if (!defcs)
{
switch (n)
{
case 1: colorspace = pdf_devicegray; break;
case 3: colorspace = pdf_devicergb; break;
case 4: colorspace = pdf_devicecmyk; break;
}
}
//error = fz_new_pixmap(&img, colorspace, w, h);
//if (error)
// return error;
pdf_logimage("colorspace handled\n");
int bpc = 1;
if (colorspace) {
bpc = 1 + colorspace->n;
};
pdf_logimage("w = %d, bpc = %d, h = %d\n", w, bpc, h);
img->samples = fz_newbuffer(w * bpc * h);
//opj_image_destroy(jpx);
//fz_throw("out of memory loading jpx");
p = (char*)img->samples->bp;
pdf_logimage("start to deal with samples");
for (y = 0; y < h; y++)
{
for (x = 0; x < w; x++)
{
for (k = 0; k < n + a; k++)
{
v = jpx->comps[k].data[y * w + x];
if (sgnd)
v = v + (1 << (depth - 1));
if (depth > 8)
v = v >> (depth - 8);
*p++ = v;
}
if (!a)
*p++ = 255;
}
}
img->samples->wp = p;
pdf_logimage("start to deal with samples succeeded");
opj_image_destroy(jpx);
// if (a)
// {
// if (n == 4)
// {
// fz_pixmap *tmp = fz_new_pixmap(ctx, fz_device_rgb(ctx), w, h);
// fz_convert_pixmap(ctx, tmp, img);
// fz_drop_pixmap(ctx, img);
// img = tmp;
// }
// fz_premultiply_pixmap(ctx, img);
// }
return fz_okay;
}
The render code:
JNIEXPORT jbyteArray JNICALL Java_org_vudroid_pdfdroid_codec_PdfPage_drawPage
(JNIEnv *env, jclass clazz, jlong dochandle, jlong pagehandle)
{
renderdocument_t *doc = (renderdocument_t*) dochandle;
renderpage_t *page = (renderpage_t*) pagehandle;
//DEBUG("PdfView(%p).drawpage(%p, %p)", this, doc, page);
fz_error error;
fz_matrix ctm;
fz_irect viewbox;
fz_pixmap *pixmap;
jfloat *matrix;
jint *viewboxarr;
jint *dimen;
jint *buffer;
int length, val;
pixmap = nil;
/* initialize parameter arrays for MuPDF */
ctm.a = 1;
ctm.b = 0;
ctm.c = 0;
ctm.d = 1;
ctm.e = 0;
ctm.f = 0;
// matrix = (*env)->GetPrimitiveArrayCritical(env, matrixarray, 0);
// ctm.a = matrix[0];
// ctm.b = matrix[1];
// ctm.c = matrix[2];
// ctm.d = matrix[3];
// ctm.e = matrix[4];
// ctm.f = matrix[5];
// (*env)->ReleasePrimitiveArrayCritical(env, matrixarray, matrix, 0);
// DEBUG("Matrix: %f %f %f %f %f %f",
// ctm.a, ctm.b, ctm.c, ctm.d, ctm.e, ctm.f);
// viewboxarr = (*env)->GetPrimitiveArrayCritical(env, viewboxarray, 0);
// viewbox.x0 = viewboxarr[0];
// viewbox.y0 = viewboxarr[1];
// viewbox.x1 = viewboxarr[2];
// viewbox.y1 = viewboxarr[3];
// (*env)->ReleasePrimitiveArrayCritical(env, viewboxarray, viewboxarr, 0);
// DEBUG("Viewbox: %d %d %d %d",
// viewbox.x0, viewbox.y0, viewbox.x1, viewbox.y1);
viewbox.x0 = 0;
viewbox.y0 = 0;
viewbox.x1 = 595;
viewbox.y1 = 841;
/* do the rendering */
DEBUG("doing the rendering...");
//buffer = (*env)->GetPrimitiveArrayCritical(env, bufferarray, 0);
// do the actual rendering:
error = fz_rendertree(&pixmap, doc->rast, page->page->tree,
ctm, viewbox, 1);
/* evil magic: we transform the rendered image's byte order
*/
int x, y;
if (bmpdata)
fz_free(bmpdata);
bmpstride = ((pixmap->w * 3 + 3) / 4) * 4;
bmpdata = fz_malloc(pixmap->h * bmpstride);
DEBUG("inside drawpage, bmpstride = %d, pixmap->w = %d, pixmap->h = %d\n", bmpstride, pixmap->w, pixmap->h);
if (!bmpdata)
return;
for (y = 0; y < pixmap->h; y++)
{
unsigned char *p = bmpdata + y * bmpstride;
unsigned char *s = pixmap->samples + y * pixmap->w * 4;
for (x = 0; x < pixmap->w; x++)
{
p[x * 3 + 0] = s[x * 4 + 3];
p[x * 3 + 1] = s[x * 4 + 2];
p[x * 3 + 2] = s[x * 4 + 1];
}
}
FILE* fp = fopen("/sdcard/drawpage", "wb");
fwrite(bmpdata, pixmap->h * bmpstride, 1, fp);
fclose(fp);
jbyteArray array = (*env)->NewByteArray(env, pixmap->h * bmpstride);
(*env)->SetByteArrayRegion(env, array, 0, pixmap->h * bmpstride, bmpdata);
// if(!error) {
// DEBUG("Converting image buffer pixel order");
// length = pixmap->w * pixmap->h;
// unsigned int *col = pixmap->samples;
// int c = 0;
// for(val = 0; val < length; val++) {
// col[val] = ((col[val] & 0xFF000000) >> 24) |
// ((col[val] & 0x00FF0000) >> 8) |
// ((col[val] & 0x0000FF00) << 8);
// }
// winconvert(pixmap);
// }
// (*env)->ReleasePrimitiveArrayCritical(env, bufferarray, buffer, 0);
fz_free(pixmap);
if (error) {
DEBUG("error!");
throw_exception(env, "error rendering page");
}
DEBUG("PdfView.drawPage() done");
return array;
}
I have compare the jpx output samples to the mupdf-1.5 windows, it is the same, but the colorspace of original jpx have gone.
Could help me to get the colorspace back?
It seems you are trying to use an old version of MuPDF with some bits pulled in from a more recent version. TO be honest that's hardly likely to work. I would also guess that its not the OpenJPEG library causing your problem, since the image appears, but converted to grayscale.
Have you tried opening the file in the current version of MuPDF ? Does it work ?
If so then it seems to me your correct approach should be to use the current code, not try and bolt pieces onto an older version.

using X264 and librtmp to send live camera frame, but the flash can't show

I am using X264 and librtmp to send my live camera frame, all the things seems right. but my web test flash can't show the correct video. Sometimes it seems correct, but when I re-click play button, it doesn't show any picture on the flash.
Here is my X264 config code
x264_param_default_preset(&x264param, "ultrafast", "zerolatency");
x264param.i_threads = 2;
x264param.i_width = width;
x264param.i_height = height;
x264param.i_log_level = X264_LOG_DEBUG;
x264param.i_fps_num = x264param.i_timebase_num= fps;
x264param.i_fps_den = x264param.i_timebase_den=1;
x264param.i_frame_total = 0;
x264param.i_frame_reference =1;
//x264param.i_frame_reference = 2;
x264param.i_keyint_min = 25;
x264param.i_keyint_max = fps*3;
x264param.i_scenecut_threshold = 40;
x264param.b_deblocking_filter = 1;
x264param.b_cabac = 0;
x264param.analyse.i_trellis = 0;
x264param.analyse.b_chroma_me = 1;
x264param.vui.i_sar_width = 0;
x264param.vui.i_sar_height = 0;
x264param.i_bframe_bias = 0;
x264param.b_interlaced= 0;
x264param.analyse.i_subpel_refine = 6; /* 0..5 -> 1..6 */
x264param.analyse.i_me_method = X264_ME_DIA;//X264_ME_HEX?X264_ME_DIA
x264param.analyse.i_me_range = 16;
x264param.analyse.i_direct_mv_pred = X264_DIRECT_PRED_AUTO;
x264param.i_deblocking_filter_alphac0 = 0;
x264param.i_deblocking_filter_beta = 0;
//x264param.analyse.intra = X264_ANALYSE_I4x4;
x264param.analyse.intra = X264_ANALYSE_I4x4;// | X264_ANALYSE_PSUB16x16 | X264_ANALYSE_BSUB16x16;
x264param.analyse.inter = X264_ANALYSE_I4x4 | X264_ANALYSE_PSUB16x16 | X264_ANALYSE_BSUB16x16;
//edit 2014-7-28
x264param.analyse.b_transform_8x8 = 1;
//x264param.analyse.b_transform_8x8 = 0;
x264param.analyse.b_fast_pskip = 1;
x264param.i_bframe = 0;
//x264param.b_intra_refresh
x264param.analyse.b_weighted_bipred = 0;
//// Intra refres:
x264param.i_keyint_max = 250;
x264param.b_intra_refresh = 0;
////Rate control:
//x264param.rc.i_rc_method = X264_RC_CRF;
//Rate Control
x264param.rc.f_ip_factor = 1.4f;
x264param.rc.f_pb_factor = 1.3f;
x264param.rc.f_qcompress = 1.0;
x264param.rc.i_qp_min = 20;//20;
x264param.rc.i_qp_max = 32;
x264param.rc.i_qp_step = 1;
switch (0)
{
case 0: /* 1 PASS ABR */
x264param.rc.i_rc_method = X264_RC_ABR;
x264param.rc.i_bitrate = 300; // max = 5000
x264param.rc.b_mb_tree = 0;
break;
case 1: /* 1 PASS CQ */
x264param.rc.i_rc_method = X264_RC_CQP;
x264param.rc.i_qp_constant = 26;//10 - 51
break;
}
//For streaming:
x264param.b_repeat_headers = 1;
x264param.b_annexb = 1;
x264_param_apply_profile(&x264param, "baseline");
encoder = x264_encoder_open(&x264param);
x264_picture_init( &pic_in );
x264_picture_alloc(&pic_in, X264_CSP_I420, width, height);
pic_in.img.i_csp = X264_CSP_I420|X264_CSP_VFLIP;
pic_in.img.i_plane = 3;
pic_in.i_type = X264_TYPE_AUTO;
Sending To RTMP:
sws_scale(convertCtx,&a,&scribe,0,height, pic_in.img.plane, pic_in.img.i_stride);
int i_nal;
int i_frame_size = x264_encoder_encode( encoder, &nal, &i_nal, &pic_in, &pic_out );
if(i_frame_size <= 0){
printf("\t!!!FAILED encode frame \n");
}else{
for (int i = 0,last=0; i < i_nal;i++)
{
fwrite(nal[i].p_payload, 1, i_frame_size-last, fpw1);
if (nal[i].i_type == NAL_SPS) {
sps_len = nal[i].i_payload-4;
sps = new unsigned char[sps_len];
memcpy(sps,nal[i].p_payload+4,sps_len);
} else if (nal[i].i_type == NAL_PPS) {
pps_len = nal[i].i_payload-4;
pps = new unsigned char[sps_len];
memcpy(pps,nal[i].p_payload+4,pps_len);
send_video_sps_pps();
free(sps);
free(pps);
} else {
send_rtmp_video(nal[i].p_payload,i_frame_size-last);
break;
}
last += nal[i].i_payload;
}
}
Send PPS and SPS
void send_video_sps_pps(){
if(rtmp!= NULL){
RTMPPacket * packet;
unsigned char * body;
int i;
packet = (RTMPPacket *)malloc(RTMP_HEAD_SIZE+1024);
memset(packet,0,RTMP_HEAD_SIZE);
packet->m_body = (char *)packet + RTMP_HEAD_SIZE;
body = (unsigned char *)packet->m_body;
i = 0;
body[i++] = 0x17;
body[i++] = 0x00;
body[i++] = 0x00;
body[i++] = 0x00;
body[i++] = 0x00;
/*AVCDecoderConfigurationRecord*/
body[i++] = 0x01;
body[i++] = sps[1];
body[i++] = sps[2];
body[i++] = sps[3];
body[i++] = 0xff;
/*sps*/
body[i++] = 0xe1;
body[i++] = (sps_len >> 8) & 0xff;
body[i++] = sps_len & 0xff;
memcpy(&body[i],sps,sps_len);
i += sps_len;
/*pps*/
body[i++] = 0x01;
body[i++] = (pps_len >> 8) & 0xff;
body[i++] = (pps_len) & 0xff;
memcpy(&body[i],pps,pps_len);
i += pps_len;
packet->m_packetType = RTMP_PACKET_TYPE_VIDEO;
packet->m_nBodySize = i;
packet->m_nChannel = 0x04;
packet->m_nTimeStamp = 0;
packet->m_hasAbsTimestamp = 0;
packet->m_headerType = RTMP_PACKET_SIZE_MEDIUM;
packet->m_nInfoField2 = rtmp->m_stream_id;
RTMP_SendPacket(rtmp,packet,TRUE);
free(packet);
rtmp_start_time = GetTickCount();
}else{
std::cout<<"RTMP is not ready"<<std::endl;
}
}
Send video Frame
void send_rtmp_video(unsigned char * buf,int len){
RTMPPacket * packet;
long timeoffset = GetTickCount() - rtmp_start_time;
int type = buf[0]&0x1f;
packet = (RTMPPacket *)malloc(RTMP_HEAD_SIZE+len+9);
memset(packet,0,RTMP_HEAD_SIZE);
packet->m_body = (char *)packet + RTMP_HEAD_SIZE;
packet->m_nBodySize = len + 9;
/*send video packet*/
unsigned char *body = (unsigned char *)packet->m_body;
memset(body,0,len+9);
/*key frame*/
body[0] = 0x27;
if (type == NAL_SLICE_IDR) {
body[0] = 0x17;
}
body[1] = 0x01; /*nal unit*/
body[2] = 0x00;
body[3] = 0x00;
body[4] = 0x00;
body[5] = (len >> 24) & 0xff;
body[6] = (len >> 16) & 0xff;
body[7] = (len >> 8) & 0xff;
body[8] = (len ) & 0xff;
/*copy data*/
memcpy(&body[9],buf,len);
packet->m_hasAbsTimestamp = 0;
packet->m_packetType = RTMP_PACKET_TYPE_VIDEO;
if(rtmp != NULL){
packet->m_nInfoField2 = rtmp->m_stream_id;
}
packet->m_nChannel = 0x04;
packet->m_headerType = RTMP_PACKET_SIZE_LARGE;
packet->m_nTimeStamp = timeoffset;
if(rtmp != NULL){
RTMP_SendPacket(rtmp,packet,TRUE);
}
free(packet);
}
Try changing:
pps = new unsigned char[sps_len];
to:
pps = new unsigned char[pps_len];

Arduino State Machine

This is a cross-post from someone who answered my original question here.
I'm not sure how to go about executing the 3 functions I'm after (as well as introducing even more than 3 in the future).
I am simply trying to Fade/Blink the selected Colour of an RGB LED (and perhaps introduce more functions in the future) where its RGB data is coming back from iOS and sent to an RFDuino BLE module.
Sends a "fade" string to the module picked up by RFduinoBLE_onReceive (char *data, int len) on the Arduino end.
- (IBAction)fadeButtonPressed:(id)sender {
[rfduino send:[#"fade" dataUsingEncoding:NSUTF8StringEncoding]];
}
- (IBAction)blinkButtonPressed:(id)sender {
[rfduino send:[#"blink" dataUsingEncoding:NSUTF8StringEncoding]];
}
Selected Color:
- (void)setColor
{
NSLog(#"colors: RGB %f %f %f", red, green, blue);
UIColor *color = [UIColor colorWithRed:red green:green blue:blue alpha:1.0];
[colorSwatch setHighlighted:YES];
[colorSwatch setTintColor:color];
uint8_t tx[3] = { red * 255, green * 255, blue * 255 };
NSData *data = [NSData dataWithBytes:(void*)&tx length:3];
[rfduino send:data];
}
This is originally how I set the RGB colour:
void RFduinoBLE_onReceive (char *data, int len) {
if (len >= 3) {
// Get the RGB values.
uint8_t red = data[0];
uint8_t green = data[1];
uint8_t blue = data[2];
// Set PWM for each LED.
analogWrite(rgb2_pin, red);
analogWrite(rgb3_pin, green);
analogWrite(rgb4_pin, blue);
}
}
This was the provided answer that now compiles on Arduino, but I have no idea how to actually execute my functions and where?
#include <RFduinoBLE.h>
int state;
char command;
String hexstring;
// RGB pins.
int redPin = 2;
int grnPin = 3;
int bluPin = 4;
void setup () {
state = 1;
pinMode(redPin, OUTPUT);
pinMode(grnPin, OUTPUT);
pinMode(bluPin, OUTPUT);
// This is the data we want to appear in the advertisement
// (the deviceName length plus the advertisement length must be <= 18 bytes.
RFduinoBLE.deviceName = "iOS";
RFduinoBLE.advertisementInterval = MILLISECONDS(300);
RFduinoBLE.txPowerLevel = -20;
RFduinoBLE.advertisementData = "rgb";
// Start the BLE stack.
RFduinoBLE.begin();
}
void loop () {
//RFduino_ULPDelay(INFINITE);
}
void processCommand (int command, String hex) {
// hex ?
// command ?
}
void RFduinoBLE_onReceive (char *data, int len) {
for (int i = 0; i < len; i++) {
stateMachine(data[i]);
}
}
void stateMachine (char data) {
switch (state) {
case 1:
if (data == 1) {
state = 2;
}
break;
case 2:
if (data == 'b' || data == 'f' || data == 'c') {
command = data;
hexstring = "";
state = 3;
} else if (data != 1) { // Stay in state 2 if we received another 0x01.
state = 1;
}
break;
case 3:
if ((data >= 'a' && data <= 'z') || (data >= '0' && data <= '9')) {
hexstring = hexstring + data;
if (hexstring.length() == 6) {
state = 4;
}
} else if (data == 1) {
state = 2;
} else {
state = 1;
}
break;
case 4:
if (data == 3) {
processCommand(command, hexstring);
state = 1;
} else if (data == 1) {
state = 2;
} else {
state = 1;
}
break;
}
}
EDIT: Final code
#include <RFduinoBLE.h>
// State properties.
int state = 1;
char command;
String hexstring;
// RGB pins.
int redPin = 2;
int grnPin = 3;
int bluPin = 4;
// Setup function to set RGB pins to OUTPUT pins.
void setup () {
pinMode(redPin, OUTPUT);
pinMode(grnPin, OUTPUT);
pinMode(bluPin, OUTPUT);
// This is the data we want to appear in the advertisement
// (the deviceName length plus the advertisement length must be <= 18 bytes.
RFduinoBLE.deviceName = "iOS";
RFduinoBLE.advertisementInterval = MILLISECONDS(300);
RFduinoBLE.txPowerLevel = -20;
RFduinoBLE.advertisementData = "rgb";
// Start the BLE stack.
RFduinoBLE.begin();
}
void loop () {
switch (command) {
case 1:
// Blink.
break;
case 2:
// Fade.
break;
}
//RFduino_ULPDelay(INFINITE);
}
// Converts HEX as a String to actual HEX values.
// This is needed to properly convert the ASCII value to the hex
// value of each character.
byte getVal (char c) {
if (c >= '0' && c <= '9') return (byte)(c - '0');
else return (byte)(c - 'a' + 10);
}
// Process each function/command.
void processCommand (int command, String hex) {
switch (command) {
case 'b':
command = 1; // Set blink mode.
break;
case 'f':
command = 2; // Set fade mode.
break;
case 'c':
// We put together 2 characters as is
// done with HEX notation and set the color.
byte red = getVal(hex.charAt(1)) + (getVal(hex.charAt(0)) << 4);
byte green = getVal(hex.charAt(3)) + (getVal(hex.charAt(2)) << 4);
byte blue = getVal(hex.charAt(5)) + (getVal(hex.charAt(4)) << 4);
// Set the color.
setColor (red, green, blue);
break;
}
}
// Sets the color of each RGB pin.
void setColor (byte red, byte green, byte blue) {
analogWrite(redPin, red);
analogWrite(grnPin, green);
analogWrite(bluPin, blue);
}
// This function returns data from the radio.
void RFduinoBLE_onReceive (char *data, int len) {
for (int i = 0; i < len; i++) {
stateMachine(data[i]);
}
}
// Main state machine function, which processes
// data depending on the bytes received.
void stateMachine (char data) {
switch (state) {
case 1:
if (data == 1) {
state = 2;
}
break;
case 2:
if (data == 'b' || data == 'f' || data == 'c') {
command = data;
hexstring = "";
state = 3;
} else if (data != 1) { // Stay in state 2 if we received another 0x01.
state = 1;
}
break;
case 3:
if ((data >= 'a' && data <= 'z') || (data >= '0' && data <= '9')) {
hexstring = hexstring + data;
if (hexstring.length() == 6) {
state = 4;
}
} else if (data == 1) {
state = 2;
} else {
state = 1;
}
break;
case 4:
if (data == 3) {
processCommand(command, hexstring);
state = 1;
} else if (data == 1) {
state = 2;
} else {
state = 1;
}
break;
}
}
There is some code here that you can use to convert hex characters to a byte.
So, you can add this to your existing code -
byte getVal(char c)
{
if (c >= '0' && c <= '9')
return (byte)(c - '0');
else
return (byte)(c-'a'+10)
}
void processCommand (int command, String hex)
{
switch (command) {
case 'b':
command = 1; // set blink mode
break;
case 'f':
command=2; // set fade mode
break;
case 'c':
byte red=getVal(hex.charAt(1)) + (getVal(hex.charAt(0)) << 4);
byte green=getVal(hex.charAt(3)) + (getVal(hex.charAt(2)) << 4);
byte blue=getVal(hex.charAt(5)) + (getVal(hex.charAt(4)) << 4);
setColor(red,green,blue);
}
}
void setColor(byte red,byte green,byte blue)
{
// Set PWM for each LED.
analogWrite(rgb2_pin, red);
analogWrite(rgb3_pin, green);
analogWrite(rgb4_pin, blue);
}
On the iOS side you can use something like this -
-(void) sendCommand:(char)command arg1:(Byte)arg1 arg2:(Byte)arg2 arg3:(Byte) arg3 {
NSString *commandString=[NSString stringWithFormat:#"\001%c%02x%02x%02x\003",command,arg1,arg2,arg3];
NSData *commandData=[commandString dataUsingEncoding:NSASCIIStringEncoding];
[rfduino send:data];
}
- (IBAction)fadeButtonPressed:(id)sender {
[self sendCommand:'f' arg1:0 arg2:0 arg3:0];
}
- (IBAction)blinkButtonPressed:(id)sender {
[self sendCommand:'b' arg1:0 arg2:0 arg3:0];
}
- (void)setColor
{
NSLog(#"colors: RGB %f %f %f", red, green, blue);
UIColor *color = [UIColor colorWithRed:red green:green blue:blue alpha:1.0];
[colorSwatch setHighlighted:YES];
[colorSwatch setTintColor:color];
[self sendCommand:c arg1:red*255 arg2:green*255 arg3:blue*255];
}