anybody know why restful api scan has data
http://127.0.0.1:8080/ignite?cmd=qryscanexe&pageSize=5&cacheName=contact
result
{
"successStatus": 0,
"error": null,
"sessionToken": null,
"response": {
"items": [
{
"key": {
"id": 703896957108224
},
"value": {
"mergedId": null,
"priority": 0,
"identities": [
{
"id": "7",
"type": "idCard",
"dateCreated": 1652932875433,
"lastUpdated": 1652932875433
}
],
"followerIds": [],
"contactType": "LEADER",
"dateCreated": 1652932875433,
"lastUpdated": 1652932875433
}
}
],
"last": true,
"queryId": 2,
"fieldsMetadata": [
{
"schemaName": null,
"typeName": null,
"fieldName": "key",
"fieldTypeName": null
},
{
"schemaName": null,
"typeName": null,
"fieldName": "value",
"fieldTypeName": null
}
]
}
}.
but get command no result.
http://127.0.0.1:8080/ignite?cacheName=contact&cmd=get&keyType=ContactKey&key={"id":703896957108224}
result
{
"successStatus": 0,
"affinityNodeId": "ee5e4d0d-5c91-4b9d-b68f-5dfac2f45908",
"error": null,
"sessionToken": null,
"response": null
}
By default Ignite REST supports Java built-in types for get/put operations. But it should be possible to implement a custom serialization if needed. In general it's much better to use Ignite thin clients, it's more functional than REST.
I have a column named "sessions" in table which looks like this
session
[ {"key": "hello", "value": {"string_value": null, "int_value": 1, "float_value": null, "double_value": null}} ]
[ {"key": "welocome", "value": {"string_value": "venue", "int_value": null, "float_value": null, "double_value": null}} ]
when i checked the type of these row values it returns Nullable(String) instead of Array
is there a way to convert these row values to a valid array ? thank you
I have nested data and want to update a value of a key of an array which is actually in array format user_properties['first_open_time']['int_value']. I want to change this value to a static value -1582243200000 ie.-. Sample structure of the data is as below:
[
{
"user_properties": [
{
"key": "ga_session_id",
"value": {
"string_value": null,
"int_value": "1582306435",
"float_value": null,
"double_value": null,
"set_timestamp_micros": "1582306435527000"
}
},
{
"key": "ga_session_number",
"value": {
"string_value": null,
"int_value": "1",
"float_value": null,
"double_value": null,
"set_timestamp_micros": "1582306435527000"
}
},
{
"key": "first_open_time",
"value": {
"string_value": null,
"int_value": "1582308000000",
"float_value": null,
"double_value": null,
"set_timestamp_micros": "1582306432489000"
}
}
]
}
]
You basically need to reconstruct the struct, changing what you want to change in the process:
-- CREATE TABLE `temp.firebase_sample`
-- AS (
-- SELECT * FROM `bingo-blast-174dd.analytics_151321511.events_20200225`
-- );
UPDATE `temp.firebase_sample` a
SET user_properties = (
SELECT
ARRAY_AGG(STRUCT(key,
STRUCT(value.string_value,
IF(key='first_open_time', 1582243200000, value.int_value),
value.float_value,
value.double_value,
value.set_timestamp_micros)))
FROM UNNEST(a.user_properties) x)
WHERE TRUE
I thought I'd be able to get what I needed by asking a simpler question referencing a simpler data example here, but I still need some help.
I'm pretty new to querying json style data within BigQuery, and am having trouble with the analytics (events) data that Firebase dumps into BigQuery for me. The format of 1 row of data is below (trimmed out some fluff).
{
"user_dim": {
"user_id": "some_identifier_here",
"user_properties": [
{
"key": "special_key1",
"val": {
"val": {
"str_val": "894",
"int_val": null
}
}
},
{
"key": "special_key2",
"val": {
"val": {
"str_val": "1",
"int_val": null
}
}
},
{
"key": "special_key3",
"val": {
"val": {
"str_val": "23",
"int_val": null
}
}
}
],
"device_info": {
"device_category": "mobile",
"mobile_brand_name": "Samsung",
"mobile_model_name": "model_phone"
},
"dt_a": "1470625311138000",
"dt_b": "1470620345566000"
},
"event_dim": [
{
"name": "user_engagement",
"params": [
{
"key": "firebase_event_origin",
"value": {
"string_value": "auto",
"int_value": null,
"float_value": null,
"double_value": null
}
},
{
"key": "engagement_time_msec",
"value": {
"string_value": null,
"int_value": "30006",
"float_value": null,
"double_value": null
}
}
],
"timestamp_micros": "1470675614434000",
"previous_timestamp_micros": "1470675551092000"
},
{
"name": "new_game",
"params": [
{
"key": "total_time",
"value": {
"string_value": "496048",
"int_value": null,
"float_value": null,
"double_value": null
}
},
{
"key": "armor",
"value": {
"string_value": "2",
"int_value": null,
"float_value": null,
"double_value": null
}
},
{
"key": "reason",
"value": {
"string_value": "power_up",
"int_value": null,
"float_value": null,
"double_value": null
}
}
],
"timestamp_micros": "1470675825988001",
"previous_timestamp_micros": "1470675282500001"
},
{
"name": "user_engagement",
"params": [
{
"key": "firebase_event_origin",
"value": {
"string_value": "auto",
"int_value": null,
"float_value": null,
"double_value": null
}
},
{
"key": "engagement_time_msec",
"value": {
"string_value": null,
"int_value": "318030",
"float_value": null,
"double_value": null
}
}
],
"timestamp_micros": "1470675972778002",
"previous_timestamp_micros": "1470675614434002"
},
{
"name": "won_game",
"params": [
{
"key": "total_time",
"value": {
"string_value": "497857",
"int_value": null,
"float_value": null,
"double_value": null
}
},
{
"key": "level",
"value": {
"string_value": null,
"int_value": "207",
"float_value": null,
"double_value": null
}
},
{
"key": "sword",
"value": {
"string_value": "iron",
"int_value": null,
"float_value": null,
"double_value": null
}
}
],
"timestamp_micros": "1470677171374007",
"previous_timestamp_micros": "1470671343784007"
}
]
}
Based on the answers to my original question I've been able to work just fine with the first part of the object user_dim. However, whenever I try similar approaches to the event_dim field (unnesting it) the queries fail with the message "Error: Scalar subquery produced more than one element." I have a suspicion this is due to the fact that event_dim is an array itself, and contains structs that have arrays in them as well.
If it helps here is the basic query that is giving me the error, although it should be noted that I am quite out of my element working with this type of data in BQ and could be going completely off course:
SELECT
(SELECT name FROM UNNEST(event_dim) WHERE name = 'user_engagement') AS event_name
FROM
my_table;
The end result I am going for is a query that can turn a table that contains many of these types of objects into a table that outputs 1 row per event in each objects event_dim array. i.e. for the example object above, I'd want it to output 4 rows where the first set of columns are identical and are just the metadata from user_dim. Then I'd like columns that I can explicitly define based on what I know will exist for each possible event, like event_name, firebase_event_origin, engagement_time_msec, total_time, armor, reason, level, sword and then fill with the value from that event parameter, or NULL if it doesn't exist.
Based on Mikhail's answer, but over an actual Firebase dataset:
SELECT
user_dim.app_info.app_instance_id,
timestamp_micros,
(SELECT value.int_value FROM UNNEST(dim.params) WHERE key = "level") AS level,
(SELECT value.int_value FROM UNNEST(dim.params) WHERE key = "coins") AS coins,
(SELECT value.int_value FROM UNNEST(dim.params) WHERE key = "powerups") AS powerups
FROM `dataset.table`, UNNEST(event_dim) AS dim
WHERE timestamp_micros=1464718937589000
(saving it here for future reference, and easier copy-pastability)
Hope, below can give you next push
WITH YourTable AS (
SELECT ARRAY[
STRUCT(
"user_engagement" AS name,
ARRAY<STRUCT<key STRING, val STRUCT<str_val STRING, int_val INT64>>>[
STRUCT("firebase_event_origin", STRUCT("auto", NULL)),
STRUCT("engagement_time_msec", STRUCT("30006", NULL))] AS params,
1470675614434000 AS TIMESTAMP_MICROS,
1470675551092000 AS previous_timestamp_micros
),
STRUCT(
"new_game" AS name,
ARRAY<STRUCT<key STRING, val STRUCT<str_val STRING, int_val INT64>>>[
STRUCT("total_time", STRUCT("496048", NULL)),
STRUCT("armor", STRUCT("2", NULL)),
STRUCT("reason", STRUCT("power_up", NULL))] AS params,
1470675825988001 AS TIMESTAMP_MICROS,
1470675282500001 AS previous_timestamp_micros
),
STRUCT(
"user_engagement" AS name,
ARRAY<STRUCT<key STRING, val STRUCT<str_val STRING, int_val INT64>>>[
STRUCT("firebase_event_origin", STRUCT("auto", NULL)),
STRUCT("engagement_time_msec", STRUCT("318030", NULL))] AS params,
1470675972778002 AS TIMESTAMP_MICROS,
1470675614434002 AS previous_timestamp_micros
),
STRUCT(
"won_game" AS name,
ARRAY<STRUCT<key STRING, val STRUCT<str_val STRING, int_val INT64>>>[
STRUCT("total_time", STRUCT("497857", NULL)),
STRUCT("level", STRUCT("207", NULL)),
STRUCT("sword", STRUCT("iron", NULL))] AS params,
1470677171374007 AS TIMESTAMP_MICROS,
1470671343784007 AS previous_timestamp_micros
)
] AS event_dim
)
SELECT
name,
(SELECT val.str_val FROM UNNEST(dim.params) WHERE key = "firebase_event_origin") AS firebase_event_origin,
(SELECT val.str_val FROM UNNEST(dim.params) WHERE key = "engagement_time_msec") AS engagement_time_msec,
(SELECT val.str_val FROM UNNEST(dim.params) WHERE key = "total_time") AS total_time,
(SELECT val.str_val FROM UNNEST(dim.params) WHERE key = "armor") AS armor,
(SELECT val.str_val FROM UNNEST(dim.params) WHERE key = "reason") AS reason,
(SELECT val.str_val FROM UNNEST(dim.params) WHERE key = "level") AS level,
(SELECT val.str_val FROM UNNEST(dim.params) WHERE key = "sword") AS sword
FROM YourTable, UNNEST(event_dim) AS dim
Below is my JSON. I'm trying to create a tree out of this,
I tried with the following snippet:
require(["dijit/Tree", "dojo/data/ItemFileReadStore", "dijit/tree/ForestStoreModel", "dojo/domReady!"],
function(Tree, ItemFileReadStore, ForestStoreModel){
var store = new ItemFileReadStore({
url: "/om/web/em/tree.json"
});
var treeModel = new ForestStoreModel({
store: store,
query: {id: 'aid'},
rootId: "PSS-32",
rootLabel: "P",
childrenAttrs: ['eqList']
});
var myTree = new Tree({
model: treeModel
}, "treeOne");
myTree.startup();
});
But this is giving me error loading PSS010010026024 children and message: "Cannot read property 'length' of undefined errors,what should be specified in the rootID,rootLabel and childrenAttrs?
[
{
"responseStatus": null,
"entityType": "NODE",
"aid": "p",
"id": "p",
"hsa": null,
"eqList":[ {
"responseStatus": null,
"EId": "5",
"EGroupId": "1006",
"aid": "p",
"additionalInfo": null,
"eqList": [
{
"responseStatus": null,
"EId": null,
"EGroupId": null,
"aid": null,
"additionalInfo": null,
"eqList": null,
"shelfType": null,
"isEqAvailable": null,
"id": null,
"entityType": null,
"hsa": null,
"Elist": null
}
],
"shelfType": null,
"isEqAvailable": null,
"id": "p/p",
"entityType": "E",
"hsa": "-",
"Elist": null
{
"responseStatus": null,
"EId": "5",
"EGroupId": "1006",
"aid": "p#OCS",
"EType": "1830pss-ocs",
"ERelease": "7.0",
"additionalInfo": null,
"eqList": [
{
"responseStatus": null,
"EId": null,
"EGroupId": null,
"aid": null,
"EType": null,
"ERelease": null,
"additionalInfo": null,
"eqList": null,
"shelfType": null,
"isEqAvailable": null,
"id": null,
"entityType": null,
"hsa": null,
"Elist": null
}
],
"shelfType": null,
"isEqAvailable": null,
"id": "p/p#OCS",
"entityType": "E",
"hsa": "-",
"Elist": null
}
]
}
]
The rootID attribute is what ID you want to give the root item that will be created (so that you can query for it later or check if your tree is at the top level). The rootLabel is what you want the label of the root attribute to be. The childrenAttrs is how you tell the Tree where a specific node's children are.
I'm not sure what you're trying to do in your code though since your data doesn't seem to have PSS010010026024 in it but I would recommend checking out the API documentation for the ForestTreeModel here: http://dojotoolkit.org/api/?qs=1.9/dijit/tree/ForestStoreModel