amcharts v3 add legend to radar chart - legend

I like to add a legend to my radar chard but i can not seem to connect the legend data to the dataprovider data
Here is my javascript code:
AmCharts.makeChart("360_radar_chartdiv",
{"type": "radar",
"dataProvider": [
{"competence": "test Ronald","user_15": 7.5,"user_16": 7.5,"user_18": 1,"user_23": 5},
{"competence": "Aansturen","user_15": 5,"user_16": 7,"user_18": 8,"user_23": 3}],
"valueAxes": [
{"axisTitleOffset": 20, "minimum": 0,"maximum": 10, "axisAlpha": 0.15}],
"startDuration": 0 ,
"graphs": [
{"balloonText": "[[value]]", "bullet": "round","valueField": "user_15"},
{"balloonText": "[[value]]", "bullet": "round","valueField": "user_16"},
{"balloonText": "[[value]]", "bullet": "round","valueField": "user_18"},
{"balloonText": "[[value]]", "bullet": "round","valueField": "user_23"}
],
"categoryField": "competence",
"legend": {"align": "center", "markerType": "circle", "position": "right","marginright": 80, "automargin": false, "combineLegend": true,
"data": [
{"title": "Beheer B van (Proeftuin)"},
{"title": "Acda W. (Willem)"},
{"title": "Baard B. (Bas)"},
{"title": "Bedrijf 1 - Jan Stucadoor"}]}
});

I have resolved this by putting the legend text in the graphs data:
"graphs": [
{"balloonText": "[[value]] - van Beheer B van (Proeftuin)", "bullet": "round","valueField": "user_25"},
{"balloonText": "[[value]] - van Acda W. (Willem)", "bullet": "round","valueField": "user_26"},
{"balloonText": "[[value]] - van Rutte M.E. (Mark)", "bullet": "round","valueField": "user_27"},
{"balloonText": "[[value]] - van Bedrijf 1 - Persoon 1", "bullet": "round","valueField": "user_33"}]

Related

prevent text from overlapping data points and other text

I'm trying to find an intelligent solution to how text / annotations are placed into a matplotlib plt so they don't over lap with the data point being annotated. Code snip below. Apologies for long dict at the top. So far I've found adjustText which looks very promising, but I can't seem to get it working in this instance. The code below uses adjust_text(), but at the moment all text is being placed together in one part of the ax and I don't understand why. If you run without adjust_text() it places text roughly where it should be, but text is overlapping the data point in places, which I want to avoid. Grateful for any help.
fig, ax = plt.subplots(figsize=(10, 8))
dl_data = {
"Center": {
"axis": (0, 0),
"tp": (0, 0),
"r": 21.37311395187889,
"colour": "#ffffff",
"text": "Center",
"fill": "solid",
"ec": "#808080",
"alignment": ("center", "center"),
},
"First": {
"r": 6.758772077825972,
"wlc": 45.681000000000004,
"text": "First",
"colour": "#FFFFFF",
"fill": "dashed",
"ec": "#808080",
"alignment": ("center", "center"),
"axis": (-68.82111180215705, -1.2642233142341064e-14),
"tp": (-68.82111180215705, -1.2642233142341064e-14),
},
"Second": {
"r": 18.979199140111263,
"wlc": 360.21000000000004,
"text": "Second",
"colour": "#FFFFFF",
"fill": "dashed",
"ec": "#808080",
"alignment": ("center", "center"),
"axis": (-34.41055590107855, 59.600831137357034),
"tp": (-34.41055590107855, 59.600831137357034),
},
"P1": {
"r": 4.779173568725037,
"wlc": 2.6,
"colour": "#92a700",
"text": "P1, £3",
"fill": "solid",
"ec": "#92a700",
"axis": (-80.83697480558055, -1.4849511367261418e-14),
"alignment": ("right", "top"),
"tp": (-87.6161483743056, -1.6094825349031936e-14),
},
"P2": {
"r": 4.779173568725037,
"wlc": 0,
"colour": "#ffba00",
"text": "P2 has a long\nName, £0\n\n",
"fill": "solid",
"ec": "#ffba00",
"axis": (-13.031791598544089, 30.17548646933409),
"alignment": ("left", "top"),
"tp": (-9.047093352116576, 24.691019844418072),
},
"P3": {
"r": 4.779173568725037,
"wlc": 0.21,
"colour": "#92a700",
"text": "P3 has a very,\nlong long long,\nname, £0 \n",
"fill": "solid",
"ec": "#92a700",
"axis": (-55.78932020361301, 30.175486469334082),
"alignment": ("right", "top"),
"tp": (-59.77401845004052, 24.691019844418065),
},
"P4": {
"r": 15.811388300841896,
"wlc": 250,
"colour": "#e77200",
"text": "P4 also\nhas a longish\nname, £250\n",
"fill": "solid",
"ec": "#e77200",
"axis": (-34.41055590107855, 95.97255740839438),
"alignment": ("center", "center"),
"tp": (-34.41055590107855, 113.78394570923628),
},
"P5": {
"r": 4.779173568725037,
"wlc": 6.6,
"colour": "#92a700",
"text": "P5 is medium,\n£7\n\n",
"fill": "solid",
"ec": "#92a700",
"axis": (-69.00212318005225, 70.8403126698613),
"alignment": ("right", "top"),
"tp": (-75.44950037768407, 72.9351925104148),
},
"P6": {
"r": 10.16857905510893,
"wlc": 103.4,
"colour": "#92a700",
"text": "P6 is a very long name\nlike P4 is also,\n£100\n",
"fill": "solid",
"ec": "#92a700",
"axis": (0.181011377895139, 70.8403126698613),
"alignment": ("left", "top"),
"tp": (11.754017782309209, 74.600610395285),
},
}
ts = []
x_list = []
y_list = []
for c in dl_data.keys():
circle = plt.Circle(
dl_data[c]["axis"], # x, y position
radius=dl_data[c]["r"],
fc=dl_data[c]["colour"], # face colour
ec=dl_data[c]["ec"], # edge colour
zorder=2,
)
ax.add_patch(circle)
x = dl_data[c]["axis"][0]
y = dl_data[c]["axis"][1]
text = dl_data[c]["text"]
if c in ["Center", "First", "Second"]:
pass
else:
ts.append(ax.text(x, y, dl_data[c]["text"]))
x_list.append(x)
y_list.append(y)
adjust_text(
ts,
x=x_list,
y=y_list,
force_points=0.1,
arrowprops=dict(arrowstyle="->", color="red"),
)
plt.axis("scaled")
plt.axis("off")
plt.show()
There are two issues:
adjust_text must called after all drawing is completed, i.e. plt.axis("scaled") must come before adjust_text, see docs:
Call adjust_text the very last, after all plotting (especially
anything that can change the axes limits) has been done.
You must pass your circles as additional objects to be avoided: add_objects=objects
ts = []
x_list = []
y_list = []
objects = []
for c in dl_data.keys():
circle = plt.Circle(
dl_data[c]["axis"], # x, y position
radius=dl_data[c]["r"],
fc=dl_data[c]["colour"], # face colour
ec=dl_data[c]["ec"], # edge colour
zorder=2,
)
objects.append(circle)
ax.add_patch(circle)
x = dl_data[c]["axis"][0]
y = dl_data[c]["axis"][1]
text = dl_data[c]["text"]
if c in ["Center", "First", "Second"]:
pass
else:
ts.append(ax.text(x, y, dl_data[c]["text"].strip()))
x_list.append(x)
y_list.append(y)
plt.axis("scaled")
plt.axis("off")
adjust_text(
ts,
add_objects=objects,
arrowprops=dict(arrowstyle="->", color="red"),
)
I couldn't manage to move the P6 text away from the green and orange circles, though.

Combining separate temporal measurement series

I have a data set that combines two temporal measurement series with one row per measurement
time: 1, measurement: a, value: 5
time: 2, measurement: b, value: false
time: 10, measurement: a, value: 2
time: 13, measurement: b, value: true
time: 20, measurement: a, value: 4
time: 24, measurement: b, value: true
time: 30, measurement: a, value: 6
time: 32, measurement: b, value: false
in a visualization using Vega lite, I'd like to combine the measurement series and encode measurement a and b in a single visualization without simply layering their representation on a temporal axis but representing their value in a single encoding spec.
either measurement a values need to be interpolated and added as a new value to rows of measurement b
eg:
time: 2, measurement: b, value: false, interpolatedMeasurementA: 4.6667
or the other way around, which leaves the question of how to interpolate a boolean. maybe closest value by time, or simpler: last value
eg:
time: 30, measurement: a, value: 6, lastValueMeasurementB: true
I suppose this could be done either query side in which case this question would be regarding indexDB Flux query language
or this could be done on the visualization side in which case this would be regarding vega-lite
There's not any true linear interpolation schemes built-in to Vega-Lite (though the loess transform comes close), but you can achieve roughly what you wish with a window transform.
Here is an example (view in editor):
{
"data": {
"values": [
{"time": 1, "measurement": "a", "value": 5},
{"time": 2, "measurement": "b", "value": false},
{"time": 10, "measurement": "a", "value": 2},
{"time": 13, "measurement": "b", "value": true},
{"time": 20, "measurement": "a", "value": 4},
{"time": 24, "measurement": "b", "value": true},
{"time": 30, "measurement": "a", "value": 6},
{"time": 32, "measurement": "b", "value": false}
]
},
"transform": [
{
"calculate": "datum.measurement == 'a' ? datum.value : null",
"as": "measurement_a"
},
{
"window": [
{"op": "mean", "field": "measurement_a", "as": "interpolated"}
],
"sort": [{"field": "time"}],
"frame": [1, 1]
},
{"filter": "datum.measurement == 'b'"}
],
"mark": "line",
"encoding": {
"x": {"field": "time"},
"y": {"field": "interpolated"},
"color": {"field": "value"}
}
}
This first uses a calculate transform to isolate the values to be interpolated, then a window transform that computes the mean over adjacent values (frame: [1, 1]), then a filter transform to isolate interpolated rows.
If you wanted to go the other route, you could do a similar sequence of transforms targeting the boolean value instead.

Using COPY to import a .json file into a PostgreSQL table

I want to import some weather data (temperature, wind speed, ...) that is all formatted in a JSON file into a PostgreSQL 11 table so I can then make queries on that data.
I've been able to manually insert some data into a table but that's only OK because it's a small amount of data and I'm planning on using a LOT more data afterwards. Here is what I've found using the INSERT function: https://datavirtuality.com/blog-json-in-postgresql/.
That's why I've been trying to use the COPY function but no luck so far, even after having read a lot of stuff on different sources on the Internet ...
The JSON file is downloadable there : https://queueresults.meteoblue.com/F2637B90-45BB-4E7A-B47C-C34CD56674B3 (let me know if the file doesn't exist anymore).
I've been able to import the JSON file as text into a table with:
create table temp_json (values text);
copy temp_json from '/home/cae/test.json';
But I don't think that's the best approach to be able to make efficient queries later on ...
I usually run into the following error during my tests:
ERROR: invalid input syntax for type json
DETAIL: The input string ended unexpectedly.
CONTEXT: JSON data, line 1: [
as if I'm not able to parse the JSON file and the array properly within PostgreSQL ...
Thanks for your help !
Edit: Here is the content of the JSON file:
[
{
"geometry": {
"type": "MultiPoint",
"locationNames": [
"59.4°N/24.7°E31.7m",
"59.4°N/24.8°E36.4m"
],
"coordinates": [
[
24.7,
59.4,
31.73
],
[
24.8,
59.4,
36.445
]
]
},
"domain": "NEMS12",
"codes": [
{
"unit": "°C",
"dataPerTimeInterval": [
{
"data": [
[
-0.395,
-0.195,
-0.099999994,
-0.030000001,
-0.060000002,
-0.099999994,
-0.099999994,
0.005,
-0.055,
0.19,
0.48,
0.725,
1.88,
1.88,
1.855,
1.935,
2.1950002,
2.595,
3.3049998,
4.115,
3.37,
2.97,
3.32,
3.5149999,
3.56,
3.44,
3.355,
3.3600001,
3.32,
3.32,
3.4250002,
3.42,
3.3899999,
3.445,
3.3200002,
3.0549998,
4.58,
4.01,
3.02,
2.79,
2.75,
2.76,
2.855,
2.99,
2.96,
2.775,
2.595,
2.4250002
],
[
-0.49,
-0.26,
-0.16,
-0.09,
-0.1,
-0.13,
-0.12,
0.01,
-0.07,
0.17,
0.44,
0.66,
1.84,
1.85,
1.83,
1.9,
2.15,
2.55,
3.27,
4.11,
3.46,
2.96,
3.31,
3.5,
3.55,
3.42,
3.33,
3.34,
3.29,
3.29,
3.43,
3.44,
3.42,
3.52,
3.41,
3.11,
4.53,
4,
3.01,
2.79,
2.76,
2.77,
2.87,
3,
2.93,
2.71,
2.53,
2.38
]
],
"gapFillRatio": 0
}
],
"level": "2 m above gnd",
"aggregation": "none",
"code": 11,
"variable": "Temperature"
}
],
"timeIntervals": [
[
"20180101T0000",
"20180101T0100",
"20180101T0200",
"20180101T0300",
"20180101T0400",
"20180101T0500",
"20180101T0600",
"20180101T0700",
"20180101T0800",
"20180101T0900",
"20180101T1000",
"20180101T1100",
"20180101T1200",
"20180101T1300",
"20180101T1400",
"20180101T1500",
"20180101T1600",
"20180101T1700",
"20180101T1800",
"20180101T1900",
"20180101T2000",
"20180101T2100",
"20180101T2200",
"20180101T2300",
"20180102T0000",
"20180102T0100",
"20180102T0200",
"20180102T0300",
"20180102T0400",
"20180102T0500",
"20180102T0600",
"20180102T0700",
"20180102T0800",
"20180102T0900",
"20180102T1000",
"20180102T1100",
"20180102T1200",
"20180102T1300",
"20180102T1400",
"20180102T1500",
"20180102T1600",
"20180102T1700",
"20180102T1800",
"20180102T1900",
"20180102T2000",
"20180102T2100",
"20180102T2200",
"20180102T2300"
]
],
"timeResolution": "hourly"
},
{
"geometry": {
"coordinates": [
[
24.7,
59.4,
31.73
],
[
24.8,
59.4,
36.445
]
],
"locationNames": [
"59.4°N/24.7°E31.7m",
"59.4°N/24.8°E36.4m"
],
"type": "MultiPoint"
},
"domain": "NEMS12",
"codes": [
{
"unit": "°C",
"aggregation": "none",
"code": 11,
"level": "1000 mb",
"dataPerTimeInterval": [
{
"data": [
[
-0.585,
-0.265,
-0.055,
0.04,
0.044999998,
0.08,
0.11,
0.205,
0.13499999,
0.43,
0.84000003,
1.2,
2.1,
2.33,
2.5,
2.72,
3.1750002,
3.775,
4.915,
5.37,
4.16,
3.795,
4.1949997,
4.41,
4.415,
4.275,
4.1800003,
4.16,
4.0950003,
4.08,
4.185,
4.1,
3.98,
3.575,
3.22,
2.92,
4.395,
3.7649999,
2.895,
2.66,
2.6550002,
2.72,
2.845,
2.955,
2.89,
2.685,
2.54,
2.355
],
[
-0.64,
-0.29,
-0.08,
0.01,
0.03,
0.08,
0.12,
0.24,
0.14,
0.4,
0.8,
1.13,
2.11,
2.34,
2.52,
2.74,
3.19,
3.82,
4.91,
5.45,
4.29,
3.81,
4.19,
4.42,
4.43,
4.28,
4.17,
4.15,
4.08,
4.06,
4.18,
4.12,
4.01,
3.66,
3.31,
2.97,
4.38,
3.79,
2.9,
2.68,
2.68,
2.75,
2.89,
2.99,
2.88,
2.64,
2.43,
2.27
]
],
"gapFillRatio": 0
}
],
"variable": "Temperature"
}
],
"timeIntervals": [
[
"20180101T0000",
"20180101T0100",
"20180101T0200",
"20180101T0300",
"20180101T0400",
"20180101T0500",
"20180101T0600",
"20180101T0700",
"20180101T0800",
"20180101T0900",
"20180101T1000",
"20180101T1100",
"20180101T1200",
"20180101T1300",
"20180101T1400",
"20180101T1500",
"20180101T1600",
"20180101T1700",
"20180101T1800",
"20180101T1900",
"20180101T2000",
"20180101T2100",
"20180101T2200",
"20180101T2300",
"20180102T0000",
"20180102T0100",
"20180102T0200",
"20180102T0300",
"20180102T0400",
"20180102T0500",
"20180102T0600",
"20180102T0700",
"20180102T0800",
"20180102T0900",
"20180102T1000",
"20180102T1100",
"20180102T1200",
"20180102T1300",
"20180102T1400",
"20180102T1500",
"20180102T1600",
"20180102T1700",
"20180102T1800",
"20180102T1900",
"20180102T2000",
"20180102T2100",
"20180102T2200",
"20180102T2300"
]
],
"timeResolution": "hourly"
},
{
"geometry": {
"type": "MultiPoint",
"locationNames": [
"59.4°N/24.7°E31.7m",
"59.4°N/24.8°E36.4m"
],
"coordinates": [
[
24.7,
59.4,
31.73
],
[
24.8,
59.4,
36.445
]
]
},
"domain": "NEMS12",
"codes": [
{
"unit": "°C",
"dataPerTimeInterval": [
{
"data": [
[
-7.0950003,
-6.615,
-4.815,
-3.55,
-2.6750002,
-2.1950002,
-2.695,
-2.87,
-2.1399999,
-0.995,
0.1,
1,
0.335,
0.38,
-0.030000001,
-0.8,
-0.18,
0.575,
1.11,
-0.32999998,
-1.03,
-2.31,
-3.09,
-3.7350001,
-3.93,
-3.905,
-3.92,
-3.71,
-3.625,
-3.195,
-3.7,
-3.32,
-3.72,
-3.915,
-3.93,
-3.605,
-4.315,
-3.8899999,
-3.815,
-3.38,
-3.2150002,
-3.27,
-3.435,
-3.47,
-3.43,
-3.37,
-3.44,
-3.51
],
[
-7.11,
-6.73,
-4.94,
-3.57,
-2.7,
-2.15,
-2.62,
-2.91,
-2.22,
-1.1,
0.03,
0.9,
0.36,
0.37,
0.11,
-0.74,
-0.13,
0.59,
1.19,
-0.19,
-0.95,
-2.18,
-3.08,
-3.68,
-3.97,
-3.94,
-3.93,
-3.69,
-3.63,
-3.27,
-3.7,
-3.32,
-3.68,
-3.9,
-3.97,
-3.6,
-4.29,
-3.92,
-3.8,
-3.37,
-3.24,
-3.28,
-3.42,
-3.44,
-3.39,
-3.35,
-3.37,
-3.44
]
],
"gapFillRatio": 0
}
],
"level": "850 mb",
"code": 11,
"aggregation": "none",
"variable": "Temperature"
}
],
"timeResolution": "hourly",
"timeIntervals": [
[
"20180101T0000",
"20180101T0100",
"20180101T0200",
"20180101T0300",
"20180101T0400",
"20180101T0500",
"20180101T0600",
"20180101T0700",
"20180101T0800",
"20180101T0900",
"20180101T1000",
"20180101T1100",
"20180101T1200",
"20180101T1300",
"20180101T1400",
"20180101T1500",
"20180101T1600",
"20180101T1700",
"20180101T1800",
"20180101T1900",
"20180101T2000",
"20180101T2100",
"20180101T2200",
"20180101T2300",
"20180102T0000",
"20180102T0100",
"20180102T0200",
"20180102T0300",
"20180102T0400",
"20180102T0500",
"20180102T0600",
"20180102T0700",
"20180102T0800",
"20180102T0900",
"20180102T1000",
"20180102T1100",
"20180102T1200",
"20180102T1300",
"20180102T1400",
"20180102T1500",
"20180102T1600",
"20180102T1700",
"20180102T1800",
"20180102T1900",
"20180102T2000",
"20180102T2100",
"20180102T2200",
"20180102T2300"
]
]
},
{
"geometry": {
"type": "MultiPoint",
"locationNames": [
"59.4°N/24.7°E31.7m",
"59.4°N/24.8°E36.4m"
],
"coordinates": [
[
24.7,
59.4,
31.73
],
[
24.8,
59.4,
36.445
]
]
},
"domain": "NEMS12",
"codes": [
{
"unit": "°C",
"dataPerTimeInterval": [
{
"data": [
[
-10.84,
-12,
-10.280001,
-8.865,
-8.5,
-7.7,
-7.5699997,
-7.655,
-8.434999,
-8.844999,
-8.700001,
-7.1549997,
-9.555,
-10.004999,
-7.885,
-8.32,
-8.370001,
-8.915,
-9.53,
-10.225,
-10.934999,
-11.12,
-11.434999,
-11.575,
-11.965,
-11.64,
-12.12,
-12.345,
-12.34,
-12.48,
-12.844999,
-13.174999,
-13.18,
-13.219999,
-13.434999,
-13.305,
-12.775,
-12.745,
-12.79,
-12.75,
-12.690001,
-12.77,
-12.77,
-12.76,
-12.67,
-12.605,
-12.635,
-12.695
],
[
-10.74,
-11.94,
-10.54,
-8.77,
-8.56,
-7.75,
-7.52,
-7.53,
-8.24,
-8.95,
-8.77,
-7.15,
-9.48,
-10.03,
-7.88,
-8.24,
-8.35,
-8.82,
-9.4,
-10.08,
-10.84,
-11.04,
-11.3,
-11.5,
-11.9,
-11.6,
-12.09,
-12.31,
-12.39,
-12.48,
-12.83,
-13.16,
-13.2,
-13.19,
-13.4,
-13.3,
-12.77,
-12.7,
-12.78,
-12.71,
-12.66,
-12.73,
-12.73,
-12.72,
-12.62,
-12.57,
-12.6,
-12.67
]
],
"gapFillRatio": 0
}
],
"code": 11,
"level": "700 mb",
"aggregation": "none",
"variable": "Temperature"
}
],
"timeResolution": "hourly",
"timeIntervals": [
[
"20180101T0000",
"20180101T0100",
"20180101T0200",
"20180101T0300",
"20180101T0400",
"20180101T0500",
"20180101T0600",
"20180101T0700",
"20180101T0800",
"20180101T0900",
"20180101T1000",
"20180101T1100",
"20180101T1200",
"20180101T1300",
"20180101T1400",
"20180101T1500",
"20180101T1600",
"20180101T1700",
"20180101T1800",
"20180101T1900",
"20180101T2000",
"20180101T2100",
"20180101T2200",
"20180101T2300",
"20180102T0000",
"20180102T0100",
"20180102T0200",
"20180102T0300",
"20180102T0400",
"20180102T0500",
"20180102T0600",
"20180102T0700",
"20180102T0800",
"20180102T0900",
"20180102T1000",
"20180102T1100",
"20180102T1200",
"20180102T1300",
"20180102T1400",
"20180102T1500",
"20180102T1600",
"20180102T1700",
"20180102T1800",
"20180102T1900",
"20180102T2000",
"20180102T2100",
"20180102T2200",
"20180102T2300"
]
]
},
{
"geometry": {
"type": "MultiPoint",
"locationNames": [
"59.4°N/24.7°E",
"59.4°N/24.8°E"
],
"coordinates": [
[
24.7,
59.4,
"NaN"
],
[
24.8,
59.4,
"NaN"
]
]
},
"domain": "CAMSGLOBAL",
"codes": [
{
"unit": "",
"dataPerTimeInterval": [
{
"data": [
[
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN"
],
[
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN",
"NaN"
]
],
"gapFillRatio": 0
}
],
"code": 706,
"level": "sfc",
"aggregation": "none",
"variable": "Air Quality Index"
}
],
"timeResolution": "hourly",
"timeIntervals": [
[
"20180101T0000",
"20180101T0100",
"20180101T0200",
"20180101T0300",
"20180101T0400",
"20180101T0500",
"20180101T0600",
"20180101T0700",
"20180101T0800",
"20180101T0900",
"20180101T1000",
"20180101T1100",
"20180101T1200",
"20180101T1300",
"20180101T1400",
"20180101T1500",
"20180101T1600",
"20180101T1700",
"20180101T1800",
"20180101T1900",
"20180101T2000",
"20180101T2100",
"20180101T2200",
"20180101T2300",
"20180102T0000",
"20180102T0100",
"20180102T0200",
"20180102T0300",
"20180102T0400",
"20180102T0500",
"20180102T0600",
"20180102T0700",
"20180102T0800",
"20180102T0900",
"20180102T1000",
"20180102T1100",
"20180102T1200",
"20180102T1300",
"20180102T1400",
"20180102T1500",
"20180102T1600",
"20180102T1700",
"20180102T1800",
"20180102T1900",
"20180102T2000",
"20180102T2100",
"20180102T2200",
"20180102T2300"
]
]
}
]
Given your first example, you could then process it like this to separate the json array into individual objects and stuff them into a table as separate rows:
create table real_json as select value::jsonb from temp_json join lateral json_array_elements(values::json) on true;
However, this depends on the large single json object always being small enough to fit comfortably into an amount of memory you are willing to use, which seems like a dubious proposition. You need a library which does incremental or streaming parsing on the JSON object, returning one 2nd level object at a time and then clearing it from memory one returned. I don't think that PostgreSQL provides such a facility. If you let us know what your favorite programming language is, perhaps someone can propose a specific library.
Alternatively, you could whip up a quick and dirty script that divides the JSON into lines for separate records based on the assumption that the indenting of the "pretty" file is always correct, and so using "^ [{}]" as markers, and then strips out the newlines to reverse the "pretty" formatting so that each record is a single line. If you had such a script, you could then do:
\copy real_json FROM PROGRAM 'unnest_top_array_and_depretty /home/cae/test_without_new_lines.json';
Same code of #jjanes with a real, working command line tool.
\copy json_table FROM PROGRAM 'jq --stream -nc -f myfile.json';
Removing the ''pretty format'' from the file helped in using the COPY function but it puts the whole content of the file in one row, making it impossible to run a simple SELECT query on an existing column ...
Here is what I used :
CREATE TEMP TABLE target(data jsonb);
copy target from '/home/cae/test_without_new_lines.json';

Error using tensorflow create_coco_tf_record script: "TypeError: string indices must be integers"

I want to use the create_coco_tf_record-script provided by tensorflow, to convert the following simple label definition in coco json format to TFRecord:
"info": {
"year": 2018,
"version": null,
"description": "TransferLearningTest",
"contributor": "ralph#r4robotics.com.au",
"url": "labelbox.io",
"date_created": "2018-03-25T08:30:27.427851+00:00"
},
"images": [{
"id": "cjf6gxqjw2fho01619gre5j0y",
"width": 615,
"height": 409,
"file_name": "https://firebasestorage.googleapis.com/v0/b/labelbox-193903.appspot.com/o/cjf6gtsr950sr0125idy65yiy%2Ff245e964-d756-4c01-98de-b6e5a9070588%2Fbottles1.jpg?alt=media&token=b381c976-da30-49d7-8e95-eb4ae8588354",
"license": null,
"flickr_url": "https://firebasestorage.googleapis.com/v0/b/labelbox-193903.appspot.com/o/cjf6gtsr950sr0125idy65yiy%2Ff245e964-d756-4c01-98de-b6e5a9070588%2Fbottles1.jpg?alt=media&token=b381c976-da30-49d7-8e95-eb4ae8588354",
"coco_url": "https://firebasestorage.googleapis.com/v0/b/labelbox-193903.appspot.com/o/cjf6gtsr950sr0125idy65yiy%2Ff245e964-d756-4c01-98de-b6e5a9070588%2Fbottles1.jpg?alt=media&token=b381c976-da30-49d7-8e95-eb4ae8588354",
"date_captured": null
}, {
"id": "cjf6gyhtl55sv01385xtqjrqi",
"width": 259,
"height": 194,
"file_name": "https://firebasestorage.googleapis.com/v0/b/labelbox-193903.appspot.com/o/cjf6gtsr950sr0125idy65yiy%2Ff245e964-d756-4c01-98de-b6e5a9070588%2Fbottles2.jpg?alt=media&token=9b274e2e-c541-4e80-8f3d-b198f3ba9b4d",
"license": null,
"flickr_url": "https://firebasestorage.googleapis.com/v0/b/labelbox-193903.appspot.com/o/cjf6gtsr950sr0125idy65yiy%2Ff245e964-d756-4c01-98de-b6e5a9070588%2Fbottles2.jpg?alt=media&token=9b274e2e-c541-4e80-8f3d-b198f3ba9b4d",
"coco_url": "https://firebasestorage.googleapis.com/v0/b/labelbox-193903.appspot.com/o/cjf6gtsr950sr0125idy65yiy%2Ff245e964-d756-4c01-98de-b6e5a9070588%2Fbottles2.jpg?alt=media&token=9b274e2e-c541-4e80-8f3d-b198f3ba9b4d",
"date_captured": null
}, {
"id": "cjf6gzj9v2g1h0161bwh18chv",
"width": 277,
"height": 182,
"file_name": "https://firebasestorage.googleapis.com/v0/b/labelbox-193903.appspot.com/o/cjf6gtsr950sr0125idy65yiy%2Ff245e964-d756-4c01-98de-b6e5a9070588%2Fbottles3.jpg?alt=media&token=3cfc13ca-432d-4501-b574-00d3874a4682",
"license": null,
"flickr_url": "https://firebasestorage.googleapis.com/v0/b/labelbox-193903.appspot.com/o/cjf6gtsr950sr0125idy65yiy%2Ff245e964-d756-4c01-98de-b6e5a9070588%2Fbottles3.jpg?alt=media&token=3cfc13ca-432d-4501-b574-00d3874a4682",
"coco_url": "https://firebasestorage.googleapis.com/v0/b/labelbox-193903.appspot.com/o/cjf6gtsr950sr0125idy65yiy%2Ff245e964-d756-4c01-98de-b6e5a9070588%2Fbottles3.jpg?alt=media&token=3cfc13ca-432d-4501-b574-00d3874a4682",
"date_captured": null
}, {
"id": "cjf6h0p9n55wz0178pg79lc3c",
"width": 301,
"height": 167,
"file_name": "https://firebasestorage.googleapis.com/v0/b/labelbox-193903.appspot.com/o/cjf6gtsr950sr0125idy65yiy%2Ff245e964-d756-4c01-98de-b6e5a9070588%2Fbottles4.jpg?alt=media&token=d2660bc4-d576-45f0-8de6-557270fc683d",
"license": null,
"flickr_url": "https://firebasestorage.googleapis.com/v0/b/labelbox-193903.appspot.com/o/cjf6gtsr950sr0125idy65yiy%2Ff245e964-d756-4c01-98de-b6e5a9070588%2Fbottles4.jpg?alt=media&token=d2660bc4-d576-45f0-8de6-557270fc683d",
"coco_url": "https://firebasestorage.googleapis.com/v0/b/labelbox-193903.appspot.com/o/cjf6gtsr950sr0125idy65yiy%2Ff245e964-d756-4c01-98de-b6e5a9070588%2Fbottles4.jpg?alt=media&token=d2660bc4-d576-45f0-8de6-557270fc683d",
"date_captured": null
}],
"annotations": [{
"id": 1,
"image_id": "cjf6gxqjw2fho01619gre5j0y",
"category_id": 1,
"segmentation": [
[118.39765618513167, 313.457848898712, 179.7169976455091, 299.1734470204843, 294.6908226914901, 310.3222212573321, 337.1962867729657, 334.7101881586143, 366.4623832276035, 338.89097185223864, 372.03689297966736, 385.5765403887654, 332.31863061175864, 389.75732408238974, 282.84505592143563, 406.48051201843583, 215.9512047089942, 408.5708772844735, 192.2596180064203, 390.4541125044023, 151.8445552101198, 403.6933051688366, 105.1582353958287, 376.51813141795526, 118.39765618513167, 313.457848898712]
],
"area": 22106.876283900496,
"bbox": [105.1582353958287, 0.42912271552648545, 266.8786575838387, 109.39743026398922],
"iscrowd": 0
}, {
"id": 2,
"image_id": "cjf6gxqjw2fho01619gre5j0y",
"category_id": 1,
"segmentation": [
[160.20631983821562, 142.04523900617488, 195.04687288245222, 131.24488556110788, 308.62704390918475, 134.03209241070698, 356.01021731433246, 152.1488571907783, 381.7922053020817, 150.75522718520426, 384.57951334057844, 186.64038911511503, 349.7389071338769, 187.68559832890833, 317.6856089656722, 202.3183678373679, 159.50946624735892, 195.3503772941444, 160.20631983821562, 142.04523900617488]
],
"area": 13123.705213053147,
"bbox": [159.50946624735892, 206.6816321626321, 225.07004709321953, 71.07348227626002],
"iscrowd": 0
}, {
"id": 3,
"image_id": "cjf6gyhtl55sv01385xtqjrqi",
"category_id": 1,
"segmentation": [
[80.06035395893144, 68.18619344603749, 119.11342792196902, 74.69491256085784, 131.84812313721997, 72.14801308536389, 177.97602777539703, 78.09078572494903, 187.59778022105084, 91.67421361042045, 203.1624077063576, 93.37213939731716, 201.18146375358646, 112.04938782407424, 184.76784795099502, 111.200414135477, 169.20322046568833, 122.51994816851872, 128.16920254987957, 117.42614921753086, 114.86852951688535, 114.03029764373744, 93.07803808305403, 114.31328167650393, 70.43857992260781, 103.2767316762287, 80.06035395893144, 68.18619344603749]
],
"area": 4995.907009222967,
"bbox": [70.43857992260781, 71.48005183148128, 132.7238277837498, 54.33375472248123],
"iscrowd": 0
}, {
"id": 4,
"image_id": "cjf6gzj9v2g1h0161bwh18chv",
"category_id": 1,
"segmentation": [
[173.46162883883662, 160.28013107383993, 255.65715601241382, 148.2998138472238, 266.2728180897869, 177.11325728633884, 184.68389092165103, 182.8759506021435, 159.20627416758742, 180.1462513325615, 154.35340470313542, 170.74397441725296, 175.28142885516084, 167.7109803710303, 173.46162883883662, 160.28013107383993]
],
"area": 2509.1082874191734,
"bbox": [154.35340470313542, -0.8759506021434983, 111.91941338665146, 34.576136754919716],
"iscrowd": 0
}, {
"id": 5,
"image_id": "cjf6gzj9v2g1h0161bwh18chv",
"category_id": 1,
"segmentation": [
[37.58112185203197, 87.03332022958155, 45.16373762158412, 93.40262623857566, 94.90570169790779, 106.44448675338808, 106.73458692647054, 87.03332022958155, 46.680260775494574, 73.08155224493905, 40.31086815713212, 74.901344044691, 33.63817553604898, 74.901344044691, 27.875382923127926, 80.9673321371363, 37.58112185203197, 87.03332022958155]
],
"area": 1386.09176276128,
"bbox": [27.875382923127926, 75.55551324661192, 78.85920400334261, 33.36293450844903],
"iscrowd": 0
}, {
"id": 6,
"image_id": "cjf6gzj9v2g1h0161bwh18chv",
"category_id": 1,
"segmentation": [
[200.7590456092244, 136.92608617388885, 181.95412147624396, 120.09295996138994, 234.4258318576678, 85.2135284298296, 255.05055600697247, 103.71478748380605, 200.7590456092244, 136.92608617388885]
],
"area": 1614.301579806095,
"bbox": [181.95412147624396, 45.073913826111145, 73.09643453072852, 51.71255774405926],
"iscrowd": 0
}, {
"id": 7,
"image_id": "cjf6h0p9n55wz0178pg79lc3c",
"category_id": 1,
"segmentation": [
[17.847508506087518, 28.63952607163654, 66.60858665657888, 24.08859036914734, 77.98617155836023, 14.986669362689923, 145.27644948557162, 14.49906202292621, 147.5519565454611, 51.881911126804255, 75.0605019090974, 56.10780833710362, 64.0079859014193, 47.98110201017366, 24.3489855928197, 53.34473314609532, 17.847508506087518, 28.63952607163654]
],
"area": 4189.730491764894,
"bbox": [17.847508506087518, 110.89219166289638, 129.7044480393736, 41.60874631417741],
"iscrowd": 0
}, {
"id": 8,
"image_id": "cjf6h0p9n55wz0178pg79lc3c",
"category_id": 1,
"segmentation": [
[223.94433711573117, 23.27591973645434, 257.10186033759857, 27.82685543894354, 261.32783036444124, 48.306165303102944, 179.73427308501883, 104.86804629868364, 145.27644948557162, 113.3198159185429, 128.37261898053467, 122.42173692500033, 111.46876367433086, 108.76885541531423, 131.29826382863067, 96.09118858515549, 137.14960312715638, 77.56230808005091, 223.94433711573117, 23.27591973645434]
],
"area": 6031.236484118768,
"bbox": [111.46876367433086, 44.57826307499967, 149.85906669011038, 99.14581718854599],
"iscrowd": 0
}, {
"id": 9,
"image_id": "cjf6h0p9n55wz0178pg79lc3c",
"category_id": 1,
"segmentation": [
[26.299423758605975, 125.34733136210352, 40.60267830965016, 111.53193060632253, 117.97024076106304, 72.6862842838929, 133.57379568968702, 80.81299061082292, 132.59856420621048, 93.16559414805232, 111.46876367433086, 115.2702204768582, 64.33305479552251, 138.67514957886033, 46.128923912905776, 139.65033945764822, 23.37375410934314, 148.75226046410563, 8.095292875989244, 141.11316147693933, 26.299423758605975, 125.34733136210352]
],
"area": 3857.6591542480846,
"bbox": [8.095292875989244, 18.24773953589436, 125.47850281369777, 76.06597618021274],
"iscrowd": 0
}],
"licenses": [],
"categories": [{
"supercategory": "Bottle",
"id": 1,
"name": "Bottle"
}]
}
But when I run the script using
with open('coco_labels.json') as json_data:
label_info = json.load(json_data)
IMAGE_FOLDER = "coco_images"
with tf.python_io.TFRecordWriter("training.record") as writer:
for i,image in enumerate(label_info["images"]):
img_data = requests.get(image["file_name"]).content
image_name = "image"+str(i)+".jpg"
image_path = os.path.join(IMAGE_FOLDER,image_name)
with open(image_path, 'wb') as handler:
handler.write(img_data)
image["file_name"] = image_name
tf_example = create_coco_tf_record.create_tf_example(image,
label_info["annotations"][i],
IMAGE_FOLDER,
label_info["categories"]
)
writer.write(tf_example.SerializeToString())
I get the error
(image, annotations_list, image_dir, category_index, include_masks)
124 num_annotations_skipped = 0
125 for object_annotations in annotations_list:
--> 126 (x, y, width, height) = tuple(object_annotations['bbox'])
127 if width <= 0 or height <= 0:
128 num_annotations_skipped += 1
TypeError: string indices must be integers
What could be the problem?
Each image is supposed to receive a list of annotations, and you are providing a single one. Making it a single element list should solve your error.
Ideally, make each item of images in your json be a list itself. As a quick fix, embrace label_info["annotations"][i] in brackets:
[label_info["annotations"][i]]

Rendering multi series line chart with time on x axis in AnyChart

I am trying Anychart line chart with multi series with date time on x axis. Not able to render the chart perfectly. Its drawing the series 1 with the given data then for the second series its rendering the date time freshly on x axis after the 1st series values then plotting the series 2.
data is like:
"data": [
{"x": "10/2/2016 01:00:00 AM", "value": "128.14"},
{"x": "10/2/2016 01:10:00 AM", "value": "112.61"}
]
},{
// second series data
"data": [
{"x": "10/2/2016 01:01:00 AM", "value": "90.54"},
{"x": "10/2/2016 01:02:00 AM", "value": "104.19"},
{"x": "10/2/2016 01:11:00 AM", "value": "150.67"}
]
It has to plot on x axis like 10/2/2016 01:00:00 AM, 10/2/2016 01:01:00 AM, 10/2/2016 01:02:00 AM, 10/2/2016 01:10:00 AM, 10/2/2016 01:11:00 AM
but it plotting like 10/2/2016 01:00:00 AM, 10/2/2016 01:10:00 AM, 10/2/2016 01:01:00 AM, 10/2/2016 01:02:00 AM, 10/2/2016 01:11:00 AM
Updating the code:
anychart.onDocumentReady(function() {
// JSON data
var json = {
// chart settings
"chart": {
// chart type
"type": "line",
// chart title
"title": "Axes settings from JSON",
// series settings
"series": [{
// first series data
"data": [
{"x": "10/2/2016 01:00:00 AM", "value": 128.14},
{"x": "10/2/2016 01:10:00 AM", "value": 112.61},
{"x": "10/3/2016 01:00:00 AM", "value": 12.14},
{"x": "10/3/2016 01:10:00 AM", "value": 152.61},
]},{
"data": [
{"x": "10/2/2016 01:09:00 AM", "value": 28.14},
{"x": "10/2/2016 01:11:00 AM", "value": 12.61},
{"x": "10/3/2016 01:01:00 AM", "value": 1.14},
{"x": "10/3/2016 01:12:00 AM", "value": 15.61},
]
}],
// x scale settings
"xScale": {
ticks:
{scale: "DateTime"}
},
xAxes: [{
title: "Basic X Axis"
}],
// chart container
"container": "container"
}
};
// get JSON data
var chart = anychart.fromJson(json);
// draw chart
chart.draw();
});
With this type of data you need to use scatter chart: http://docs.anychart.com/7.12.0/Basic_Charts_Types/Scatter_Chart
Datetime scale should be set like this in JSON:
"xScale": {
type: "datetime",
minimum: "10/02/2016 00:00:00",
maximum: "10/03/2016 12:00:00",
}
Here is a sample: https://jsfiddle.net/3ewcnp5j/102/