Reading JSON data with nested obects within arrays to SQL Server 2016 - sql

I'm stuck trying to read the below JSON data into SQL Server 2016. I can't return any values from the Values object onwards.
The select statement shows NULL values for binWidth, minVal, nBins and type.
Also, I'm unsure how to deal with the result array as the values do not have any keys assigned.
Any help much appreciated.
JSON data:
DECLARE #json NVARCHAR(MAX) =
'{
"Histograms": [
{
"Name": "20458-Z01-DWL",
"RegisterId": "0",
"Tags": [],
"UUID": "a4c5fa3f-ecb8-4635-8e94-5167e743b518",
"Values": [
{
"config": {
"binWidth": 50,
"minVal": 50,
"nBins": 18,
"type": "total wait"
},
"result": [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
]
}
]
},
{
"Name": "20458-Z02-DWL",
"RegisterId": "1",
"Tags": [],
"UUID": "95d57826-30f6-44c9-ad0d-6a24684fcaed",
"Values": [
{
"config": {
"binWidth": 50,
"minVal": 50,
"nBins": 18,
"type": "total wait"
},
"result": [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
]
}
]
},
{
"Name": "20458-Z03-DWL",
"RegisterId": "2",
"Tags": [],
"UUID": "90223a0e-3d1a-471f-a871-ee56da4799f5",
"Values": [
{
"config": {
"binWidth": 50,
"minVal": 50,
"nBins": 18,
"type": "total wait"
},
"result": [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
]
}
]
},
{
"Name": "20458-Z04-DWL",
"RegisterId": "3",
"Tags": [],
"UUID": "6c837def-feeb-48d5-8dcf-307b56ec44e9",
"Values": [
{
"config": {
"binWidth": 50,
"minVal": 100,
"nBins": 16,
"type": "total wait"
},
"result": [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
]
}
]
},
{
"Name": "20458-Z05-DWL",
"RegisterId": "4",
"Tags": [],
"UUID": "76bd5aa2-8860-4a2e-997d-3c83e940790f",
"Values": [
{
"config": {
"binWidth": 50,
"minVal": 100,
"nBins": 16,
"type": "total wait"
},
"result": [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
]
}
]
}
],
"LogEntryId": 6593,
"StartTimestamp": "2020-07-20T16:05:00Z",
"Timestamp": "2020-07-20T16:06:00Z"
}'
Query to fetch items from data:
SELECT
[Name],
[RegisterId],
UUID,
binWidth
minVal,
nBins,
[type]
FROM
OPENJSON (#json, '$.Histograms')
WITH
([Name] nvarchar(100),
[RegisterId] nvarchar(100),
UUID nvarchar(100),
[Values] nvarchar(max) AS json) AS Histograms
CROSS APPLY
OPENJSON (Histograms.[Values])
WITH
(binWidth int,
minVal int,
nBins int,
[type] nvarchar(100)) AS config

Figured out the last question re. passing result array into one field - edit below. Thanks again for your guidance, have learnt a lot!
select
Histograms.[Name],
Histograms.[RegisterId],
Histograms.UUID,
config.binWidth,
config.minVal,
config.nBins,
config.[type],
r.[key] as 'result position',
r.[value] as 'result value'
from
openjson(#json, '$.Histograms')
with (
[Name] nvarchar(100),
LogEntryId int,
[RegisterId] nvarchar(100),
UUID nvarchar(100),
[Values] nvarchar(max) as json
) as Histograms
cross apply openjson (Histograms.[Values]) h
cross apply openjson (h.value)
with
(
binWidth int N'$.config.binWidth',
minVal int N'$.config.minVal',
nBins int N'$.config.nBins',
[type] nvarchar(100) N'$.config.type',
result nvarchar(max) as json
) as config
CROSS APPLY OPENJSON(config.result) r

Related

Plotly Animation with slider

I want to add two moving points represent the location of two trains according to the day. My day data is as shown in pic starting from 0 to 7. However, in the resulting animation, the slider does not slide into the integer day. It jumped from 1.75 to 2.25 or 2.75 to 3.25 automatically. Can anyone help me to solve that?
trainpath info
import plotly.graph_objects as go
import pandas as pd
dataset = pd.read_csv('trainpath.csv')
days = []
for k in range(len(dataset['day'])):
if dataset['day'][k] not in days:
days.append(dataset['day'][k])
t1 = [-1, 0, 1, 1, 1, 0, -1, -1, -1]
k1 = [-20, -20, -20, 0, 20, 20, 20, 0, -20]
# make list of trains
trains = []
for train in dataset["train"]:
if train not in trains:
trains.append(train)
# make figure
fig_dict = {
"data": [go.Scatter(x=t1, y=k1,
mode="lines",
line=dict(width=2, color="blue")),
go.Scatter(x=t1, y=k1,
mode="lines",
line=dict(width=2, color="blue"))],
"layout": {},
"frames": []
}
# fill in most of layout
fig_dict['layout']['title'] = {'text':'Train Animation'}
fig_dict["layout"]["xaxis"] = {"range": [-10, 10], "title": "xlocation", 'autorange':False, 'zeroline':False}
fig_dict["layout"]["yaxis"] = {"range": [-22, 22], "title": "ylocation", 'autorange':False, 'zeroline':False}
fig_dict["layout"]["hovermode"] = "closest"
fig_dict["layout"]["updatemenus"] = [
{
"buttons": [
{
"args": [None, {"frame": {"duration": 500, "redraw": False},
"fromcurrent": True, "transition": {"duration": 300,
"easing": "quadratic-in-out"}}],
"label": "Play",
"method": "animate"
},
{
"args": [[None], {"frame": {"duration": 0, "redraw": False},
"mode": "immediate",
"transition": {"duration": 0}}],
"label": "Pause",
"method": "animate"
}
],
"direction": "left",
"pad": {"r": 10, "t": 87},
"showactive": False,
"type": "buttons",
"x": 0.1,
"xanchor": "right",
"y": 0,
"yanchor": "top"
}
]
sliders_dict = {
"active": 0,
"yanchor": "top",
"xanchor": "left",
"currentvalue": {
"font": {"size": 20},
"prefix": "Day:",
"visible": True,
"xanchor": "right"
},
"transition": {"duration": 300, "easing": "cubic-in-out"},
"pad": {"b": 10, "t": 50},
"len": 0.9,
"x": 0.1,
"y": 0,
"steps": []
}
# make data
day = 0
for train in trains:
dataset_by_date = dataset[dataset['day']==day]
dataset_by_date_and_train = dataset_by_date[dataset_by_date['train']==train]
data_dict = {
'x': list(dataset_by_date_and_train['x']),
'y': list(dataset_by_date_and_train['y']),
'mode': 'markers',
'text': train,
'marker': {
'sizemode': 'area',
'sizeref': 20,
'size': 20,
# 'size': list(dataset_by_date_and_train['quantity']) # this section can be used to increase or decrease the marker size to reflect the material quantity
},
'name': train
}
fig_dict['data'].append(data_dict)
# make frames
for day in days:
frame={'data': [go.Scatter(x=t1, y=k1,
mode="lines",
line=dict(width=2, color="blue")),
go.Scatter(x=t1, y=k1,
mode="lines",
line=dict(width=2, color="blue"))], 'name':str(day)}
for train in trains:
dataset_by_date = dataset[dataset['day'] == day]
dataset_by_date_and_train = dataset_by_date[dataset_by_date['train'] == train]
data_dict = {
'x': list(dataset_by_date_and_train['x']),
'y': list(dataset_by_date_and_train['y']),
'mode': 'markers',
'text': train,
'marker': {
'sizemode': 'area',
'sizeref': 20,
'size': 20,
# 'size': list(dataset_by_date_and_train['quantity']) # this section can be used to increase or decrease the marker size to reflect the material quantity
},
'name': train
}
frame['data'].append(data_dict)
fig_dict['frames'].append(frame)
slider_step = {'args': [
[day],
{'frame': {'duration':300, 'redraw':False},
'mode': 'immediate',
'transition': {'duration':3000}}
],
'label': day,
'method': 'animate'}
sliders_dict["steps"].append(slider_step)
if day == 7:
print('H')
fig_dict["layout"]["sliders"] = [sliders_dict]
fig = go.Figure(fig_dict)
fig.show()

Alternative to Expect.all using elm-test?

I'm new to Elm and I have some question about elm-test. I try to have multiple expect in the same test, but didn't find how. so here is what I've done for now but it's not really expressive
suite : Test
suite =
describe "2048-elm"
[ test "moveLeftWithZero" <|
\_ ->
let
expectedCases =
[ ( [ 2, 0, 0, 2 ], [ 4, 0, 0, 0 ] )
, ( [ 2, 2, 0, 4 ], [ 4, 4, 0, 0 ] )
, ( [ 0, 0, 0, 4 ], [ 4, 0, 0, 0 ] )
, ( [ 0, 0, 2, 4 ], [ 2, 4, 0, 0 ] )
, ( [ 2, 4, 2, 4 ], [ 2, 4, 2, 4 ] )
, ( [ 2, 2, 2, 2 ], [ 4, 4, 0, 0 ] )
]
toTest =
List.map (\expected -> ( Tuple.first expected, Main.moveLeftWithZero (Tuple.first expected) )) expectedCases
in
Expect.equal expectedCases toTest
]
I tried with Expect.all but it does not seems to do what I want

How to write a SQL query to pull a value from a nested json object identified by a variable field name

Problem: how to write a sqlite statement to select a value from a nested json object when the needed name is dynamic / variable. It is also important that this can be done from a single sql statement. Eventually, this will be executed from within a bash script.
In the object sample below, I need to list all the dot11.advertisedssid.ssid in the sql database. An acceptable solution is to list all values of dot11.advertisedssid.ssid that exist in the json object, but I would like to understand how to query a dynamic json name (so I can get the other nested values).
In general I am using json_extract in my sql statement I just can’t figure out how to get to the ssid value (in this example)!
How do I know 733545801 is the field name and how can I then use it in the json_extract statement? And do that for all such nested objects.
Examples:
In general this is how I am querying other json values.
select json_extract(devices.device,'$."dot11.device"."dot11.device.typeset"') from devices;
An object sample from the database:
"dot11.device": {
"dot11.device.typeset": 257,
"dot11.device.client_map": {
},
"dot11.device.num_client_aps": 0,
"dot11.device.advertised_ssid_map": {
"733545801": {
"dot11.advertisedssid.ssid": "SampleFES-WiFi",
"dot11.advertisedssid.ssidlen": 15,
"dot11.advertisedssid.beacon": 1,
"dot11.advertisedssid.probe_response": 1,
"dot11.advertisedssid.channel": "6",
"dot11.advertisedssid.ht_mode": "HT20",
"dot11.advertisedssid.ht_center_1": 0,
"dot11.advertisedssid.ht_center_2": 0,
"dot11.advertisedssid.first_time": 1559567379,
"dot11.advertisedssid.last_time": 1559567379,
"dot11.advertisedssid.beacon_info": "",
"dot11.advertisedssid.cloaked": 0,
"dot11.advertisedssid.crypt_set": 268436162,
"dot11.advertisedssid.maxrate": 65.000000,
"dot11.advertisedssid.beaconrate": 10,
"dot11.advertisedssid.beacons_sec": 2,
"dot11.advertisedssid.ietag_checksum": 1220416683,
"dot11.advertisedssid.wpa_mfp_required": 0,
"dot11.advertisedssid.wpa_mfp_supported": 0,
"dot11.advertisedssid.dot11d_country": "",
"dot11.advertisedssid.dot11d_list": [
],
"dot11.advertisedssid.wps_state": 0,
"dot11.advertisedssid.dot11r_mobility": 0,
"dot11.advertisedssid.dot11r_mobility_domain_id": 0,
"dot11.advertisedssid.dot11e_qbss": 0,
"dot11.advertisedssid.dot11e_qbss_stations": 0,
"dot11.advertisedssid.dot11e_channel_utilization_perc": 0.000000,
"dot11.advertisedssid.ccx_txpower": 0,
"dot11.advertisedssid.cisco_client_mfp": 0,
"dot11.advertisedssid.ie_tag_list": [
0.000000,
1.000000,
3.000000,
5.000000,
42.000000,
50.000000,
48.000000,
45.000000,
61.000000,
127.000000,
221.000000
]
}
}
Thanks for the help!
PS. This is from the new kismet database and the redesigned schema.
Here is the whole object:
{
"kismet.device.base.manuf": "Texas Instruments",
"kismet.device.base.key": "4202770D00000000_AFB4F569D2380000",
"kismet.device.base.macaddr": "38:D2:69:F5:B4:AF",
"kismet.device.base.phyname": "IEEE802.11",
"kismet.device.base.phyid": 0,
"kismet.device.base.name": "LincolnFES-WiFi",
"kismet.device.base.commonname": "LincolnFES-WiFi",
"kismet.device.base.type": "Wi-Fi AP",
"kismet.device.base.basic_type_set": 1,
"kismet.device.base.crypt": "WPA2-PSK",
"kismet.device.base.basic_crypt_set": 2,
"kismet.device.base.first_time": 1559567379,
"kismet.device.base.last_time": 1559567379,
"kismet.device.base.mod_time": 1559567380,
"kismet.device.base.packets.total": 3,
"kismet.device.base.packets.rx": 0,
"kismet.device.base.packets.tx": 0,
"kismet.device.base.packets.llc": 3,
"kismet.device.base.packets.error": 0,
"kismet.device.base.packets.data": 0,
"kismet.device.base.packets.crypt": 0,
"kismet.device.base.packets.filtered": 0,
"kismet.device.base.datasize": 0,
"kismet.device.base.packets.rrd": {
"kismet.common.rrd.last_time": 1559567383,
"kismet.common.rrd.minute_vec": [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
2,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
],
"kismet.common.rrd.blank_val": 0,
"kismet.common.rrd.aggregator": "default",
"kismet.common.rrd.hour_vec": [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
],
"kismet.common.rrd.day_vec": [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
]
},
"kismet.device.base.signal": {
"kismet.common.signal.type": "dbm",
"kismet.common.signal.last_signal": -56,
"kismet.common.signal.last_noise": 0,
"kismet.common.signal.min_signal": -74,
"kismet.common.signal.min_noise": 0,
"kismet.common.signal.max_signal": -56,
"kismet.common.signal.max_noise": 0,
"kismet.common.signal.maxseenrate": 10,
"kismet.common.signal.encodingset": 1,
"kismet.common.signal.carrierset": 1,
"kismet.common.signal.signal_rrd": {
"kismet.common.rrd.last_time": 1559567383,
"kismet.common.rrd.minute_vec": [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
],
"kismet.common.rrd.blank_val": 0,
"kismet.common.rrd.aggregator": "peak_signal"
}
},
"kismet.device.base.freq_khz_map": {
"2437000.000000": 1,
"2442000.000000": 1,
"5500000.000000": 1
},
"kismet.device.base.channel": "6",
"kismet.device.base.frequency": 2442000,
"kismet.device.base.num_alerts": 0,
"kismet.device.base.tags": {
},
"kismet.device.base.seenby": {
"-1970862229": {
"kismet.common.seenby.uuid": "5FE308BD-0000-0000-0000-00C0CAA60413",
"kismet.common.seenby.first_time": 1559567379,
"kismet.common.seenby.last_time": 1559567379,
"kismet.common.seenby.num_packets": 3,
"kismet.common.seenby.freq_khz_map": {
"2437000.000000": 1,
"2442000.000000": 1,
"5500000.000000": 1
},
"kismet.common.seenby.signal": {
"kismet.common.signal.type": "dbm",
"kismet.common.signal.last_signal": -56,
"kismet.common.signal.last_noise": 0,
"kismet.common.signal.min_signal": -74,
"kismet.common.signal.min_noise": 0,
"kismet.common.signal.max_signal": -56,
"kismet.common.signal.max_noise": 0,
"kismet.common.signal.maxseenrate": 10,
"kismet.common.signal.encodingset": 1,
"kismet.common.signal.carrierset": 1,
"kismet.common.signal.signal_rrd": {
"kismet.common.rrd.last_time": 1559567383,
"kismet.common.rrd.minute_vec": [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
],
"kismet.common.rrd.blank_val": 0,
"kismet.common.rrd.aggregator": "peak_signal"
}
}
}
},
"kismet.device.base.server_uuid": "A8F71A2C-85F8-11E9-BA41-4B49534D4554",
"dot11.device": {
"dot11.device.typeset": 257,
"dot11.device.client_map": {
},
"dot11.device.num_client_aps": 0,
"dot11.device.advertised_ssid_map": {
"733545801": {
"dot11.advertisedssid.ssid": "LincolnFES-WiFi",
"dot11.advertisedssid.ssidlen": 15,
"dot11.advertisedssid.beacon": 1,
"dot11.advertisedssid.probe_response": 1,
"dot11.advertisedssid.channel": "6",
"dot11.advertisedssid.ht_mode": "HT20",
"dot11.advertisedssid.ht_center_1": 0,
"dot11.advertisedssid.ht_center_2": 0,
"dot11.advertisedssid.first_time": 1559567379,
"dot11.advertisedssid.last_time": 1559567379,
"dot11.advertisedssid.beacon_info": "",
"dot11.advertisedssid.cloaked": 0,
"dot11.advertisedssid.crypt_set": 268436162,
"dot11.advertisedssid.maxrate": 65,
"dot11.advertisedssid.beaconrate": 10,
"dot11.advertisedssid.beacons_sec": 2,
"dot11.advertisedssid.ietag_checksum": 1220416683,
"dot11.advertisedssid.wpa_mfp_required": 0,
"dot11.advertisedssid.wpa_mfp_supported": 0,
"dot11.advertisedssid.dot11d_country": "",
"dot11.advertisedssid.dot11d_list": [
],
"dot11.advertisedssid.wps_state": 0,
"dot11.advertisedssid.dot11r_mobility": 0,
"dot11.advertisedssid.dot11r_mobility_domain_id": 0,
"dot11.advertisedssid.dot11e_qbss": 0,
"dot11.advertisedssid.dot11e_qbss_stations": 0,
"dot11.advertisedssid.dot11e_channel_utilization_perc": 0,
"dot11.advertisedssid.ccx_txpower": 0,
"dot11.advertisedssid.cisco_client_mfp": 0,
"dot11.advertisedssid.ie_tag_list": [
0,
1,
3,
5,
42,
50,
48,
45,
61,
127,
221
]
}
},
"dot11.device.num_advertised_ssids": 1,
"dot11.device.probed_ssid_map": {
},
"dot11.device.num_probed_ssids": 0,
"dot11.device.associated_client_map": {
},
"dot11.device.num_associated_clients": 0,
"dot11.device.client_disconnects": 0,
"dot11.device.last_sequence": 0,
"dot11.device.bss_timestamp": 0,
"dot11.device.num_fragments": 0,
"dot11.device.num_retries": 0,
"dot11.device.datasize": 0,
"dot11.device.datasize_retry": 0,
"dot11.device.last_probed_ssid_csum": 0,
"dot11.device.last_beaconed_ssid": "LincolnFES-WiFi",
"dot11.device.last_beaconed_ssid_checksum": 733545801,
"dot11.device.last_bssid": "38:D2:69:F5:B4:AF",
"dot11.device.last_beacon_timestamp": 1559567379,
"dot11.device.wps_m3_count": 0,
"dot11.device.wps_m3_last": 0,
"dot11.device.wpa_handshake_list": [
],
"dot11.device.wpa_nonce_list": [
],
"dot11.device.wpa_anonce_list": [
],
"dot11.device.wpa_present_handshake": 0,
"dot11.device.min_tx_power": 0,
"dot11.device.max_tx_power": 0,
"dot11.device.supported_channels": [
],
"dot11.device.link_measurement_capable": 0,
"dot11.device.neighbor_report_capable": 0,
"dot11.device.extended_capabilities": [
],
"dot11.device.beacon_fingerprint": 4212996422,
"dot11.device.probe_fingerprint": 0,
"dot11.device.response_fingerprint": 0
}
}
When you want to recursively walk through the fields of an entire object and its contents, you need json_tree():
SELECT j.value
FROM devices AS d
JOIN json_tree(d.device) AS j
WHERE j.key = 'dot11.advertisedssid.ssid';
gives
value
--------------
SampleFES-WiFi
when run on a table holding a fixed version of that sample object.
I know this is a bit old, but OP seemed (in comments) to want a more complete solution. I know I did when I first came across this answer. The accepted solution allows you to pull in one field from the JSON blob, but the common use case in OP's example is to pull multiple fields from that blob. After some searching I found that the json_extract() function works very well for this once you realize that the "dot11.device.advertised_ssid_map" object is an array. Once you provide it with an index his normal query method works.
Considerations:
OP's example is relating to the Kismet device field in the devices table, so my example will use a common query that I often need in the context of that table
With Kismet the keys used in these JSON blobs are long and contain dots, so the syntax for specifying them in SQLite3 is a bit cumbersome for some nested values
SQLite3's JSON1 extension does not seem to like some of the wildcarding syntax normally allowed in JSONPath specifications, so long explicit paths are required
So here is my solution:
SELECT devmac, strongest_signal,
json_extract(d.device, '$."dot11.device"."dot11.device.advertised_ssid_map"[0]."dot11.advertisedssid.ssid"') AS ssid,
json_extract(d.device, '$."dot11.device"."dot11.device.advertised_ssid_map"[0]."dot11.advertisedssid.cloaked"') AS cloaked,
json_extract(d.device, '$."kismet.device.base.signal"."kismet.common.signal.min_signal"') AS weakest_signal,
json_extract(d.device, '$."kismet.device.base.channel"') AS channel,
json_extract(d.device, '$."dot11.device"."dot11.device.num_associated_clients"') AS clientCnt,
json_extract(d.device, '$."kismet.device.base.crypt"') AS crypt,
json_extract(d.device, '$."kismet.device.base.manuf"') AS manuf
FROM devices AS d
WHERE type = 'Wi-Fi AP'
;

Error using tensorflow create_coco_tf_record script: "TypeError: string indices must be integers"

I want to use the create_coco_tf_record-script provided by tensorflow, to convert the following simple label definition in coco json format to TFRecord:
"info": {
"year": 2018,
"version": null,
"description": "TransferLearningTest",
"contributor": "ralph#r4robotics.com.au",
"url": "labelbox.io",
"date_created": "2018-03-25T08:30:27.427851+00:00"
},
"images": [{
"id": "cjf6gxqjw2fho01619gre5j0y",
"width": 615,
"height": 409,
"file_name": "https://firebasestorage.googleapis.com/v0/b/labelbox-193903.appspot.com/o/cjf6gtsr950sr0125idy65yiy%2Ff245e964-d756-4c01-98de-b6e5a9070588%2Fbottles1.jpg?alt=media&token=b381c976-da30-49d7-8e95-eb4ae8588354",
"license": null,
"flickr_url": "https://firebasestorage.googleapis.com/v0/b/labelbox-193903.appspot.com/o/cjf6gtsr950sr0125idy65yiy%2Ff245e964-d756-4c01-98de-b6e5a9070588%2Fbottles1.jpg?alt=media&token=b381c976-da30-49d7-8e95-eb4ae8588354",
"coco_url": "https://firebasestorage.googleapis.com/v0/b/labelbox-193903.appspot.com/o/cjf6gtsr950sr0125idy65yiy%2Ff245e964-d756-4c01-98de-b6e5a9070588%2Fbottles1.jpg?alt=media&token=b381c976-da30-49d7-8e95-eb4ae8588354",
"date_captured": null
}, {
"id": "cjf6gyhtl55sv01385xtqjrqi",
"width": 259,
"height": 194,
"file_name": "https://firebasestorage.googleapis.com/v0/b/labelbox-193903.appspot.com/o/cjf6gtsr950sr0125idy65yiy%2Ff245e964-d756-4c01-98de-b6e5a9070588%2Fbottles2.jpg?alt=media&token=9b274e2e-c541-4e80-8f3d-b198f3ba9b4d",
"license": null,
"flickr_url": "https://firebasestorage.googleapis.com/v0/b/labelbox-193903.appspot.com/o/cjf6gtsr950sr0125idy65yiy%2Ff245e964-d756-4c01-98de-b6e5a9070588%2Fbottles2.jpg?alt=media&token=9b274e2e-c541-4e80-8f3d-b198f3ba9b4d",
"coco_url": "https://firebasestorage.googleapis.com/v0/b/labelbox-193903.appspot.com/o/cjf6gtsr950sr0125idy65yiy%2Ff245e964-d756-4c01-98de-b6e5a9070588%2Fbottles2.jpg?alt=media&token=9b274e2e-c541-4e80-8f3d-b198f3ba9b4d",
"date_captured": null
}, {
"id": "cjf6gzj9v2g1h0161bwh18chv",
"width": 277,
"height": 182,
"file_name": "https://firebasestorage.googleapis.com/v0/b/labelbox-193903.appspot.com/o/cjf6gtsr950sr0125idy65yiy%2Ff245e964-d756-4c01-98de-b6e5a9070588%2Fbottles3.jpg?alt=media&token=3cfc13ca-432d-4501-b574-00d3874a4682",
"license": null,
"flickr_url": "https://firebasestorage.googleapis.com/v0/b/labelbox-193903.appspot.com/o/cjf6gtsr950sr0125idy65yiy%2Ff245e964-d756-4c01-98de-b6e5a9070588%2Fbottles3.jpg?alt=media&token=3cfc13ca-432d-4501-b574-00d3874a4682",
"coco_url": "https://firebasestorage.googleapis.com/v0/b/labelbox-193903.appspot.com/o/cjf6gtsr950sr0125idy65yiy%2Ff245e964-d756-4c01-98de-b6e5a9070588%2Fbottles3.jpg?alt=media&token=3cfc13ca-432d-4501-b574-00d3874a4682",
"date_captured": null
}, {
"id": "cjf6h0p9n55wz0178pg79lc3c",
"width": 301,
"height": 167,
"file_name": "https://firebasestorage.googleapis.com/v0/b/labelbox-193903.appspot.com/o/cjf6gtsr950sr0125idy65yiy%2Ff245e964-d756-4c01-98de-b6e5a9070588%2Fbottles4.jpg?alt=media&token=d2660bc4-d576-45f0-8de6-557270fc683d",
"license": null,
"flickr_url": "https://firebasestorage.googleapis.com/v0/b/labelbox-193903.appspot.com/o/cjf6gtsr950sr0125idy65yiy%2Ff245e964-d756-4c01-98de-b6e5a9070588%2Fbottles4.jpg?alt=media&token=d2660bc4-d576-45f0-8de6-557270fc683d",
"coco_url": "https://firebasestorage.googleapis.com/v0/b/labelbox-193903.appspot.com/o/cjf6gtsr950sr0125idy65yiy%2Ff245e964-d756-4c01-98de-b6e5a9070588%2Fbottles4.jpg?alt=media&token=d2660bc4-d576-45f0-8de6-557270fc683d",
"date_captured": null
}],
"annotations": [{
"id": 1,
"image_id": "cjf6gxqjw2fho01619gre5j0y",
"category_id": 1,
"segmentation": [
[118.39765618513167, 313.457848898712, 179.7169976455091, 299.1734470204843, 294.6908226914901, 310.3222212573321, 337.1962867729657, 334.7101881586143, 366.4623832276035, 338.89097185223864, 372.03689297966736, 385.5765403887654, 332.31863061175864, 389.75732408238974, 282.84505592143563, 406.48051201843583, 215.9512047089942, 408.5708772844735, 192.2596180064203, 390.4541125044023, 151.8445552101198, 403.6933051688366, 105.1582353958287, 376.51813141795526, 118.39765618513167, 313.457848898712]
],
"area": 22106.876283900496,
"bbox": [105.1582353958287, 0.42912271552648545, 266.8786575838387, 109.39743026398922],
"iscrowd": 0
}, {
"id": 2,
"image_id": "cjf6gxqjw2fho01619gre5j0y",
"category_id": 1,
"segmentation": [
[160.20631983821562, 142.04523900617488, 195.04687288245222, 131.24488556110788, 308.62704390918475, 134.03209241070698, 356.01021731433246, 152.1488571907783, 381.7922053020817, 150.75522718520426, 384.57951334057844, 186.64038911511503, 349.7389071338769, 187.68559832890833, 317.6856089656722, 202.3183678373679, 159.50946624735892, 195.3503772941444, 160.20631983821562, 142.04523900617488]
],
"area": 13123.705213053147,
"bbox": [159.50946624735892, 206.6816321626321, 225.07004709321953, 71.07348227626002],
"iscrowd": 0
}, {
"id": 3,
"image_id": "cjf6gyhtl55sv01385xtqjrqi",
"category_id": 1,
"segmentation": [
[80.06035395893144, 68.18619344603749, 119.11342792196902, 74.69491256085784, 131.84812313721997, 72.14801308536389, 177.97602777539703, 78.09078572494903, 187.59778022105084, 91.67421361042045, 203.1624077063576, 93.37213939731716, 201.18146375358646, 112.04938782407424, 184.76784795099502, 111.200414135477, 169.20322046568833, 122.51994816851872, 128.16920254987957, 117.42614921753086, 114.86852951688535, 114.03029764373744, 93.07803808305403, 114.31328167650393, 70.43857992260781, 103.2767316762287, 80.06035395893144, 68.18619344603749]
],
"area": 4995.907009222967,
"bbox": [70.43857992260781, 71.48005183148128, 132.7238277837498, 54.33375472248123],
"iscrowd": 0
}, {
"id": 4,
"image_id": "cjf6gzj9v2g1h0161bwh18chv",
"category_id": 1,
"segmentation": [
[173.46162883883662, 160.28013107383993, 255.65715601241382, 148.2998138472238, 266.2728180897869, 177.11325728633884, 184.68389092165103, 182.8759506021435, 159.20627416758742, 180.1462513325615, 154.35340470313542, 170.74397441725296, 175.28142885516084, 167.7109803710303, 173.46162883883662, 160.28013107383993]
],
"area": 2509.1082874191734,
"bbox": [154.35340470313542, -0.8759506021434983, 111.91941338665146, 34.576136754919716],
"iscrowd": 0
}, {
"id": 5,
"image_id": "cjf6gzj9v2g1h0161bwh18chv",
"category_id": 1,
"segmentation": [
[37.58112185203197, 87.03332022958155, 45.16373762158412, 93.40262623857566, 94.90570169790779, 106.44448675338808, 106.73458692647054, 87.03332022958155, 46.680260775494574, 73.08155224493905, 40.31086815713212, 74.901344044691, 33.63817553604898, 74.901344044691, 27.875382923127926, 80.9673321371363, 37.58112185203197, 87.03332022958155]
],
"area": 1386.09176276128,
"bbox": [27.875382923127926, 75.55551324661192, 78.85920400334261, 33.36293450844903],
"iscrowd": 0
}, {
"id": 6,
"image_id": "cjf6gzj9v2g1h0161bwh18chv",
"category_id": 1,
"segmentation": [
[200.7590456092244, 136.92608617388885, 181.95412147624396, 120.09295996138994, 234.4258318576678, 85.2135284298296, 255.05055600697247, 103.71478748380605, 200.7590456092244, 136.92608617388885]
],
"area": 1614.301579806095,
"bbox": [181.95412147624396, 45.073913826111145, 73.09643453072852, 51.71255774405926],
"iscrowd": 0
}, {
"id": 7,
"image_id": "cjf6h0p9n55wz0178pg79lc3c",
"category_id": 1,
"segmentation": [
[17.847508506087518, 28.63952607163654, 66.60858665657888, 24.08859036914734, 77.98617155836023, 14.986669362689923, 145.27644948557162, 14.49906202292621, 147.5519565454611, 51.881911126804255, 75.0605019090974, 56.10780833710362, 64.0079859014193, 47.98110201017366, 24.3489855928197, 53.34473314609532, 17.847508506087518, 28.63952607163654]
],
"area": 4189.730491764894,
"bbox": [17.847508506087518, 110.89219166289638, 129.7044480393736, 41.60874631417741],
"iscrowd": 0
}, {
"id": 8,
"image_id": "cjf6h0p9n55wz0178pg79lc3c",
"category_id": 1,
"segmentation": [
[223.94433711573117, 23.27591973645434, 257.10186033759857, 27.82685543894354, 261.32783036444124, 48.306165303102944, 179.73427308501883, 104.86804629868364, 145.27644948557162, 113.3198159185429, 128.37261898053467, 122.42173692500033, 111.46876367433086, 108.76885541531423, 131.29826382863067, 96.09118858515549, 137.14960312715638, 77.56230808005091, 223.94433711573117, 23.27591973645434]
],
"area": 6031.236484118768,
"bbox": [111.46876367433086, 44.57826307499967, 149.85906669011038, 99.14581718854599],
"iscrowd": 0
}, {
"id": 9,
"image_id": "cjf6h0p9n55wz0178pg79lc3c",
"category_id": 1,
"segmentation": [
[26.299423758605975, 125.34733136210352, 40.60267830965016, 111.53193060632253, 117.97024076106304, 72.6862842838929, 133.57379568968702, 80.81299061082292, 132.59856420621048, 93.16559414805232, 111.46876367433086, 115.2702204768582, 64.33305479552251, 138.67514957886033, 46.128923912905776, 139.65033945764822, 23.37375410934314, 148.75226046410563, 8.095292875989244, 141.11316147693933, 26.299423758605975, 125.34733136210352]
],
"area": 3857.6591542480846,
"bbox": [8.095292875989244, 18.24773953589436, 125.47850281369777, 76.06597618021274],
"iscrowd": 0
}],
"licenses": [],
"categories": [{
"supercategory": "Bottle",
"id": 1,
"name": "Bottle"
}]
}
But when I run the script using
with open('coco_labels.json') as json_data:
label_info = json.load(json_data)
IMAGE_FOLDER = "coco_images"
with tf.python_io.TFRecordWriter("training.record") as writer:
for i,image in enumerate(label_info["images"]):
img_data = requests.get(image["file_name"]).content
image_name = "image"+str(i)+".jpg"
image_path = os.path.join(IMAGE_FOLDER,image_name)
with open(image_path, 'wb') as handler:
handler.write(img_data)
image["file_name"] = image_name
tf_example = create_coco_tf_record.create_tf_example(image,
label_info["annotations"][i],
IMAGE_FOLDER,
label_info["categories"]
)
writer.write(tf_example.SerializeToString())
I get the error
(image, annotations_list, image_dir, category_index, include_masks)
124 num_annotations_skipped = 0
125 for object_annotations in annotations_list:
--> 126 (x, y, width, height) = tuple(object_annotations['bbox'])
127 if width <= 0 or height <= 0:
128 num_annotations_skipped += 1
TypeError: string indices must be integers
What could be the problem?
Each image is supposed to receive a list of annotations, and you are providing a single one. Making it a single element list should solve your error.
Ideally, make each item of images in your json be a list itself. As a quick fix, embrace label_info["annotations"][i] in brackets:
[label_info["annotations"][i]]

how to redirect GAP output to a text file on a local drive?

Example
m1;
[ [ -1, 0, 0, 0, 0, 0 ], [ 1, 1, -1, 0, 0, 0 ], [ 0, 0, -1, 0, 0, 0 ], [ 0, 0, 0, -1, 0, 0 ], [ 0, 0, 0, 1, 1, 0 ],
[ 0, 0, 0, 0, 0, -1 ] ]
in the Windows version of the GAP system, how do it redirect any output to a text file on a local drive?
You may use LogTo command to save inputs and outputs of the whole GAP session, or you may use PrintTo to print the object to the text file.
Enter ?LogTo and `?PrintTo' in GAP to see the documentation.
P.S. If you prefer to ask questions about GAP in StackExchange framework, I'd recommend to try to ask them at Mathematics Q&A site here.