How to return nested JSON? - sql

I have 3 table in PostgreSQL database.
QUESTIONS table:
| id (int) | text (text) |
|----------|--------------------------------------|
| 1 | What is your favorite color? |
| 2 | What is your favorite football club? |
OPTIONS table:
| id (int) | text (text) |
|----------|-------------|
| 1 | red |
| 2 | blue |
| 3 | grey |
| 4 | green |
| 5 | brown |
QUESTIONS_OPTIONS table:
| question_id (int) | option_id (int) |
|-------------------|-----------------|
| 1 | 1 |
| 1 | 2 |
| 1 | 3 |
| 1 | 4 |
| 1 | 5 |
In Golang application I create such models:
type Option struct {
ID int `json:"option_id"`
Text string `json:"option_text"`
}
type Question struct {
ID int `json:"question_id"`
Text string `json:"question_text"`
Options []Option `json:"options"`
}
In controller I have such code:
var GetQuestions = func(responseWriter http.ResponseWriter, request *http.Request) {
rows, _ := database.DBSQL.Query("SELECT * FROM questions;")
defer rows.Close()
var questions []Question
for rows.Next() {
var question Question
var options []Option
queries, _ := database.DBSQL.Query(`select options.id as option_id, options.text as option_text from questions_options inner join questions on questions_options.question_id = ` + &question.ID + ` inner join options on questions_options.option_id = options.id`)
queries.Close()
for queries.Next() {
var option Option
if err := queries.Scan(&option.ID, &option.Text); err != nil {
log.Println(err)
}
options = append(options, option)
}
if err := rows.Scan(&question.ID, &question.Text, options); err != nil { // service raise error in this line: sql: expected 2 destination arguments in Scan, not 3
log.Println(err)
}
questions = append(questions, question)
}
utils.Response(responseWriter, http.StatusOK, questions)
}
When I try to make GET request to take all questions with all there options service such incorrect result:
[
{
"question_id": 0,
"question_text": "",
"options": null
},
{
"question_id": 0,
"question_text": "",
"options": null
}
]
Where I make mistake?

You should move queries.Close() to the end of loop, like this:
var GetQuestions = func(responseWriter http.ResponseWriter, request *http.Request) {
rows, _ := database.DBSQL.Query("SELECT * FROM questions;")
defer rows.Close()
var questions []Question
for rows.Next() {
var question Question
if err := rows.Scan(&question.ID, &question.Text); err != nil {
log.Println(err)
continue
}
queries, _ := database.DBSQL.Query(`select options.id as option_id, options.text as option_text from questions_options inner join questions on questions_options.question_id = $1 inner join options on questions_options.option_id = options.id`, question.ID)
for queries.Next() {
var option Option
if err := queries.Scan(&option.ID, &option.Text); err != nil {
log.Println(err)
}
question.Options = append(question.Options, option)
}
queries.Close()
questions = append(questions, question)
}
utils.Response(responseWriter, http.StatusOK, questions)
}

Related

Google Apps Script for BigQuery - different results of REGEXP_CONTAINS

I have troubles with my own SQL and REGEXP_CONTAINS, so I decided to test official BigQuery example from here. I used SQL from example, which shows how the REGEXP_CONTAINS works. See the SQL here:
SELECT
email,
REGEXP_CONTAINS(email, r'^([\w.+-]+#foo\.com|[\w.+-]+#bar\.org)$')
AS valid_email_address,
REGEXP_CONTAINS(email, r'^[\w.+-]+#foo\.com|[\w.+-]+#bar\.org$')
AS without_parentheses
FROM
(SELECT
['a#foo.com', 'a#foo.computer', 'b#bar.org', '!b#bar.org', 'c#buz.net']
AS addresses),
UNNEST(addresses) AS email;
+----------------+---------------------+---------------------+
| email | valid_email_address | without_parentheses |
+----------------+---------------------+---------------------+
| a#foo.com | true | true |
| a#foo.computer | false | true |
| b#bar.org | true | true |
| !b#bar.org | false | true |
| c#buz.net | false | false |
+----------------+---------------------+---------------------+
This SQL works as expected when I compose new query directly in BigQuery editor. But If I want to use SQL with the REGEXP_CONTAINS in Google Apps Script, then it doesn't work. The script I use is:
/**
* Runs a BigQuery query and logs the results in a spreadsheet.
*/
function testSQL() {
// Replace this value with the project ID listed in the Google
// Cloud Platform project.
var projectId = 'bigquery-xxxx';
var request = {
query: 'SELECT '+
'email, '+
'REGEXP_CONTAINS(email, r"^([\w.+-]+#foo\.com|[\w.+-]+#bar\.org)$")'+
' AS valid_email_address, '+
'REGEXP_CONTAINS(email, r"^[\w.+-]+#foo\.com|[\w.+-]+#bar\.org$")'+
' AS without_parentheses '+
'FROM'+
' (SELECT'+
' ["a#foo.com", "a#foo.computer", "b#bar.org", "!b#bar.org", "c#buz.net"]'+
' AS addresses),'+
' UNNEST(addresses) AS email;',
useLegacySql: false
};
var queryResults = BigQuery.Jobs.query(request, projectId);
var jobId = queryResults.jobReference.jobId;
// Check on status of the Query Job.
var sleepTimeMs = 500;
while (!queryResults.jobComplete) {
Utilities.sleep(sleepTimeMs);
sleepTimeMs *= 2;
queryResults = BigQuery.Jobs.getQueryResults(projectId, jobId);
}
// Get all the rows of results.
var rows = queryResults.rows;
while (queryResults.pageToken) {
queryResults = BigQuery.Jobs.getQueryResults(projectId, jobId, {
pageToken: queryResults.pageToken
});
rows = rows.concat(queryResults.rows);
}
if (rows) {
var spreadsheet = SpreadsheetApp.create('BiqQuery Results');
var sheet = spreadsheet.getActiveSheet();
// Append the headers.
var headers = queryResults.schema.fields.map(function(field) {
return field.name;
});
sheet.appendRow(headers);
// Append the results.
var data = new Array(rows.length);
for (var i = 0; i < rows.length; i++) {
var cols = rows[i].f;
data[i] = new Array(cols.length);
for (var j = 0; j < cols.length; j++) {
data[i][j] = cols[j].v;
}
}
sheet.getRange(2, 1, rows.length, headers.length).setValues(data);
Logger.log('Results spreadsheet created: %s',
spreadsheet.getUrl());
} else {
Logger.log('No rows returned.');
}
}
The script above creates the sheet and the results are:
What is wrong with REGEXP_CONTAINS?

Laravel Eloquent - Return unique values only and ignore all that appear more than once

I have 2 database tables.
|---------------------|------------------|
| Users | Matches |
|---------------------|------------------|
| user_id | match_id |
|---------------------|------------------|
| user_1 |
|------------------|
| user_2 |
|------------------|
with user_1 and user_2 being user_ids.
I am now trying to retrieve all matches with unique values only. As soon as a user ID is used twice, I don't want to retrieve ANY matches containing the id.
Example:
MATCHES (match_id, user_1, user_2):
1, 1, 2
2, 1, 3
3, 4, 5
4, 6, 7
5, 7, 8
The query should return 3, 4, 5 ONLY, because it's the only match containing only unique user_ids.
How should I go about this? I've been trying an approach using ->distinct() but that doesn't work since it only removes duplicates but I want to kick other entries containing the values as well.
Simple and crude, not a query based solution, but will get you what you want.
public function strictlyUniqueMatches()
{
$matches = Matches::distinct()->get();
$present = [];
foreach ($matches as $idx => $match) {
if (!isset($present[$match->user_1])) {
$present[$match->user_1] = 0;
}
if (!isset($present[$match->user_2])) {
$present[$match->user_2] = 0;
}
$present[$match->user_1]++;
$present[$match->user_2]++;
}
return $matches->filter(function ($match) use ($present) {
if ($present[$match->user_1] > 1) {
return false;
}
if ($present[$match->user_2] > 1) {
return false;
}
return true;
});
}

Node Postgres insert multiple rows based on array and static variable

I am trying to insert multiple rows into a PostgresSQL server from my Nodejs server based on an array. I have a static variable, user_id, which would be the same for all entries but I want to change the filter_name based off an array. My goal is to not make multiple SQL insert calls.
arrayOfFilters = ["CM", "CBO", "SA", "EPS", "AD"]
await db.query(
"INSERT INTO filters(filter_name, requests_id)VALUES($1, $2)",
[arrayOfFiltersParams, user_id]);
I am hoping to have a row in the filters table for each one of the filters found in the arrayOfFilters with a matching user_id key for each entry (aka 5 rows for this example).
Thanks so much!
Write a helper function expand to build Parameterized query. Here is a solution:
import { pgclient } from '../../db';
function expand(rowCount, columnCount, startAt = 1) {
var index = startAt;
return Array(rowCount)
.fill(0)
.map(
(v) =>
`(${Array(columnCount)
.fill(0)
.map((v) => `$${index++}`)
.join(', ')})`,
)
.join(', ');
}
function flatten(arr) {
var newArr: any[] = [];
arr.forEach((v) => v.forEach((p) => newArr.push(p)));
return newArr;
}
(async function test() {
try {
await pgclient.connect();
// create table
await pgclient.query(`
CREATE TABLE IF NOT EXISTS filters (
id serial PRIMARY KEY,
filter_name VARCHAR(10),
requests_id INTEGER
)
`);
// test
const arrayOfFilters = ['CM', 'CBO', 'SA', 'EPS', 'AD'];
const user_id = 1;
const arrayOfFiltersParams: any[] = arrayOfFilters.map((el) => [el, user_id]);
await pgclient.query(
`INSERT INTO filters(filter_name, requests_id) VALUES ${expand(arrayOfFilters.length, 2)}`,
flatten(arrayOfFiltersParams),
);
} catch (error) {
console.log(error);
} finally {
pgclient.end();
}
})();
After executed above code, check the database:
node-sequelize-examples=# select * from "filters";
id | filter_name | requests_id
----+-------------+-------------
1 | CM | 1
2 | CBO | 1
3 | SA | 1
4 | EPS | 1
5 | AD | 1
(5 rows)

Side-effects-in-computed-properties

I want to avoid having side effects in my code, but I don't know how to fix these, does some one can help?
computed: {
sumarVerduras(){
this.totalVerduras = 0;
for( const verdura of this.verduras){
this.totalVerduras = this.totalVerduras + verdura.cantidad
} return this.totalVerduras;
}
}
It work as I want but side effect is there
Module Warning (from ./node_modules/eslint-loader/index.js):
error: Unexpected side effect in "sumarVerduras" computed property (vue/no-side-effects-in-computed-properties) at src\App.vue:53:7:
51 | computed: {
52 | sumarVerduras(){
53 | this.totalVerduras = 0;
| ^
54 | for( const verdura of this.verduras){
55 | this.totalVerduras = this.totalVerduras + verdura.cantidad
56 | } return this.totalVerduras;
error: Unexpected side effect in "sumarVerduras" computed property (vue/no-side-effects-in-computed-properties) at src\App.vue:55:11:
53 | this.totalVerduras = 0;
54 | for( const verdura of this.verduras){
55 | this.totalVerduras = this.totalVerduras + verdura.cantidad
| ^
56 | } return this.totalVerduras;
57 | }
58 | }
You should not edit any Vue component's data in computed property. Here you modify this.totalVerduras, which is considered as Vue's component data.
You can change to:
computed: {
sumarVerduras() {
let totalVerduras = 0;
for (const verdura of this.verduras) {
totalVerduras = totalVerduras + verdura.cantidad
}
return totalVerduras;
}
}
You can do this as well:
computed: {
sumarVerduras() {
return verduras.reduce((a, x) => a + x.cantidad, 0);
}
}
This method gets rid of totalVerduras variable and the for loop.

Azure Stream Analytics Pivoting Rows To Columns

I have a data like;
[
{"deviceid":"d1","parameter"="p1" value="1" timestamp="2018-03-22T12:33:00"},
{"deviceid":"d1","parameter"="p2" value="2" timestamp="2018-03-22T12:34:00"},
{"deviceid":"d1","parameter"="p2" value="3" timestamp="2018-03-22T12:35:00"},
{"deviceid":"d2","parameter"="p1" value="4" timestamp="2018-03-22T12:36:00"},
{"deviceid":"d2","parameter"="p2" value="5" timestamp="2018-03-22T12:37:00"},
{"deviceid":"d2","parameter"="p2" value="6" timestamp="2018-03-22T12:38:00"},
{"deviceid":"d2","parameter"="p1" value="7" timestamp="2018-03-22T12:43:00"},
{"deviceid":"d2","parameter"="p1" value="8" timestamp="2018-03-22T12:44:00"},
{"deviceid":"d2","parameter"="p2" value="9" timestamp="2018-03-22T12:45:00"},
{"deviceid":"d1","parameter"="p1" value="10" timestamp="2018-03-22T12:46:00"},
{"deviceid":"d1","parameter"="p1" value="11" timestamp="2018-03-22T12:47:00"},
{"deviceid":"d1","parameter"="p2" value="12" timestamp="2018-03-22T12:49:00"}
]
I want to pivot that parameter values(for every 10 minutes window) and display last recieved value of each parameter for each deviceid and like this edit*(Parameters will be dynamic not just p1 and p2);
------------------------------------------------------------
| deviceid| windowtime | p1 | p2 |
------------------------------------------------------------
| d1 | 2018-03-22 12:40 | 1 | 3 |
------------------------------------------------------------
| d2 | 2018-03-22 12:40 | 4 | 6 |
------------------------------------------------------------
| d2 | 2018-03-22 12:50 | 8 | 9 |
------------------------------------------------------------
| d1 | 2018-03-22 12:50 | 11 | 12 |
------------------------------------------------------------
Thank You.
Another more clear way is to use UDA :
SELECT
deviceid, system.Timestamp as windowtime, uda.P1UDA(test) AS P1, uda.P2UDA(test) AS P2
FROM
test TIMESTAMP BY timestamp
GROUP BY
deviceid,
TumblingWindow(minute, 10)
JavaScript UDA:
function P1UDA() {
this.init = function ()
{
this.state = 0;
}
this.accumulate = function (value, timestamp) {
if (value.parameter == 'p1') {
if (value.value > this.state) {
this.state = value.value;
}
}
}
this.computeResult = function () {
return this.state;
}
}
function P2UDA() {
this.init = function () {
this.state = 0;
}
this.accumulate = function (value, timestamp) {
if (value.parameter == 'p2') {
if (value.value > this.state) {
this.state = value.value;
}
}
}
this.computeResult = function () {
return this.state;
}
}
you can try like below script :
with tempone as (SELECT
deviceid, system.Timestamp as windowtime, max(value) AS P1
FROM
test TIMESTAMP BY timestamp
where parameter = 'p1'
GROUP BY
deviceid,
TumblingWindow(minute, 10)),
temptwo AS ( SELECT
deviceid, system.Timestamp as windowtime, max(value) AS P2
FROM
test TIMESTAMP BY timestamp
where parameter = 'p2'
GROUP BY
deviceid,
TumblingWindow(minute, 10))
select tempone.deviceid, tempone.windowtime, tempone.P1, temptwo.P2 from tempone
join temptwo on tempone.deviceid = temptwo.deviceid
and DATEDIFF(minute,tempone, temptwo) BETWEEN 0 AND 1