Executing SQL query with variable number of named parameters in Golang - sql

So I have this PostgreSQL function, which takes variable number of named arguments and returns list of corresponding items:
CREATE OR REPLACE FUNCTION read_user(
_id BIGINT DEFAULT NULL,
_phone VARCHAR(30) DEFAULT NULL,
_type VARCHAR(15) DEFAULT NULL,
_last VARCHAR(50) DEFAULT NULL,
_first VARCHAR(50) DEFAULT NULL
)
RETURNS setof T_USERS
AS $$
BEGIN
RETURN QUERY
SELECT * FROM T_USERS
WHERE ( id = _id OR _id IS NULL )
AND ( phone = _phone OR _phone IS NULL )
AND ( type = _type OR _type IS NULL )
AND ( last = _last OR _last IS NULL )
AND ( first = _first OR _first IS NULL );
EXCEPTION WHEN others THEN
RAISE WARNING 'Transaction failed and was rolled back';
RAISE NOTICE '% %', SQLERRM, SQLSTATE;
END
$$ LANGUAGE plpgsql;
So I can run polymorphic queries like these:
SELECT read_user(_id := 2);
SELECT read_user(_first := 'John', _last := 'Doe');
In Golang I can make something like:
stmt, err := db.Prepare("SELECT read_user(_id = ?)")
But how can I do the same, but with variable amount of read_user arguments? I'm using pq driver https://github.com/lib/pq.

You can construct your one statement by enumerating all the parameters with their placeholders and then you could pass nil explicitly where you don't have the parameter value.
stmt, err := db.Prepare("SELECT read_user(_id := $1, _phone := $2, _type := $3, _last := $4, _first := $5)")
if err != nil {
// ...
}
stmt.Query(2, nil, nil, nil, nil) // result should be equivalent to `SELECT read_user(_id := 2)`
stmt.Query(nil, nil, nil, "Doe", "John") // result should be equivalent to `SELECT read_user(_first := 'John', _last := 'Doe')`
And if you want to have named parameters in Go as well, you can create a struct type to represent the parameters and a wrapper func that'll map that parameter type's fields into the query:
type readUserParams struct {
Id interface{}
Phone interface{}
Type interface{}
Last interface{}
First interface{}
}
func readUser(p *readUserParams) {
stmt.Query(p.Id, p.Phone, p.Type, p.Last, p.First)
// ...
}
readUser(&readUserParams{Id: 2})
readUser(&readUserParams{First: "John", Last:"Doe"})

Related

ORACLE json_object_t - error when call get_string() method on object got by chain of methods .get_array() .get()

I am trying to get a value from an array contained in JSON-object by chain of methods of JSON_OBJECT_T type - .get_array(...).get(0).get_string(...)
But getting an exception component 'GET_STRING' must be declared
Here is my code, could someone explain why the chain does not work?
declare
l_data clob := '{
"data": {
"foo": "bar",
"persons": [{"code":100, "name":"Elon"}, {"code":200, "name":"Musk"}]
}
}';
--
function bool_to_char(val in boolean) return varchar2 is
begin
return case val when true then 'TRUE' else 'FALSE' end;
end;
begin
dbms_output.put_line(
bool_to_char(
json_object_t
.parse(l_data)
.get_object('data')
.get_array('persons')
.get(0)
.is_object
)
); -- TRUE - got object
-- this code works - got "Elon"
dbms_output.put_line(
json_object_t
.parse(
json_object_t
.parse(l_data)
.get_object('data')
.get_array('persons')
.get(0)
.stringify
)
.get_string('name')
);
-- this code does not work - got exception "PLS-00302: component 'GET_STRING' must be declared"
/*
dbms_output.put_line(
json_object_t
.parse(l_data)
.get_object('data')
.get_array('persons')
.get(0)
.get_string('name')
);
*/
end;
The problem is that calling .get(0) returns the type JSON_ELEMENT_T, not a JSON_OBJECT_T type. JSON_ELEMENT_T does not have GET_STRING method which is why you are getting the error.
After using get(0), you can TREAT the returned value as a JSON_OBJECT_T so that you can call the GET_STRING method.
See code below
DECLARE
l_data CLOB := '{
"data": {
"foo": "bar",
"persons": [{"code":100, "name":"Elon"}, {"code":200, "name":"Musk"}]
}
}';
--
FUNCTION bool_to_char (val IN BOOLEAN)
RETURN VARCHAR2
IS
BEGIN
RETURN CASE val WHEN TRUE THEN 'TRUE' ELSE 'FALSE' END;
END;
BEGIN
DBMS_OUTPUT.put_line (
bool_to_char (
json_object_t.parse (l_data).get_object ('data').get_array ('persons').get (0).is_object)); -- TRUE - got object
-- this code works - got "Elon"
DBMS_OUTPUT.put_line (
json_object_t.parse (
json_object_t.parse (l_data).get_object ('data').get_array ('persons').get (0).stringify).get_string (
'name'));
-- this code does not work - got exception "PLS-00302: component 'GET_STRING' must be declared"
--Fixed using TREAT(... as JSON_OBJECT_T)
DBMS_OUTPUT.put_line (
TREAT (
json_object_t.parse (l_data).get_object ('data').get_array ('persons').get (0)
AS json_object_t).get_string ('name'));
END;

Make a query to both "id" or "slug" with a single variable

I have a table "articles" where there're "id" and "slug" among other things. On an html page I have a list of links to articles. A link can contain either "id" or "slug" in it.
But if in a URL there's only a number, it doesn't still mean that it's an id -- therefore, casting to int to determine whether or not it's slug or id, won't work.
/articles/my_article
/articles/35
/articles/666 --> still may be slug
I have this sql query:
import (
"github.com/jackc/pgx/v4"
//.........
)
// [..........]
vars := mux.Vars(req)
q1 := `
SELECT
ar.id,
[.........]
FROM
articles AS ar
WHERE ar.slug = $1 OR ar.id = $1`
ar := Article{}
row := db.QueryRow(context.Background(), q1, vars["id_or_slug"])
switch err := row.Scan(&ar.Id, /*[.......]*/); err {
case pgx.ErrNoRows:
wrt.WriteHeader(http.StatusNotFound)
wrt.Write([]byte("article not found"))
case nil:
// good, article found
I get:
ERROR: operator does not exist: bigint = text (SQLSTATE 42883)
You can "attempt" to convert the value to an integer and if the conversion fails just ignore the error and provide an id value known to not be present in the db.
Doing the conversion with Go:
slug := mux.Vars(req)["id_or_slug"]
// option 1:
id, err := strconv.ParseInt(slug, 10, 64)
if err != nil {
id = -1 // provide a value that you're certain will not be present in the db
}
// option 2:
// if id 0 is good enough, you can skip error checking
// and use the following instead of the above.
id, _ := strconv.ParseInt(slug, 10, 64)
query := `SELECT ... FROM articles AS a
WHERE a.slug = $1
OR a.id = $2`
row := db.QueryRow(query, slug, id)
Doing the conversion with postgres: (the following postgres snippet was taken from here.
)
-- first create a postgres function that will do the conversion / cast
create or replace function cast_to_int(text, integer) returns integer as $$
begin
return cast($1 as integer);
exception
when invalid_text_representation then
return $2;
end;
$$ language plpgsql immutable;
... and then utilizing that in go:
slug := mux.Vars(req)["id_or_slug"]
query := `SELECT ... FROM articles AS a
WHERE a.slug = $1
OR a.id = cast_to_int($1::text, -1)` // use the postgres function in the go query string
row := db.QueryRow(query, slug)

golang null.String decoding not working correctly

Trying to fix this problem i'm having with my api im building.
db:
DROP TABLE IF EXISTS contacts CASCADE;
CREATE TABLE IF NOT EXISTS contacts (
uuid UUID UNIQUE PRIMARY KEY,
first_name varchar(150),
);
DROP TABLE IF EXISTS workorders CASCADE;
CREATE TABLE IF NOT EXISTS workorders (
uuid UUID UNIQUE PRIMARY KEY,
work_date timestamp WITH time zone,
requested_by UUID REFERENCES contacts (uuid) ON UPDATE CASCADE ON DELETE CASCADE,
);
struct:
https://gopkg.in/guregu/null.v3
type WorkorderNew struct {
UUID string `json:"uuid"`
WorkDate null.Time `json:"work_date"`
RequestedBy null.String `json:"requested_by"`
}
api code:
workorder := &models.WorkorderNew{}
if err := json.NewDecoder(r.Body).Decode(workorder); err != nil {
log.Println("decoding fail", err)
}
// fmt.Println(NewUUID())
u2, err := uuid.NewV4()
if err != nil {
log.Fatalf("failed to generate UUID: %v", err)
}
q := `
INSERT
INTO workorders
(uuid,
work_date,
requested_by
)
VALUES
($1,$2,$3)
RETURNING uuid;`
statement, err := global.DB.Prepare(q)
global.CheckDbErr(err)
fmt.Println("requested by", workorder.RequestedBy)
lastInsertID := ""
err = statement.QueryRow(
u2,
workorder.WorkDate,
workorder.RequestedBy,
).Scan(&lastInsertID)
global.CheckDbErr(err)
json.NewEncoder(w).Encode(lastInsertID)
When I send an API request with null as the value it works as expected
but when i try to send a "" as the value for the null.String or the null.Time it fails
works:
{
"work_date":"2016-12-16T19:00:00Z",
"requested_by":null
}
not working:
{
"work_date":"2016-12-16T19:00:00Z",
"requested_by":""
}
Basically when i call the QueryRow and save to database the workorder.RequestedBy value should be a null and not the "" value im getting
thanks
If you want to treat empty strings as nulls you have at least two options.
"Extend" null.String:
type MyNullString struct {
null.String
}
func (ns *MyNullString) UnmarshalJSON(data []byte) error {
if string(data) == `""` {
ns.Valid = false
return nil
}
ns.String.UnmarshalJSON(data)
}
Or use NULLIF in the query:
INSERT INTO workorders (
uuid
, work_date
, requested_by
) VALUES (
$1
, $2
, NULLIF($3, '')
)
RETURNING uuid
Update:
To extend the null.Time you have to understand that the type of null.Time.Time is a struct. The builtin len function works on slices, arrays, pointers to arrays, maps, channels, and strings. Not structs. So in this case you can check the data argument, which is a byte slice, by converting it to a string and comparing it to a string that contains an empty string, i.e. it has two double quotes and nothing else.
type MyNullTime struct {
null.Time
}
func (ns *MyNullTime) UnmarshalJSON(data []byte) error {
if string(data) == `""` {
ns.Valid = false
return nil
}
return ns.Time.UnmarshalJSON(data)
}

IBQuery insert - Column unknown / Unsupported feature

Im trying to insert a row into a firebird database (embedded), but geting an exception when calling:
datamodule1.IBQuery1.prepare
Project xyz.exe raised exception class EIBInterBaseError with message
'Dynamic SQL Error SQL error code = -206 Column unknown INDEX_ At
line, column 25'.
with datamodule1.IBQuery1 do
begin
close;
With SQL do
begin
clear;
Add( 'INSERT INTO MST_EVENTS (eventindex, state_, event_, param_, date_, time_, devID_, gateway_)' );
Add( 'VALUES (:eventindex, :state_, :event_, :param_, :date_, :time_, :devid_, :gateway_') );
end;
//
GeneratorField.Field := 'Nr_';
GeneratorField.Generator := 'GEN_MST_EVENTS_ID';
//
Params[0].AsInteger := FMst.EventRecordIndex;
Params[1].AsSmallInt := FMst.EventRecordState;
Params[2].AsString := eventToStr(FMst.EventRecordEvent);
Params[3].AsSmallInt := 0;
Params[4].AsDate := FMst.EventRecordDate;
Params[5].AsTime := FMst.EventRecordTime;
Params[6].AsLongWord := FMst.EventRecordDevID;
Params[7].AsString := FMst.EventRecordIP;
//
if ( prepared = false ) then
prepare; //Throws an exception here (SOLVED)
execSQL; //Now getting exception here
end;
I have the following components tied together:
IBDatabase
IBTransaction
DataSource
IBQuery
Above problem solved - Edit >>
Ok, i have changed
Add( 'INSERT INTO MST_EVENTS (eventindex, state_, event_, param_, date_, time_, devID_, gateway_)' );
to
Add( 'INSERT INTO MST_EVENTS ("eventindex", "state_", "event_", "param_", "date_", "time_", "devID_", "gateway_")' );
... (so im using quotation marks) and now it finds the fields, but get another exception at line:
IBQuery1.execSQL:
Exception class EIBClientError with message 'Unsupported feature'
My fields are:
Nr_ : INTEGER
eventindex : INTEGER
state_ : SMALLINT
event_ : VARCHAR(50)
param_ : SMALLINT
date_ : DATE
time_ : TIME
devID_ : BIGINT
gateway_ : VARCHAR(50)
Firebird version is 2.5 embedded 32bit
I took out all the string and date/time parameters, yet i get the exception.
Using IBExpert and the same client/server .dll i can insert the row flawlessly (using all the values).
The solution was changing line
Params[6].AsLongWord := FMst.EventRecordDevID;
to
Params[6].AsLargeInt := FMst.EventRecordDevID;
But please how to auto-increment the field 'Nr_'?
with datamodule1.IBQuery1 do
begin
close;
With SQL do
begin
clear;
Add( 'INSERT INTO MST_EVENTS (eventindex, state_, event_, param_, date_, time_, devID_, gateway_)' );
Add( 'VALUES (:eventindex, :state_, :event_, :param_, :date_, :time_, :devid_, :gateway_') );
end;
//
GeneratorField.Field := 'Nr_';
GeneratorField.Generator := 'GEN_MST_EVENTS_ID';
//
Params[0].AsInteger := FMst.EventRecordIndex;
Params[1].AsSmallInt := FMst.EventRecordState;
Params[2].AsString := eventToStr(FMst.EventRecordEvent);
Params[3].AsSmallInt := 0;
Params[4].AsDate := FMst.EventRecordDate;
Params[5].AsTime := FMst.EventRecordTime;
Params[6].AsLargeInt := FMst.EventRecordDevID;
Params[7].AsString := FMst.EventRecordIP;
//
if ( prepared = false ) then
prepare; //Throws an exception here (SOLVED)
execSQL; //Now getting exception here
end;
I made the generator in flamerobin.
But getting exception (at calling 'execSQL'):
EDIT >>
I set up a generator and a BEFORE INSERT trigger in IBExpert:
And now its ok.

Oracle aggregate functions and how to concatenate all values in column

All,
My Oracle Database is version 10g Enterprise Edition Release 10.2.0.5.0 - 64bit
I have the following statement which usefully gets me the max (or min or count etc) values in each case as expected however what I would like is to get and to concatenate all of the values rather than the max, min or count - is there an elegant way to do this please ?
SELECT lla.id,
max(decode(lla.attrid, 2, lla.valstr, null)) "Attribute Name 2",
min(decode(lla.attrid, 3, lla.valstr, null)) "Attribute Name 3",
count(decode(lla2.attrid, 5, lla2.valstr, null)) "Attribute Name 5"
FROM llattrdata lla, llattrdata lla2
WHERE lla.id = lla2.id
AND lla.defid = 111111 --category id 1
AND lla2.defid = 222222 --category id 2
AND lla.id = 48212327 and lla2.id = 48212327
GROUP BY lla.id
Hoping for a row that looks something like this:
12121212 | fred, jack, gill | 56 | 29,10
To be clearer it is the values that the 'Attribute Name 3' (for example) contains that I want to see all of and not just the max or the minimum. In other words for that attribute I can get the max or the min value or even the count but cannot see a way to get all of the values ? In other words I can get 10 as the min and 29 as the max - even 2 as the count but not 29 and 10 in the same column !
Many thanks in advance,
SELECT e.department_id,
listagg(e.first_name) within group (order by e.department_id) "Attribute Name 2"
FROM employees e join
departments d
on e.department_id = d.department_id
GROUP BY e.department_id;
you can use above example and alter your query
Try this:
SELECT lla.id || ' | ' ||
max(decode(lla.attrid, 2, lla.valstr, null)) || ' | ' ||
min(decode(lla.attrid, 3, lla.valstr, null)) || ' | ' ||
count(decode(lla2.attrid, 5, lla2.valstr, null))
FROM llattrdata lla, llattrdata lla2
WHERE lla.id = lla2.id
AND lla.defid = 111111 --category id 1
AND lla2.defid = 222222 --category id 2
AND lla.id = 48212327 and lla2.id = 48212327
GROUP BY lla.id
Use wmsys.wm_concat function learn more here about it. This is a non-documented function in Oracle 10.
It returns you comma-separated list, you can use replace function to replace comma with the thing you need.
Unfortunately this function does not have order clause so you cannot specify the order in the list.
EDIT:
As far as this function is not available for you, you can simply create it:
CREATE OR REPLACE TYPE wm_concat_impl
AUTHID CURRENT_USER
AS OBJECT (
curr_str VARCHAR2 (32767),
STATIC FUNCTION odciaggregateinitialize (sctx IN OUT wm_concat_impl)
RETURN NUMBER,
MEMBER FUNCTION odciaggregateiterate (
SELF IN OUT wm_concat_impl,
p1 IN VARCHAR2
)
RETURN NUMBER,
MEMBER FUNCTION odciaggregateterminate (
SELF IN wm_concat_impl,
returnvalue OUT VARCHAR2,
flags IN NUMBER
)
RETURN NUMBER,
MEMBER FUNCTION odciaggregatemerge (
SELF IN OUT wm_concat_impl,
sctx2 IN wm_concat_impl
)
RETURN NUMBER
);
/
CREATE OR REPLACE TYPE BODY wm_concat_impl
IS
STATIC FUNCTION odciaggregateinitialize (sctx IN OUT wm_concat_impl)
RETURN NUMBER
IS
BEGIN
sctx := wm_concat_impl (NULL);
RETURN odciconst.success;
END;
MEMBER FUNCTION odciaggregateiterate (
SELF IN OUT wm_concat_impl,
p1 IN VARCHAR2
)
RETURN NUMBER
IS
BEGIN
IF (curr_str IS NOT NULL)
THEN
curr_str := curr_str || ',' || p1;
ELSE
curr_str := p1;
END IF;
RETURN odciconst.success;
END;
MEMBER FUNCTION odciaggregateterminate (
SELF IN wm_concat_impl,
returnvalue OUT VARCHAR2,
flags IN NUMBER
)
RETURN NUMBER
IS
BEGIN
returnvalue := curr_str;
RETURN odciconst.success;
END;
MEMBER FUNCTION odciaggregatemerge (
SELF IN OUT wm_concat_impl,
sctx2 IN wm_concat_impl
)
RETURN NUMBER
IS
BEGIN
IF (sctx2.curr_str IS NOT NULL)
THEN
SELF.curr_str := SELF.curr_str || ',' || sctx2.curr_str;
END IF;
RETURN odciconst.success;
END;
END;
/
CREATE OR REPLACE FUNCTION wm_concat (p1 VARCHAR2)
RETURN VARCHAR2
AGGREGATE USING wm_concat_impl;
/
The query is taken from this website, it is, unfortunately, in Russian, but just use this custom aggregate function for your purposes.
I had the same problem and used the STRAGG ( as in STRing AGGregate) function created by Tom Kyte.
https://asktom.oracle.com/pls/asktom/f?p=100:11:::::P11_QUESTION_ID:15637744429336
create or replace type stragg_type as object
(
string varchar2(4000),
static function ODCIAggregateInitialize
( sctx in out stragg_type )
return number ,
member function ODCIAggregateIterate
( self in out stragg_type ,
value in varchar2
) return number ,
member function ODCIAggregateTerminate
( self in stragg_type,
returnvalue out varchar2,
flags in number
) return number ,
member function ODCIAggregateMerge
( self in out stragg_type,
ctx2 in stragg_type
) return number
);
/
create or replace type body stragg_type
is
static function ODCIAggregateInitialize
( sctx in out stragg_type )
return number
is
begin
sctx := stragg_type( null ) ;
return ODCIConst.Success ;
end;
member function ODCIAggregateIterate
( self in out stragg_type ,
value in varchar2
) return number
is
begin
self.string := self.string || ',' || value ;
return ODCIConst.Success;
end;
member function ODCIAggregateTerminate
( self in stragg_type ,
returnvalue out varchar2 ,
flags in number
) return number
is
begin
returnValue := ltrim( self.string, ',' );
return ODCIConst.Success;
end;
member function ODCIAggregateMerge
( self in out stragg_type ,
ctx2 in stragg_type
) return number
is
begin
self.string := self.string || ctx2.string;
return ODCIConst.Success;
end;
end;
/
create or replace function stragg
( input varchar2 )
return varchar2
deterministic
parallel_enable
aggregate using stragg_type
;
/
Run the three create statements one after the other in sqlplus or sqldev. Now the stragg() function is created in your user schema. Then you can do:
SELECT lla.id,
max(decode(lla.attrid, 2, lla.valstr, null)) "Attribute Name 2",
STRAGG(decode(lla.attrid, 3, lla.valstr, null)) "Attribute Name 3 List",
count(decode(lla2.attrid, 5, lla2.valstr, null)) "Attribute Name 5"
FROM llattrdata lla, llattrdata lla2
WHERE lla.id = lla2.id
AND lla.defid = 111111 --category id 1
AND lla2.defid = 222222 --category id 2
AND lla.id = 48212327 and lla2.id = 48212327
GROUP BY lla.id