Are there any solution for SELECT * FROM in golang SQL drivers [duplicate] - sql

Basically after doing a query I'd like to take the resulting rows and produce a []map[string]interface{}, but I do not see how to do this with the API since the Rows.Scan() function needs a specific number of parameters matching the requested number of columns (and possibly the types as well) to correctly obtain the data.
Again, I'd like to generalize this call and take any query and turn it into a []map[string]interface{}, where the map contains column names mapped to the values for that row.
This is likely very inefficient, and I plan on changing the structure later so that interface{} is a struct for a single data point.
How would I do this using just the database/sql package, or if necessary the database/sql/driver package?

Look at using sqlx, which can do this a little more easily than the standard database/sql library:
places := []Place{}
err := db.Select(&places, "SELECT * FROM place ORDER BY telcode ASC")
if err != nil {
fmt.Printf(err)
return
}
You could obviously replace []Place{} with a []map[string]interface{}, but where possible it is better to use a struct if you know the structure of your database. You won't need to undertake any type assertions as you might on an interface{}.

I haven't used it (yet), but I believe the "common" way to do what you are asking (more or less) is to use gorp.

You can create a struct that maintains the map key to the position of the []interface{} slice. By doing this, you do not need to create a predefined struct. For example:
IDOrder: 0
IsClose: 1
IsConfirm: 2
IDUser: 3
Then, you can use it like this:
// create a fieldbinding object.
var fArr []string
fb := fieldbinding.NewFieldBinding()
if fArr, err = rs.Columns(); err != nil {
return nil, err
}
fb.PutFields(fArr)
//
outArr := []interface{}{}
for rs.Next() {
if err := rs.Scan(fb.GetFieldPtrArr()...); err != nil {
return nil, err
}
fmt.Printf("Row: %v, %v, %v, %s\n", fb.Get("IDOrder"), fb.Get("IsConfirm"), fb.Get("IDUser"), fb.Get("Created"))
outArr = append(outArr, fb.GetFieldArr())
}
Sample output:
Row: 1, 1, 1, 2016-07-15 10:39:37 +0000 UTC
Row: 2, 1, 11, 2016-07-15 10:42:04 +0000 UTC
Row: 3, 1, 10, 2016-07-15 10:46:20 +0000 UTC
SampleQuery: [{"Created":"2016-07-15T10:39:37Z","IDOrder":1,"IDUser":1,"IsClose":0,"IsConfirm":1},{"Created":"2016-07-15T10:42:04Z","IDOrder":2,"IDUser":11,"IsClose":0,"IsConfirm":1},{"Created":"2016-07-15T10:46:20Z","IDOrder":3,"IDUser":10,"IsClose":0,"IsConfirm":1}]
Please see the full example below or at fieldbinding:
main.go
package main
import (
"bytes"
"database/sql"
"encoding/json"
"fmt"
)
import (
_ "github.com/go-sql-driver/mysql"
"github.com/junhsieh/goexamples/fieldbinding/fieldbinding"
)
var (
db *sql.DB
)
// Table definition
// CREATE TABLE `salorder` (
// `IDOrder` int(10) unsigned NOT NULL AUTO_INCREMENT,
// `IsClose` tinyint(4) NOT NULL,
// `IsConfirm` tinyint(4) NOT NULL,
// `IDUser` int(11) NOT NULL,
// `Created` datetime NOT NULL,
// `Changed` datetime NOT NULL,
// PRIMARY KEY (`IDOrder`),
// KEY `IsClose` (`IsClose`)
// ) ENGINE=InnoDB DEFAULT CHARSET=utf8;
func main() {
var err error
// starting database server
db, err = sql.Open("mysql", "Username:Password#tcp(Host:Port)/DBName?parseTime=true")
if err != nil {
panic(err.Error()) // Just for example purpose. You should use proper error handling instead of panic
}
defer db.Close()
// SampleQuery
if v, err := SampleQuery(); err != nil {
fmt.Printf("%s\n", err.Error())
} else {
var b bytes.Buffer
if err := json.NewEncoder(&b).Encode(v); err != nil {
fmt.Printf("SampleQuery: %v\n", err.Error())
}
fmt.Printf("SampleQuery: %v\n", b.String())
}
}
func SampleQuery() ([]interface{}, error) {
param := []interface{}{}
param = append(param, 1)
sql := "SELECT "
sql += " SalOrder.IDOrder "
sql += ", SalOrder.IsClose "
sql += ", SalOrder.IsConfirm "
sql += ", SalOrder.IDUser "
sql += ", SalOrder.Created "
sql += "FROM SalOrder "
sql += "WHERE "
sql += "IsConfirm = ? "
sql += "ORDER BY SalOrder.IDOrder ASC "
rs, err := db.Query(sql, param...)
if err != nil {
return nil, err
}
defer rs.Close()
// create a fieldbinding object.
var fArr []string
fb := fieldbinding.NewFieldBinding()
if fArr, err = rs.Columns(); err != nil {
return nil, err
}
fb.PutFields(fArr)
//
outArr := []interface{}{}
for rs.Next() {
if err := rs.Scan(fb.GetFieldPtrArr()...); err != nil {
return nil, err
}
fmt.Printf("Row: %v, %v, %v, %s\n", fb.Get("IDOrder"), fb.Get("IsConfirm"), fb.Get("IDUser"), fb.Get("Created"))
outArr = append(outArr, fb.GetFieldArr())
}
if err := rs.Err(); err != nil {
return nil, err
}
return outArr, nil
}
fieldbinding package:
package fieldbinding
import (
"sync"
)
// NewFieldBinding ...
func NewFieldBinding() *FieldBinding {
return &FieldBinding{}
}
// FieldBinding is deisgned for SQL rows.Scan() query.
type FieldBinding struct {
sync.RWMutex // embedded. see http://golang.org/ref/spec#Struct_types
FieldArr []interface{}
FieldPtrArr []interface{}
FieldCount int64
MapFieldToID map[string]int64
}
func (fb *FieldBinding) put(k string, v int64) {
fb.Lock()
defer fb.Unlock()
fb.MapFieldToID[k] = v
}
// Get ...
func (fb *FieldBinding) Get(k string) interface{} {
fb.RLock()
defer fb.RUnlock()
// TODO: check map key exist and fb.FieldArr boundary.
return fb.FieldArr[fb.MapFieldToID[k]]
}
// PutFields ...
func (fb *FieldBinding) PutFields(fArr []string) {
fCount := len(fArr)
fb.FieldArr = make([]interface{}, fCount)
fb.FieldPtrArr = make([]interface{}, fCount)
fb.MapFieldToID = make(map[string]int64, fCount)
for k, v := range fArr {
fb.FieldPtrArr[k] = &fb.FieldArr[k]
fb.put(v, int64(k))
}
}
// GetFieldPtrArr ...
func (fb *FieldBinding) GetFieldPtrArr() []interface{} {
return fb.FieldPtrArr
}
// GetFieldArr ...
func (fb *FieldBinding) GetFieldArr() map[string]interface{} {
m := make(map[string]interface{}, fb.FieldCount)
for k, v := range fb.MapFieldToID {
m[k] = fb.FieldArr[v]
}
return m
}

If you really want a map, which is needed in some cases, have a look at dbr, but you need to use the fork (since the pr got rejected in the original repo). The fork seems more up to date anyway:
https://github.com/mailru/dbr
For info on how to use it:
https://github.com/gocraft/dbr/issues/83

package main
import (
"fmt"
"github.com/bobby96333/goSqlHelper"
)
func main(){
fmt.Println("hello")
conn,err :=goSqlHelper.MysqlOpen("user:password#tcp(127.0.0.1:3306)/dbname")
checkErr(err)
row,err := conn.QueryRow("select * from table where col1 = ? and col2 = ?","123","abc")
checkErr(err)
if *row==nil {
fmt.Println("no found row")
}else{
fmt.Printf("%+v",row)
}
}
func checkErr(err error){
if err!=nil {
panic(err)
}
}
output:
&map[col1:abc col2:123]

Related

Storing and Retrieving Lat Long Values Stored as Geography Point Type in Database GoLang

I am trying to save to my database, latitude and longitude values as the geography point datatype and i want to be able to retrieve the values accordingly. I have implemented the following
my model device.go looks like this
device.go
package models
import (
"bytes"
"database/sql/driver"
"encoding/binary"
"encoding/hex"
"fmt"
"time"
"gorm.io/gorm"
)
type GeoPoint struct {
Lat float64 `json:"lat"`
Lng float64 `json:"lng"`
}
func (p *GeoPoint) String() string {
return fmt.Sprintf("SRID=4326;POINT(%v %v)", p.Lng, p.Lat)
}
// Scan implements the sql.Scanner interface.
func (p *GeoPoint) Scan(val interface{}) error {
b, err := hex.DecodeString(string(val.(string)))
if err != nil {
return err
}
r := bytes.NewReader(b)
var wkbByteOrder uint8
if err := binary.Read(r, binary.LittleEndian, &wkbByteOrder); err != nil {
return err
}
var byteOrder binary.ByteOrder
switch wkbByteOrder {
case 0:
byteOrder = binary.BigEndian
case 1:
byteOrder = binary.LittleEndian
default:
return fmt.Errorf("invalid byte order %d", wkbByteOrder)
}
var wkbGeometryType uint32
if err := binary.Read(r, byteOrder, &wkbGeometryType); err != nil {
return err
}
if err := binary.Read(r, byteOrder, p); err != nil {
return err
}
return nil
}
// Value impl.
func (p GeoPoint) Value() (driver.Value, error) {
return p.String(), nil
}
type Device struct {
gorm.Model
Id int `json:"id" gorm:"primaryKey"`
UserId int `json:"user_id" gorm:"uniqueIndex"`
LatestLocation GeoPoint `json:"latest_location" gorm:"type:geography(POINT, 4326)"`
CreatedAt time.Time
UpdatedAt time.Time
}
I am able to save data to the database and this is how it looks like in the database
But when i want to retrieve the record with the latitude and longitude, I get wrong data records and i am not sure why.
this is my code
location.go
package apisLocation
import (
"fmt"
db "atm/pkg/configs/database"
models "atm/pkg/models"
"strconv"
"github.com/gofiber/fiber/v2"
)
func GetLocation(c *fiber.Ctx) error {
userId, err := strconv.Atoi(c.Params("userId"))
if err != nil {
return c.Status(400).JSON(err.Error())
}
if checkIfUserExists(userId) {
return c.Status(400).JSON(fiber.Map{"error": "User does not exist"})
}
var device models.Device
db.DB.Db.Find(&device, models.Device{UserId: userId})
return c.Status(200).JSON(fiber.Map{"location": device.LatestLocation})
}
func checkIfUserExists(userId int) bool {
var device models.Device
db.DB.Db.Find(&device, models.Device{UserId: userId})
return device.Id == 0
}
when i run the GetLocation method, the response i get is not accurate, i get a value of this
"location": {
"lat": 1.7689674224598998e+71,
"lng": -3.639753837714837e+173
},
which isn't the lat and long that is saved in the database.
I think somehow when it is being decoded, something changes but i am not sure how to fix this issue.
Any help is appreciated
I found a solution to your problem here https://github.com/go-pg/pg/issues/829#issuecomment-505882885
The problem in your code was just declaring your wkbGeometryType as uint32, not uint64.

How to handle nullable Postgres JSONB data and parse it as JSON

DB Records
---------------------------------------------------------
| id | test_json |
---------------------------------------------------------
| 1 | NULL |
---------------------------------------------------------
| 2 | { "firstName": "Hello", "lastName": "World" } |
---------------------------------------------------------
I have JSONB column in postgres which can be NULL. I want to read this records DB in golang and send it to the client.
I'm getting below error on SQL scan:
sql: Scan error on column index 2, name "test_json": unsupported Scan, storing driver.Value type []uint8 into type *models.TestJSONNullable
exit status 1
I'm using echo web server.
package models
import (
"fmt"
"github.com/lib/pq"
"encoding/json"
)
type TestJson struct {
First_name *string `json:"firstName"`
Last_name *string `json:"lastName"`
}
type TestJSONNullable struct {
Valid bool
}
func (i *TestJSONNullable) UnmarshalJSON(data []byte) error {
if string(data) == "null" {
i.Valid = false
return nil
}
// The key isn't set to null
var temp *TestJson
if err := json.Unmarshal(data, &temp); err != nil {
return err
}
i.Valid = true
return nil
}
type Test01 struct {
Id string `json:"id"`
Test_json *TestJSONNullable `json:"testJson"`
}
func (db *DB) TestRecords () ([]*Test01, error) {
rows, err := db.Query("SELECT id, test_json FROM table_1 where success = true")
if err != nil {
log.Fatal(err)
return nil, err
}
defer rows.Close()
recs := []*Test01{}
for rows.Next() {
r := new(Test01)
err := rows.Scan(&r.Id, &r.Test_json)
if err != nil {
log.Fatal(err)
return nil, err
}
recs = append(recs, r)
}
if err = rows.Err(); err != nil {
log.Fatal(err)
return nil, err
}
return recs, nil
}
Here's another solution: You can implement a nullable type for raw JSON data, similar to sql.NullString, and use it as a scan destination. In this scenario, you will first check whether the value is null, and then unmarshal it only if it's not null. For instance, the NullRawMessage type from github.com/soroushj/sqlt is a nullable json.RawMessage which can be used for this purpose.
Here's an example:
package main
import (
"database/sql"
"log"
_ "github.com/lib/pq"
"github.com/soroushj/sqlt"
)
func main() {
db, err := sql.Open("postgres", "dbname=dbname user=user password=password sslmode=disable")
if err != nil {
log.Fatal(err)
}
row := db.QueryRow(`SELECT test_json FROM my_table WHERE id = $1`, 1)
testJSON := sqlt.NullRawMessage{}
err = row.Scan(&testJSON)
if err != nil {
log.Fatal(err)
}
if testJSON.Valid {
// test_json is not null
// Unmarshal testJSON.RawMessage
} else {
// test_json is null
}
}
After doing some research i found the solution.
type TestJSONMap map[string]interface{}
func (t TestJSONMap) Value() (driver.Value, error) {
j, err := json.Marshal(t)
return j, err
}
func (p *TestJSONMap) Scan(val interface{}) error {
value, ok := val.([]byte)
if !ok {
return errors.New("Type assertion .([]byte) failed.")
}
var i interface{}
err := json.Unmarshal(value, &i)
if err != nil {
return err
}
*p, ok = i.(map[string]interface{})
if !ok {
return errors.New("Type assertion .(map[string]interface{}) failed.")
}
return nil
}
type Test01 struct {
Id string `json:"id"`
Test_json *TestJSONMap `json:"testJson"`
}
Got help from https://coussej.github.io/2016/02/16/Handling-JSONB-in-Go-Structs/
Instead of using struct to store values, You can use map.
type TestJson struct {
First_name *string `json:"firstName"`
Last_name *string `json:"lastName"`
}
You can use interface as
var TestJson interface{}
err := json.Unmarshal(b, &TestJson)
On the other side you can also use Dynamic creation of structs as well.
Something like
m := map[string]interface{}{
"key": "value",
}
And rather than having TestJSONNullable it would be better to have Switch case while saving data.
switch v := TestJson.(type) {
case int:
case float64:
case string:
default:
// i isn't one of the types above
}
Look into this for more details--> https://godoc.org/encoding/json#Unmarshal

Bulk insert copy sql table with golang

For the context, I'm new to go and I'm creating a program that can copy tables from Oracle to MySQL.
I use database/sql go package, so I assume it can be used for migrating any kind of database.
To simplify my question I'm coping on the same MySQL database table name world.city to world.city_copy2.
with my following code, I ended up with the same last values in all the rows in the table :-(
do I somehow need to read through all the values inside the loop? what is the efficient way to do that?
package main
import (
"database/sql"
"fmt"
"strings"
_ "github.com/go-sql-driver/mysql"
)
const (
user = "user"
pass = "testPass"
server = "localhost"
)
func main() {
fmt.Print("test")
conStr := fmt.Sprintf("%s:%s#tcp(%s)/world", user, pass, server)
db, err := sql.Open("mysql", conStr)
if err != nil {
panic(err.Error())
}
defer db.Close()
err = db.Ping()
if err != nil {
panic(err.Error())
}
rows, err := db.Query("SELECT * FROM city")
if err != nil {
panic(err.Error()) // proper error handling instead of panic in your app
}
columns, err := rows.Columns()
if err != nil {
panic(err.Error()) // proper error handling instead of panic in your app
}
// Make a slice for the values
values := make([]sql.RawBytes, len(columns))
// rows.Scan wants '[]interface{}' as an argument, so we must copy the
// references into such a slice
scanArgs := make([]interface{}, len(values))
for i := range values {
scanArgs[i] = &values[i]
}
// that string will be generated according to len of columns
placeHolders := "( ?, ?, ?, ?, ? )"
// slice will contain all the values at the end
bulkValues := []interface{}{}
valueStrings := make([]string, 0)
for rows.Next() {
// get RawBytes from data
err = rows.Scan(scanArgs...)
if err != nil {
panic(err.Error()) // proper error handling instead of panic in your app
}
valueStrings = append(valueStrings, placeHolders)
bulkValues = append(bulkValues, scanArgs...)
//
}
stmStr := fmt.Sprintf("INSERT INTO city_copy2 VALUES %s", strings.Join(valueStrings, ","))
_, err = db.Exec(stmStr, bulkValues...)
if err != nil {
panic(err.Error())
}
}
I have checked out the docs of the library, and it seems that the problem here is that bulkValues keeps the address of the pointer so when scanArgs changes, bulkValues also changes to latest value of that scanArgs.
You need to use the values variable to get the values like below:
func main() {
fmt.Print("test")
conStr := fmt.Sprintf("%s:%s#tcp(%s)/soverflow", user, pass, server)
db, err := sql.Open("mysql", conStr)
if err != nil {
panic(err.Error())
}
defer db.Close()
err = db.Ping()
if err != nil {
panic(err.Error())
}
rows, err := db.Query("SELECT * FROM city")
if err != nil {
panic(err.Error()) // proper error handling instead of panic in your app
}
columns, err := rows.Columns()
if err != nil {
panic(err.Error()) // proper error handling instead of panic in your app
}
// Make a slice for the values
values := make([]sql.RawBytes, len(columns))
// rows.Scan wants '[]interface{}' as an argument, so we must copy the
// references into such a slice
scanArgs := make([]interface{}, len(values))
for i := range values {
scanArgs[i] = &values[i]
}
// that string will be generated according to len of columns
placeHolders := "( ?, ?, ?, ?, ? )"
// slice will contain all the values at the end
bulkValues := []interface{}{}
valueStrings := make([]string, 0)
// make an interface to keep the record's value
record := make([]interface{}, len(columns))
for rows.Next() {
// get RawBytes from data
err = rows.Scan(scanArgs...)
if err != nil {
panic(err.Error()) // proper error handling instead of panic in your app
}
valueStrings = append(valueStrings, placeHolders)
for i, col := range values {
// you need to be carefull with the datatypes here
// check out the docs for details on here
record[i] = string(value)
}
bulkValues = append(bulkValues, record...)
}
stmStr := fmt.Sprintf("INSERT INTO city_copy2 VALUES %s", strings.Join(valueStrings, ","))
_, err = db.Exec(stmStr, bulkValues...)
if err != nil {
panic(err.Error())
}
}
You can also find the example of the documentation here.
Note: There might be more efficient ways to copy database from psql to mysql but this answer only gives a quick solution for this particular issue that you are having.

sql: scan row(s) with unknown number of columns (select * from ...)

I have a table t containing a lot of columns, and my sql is like this: select * from t. Now I only want to scan one column or two from the wide returned row set. However, the sql.Scan accepts dest ...interface{} as arguments. Does it mean I have to scan everything and use only the column I needed?
I know I could change the sql from select * to select my_favorite_rows, however, in this case, I have no way to change the sql.
You can make use of Rows.Columns, e.g.
package main
import (
"database/sql"
"fmt"
"github.com/lib/pq"
)
type Vehicle struct {
Id int
Name string
Wheels int
}
// VehicleCol returns a reference for a column of a Vehicle
func VehicleCol(colname string, vh *Vehicle) interface{} {
switch colname {
case "id":
return &vh.Id
case "name":
return &vh.Name
case "wheels":
return &vh.Wheels
default:
panic("unknown column " + colname)
}
}
func panicOnErr(err error) {
if err != nil {
panic(err.Error())
}
}
func main() {
conn, err := pq.ParseURL(`postgres://docker:docker#172.17.0.2:5432/pgsqltest?schema=public`)
panicOnErr(err)
var db *sql.DB
db, err = sql.Open("postgres", conn)
panicOnErr(err)
var rows *sql.Rows
rows, err = db.Query("select * from vehicle")
panicOnErr(err)
// get the column names from the query
var columns []string
columns, err = rows.Columns()
panicOnErr(err)
colNum := len(columns)
all := []Vehicle{}
for rows.Next() {
vh := Vehicle{}
// make references for the cols with the aid of VehicleCol
cols := make([]interface{}, colNum)
for i := 0; i < colNum; i++ {
cols[i] = VehicleCol(columns[i], &vh)
}
err = rows.Scan(cols...)
panicOnErr(err)
all = append(all, vh)
}
fmt.Printf("%#v\n", all)
}
For unknown length of columns but if you're sure about their type,
cols, err := rows.Columns()
if err != nil {
log.Fatal(err.Error())
}
colLen := len(cols)
vals := make([]interface{}, colLen)
for rows.Next() {
for i := 0; i < len(colLen); i++ {
vals[i] = new(string)
}
err := rows.Scan(vals...)
if err != nil {
log.Fatal(err.Error()) // if wrong type
}
fmt.Printf("Column 1: %s\n", *(vals[0].(*string))) // will panic if wrong type
}
PS: Not recommended for prod

Query WMI from Go

I would like to run WMI queries from Go. There are ways to call DLL functions from Go. My understanding is that there must be some DLL somewhere which, with the correct call, will return some data I can parse and use. I'd prefer to avoid calling into C or C++, especially since I would guess those are wrappers over the Windows API itself.
I've examined the output of dumpbin.exe /exports c:\windows\system32\wmi.dll, and the following entry looks promising:
WmiQueryAllDataA (forwarded to wmiclnt.WmiQueryAllDataA)
However I'm not sure what to do from here. What arguments does this function take? What does it return? Searching for WmiQueryAllDataA is not helpful. And that name only appears in a comment of c:\program files (x86)\windows kits\8.1\include\shared\wmistr.h, but with no function signature.
Are there better methods? Is there another DLL? Am I missing something? Should I just use a C wrapper?
Running a WMI query in Linqpad with .NET Reflector shows the use of WmiNetUtilsHelper:ExecQueryWmi (and a _f version), but neither have a viewable implementation.
Update: use the github.com/StackExchange/wmi package which uses the solution in the accepted answer.
Welcome to the wonderful world of COM, Object Oriented Programming in C from when C++ was "a young upstart".
On github mattn has thrown together a little wrapper in Go, which I used to throw together a quick example program. "This repository was created for experimentation and should be considered unstable." instills all sorts of confidence.
I'm leaving out a lot of error checking. Trust me when I say, you'll want to add it back.
package main
import (
"github.com/mattn/go-ole"
"github.com/mattn/go-ole/oleutil"
)
func main() {
// init COM, oh yeah
ole.CoInitialize(0)
defer ole.CoUninitialize()
unknown, _ := oleutil.CreateObject("WbemScripting.SWbemLocator")
defer unknown.Release()
wmi, _ := unknown.QueryInterface(ole.IID_IDispatch)
defer wmi.Release()
// service is a SWbemServices
serviceRaw, _ := oleutil.CallMethod(wmi, "ConnectServer")
service := serviceRaw.ToIDispatch()
defer service.Release()
// result is a SWBemObjectSet
resultRaw, _ := oleutil.CallMethod(service, "ExecQuery", "SELECT * FROM Win32_Process")
result := resultRaw.ToIDispatch()
defer result.Release()
countVar, _ := oleutil.GetProperty(result, "Count")
count := int(countVar.Val)
for i :=0; i < count; i++ {
// item is a SWbemObject, but really a Win32_Process
itemRaw, _ := oleutil.CallMethod(result, "ItemIndex", i)
item := itemRaw.ToIDispatch()
defer item.Release()
asString, _ := oleutil.GetProperty(item, "Name")
println(asString.ToString())
}
}
The real meat is the call to ExecQuery, I happen to grab Win32_Process from the available classes because it's easy to understand and print.
On my machine, this prints:
System Idle Process
System
smss.exe
csrss.exe
wininit.exe
services.exe
lsass.exe
svchost.exe
svchost.exe
atiesrxx.exe
svchost.exe
svchost.exe
svchost.exe
svchost.exe
svchost.exe
spoolsv.exe
svchost.exe
AppleOSSMgr.exe
AppleTimeSrv.exe
... and so on
go.exe
main.exe
I'm not running it elevated or with UAC disabled, but some WMI providers are gonna require a privileged user.
I'm also not 100% that this won't leak a little, you'll want to dig into that. COM objects are reference counted, so defer should be a pretty good fit there (provided the method isn't crazy long running) but go-ole may have some magic inside I didn't notice.
I'm commenting over a year later, but there is a solution here on github (and posted below for posterity).
// +build windows
/*
Package wmi provides a WQL interface for WMI on Windows.
Example code to print names of running processes:
type Win32_Process struct {
Name string
}
func main() {
var dst []Win32_Process
q := wmi.CreateQuery(&dst, "")
err := wmi.Query(q, &dst)
if err != nil {
log.Fatal(err)
}
for i, v := range dst {
println(i, v.Name)
}
}
*/
package wmi
import (
"bytes"
"errors"
"fmt"
"log"
"os"
"reflect"
"runtime"
"strconv"
"strings"
"sync"
"time"
"github.com/mattn/go-ole"
"github.com/mattn/go-ole/oleutil"
)
var l = log.New(os.Stdout, "", log.LstdFlags)
var (
ErrInvalidEntityType = errors.New("wmi: invalid entity type")
lock sync.Mutex
)
// QueryNamespace invokes Query with the given namespace on the local machine.
func QueryNamespace(query string, dst interface{}, namespace string) error {
return Query(query, dst, nil, namespace)
}
// Query runs the WQL query and appends the values to dst.
//
// dst must have type *[]S or *[]*S, for some struct type S. Fields selected in
// the query must have the same name in dst. Supported types are all signed and
// unsigned integers, time.Time, string, bool, or a pointer to one of those.
// Array types are not supported.
//
// By default, the local machine and default namespace are used. These can be
// changed using connectServerArgs. See
// http://msdn.microsoft.com/en-us/library/aa393720.aspx for details.
func Query(query string, dst interface{}, connectServerArgs ...interface{}) error {
dv := reflect.ValueOf(dst)
if dv.Kind() != reflect.Ptr || dv.IsNil() {
return ErrInvalidEntityType
}
dv = dv.Elem()
mat, elemType := checkMultiArg(dv)
if mat == multiArgTypeInvalid {
return ErrInvalidEntityType
}
lock.Lock()
defer lock.Unlock()
runtime.LockOSThread()
defer runtime.UnlockOSThread()
err := ole.CoInitializeEx(0, ole.COINIT_MULTITHREADED)
if err != nil {
oleerr := err.(*ole.OleError)
// S_FALSE = 0x00000001 // CoInitializeEx was already called on this thread
if oleerr.Code() != ole.S_OK && oleerr.Code() != 0x00000001 {
return err
}
} else {
// Only invoke CoUninitialize if the thread was not initizlied before.
// This will allow other go packages based on go-ole play along
// with this library.
defer ole.CoUninitialize()
}
unknown, err := oleutil.CreateObject("WbemScripting.SWbemLocator")
if err != nil {
return err
}
defer unknown.Release()
wmi, err := unknown.QueryInterface(ole.IID_IDispatch)
if err != nil {
return err
}
defer wmi.Release()
// service is a SWbemServices
serviceRaw, err := oleutil.CallMethod(wmi, "ConnectServer", connectServerArgs...)
if err != nil {
return err
}
service := serviceRaw.ToIDispatch()
defer serviceRaw.Clear()
// result is a SWBemObjectSet
resultRaw, err := oleutil.CallMethod(service, "ExecQuery", query)
if err != nil {
return err
}
result := resultRaw.ToIDispatch()
defer resultRaw.Clear()
count, err := oleInt64(result, "Count")
if err != nil {
return err
}
// Initialize a slice with Count capacity
dv.Set(reflect.MakeSlice(dv.Type(), 0, int(count)))
var errFieldMismatch error
for i := int64(0); i < count; i++ {
err := func() error {
// item is a SWbemObject, but really a Win32_Process
itemRaw, err := oleutil.CallMethod(result, "ItemIndex", i)
if err != nil {
return err
}
item := itemRaw.ToIDispatch()
defer itemRaw.Clear()
ev := reflect.New(elemType)
if err = loadEntity(ev.Interface(), item); err != nil {
if _, ok := err.(*ErrFieldMismatch); ok {
// We continue loading entities even in the face of field mismatch errors.
// If we encounter any other error, that other error is returned. Otherwise,
// an ErrFieldMismatch is returned.
errFieldMismatch = err
} else {
return err
}
}
if mat != multiArgTypeStructPtr {
ev = ev.Elem()
}
dv.Set(reflect.Append(dv, ev))
return nil
}()
if err != nil {
return err
}
}
return errFieldMismatch
}
// ErrFieldMismatch is returned when a field is to be loaded into a different
// type than the one it was stored from, or when a field is missing or
// unexported in the destination struct.
// StructType is the type of the struct pointed to by the destination argument.
type ErrFieldMismatch struct {
StructType reflect.Type
FieldName string
Reason string
}
func (e *ErrFieldMismatch) Error() string {
return fmt.Sprintf("wmi: cannot load field %q into a %q: %s",
e.FieldName, e.StructType, e.Reason)
}
var timeType = reflect.TypeOf(time.Time{})
// loadEntity loads a SWbemObject into a struct pointer.
func loadEntity(dst interface{}, src *ole.IDispatch) (errFieldMismatch error) {
v := reflect.ValueOf(dst).Elem()
for i := 0; i < v.NumField(); i++ {
f := v.Field(i)
isPtr := f.Kind() == reflect.Ptr
if isPtr {
ptr := reflect.New(f.Type().Elem())
f.Set(ptr)
f = f.Elem()
}
n := v.Type().Field(i).Name
if !f.CanSet() {
return &ErrFieldMismatch{
StructType: f.Type(),
FieldName: n,
Reason: "CanSet() is false",
}
}
prop, err := oleutil.GetProperty(src, n)
if err != nil {
errFieldMismatch = &ErrFieldMismatch{
StructType: f.Type(),
FieldName: n,
Reason: "no such struct field",
}
continue
}
defer prop.Clear()
switch val := prop.Value().(type) {
case int, int64:
var v int64
switch val := val.(type) {
case int:
v = int64(val)
case int64:
v = val
default:
panic("unexpected type")
}
switch f.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
f.SetInt(v)
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
f.SetUint(uint64(v))
default:
return &ErrFieldMismatch{
StructType: f.Type(),
FieldName: n,
Reason: "not an integer class",
}
}
case string:
iv, err := strconv.ParseInt(val, 10, 64)
switch f.Kind() {
case reflect.String:
f.SetString(val)
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
if err != nil {
return err
}
f.SetInt(iv)
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
if err != nil {
return err
}
f.SetUint(uint64(iv))
case reflect.Struct:
switch f.Type() {
case timeType:
if len(val) == 25 {
mins, err := strconv.Atoi(val[22:])
if err != nil {
return err
}
val = val[:22] + fmt.Sprintf("%02d%02d", mins/60, mins%60)
}
t, err := time.Parse("20060102150405.000000-0700", val)
if err != nil {
return err
}
f.Set(reflect.ValueOf(t))
}
}
case bool:
switch f.Kind() {
case reflect.Bool:
f.SetBool(val)
default:
return &ErrFieldMismatch{
StructType: f.Type(),
FieldName: n,
Reason: "not a bool",
}
}
default:
typeof := reflect.TypeOf(val)
if isPtr && typeof == nil {
break
}
return &ErrFieldMismatch{
StructType: f.Type(),
FieldName: n,
Reason: fmt.Sprintf("unsupported type (%T)", val),
}
}
}
return errFieldMismatch
}
type multiArgType int
const (
multiArgTypeInvalid multiArgType = iota
multiArgTypeStruct
multiArgTypeStructPtr
)
// checkMultiArg checks that v has type []S, []*S for some struct type S.
//
// It returns what category the slice's elements are, and the reflect.Type
// that represents S.
func checkMultiArg(v reflect.Value) (m multiArgType, elemType reflect.Type) {
if v.Kind() != reflect.Slice {
return multiArgTypeInvalid, nil
}
elemType = v.Type().Elem()
switch elemType.Kind() {
case reflect.Struct:
return multiArgTypeStruct, elemType
case reflect.Ptr:
elemType = elemType.Elem()
if elemType.Kind() == reflect.Struct {
return multiArgTypeStructPtr, elemType
}
}
return multiArgTypeInvalid, nil
}
func oleInt64(item *ole.IDispatch, prop string) (int64, error) {
v, err := oleutil.GetProperty(item, prop)
if err != nil {
return 0, err
}
defer v.Clear()
i := int64(v.Val)
return i, nil
}
// CreateQuery returns a WQL query string that queries all columns of src. where
// is an optional string that is appended to the query, to be used with WHERE
// clauses. In such a case, the "WHERE" string should appear at the beginning.
func CreateQuery(src interface{}, where string) string {
var b bytes.Buffer
b.WriteString("SELECT ")
s := reflect.Indirect(reflect.ValueOf(src))
t := s.Type()
if s.Kind() == reflect.Slice {
t = t.Elem()
}
if t.Kind() != reflect.Struct {
return ""
}
var fields []string
for i := 0; i < t.NumField(); i++ {
fields = append(fields, t.Field(i).Name)
}
b.WriteString(strings.Join(fields, ", "))
b.WriteString(" FROM ")
b.WriteString(t.Name())
b.WriteString(" " + where)
return b.String()
}
To access the winmgmts object or a namespace (which is the same), you can use the code below. Basically, you need to specify the namespace as parameter, which is not documented properly in go-ole.
In the code below, you can also see how to access a class within this namespace and execute a method.
package main
import (
"log"
"github.com/go-ole/go-ole"
"github.com/go-ole/go-ole/oleutil"
)
func main() {
ole.CoInitializeEx(0, ole.COINIT_MULTITHREADED)
defer ole.CoUninitialize()
unknown, err := oleutil.CreateObject("WbemScripting.SWbemLocator")
if err != nil {
log.Panic(err)
}
defer unknown.Release()
wmi, err := unknown.QueryInterface(ole.IID_IDispatch)
if err != nil {
log.Panic(err)
}
defer wmi.Release()
// Connect to namespace
// root/PanasonicPC = winmgmts:\\.\root\PanasonicPC
serviceRaw, err := oleutil.CallMethod(wmi, "ConnectServer", nil, "root/PanasonicPC")
if err != nil {
log.Panic(err)
}
service := serviceRaw.ToIDispatch()
defer serviceRaw.Clear()
// Get class
setBiosRaw, err := oleutil.CallMethod(service, "Get", "SetBIOS4Conf")
if err != nil {
log.Panic(err)
}
setBios := setBiosRaw.ToIDispatch()
defer setBiosRaw.Clear()
// Run method
resultRaw, err := oleutil.CallMethod(setBios, "AccessAuthorization", "letmein")
resultVal := resultRaw.Value().(int32)
log.Println("Return Code:", resultVal)
}
import(
"os/exec"
)
​func​ (​lcu​ ​*​LCU​) ​GrabToken​() {
​        ​cmd​ ​:=​ ​exec​.​Command​(​"powershell"​, ​"$cmdline = Get-WmiObject -Class Win32_Process"​)
​        ​
​        ​out​, ​err​ ​:=​ ​cmd​.​CombinedOutput​()
​        ​if​ ​err​ ​!=​ ​nil​ {
​                ​fmt​.​Println​(​err​)
​        }
​        ​outstr​ ​:=​ ​string(out)
​}