{
"machines": [{
"name": "relay_1",
"totalmem": "3G",
"machinemem": "6G"
}, {
"name": "relay_2",
"totalmem": "30G",
"machinemem": "4G"
}]
}
tried doing the parsing using the below code
Run: func(cmd *cobra.Command, args []string) {
fmt.Println("relay called")
conf, _ = rootCmd.Flags().GetString("conf")
if conf != "" {
fmt.Println("From Create Command : ", conf)
}
data, err := ioutil.ReadFile("file.txt") // data has type []byte
if err != nil {
log.Fatal(err)
}
var result []map[string]interface{}
json.Unmarshal(data, &result)
relays := result["relays"].(map[string]interface{})
for key, relay := range relays {
fmt.Println("name :", relay["name"],
"totalmem:", relay["totalmem"],
"relaymem:", relay["relaymem"])
}
},
But I am getting the error as below which indicates that the type is invalid
cmd/create_relay.go:54:29: invalid type assertion: result["relays"].(map[string]) (non-interface type map[string]interface {} on left)
Can someone let me know how to parse the json below by using the interfaces as below
It worked with the below code.
type Relays struct {
Relays []Relay `json:"relays"`
}
type Relay struct {
Name string `json:"name"`
Totalmem string `json:"totalmem"`
Relaymem string `json:"relaymem"`
}
// relayCmd represents the relay command
var createrelayCmd = &cobra.Command{
Use: "relay",
Short: "A brief description of your command",
Long: `A longer description that spans multiple lines and likely contains examples
and usage of using your command. For example:
Cobra is a CLI library for Go that empowers applications.
This application is a tool to generate the needed files
to quickly create a Cobra application.`,
Run: func(cmd *cobra.Command, args []string) {
fmt.Println("relay called")
conf, _ = rootCmd.Flags().GetString("conf")
if conf != "" {
fmt.Println("From Create Command : ", conf)
jsonFile, err := os.Open(conf)
// if we os.Open returns an error then handle it
if err != nil {
fmt.Println(err)
}
fmt.Println("Successfully Opened users.json")
// defer the closing of our jsonFile so that we can parse it later on
defer jsonFile.Close()
data, err := ioutil.ReadAll(jsonFile) // data has type []byte
if err != nil {
log.Fatal(err)
}
var relays Relays
json.Unmarshal(data,&relays)
for i := 0; i < len(relays.Relays); i++ {
fmt.Println("User Name: " + relays.Relays[i].Name)
fmt.Println("User Totalmem: " + relays.Relays[i].Totalmem)
fmt.Println("User Relaymem: " + relays.Relays[i].Relaymem)
}
}
},
}
result in your code is an array of map and you are using result["relays"] which is invalid.
Your code should be something like this:
Run: func(cmd *cobra.Command, args []string) {
fmt.Println("relay called")
conf, _ = rootCmd.Flags().GetString("conf")
if conf != "" {
fmt.Println("From Create Command : ", conf)
}
data, err := ioutil.ReadFile("file.txt") // data has type []byte
if err != nil {
log.Fatal(err)
}
var result map[string]interface{}
json.Unmarshal(data, &result)
relays := result["relays"].([]interface{})
for _, relay := range relays {
relayM := relay.(map[string]interface{})
fmt.Println("name :", relayM["name"].(string),
"totalmem:", relayM["totalmem"].(string),
"relaymem:", relayM["relaymem"].(string))
}
},
Related
As you can see I am calling the "coll.CountDocuments" functions multiples times. What I want is to write the code without calling the "coll.CountDocuments" function multiple times by aggregating all the filters into a single query.
func NoOfDocumentsInfo(DB string, col string, filters ...bson.D) ([]int64, error) {
if nil == dbInstance {
if nil == GetDBInstance() {
logger.Error("Not connecting to DB")
err := errors.New("DB connection error")
return nil, err
}
}
logger.Debugf("%s %s", DB, col)
coll := dbInstance.Database(DB).Collection(col)
counts := make([]int64, len(filters))
for i, filter := range filters {
count, err := coll.CountDocuments(context.TODO(), filter)
if err != nil {
logger.Fatal(err)
return nil, err
}
counts[i] = count
}
return counts, nil
}
I have tried to used aggragation pipeline but "cur" and "result" is giving null output.
`func NoOfDocumentsInfo(DB string, col string, filters ...bson.D) ([]int64, error) {
if dbInstance == nil {
if GetDBInstance() == nil {
logger.Error("Not connecting to DB")
err := errors.New("DB connection error")
return nil, err
}
}
logger.Debugf("%s %s", DB, col)
coll := dbInstance.Database(DB).Collection(col)
pipeline := make([]bson.M, 0, len(filters)+2)
pipeline = append(pipeline, bson.M{"$match": bson.M{"$or": filters}})
pipeline = append(pipeline, bson.M{"$group": bson.M{"_id": nil, "count": bson.M{"$sum": 1}}})
pipeline = append(pipeline, bson.M{"$group": bson.M{"_id": nil, "count": bson.M{"$first": "$count"}}})
var result struct {
Count int64 `bson:"count"`
}
cur, err := coll.Aggregate(context.TODO(), pipeline)
if err != nil {
logger.Fatal(err)
return nil, err
}
logger.Debugf("cur: %+v", cur)
err = cur.Decode(&result)
logger.Debugf("result: %+v, err: %v", result, err)
if err != nil {
logger.Fatal(err)
return nil, err
}
return []int64{result.Count}, nil
}`
A much simpler approach would be the one that I'm going to share here. Let's start with the code:
package main
import (
"context"
"fmt"
"time"
"go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/mongo"
"go.mongodb.org/mongo-driver/mongo/options"
)
var (
dbInstance *mongo.Client
ctx context.Context
cancel context.CancelFunc
)
func NoOfDocumentsInfo(client *mongo.Client, DB string, col string, filters bson.A) (int64, error) {
coll := client.Database(DB).Collection(col)
myFilters := bson.D{
bson.E{
Key: "$and",
Value: filters,
},
}
counts, err := coll.CountDocuments(ctx, myFilters)
if err != nil {
panic(err)
}
return counts, nil
}
func main() {
ctx, cancel = context.WithTimeout(context.Background(), 20*time.Second)
defer cancel()
// set MongoDB connection
clientOptions := options.Client().ApplyURI("mongodb://root:root#localhost:27017")
mongoClient, err := mongo.Connect(ctx, clientOptions)
if err != nil {
panic(err)
}
defer mongoClient.Disconnect(ctx)
// query with filters
numDocs, err := NoOfDocumentsInfo(mongoClient, "demodb", "myCollection", bson.A{
bson.D{bson.E{Key: "Name", Value: bson.D{bson.E{Key: "$eq", Value: "John Doe"}}}},
bson.D{bson.E{Key: "Song", Value: bson.D{bson.E{Key: "$eq", Value: "White Roses"}}}},
})
if err != nil {
panic(err)
}
fmt.Println("num docs:", numDocs)
}
Let's see the relevant changes applied to the code:
Expect a parameter called filters of type bson.A which is the type for the array in the MongoDB environment.
Build the myFilters variable which is of type bson.D (slice) with the following single item (bson.E) in this way:
The Key is the logical operator
The Value is the array passed into the function
Build the array to pass to the function with all of the needed filters (e.g. two equal conditions: one on the Name key and the other on the Song).
Finally, I also did some improvements on how you've opened the MongoDB connection and how you've released the allocated resources.
Let me know if this solves your issue, thanks!
Moved from python to golang:
jsonBlob := `{ "test" : {"thing":["team1", "team2"]}}`
type other map[string]Myset
type stuff map[string]other
type MySet struct {
set mapset.Set
}
//Custom unmarshaller
func (s *MySet) UnmarshalJSON(p []byte) error {
var a []interface{}
if err := json.Unmarshal(p, &a); err != nil {
return err
}
s.set = mapset.NewSet(a)
return nil
}
// Unmarshall it
var s stuff
err := json.Unmarshall(jsonBlob, &s)
if err != nil {
return err
}
but it throws: runtime error: hash of unhashable type []interface {}
Given that the data type you wish to use is an interface, and does not satisfy the json.Unmarshaler interface, you have two options:
Unmarshal to an array, then convert to your preferred type.
Create a custom type, that wraps your preferred type, and provides an UnmarshalJSON method. This is functionally the same as #1, but may be easier to use. Example:
type MySet struct {
set mapset.Set
}
func (s *MySet) UnmarshalJSON(p []byte) error {
var a []interface{}
if err := json.Unmarshal(p, &a); err != nil {
return err
}
s.set = mapset.NewSet(a)
return nil
}
(Note, this code is untested; it is not meant to be a complete solution, but a guide in the right direction.)
How can I insert an array of documents into MongoDB with mgo library using only a single DB call as in db.collection.insert()?
I have the following Transaction structure:
type Transaction struct {
Brand string `json:"brand"`
Name string `json:"name"`
Plu string `json:"plu"`
Price string `json:"price"`
}
From a POST request I will recieve an array of these structures. I want to insert them into MongoDB as individual documents but using a single DB call as explained in db.collection.insert()
I tried using c.Insert of mgo
The following is the code snippet:
func insertTransaction(c *gin.Context) {
var transactions []Transaction
err := c.BindJSON(&transactions)
if err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, map[string]string{"error":"invalid JSON"})
return
}
err = InsertTransactons(transactions)
if err != nil {
c.AbortWithStatusJSON(http.StatusInternalServerError, &map[string](interface{}){
"status": "error",
"code": "500",
"message": "Internal server error",
"error": err,
})
return
}
c.JSON(http.StatusCreated, &map[string](interface{}){
"status": "success",
"code": "0",
"message": "created",
})
}
func InsertTransactons(u []Transaction) error {
s := GetSession()
defer s.Close()
c := s.DB(DB).C(TransactionColl)
err := c.Insert(u...)
if err != nil {
return err
}
return nil
}
But as I compile and run the code, I get the following error:
go/database.go:34:17: cannot use u (type *[]Transaction) as type
[]interface {} in argument to c.Insert
You cannot pass []*Transaction as []interface{}. You need to convert each Transaction to inferface{} to change its memory layout.
var ui []interface{}
for _, t := range u{
ui = append(ui, t)
}
Pass ui to c.Insert instead
Create slice of interface for document structs by appending and then inserting data using Bulk insert which takes variable arguments.
type Bulk struct {
// contains filtered or unexported fields
}
func (b *Bulk) Insert(docs ...interface{})
For inserting documents in Bulk
const INSERT_COUNT int = 10000
type User struct {
Id bson.ObjectId `bson:"_id,omitempty" json:"_id"`
Email string `bson:"email" json:"email"`
}
func (self *User) Init() {
self.Id = bson.NewObjectId()
}
Call Bulk() function on collection returned from db connection. Bulk() function returns pointer to *Bulk.
bulk := dbs.Clone().DB("").C("users").Bulk()
bulk.Insert(users...)
Assign it to variable which will be used to call Insert() method using Bulk pointer receiver.
func main(){
// Database
dbs, err := mgo.Dial("mongodb://localhost/")
if err != nil {
panic(err)
}
// Collections
uc := dbs.Clone().DB("").C("users")
defer dbs.Clone().DB("").Session.Close()
for n := 0; n < b.N; n++ {
count := INSERT_COUNT
users := make([]interface{}, count)
for i := 0; i < count; i++ {
loop_user := User{}
loop_user.Init()
loop_user.Email = fmt.Sprintf("report-%d#example.com", i)
users[i] = loop_user
}
bulk := uc.Bulk()
bulk.Unordered()
bulk.Insert(users...)
_, bulkErr := bulk.Run()
if bulkErr != nil {
panic(err)
}
}
}
I found some posts on how to decoding json nested objects in go, I tried to apply the answers to my problem, but I only managed to find a partial solution.
My json file look like this:
{
"user":{
"gender":"male",
"age":"21-30",
"id":"80b1ea88-19d7-24e8-52cc-65cf6fb9b380"
},
"trials":{
"0":{"index":0,"word":"WORD 1","Time":3000,"keyboard":true,"train":true,"type":"A"},
"1":{"index":1,"word":"WORD 2","Time":3000,"keyboard":true,"train":true,"type":"A"},
},
"answers":{
"training":[
{"ans":0,"RT":null,"gtAns":"WORD 1","correct":0},
{"ans":0,"RT":null,"gtAns":"WORD 2","correct":0}
],
"test":[
{"ans":0,"RT":null,"gtAns":true,"correct":0},
{"ans":0,"RT":null,"gtAns":true,"correct":0}
]
}
}
Basically I need to parse the information inside it and save them into go structure. With the code below I managed to extract the user information, but it looks too complicated to me and it won't be easy to apply the same thing to the "answers" fields which contains 2 arrays with more than 100 entries each. Here the code I'm using now:
type userDetails struct {
Id string `json:"id"`
Age string `json:"age"`
Gender string `json:"gender"`
}
type jsonRawData map[string]interface {
}
func getJsonContent(r *http.Request) ( userDetails) {
defer r.Body.Close()
jsonBody, err := ioutil.ReadAll(r.Body)
var userDataCurr userDetails
if err != nil {
log.Printf("Couldn't read request body: %s", err)
} else {
var f jsonRawData
err := json.Unmarshal(jsonBody, &f)
if err != nil {
log.Printf("Error unmashalling: %s", err)
} else {
user := f["user"].(map[string]interface{})
userDataCurr.Id = user["id"].(string)
userDataCurr.Gender = user["gender"].(string)
userDataCurr.Age = user["age"].(string)
}
}
return userDataCurr
}
Any suggestions? Thanks a lot!
You're doing it the hard way by using interface{} and not taking advantage of what encoding/json gives you.
I'd do it something like this (note I assumed there was an error with the type of the "gtAns" field and I made it a boolean, you don't give enough information to know what to do with the "RT" field):
package main
import (
"encoding/json"
"fmt"
"io"
"log"
"strconv"
"strings"
)
const input = `{
"user":{
"gender":"male",
"age":"21-30",
"id":"80b1ea88-19d7-24e8-52cc-65cf6fb9b380"
},
"trials":{
"0":{"index":0,"word":"WORD 1","Time":3000,"keyboard":true,"train":true,"type":"A"},
"1":{"index":1,"word":"WORD 2","Time":3000,"keyboard":true,"train":true,"type":"A"}
},
"answers":{
"training":[
{"ans":0,"RT":null,"gtAns":true,"correct":0},
{"ans":0,"RT":null,"gtAns":true,"correct":0}
],
"test":[
{"ans":0,"RT":null,"gtAns":true,"correct":0},
{"ans":0,"RT":null,"gtAns":true,"correct":0}
]
}
}`
type Whatever struct {
User struct {
Gender Gender `json:"gender"`
Age Range `json:"age"`
ID IDString `json:"id"`
} `json:"user"`
Trials map[string]struct {
Index int `json:"index"`
Word string `json:"word"`
Time int // should this be a time.Duration?
Train bool `json:"train"`
Type string `json:"type"`
} `json:"trials"`
Answers map[string][]struct {
Answer int `json:"ans"`
RT json.RawMessage // ??? what type is this
GotAnswer bool `json:"gtAns"`
Correct int `json:"correct"`
} `json:"answers"`
}
// Using some custom types to show custom marshalling:
type IDString string // TODO custom unmarshal and format/error checking
type Gender int
const (
Male Gender = iota
Female
)
func (g *Gender) UnmarshalJSON(b []byte) error {
var s string
err := json.Unmarshal(b, &s)
if err != nil {
return err
}
switch strings.ToLower(s) {
case "male":
*g = Male
case "female":
*g = Female
default:
return fmt.Errorf("invalid gender %q", s)
}
return nil
}
func (g Gender) MarshalJSON() ([]byte, error) {
switch g {
case Male:
return []byte(`"male"`), nil
case Female:
return []byte(`"female"`), nil
default:
return nil, fmt.Errorf("invalid gender %v", g)
}
}
type Range struct{ Min, Max int }
func (r *Range) UnmarshalJSON(b []byte) error {
// XXX could be improved
_, err := fmt.Sscanf(string(b), `"%d-%d"`, &r.Min, &r.Max)
return err
}
func (r Range) MarshalJSON() ([]byte, error) {
return []byte(fmt.Sprintf(`"%d-%d"`, r.Min, r.Max)), nil
// Or:
b := make([]byte, 0, 8)
b = append(b, '"')
b = strconv.AppendInt(b, int64(r.Min), 10)
b = append(b, '-')
b = strconv.AppendInt(b, int64(r.Max), 10)
b = append(b, '"')
return b, nil
}
func fromJSON(r io.Reader) (Whatever, error) {
var x Whatever
dec := json.NewDecoder(r)
err := dec.Decode(&x)
return x, err
}
func main() {
// Use http.Get or whatever to get an io.Reader,
// (e.g. response.Body).
// For playground, substitute a fixed string
r := strings.NewReader(input)
// If you actually had a string or []byte:
// var x Whatever
// err := json.Unmarshal([]byte(input), &x)
x, err := fromJSON(r)
if err != nil {
log.Fatal(err)
}
fmt.Println(x)
fmt.Printf("%+v\n", x)
b, err := json.MarshalIndent(x, "", " ")
if err != nil {
log.Fatal(err)
}
fmt.Printf("Re-marshalled: %s\n", b)
}
Playground
Of course if you want to reuse those sub-types you could pull them out of the "Whatever" type into their own named types.
Also, note the use of a json.Decoder rather than reading in all the data ahead of time. Usually try and avoid any use of ioutil.ReadAll unless you really need all the data at once.
Basically after doing a query I'd like to take the resulting rows and produce a []map[string]interface{}, but I do not see how to do this with the API since the Rows.Scan() function needs a specific number of parameters matching the requested number of columns (and possibly the types as well) to correctly obtain the data.
Again, I'd like to generalize this call and take any query and turn it into a []map[string]interface{}, where the map contains column names mapped to the values for that row.
This is likely very inefficient, and I plan on changing the structure later so that interface{} is a struct for a single data point.
How would I do this using just the database/sql package, or if necessary the database/sql/driver package?
Look at using sqlx, which can do this a little more easily than the standard database/sql library:
places := []Place{}
err := db.Select(&places, "SELECT * FROM place ORDER BY telcode ASC")
if err != nil {
fmt.Printf(err)
return
}
You could obviously replace []Place{} with a []map[string]interface{}, but where possible it is better to use a struct if you know the structure of your database. You won't need to undertake any type assertions as you might on an interface{}.
I haven't used it (yet), but I believe the "common" way to do what you are asking (more or less) is to use gorp.
You can create a struct that maintains the map key to the position of the []interface{} slice. By doing this, you do not need to create a predefined struct. For example:
IDOrder: 0
IsClose: 1
IsConfirm: 2
IDUser: 3
Then, you can use it like this:
// create a fieldbinding object.
var fArr []string
fb := fieldbinding.NewFieldBinding()
if fArr, err = rs.Columns(); err != nil {
return nil, err
}
fb.PutFields(fArr)
//
outArr := []interface{}{}
for rs.Next() {
if err := rs.Scan(fb.GetFieldPtrArr()...); err != nil {
return nil, err
}
fmt.Printf("Row: %v, %v, %v, %s\n", fb.Get("IDOrder"), fb.Get("IsConfirm"), fb.Get("IDUser"), fb.Get("Created"))
outArr = append(outArr, fb.GetFieldArr())
}
Sample output:
Row: 1, 1, 1, 2016-07-15 10:39:37 +0000 UTC
Row: 2, 1, 11, 2016-07-15 10:42:04 +0000 UTC
Row: 3, 1, 10, 2016-07-15 10:46:20 +0000 UTC
SampleQuery: [{"Created":"2016-07-15T10:39:37Z","IDOrder":1,"IDUser":1,"IsClose":0,"IsConfirm":1},{"Created":"2016-07-15T10:42:04Z","IDOrder":2,"IDUser":11,"IsClose":0,"IsConfirm":1},{"Created":"2016-07-15T10:46:20Z","IDOrder":3,"IDUser":10,"IsClose":0,"IsConfirm":1}]
Please see the full example below or at fieldbinding:
main.go
package main
import (
"bytes"
"database/sql"
"encoding/json"
"fmt"
)
import (
_ "github.com/go-sql-driver/mysql"
"github.com/junhsieh/goexamples/fieldbinding/fieldbinding"
)
var (
db *sql.DB
)
// Table definition
// CREATE TABLE `salorder` (
// `IDOrder` int(10) unsigned NOT NULL AUTO_INCREMENT,
// `IsClose` tinyint(4) NOT NULL,
// `IsConfirm` tinyint(4) NOT NULL,
// `IDUser` int(11) NOT NULL,
// `Created` datetime NOT NULL,
// `Changed` datetime NOT NULL,
// PRIMARY KEY (`IDOrder`),
// KEY `IsClose` (`IsClose`)
// ) ENGINE=InnoDB DEFAULT CHARSET=utf8;
func main() {
var err error
// starting database server
db, err = sql.Open("mysql", "Username:Password#tcp(Host:Port)/DBName?parseTime=true")
if err != nil {
panic(err.Error()) // Just for example purpose. You should use proper error handling instead of panic
}
defer db.Close()
// SampleQuery
if v, err := SampleQuery(); err != nil {
fmt.Printf("%s\n", err.Error())
} else {
var b bytes.Buffer
if err := json.NewEncoder(&b).Encode(v); err != nil {
fmt.Printf("SampleQuery: %v\n", err.Error())
}
fmt.Printf("SampleQuery: %v\n", b.String())
}
}
func SampleQuery() ([]interface{}, error) {
param := []interface{}{}
param = append(param, 1)
sql := "SELECT "
sql += " SalOrder.IDOrder "
sql += ", SalOrder.IsClose "
sql += ", SalOrder.IsConfirm "
sql += ", SalOrder.IDUser "
sql += ", SalOrder.Created "
sql += "FROM SalOrder "
sql += "WHERE "
sql += "IsConfirm = ? "
sql += "ORDER BY SalOrder.IDOrder ASC "
rs, err := db.Query(sql, param...)
if err != nil {
return nil, err
}
defer rs.Close()
// create a fieldbinding object.
var fArr []string
fb := fieldbinding.NewFieldBinding()
if fArr, err = rs.Columns(); err != nil {
return nil, err
}
fb.PutFields(fArr)
//
outArr := []interface{}{}
for rs.Next() {
if err := rs.Scan(fb.GetFieldPtrArr()...); err != nil {
return nil, err
}
fmt.Printf("Row: %v, %v, %v, %s\n", fb.Get("IDOrder"), fb.Get("IsConfirm"), fb.Get("IDUser"), fb.Get("Created"))
outArr = append(outArr, fb.GetFieldArr())
}
if err := rs.Err(); err != nil {
return nil, err
}
return outArr, nil
}
fieldbinding package:
package fieldbinding
import (
"sync"
)
// NewFieldBinding ...
func NewFieldBinding() *FieldBinding {
return &FieldBinding{}
}
// FieldBinding is deisgned for SQL rows.Scan() query.
type FieldBinding struct {
sync.RWMutex // embedded. see http://golang.org/ref/spec#Struct_types
FieldArr []interface{}
FieldPtrArr []interface{}
FieldCount int64
MapFieldToID map[string]int64
}
func (fb *FieldBinding) put(k string, v int64) {
fb.Lock()
defer fb.Unlock()
fb.MapFieldToID[k] = v
}
// Get ...
func (fb *FieldBinding) Get(k string) interface{} {
fb.RLock()
defer fb.RUnlock()
// TODO: check map key exist and fb.FieldArr boundary.
return fb.FieldArr[fb.MapFieldToID[k]]
}
// PutFields ...
func (fb *FieldBinding) PutFields(fArr []string) {
fCount := len(fArr)
fb.FieldArr = make([]interface{}, fCount)
fb.FieldPtrArr = make([]interface{}, fCount)
fb.MapFieldToID = make(map[string]int64, fCount)
for k, v := range fArr {
fb.FieldPtrArr[k] = &fb.FieldArr[k]
fb.put(v, int64(k))
}
}
// GetFieldPtrArr ...
func (fb *FieldBinding) GetFieldPtrArr() []interface{} {
return fb.FieldPtrArr
}
// GetFieldArr ...
func (fb *FieldBinding) GetFieldArr() map[string]interface{} {
m := make(map[string]interface{}, fb.FieldCount)
for k, v := range fb.MapFieldToID {
m[k] = fb.FieldArr[v]
}
return m
}
If you really want a map, which is needed in some cases, have a look at dbr, but you need to use the fork (since the pr got rejected in the original repo). The fork seems more up to date anyway:
https://github.com/mailru/dbr
For info on how to use it:
https://github.com/gocraft/dbr/issues/83
package main
import (
"fmt"
"github.com/bobby96333/goSqlHelper"
)
func main(){
fmt.Println("hello")
conn,err :=goSqlHelper.MysqlOpen("user:password#tcp(127.0.0.1:3306)/dbname")
checkErr(err)
row,err := conn.QueryRow("select * from table where col1 = ? and col2 = ?","123","abc")
checkErr(err)
if *row==nil {
fmt.Println("no found row")
}else{
fmt.Printf("%+v",row)
}
}
func checkErr(err error){
if err!=nil {
panic(err)
}
}
output:
&map[col1:abc col2:123]