Moved from python to golang:
jsonBlob := `{ "test" : {"thing":["team1", "team2"]}}`
type other map[string]Myset
type stuff map[string]other
type MySet struct {
set mapset.Set
}
//Custom unmarshaller
func (s *MySet) UnmarshalJSON(p []byte) error {
var a []interface{}
if err := json.Unmarshal(p, &a); err != nil {
return err
}
s.set = mapset.NewSet(a)
return nil
}
// Unmarshall it
var s stuff
err := json.Unmarshall(jsonBlob, &s)
if err != nil {
return err
}
but it throws: runtime error: hash of unhashable type []interface {}
Given that the data type you wish to use is an interface, and does not satisfy the json.Unmarshaler interface, you have two options:
Unmarshal to an array, then convert to your preferred type.
Create a custom type, that wraps your preferred type, and provides an UnmarshalJSON method. This is functionally the same as #1, but may be easier to use. Example:
type MySet struct {
set mapset.Set
}
func (s *MySet) UnmarshalJSON(p []byte) error {
var a []interface{}
if err := json.Unmarshal(p, &a); err != nil {
return err
}
s.set = mapset.NewSet(a)
return nil
}
(Note, this code is untested; it is not meant to be a complete solution, but a guide in the right direction.)
Related
I have a function that is designed to insert a large number of elements into an MSSQL database using a table-valued parameter and a procedure.
func (requester *Requester) doQuery(ctx context.Context, dtos interface{}) error {
conn, err := requester.conn.Conn(ctx)
if err != nil {
return err
}
defer func() {
if clErr := conn.Close(); clErr != nil {
err = clErr
}
}()
tx, err := conn.BeginTx(ctx, &sql.TxOptions{Isolation: sql.LevelRepeatableRead, ReadOnly: false})
if err != nil {
return err
}
defer func() {
if p := recover(); p != nil {
tx.Rollback()
panic(p)
} else if err != nil {
tx.Rollback()
} else {
tx.Commit()
}()
param := sql.Named("TVP", mssql.TVP{
TypeName: "MyTypeName",
Value: dtos,
})
return tx.ExecContext(ctx, "EXEC [dbo].[usp_InsertConsumption] #TVP", param)
}
The test I wrote for this function is included below (note that it depends on ginkgo and gomega):
Describe("SQL Tests", func() {
It("AddConsumption - No failures - Added", func() {
db, mock, _ := sqlmock.New()
requester := Requester{conn: db}
defer db.Close()
mock.ExpectBegin()
mock.ExpectExec(regexp.QuoteMeta("EXEC [dbo].[usp_InsertConsumption] #TVP")).
WithArgs("").WillReturnResult(sqlmock.NewResult(1, 1))
mock.ExpectExec(regexp.QuoteMeta("EXEC [dbo].[usp_InsertTags] #TVP")).
WithArgs("").WillReturnResult(sqlmock.NewResult(1, 1))
mock.ExpectCommit()
err := requester.doQuery(context.TODO(), generateData())
Expect(err).ShouldNot(HaveOccurred())
Expect(mock.ExpectationsWereMet()).ShouldNot(HaveOccurred())
})
})
Now, this code was written for a MySQL context and since I've ported the code over to MSSQL, I've been getting a peculiar error:
sql: converting argument with name \"TVP\" type: unsupported type mssql.TVP, a struct
It appears that sqlmock is attempting to call ConvertValue on the TVP object, which is invalid. So, how do I make sqlmock handle this value correctly so I can unit test around the query?
What I discovered here is that sqlmock has a function called ValueConverterOption, which, when provided with a custom driver.ValueConverter interface. This will be called in place of the standard function for every invocation of ConvertValue. If you want to test around the ExecContext function when it receives a non-standard argument, a TVP in this case, then you can use this function to inject custom conversion logic into sqlmock.
type mockTvpConverter struct {}
func (converter *mockTvpConverter) ConvertValue(raw interface{}) (driver.Value, error) {
// Since this function will take the place of every call of ConvertValue, we will inevitably
// the fake string we return from this function so we need to check whether we've recieved
// that or a TVP. More extensive logic may be required
switch inner := raw.(type) {
case string:
return raw.(string), nil
case mssql.TVP:
// First, verify the type name
Expect(inner.TypeName).Should(Equal("MyTypeName"))
// VERIFICATION LOGIC HERE
// Finally, return a fake value that we can use when verifying the arguments
return "PASSED", nil
}
// We had an invalid type; return an error
return nil, fmt.Errorf("Invalid type")
}
which means, the test then becomes:
Describe("SQL Tests", func() {
It("AddConsumption - No failures - Added", func() {
db, mock, _ := sqlmock.New(sqlmock.ValueConverterOption(&mockTvpConverter{}))
requester := Requester{conn: db}
defer db.Close()
mock.ExpectBegin()
mock.ExpectExec(regexp.QuoteMeta("EXEC [dbo].[usp_InsertConsumption] #TVP")).
WithArgs("PASSED").WillReturnResult(sqlmock.NewResult(1, 1))
mock.ExpectExec(regexp.QuoteMeta("EXEC [dbo].[usp_InsertTags] #TVP")).
WithArgs("PASSED").WillReturnResult(sqlmock.NewResult(1, 1))
mock.ExpectCommit()
err := requester.doQuery(context.TODO(), generateData())
Expect(err).ShouldNot(HaveOccurred())
Expect(mock.ExpectationsWereMet()).ShouldNot(HaveOccurred())
})
})
{
"machines": [{
"name": "relay_1",
"totalmem": "3G",
"machinemem": "6G"
}, {
"name": "relay_2",
"totalmem": "30G",
"machinemem": "4G"
}]
}
tried doing the parsing using the below code
Run: func(cmd *cobra.Command, args []string) {
fmt.Println("relay called")
conf, _ = rootCmd.Flags().GetString("conf")
if conf != "" {
fmt.Println("From Create Command : ", conf)
}
data, err := ioutil.ReadFile("file.txt") // data has type []byte
if err != nil {
log.Fatal(err)
}
var result []map[string]interface{}
json.Unmarshal(data, &result)
relays := result["relays"].(map[string]interface{})
for key, relay := range relays {
fmt.Println("name :", relay["name"],
"totalmem:", relay["totalmem"],
"relaymem:", relay["relaymem"])
}
},
But I am getting the error as below which indicates that the type is invalid
cmd/create_relay.go:54:29: invalid type assertion: result["relays"].(map[string]) (non-interface type map[string]interface {} on left)
Can someone let me know how to parse the json below by using the interfaces as below
It worked with the below code.
type Relays struct {
Relays []Relay `json:"relays"`
}
type Relay struct {
Name string `json:"name"`
Totalmem string `json:"totalmem"`
Relaymem string `json:"relaymem"`
}
// relayCmd represents the relay command
var createrelayCmd = &cobra.Command{
Use: "relay",
Short: "A brief description of your command",
Long: `A longer description that spans multiple lines and likely contains examples
and usage of using your command. For example:
Cobra is a CLI library for Go that empowers applications.
This application is a tool to generate the needed files
to quickly create a Cobra application.`,
Run: func(cmd *cobra.Command, args []string) {
fmt.Println("relay called")
conf, _ = rootCmd.Flags().GetString("conf")
if conf != "" {
fmt.Println("From Create Command : ", conf)
jsonFile, err := os.Open(conf)
// if we os.Open returns an error then handle it
if err != nil {
fmt.Println(err)
}
fmt.Println("Successfully Opened users.json")
// defer the closing of our jsonFile so that we can parse it later on
defer jsonFile.Close()
data, err := ioutil.ReadAll(jsonFile) // data has type []byte
if err != nil {
log.Fatal(err)
}
var relays Relays
json.Unmarshal(data,&relays)
for i := 0; i < len(relays.Relays); i++ {
fmt.Println("User Name: " + relays.Relays[i].Name)
fmt.Println("User Totalmem: " + relays.Relays[i].Totalmem)
fmt.Println("User Relaymem: " + relays.Relays[i].Relaymem)
}
}
},
}
result in your code is an array of map and you are using result["relays"] which is invalid.
Your code should be something like this:
Run: func(cmd *cobra.Command, args []string) {
fmt.Println("relay called")
conf, _ = rootCmd.Flags().GetString("conf")
if conf != "" {
fmt.Println("From Create Command : ", conf)
}
data, err := ioutil.ReadFile("file.txt") // data has type []byte
if err != nil {
log.Fatal(err)
}
var result map[string]interface{}
json.Unmarshal(data, &result)
relays := result["relays"].([]interface{})
for _, relay := range relays {
relayM := relay.(map[string]interface{})
fmt.Println("name :", relayM["name"].(string),
"totalmem:", relayM["totalmem"].(string),
"relaymem:", relayM["relaymem"].(string))
}
},
How can I insert an array of documents into MongoDB with mgo library using only a single DB call as in db.collection.insert()?
I have the following Transaction structure:
type Transaction struct {
Brand string `json:"brand"`
Name string `json:"name"`
Plu string `json:"plu"`
Price string `json:"price"`
}
From a POST request I will recieve an array of these structures. I want to insert them into MongoDB as individual documents but using a single DB call as explained in db.collection.insert()
I tried using c.Insert of mgo
The following is the code snippet:
func insertTransaction(c *gin.Context) {
var transactions []Transaction
err := c.BindJSON(&transactions)
if err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, map[string]string{"error":"invalid JSON"})
return
}
err = InsertTransactons(transactions)
if err != nil {
c.AbortWithStatusJSON(http.StatusInternalServerError, &map[string](interface{}){
"status": "error",
"code": "500",
"message": "Internal server error",
"error": err,
})
return
}
c.JSON(http.StatusCreated, &map[string](interface{}){
"status": "success",
"code": "0",
"message": "created",
})
}
func InsertTransactons(u []Transaction) error {
s := GetSession()
defer s.Close()
c := s.DB(DB).C(TransactionColl)
err := c.Insert(u...)
if err != nil {
return err
}
return nil
}
But as I compile and run the code, I get the following error:
go/database.go:34:17: cannot use u (type *[]Transaction) as type
[]interface {} in argument to c.Insert
You cannot pass []*Transaction as []interface{}. You need to convert each Transaction to inferface{} to change its memory layout.
var ui []interface{}
for _, t := range u{
ui = append(ui, t)
}
Pass ui to c.Insert instead
Create slice of interface for document structs by appending and then inserting data using Bulk insert which takes variable arguments.
type Bulk struct {
// contains filtered or unexported fields
}
func (b *Bulk) Insert(docs ...interface{})
For inserting documents in Bulk
const INSERT_COUNT int = 10000
type User struct {
Id bson.ObjectId `bson:"_id,omitempty" json:"_id"`
Email string `bson:"email" json:"email"`
}
func (self *User) Init() {
self.Id = bson.NewObjectId()
}
Call Bulk() function on collection returned from db connection. Bulk() function returns pointer to *Bulk.
bulk := dbs.Clone().DB("").C("users").Bulk()
bulk.Insert(users...)
Assign it to variable which will be used to call Insert() method using Bulk pointer receiver.
func main(){
// Database
dbs, err := mgo.Dial("mongodb://localhost/")
if err != nil {
panic(err)
}
// Collections
uc := dbs.Clone().DB("").C("users")
defer dbs.Clone().DB("").Session.Close()
for n := 0; n < b.N; n++ {
count := INSERT_COUNT
users := make([]interface{}, count)
for i := 0; i < count; i++ {
loop_user := User{}
loop_user.Init()
loop_user.Email = fmt.Sprintf("report-%d#example.com", i)
users[i] = loop_user
}
bulk := uc.Bulk()
bulk.Unordered()
bulk.Insert(users...)
_, bulkErr := bulk.Run()
if bulkErr != nil {
panic(err)
}
}
}
I'm trying to unmarshal a JSON array of the following type:
[
{"abc's": "n;05881364"},
{"abcoulomb": "n;13658345"},
{"abcs": "n;05881364"}
]
into a map[string]string. This question Golang parse JSON array into data structure almost answered my problem, but mine is a truly map, not an array of maps. Unmarshaling into a []map[string]string worked but I now get a map of map[string]string, not a simple map of string as it should be
There is no way to do it directly with the json package; you have to do the conversion yourself. This is simple:
package main
import (
"encoding/json"
"fmt"
)
func main() {
data := []byte(`
[
{"abc's": "n;05881364"},
{"abcoulomb": "n;13658345"},
{"abcs": "n;05881364"}
]
`)
var mapSlice []map[string]string
if err := json.Unmarshal(data, &mapSlice); err != nil {
panic(err)
}
resultingMap := map[string]string{}
for _, m := range mapSlice {
for k, v := range m {
resultingMap[k] = v
}
}
fmt.Println(resultingMap)
}
Output
map[abc's:n;05881364 abcoulomb:n;13658345 abcs:n;05881364]
An alternative (though very similar) to Alex's answer is to define your own type along with an UnmarshalJSON function.
package main
import (
"encoding/json"
"fmt"
)
type myMapping map[string]string
func (mm myMapping) UnmarshalJSON(b []byte) error {
var temp []map[string]string
if err := json.Unmarshal(b, &temp); err != nil {
return err
}
for _, m := range temp {
for k, v := range m {
mm[k] = v
}
}
return nil
}
func main() {
data := []byte(`
[
{"abc's": "n;05881364"},
{"abcoulomb": "n;13658345"},
{"abcs": "n;05881364"}
]`)
resultingMap := myMapping{}
if err := json.Unmarshal(data, &resultingMap); err != nil {
panic(err)
}
fmt.Println(resultingMap)
}
Playground
I found some posts on how to decoding json nested objects in go, I tried to apply the answers to my problem, but I only managed to find a partial solution.
My json file look like this:
{
"user":{
"gender":"male",
"age":"21-30",
"id":"80b1ea88-19d7-24e8-52cc-65cf6fb9b380"
},
"trials":{
"0":{"index":0,"word":"WORD 1","Time":3000,"keyboard":true,"train":true,"type":"A"},
"1":{"index":1,"word":"WORD 2","Time":3000,"keyboard":true,"train":true,"type":"A"},
},
"answers":{
"training":[
{"ans":0,"RT":null,"gtAns":"WORD 1","correct":0},
{"ans":0,"RT":null,"gtAns":"WORD 2","correct":0}
],
"test":[
{"ans":0,"RT":null,"gtAns":true,"correct":0},
{"ans":0,"RT":null,"gtAns":true,"correct":0}
]
}
}
Basically I need to parse the information inside it and save them into go structure. With the code below I managed to extract the user information, but it looks too complicated to me and it won't be easy to apply the same thing to the "answers" fields which contains 2 arrays with more than 100 entries each. Here the code I'm using now:
type userDetails struct {
Id string `json:"id"`
Age string `json:"age"`
Gender string `json:"gender"`
}
type jsonRawData map[string]interface {
}
func getJsonContent(r *http.Request) ( userDetails) {
defer r.Body.Close()
jsonBody, err := ioutil.ReadAll(r.Body)
var userDataCurr userDetails
if err != nil {
log.Printf("Couldn't read request body: %s", err)
} else {
var f jsonRawData
err := json.Unmarshal(jsonBody, &f)
if err != nil {
log.Printf("Error unmashalling: %s", err)
} else {
user := f["user"].(map[string]interface{})
userDataCurr.Id = user["id"].(string)
userDataCurr.Gender = user["gender"].(string)
userDataCurr.Age = user["age"].(string)
}
}
return userDataCurr
}
Any suggestions? Thanks a lot!
You're doing it the hard way by using interface{} and not taking advantage of what encoding/json gives you.
I'd do it something like this (note I assumed there was an error with the type of the "gtAns" field and I made it a boolean, you don't give enough information to know what to do with the "RT" field):
package main
import (
"encoding/json"
"fmt"
"io"
"log"
"strconv"
"strings"
)
const input = `{
"user":{
"gender":"male",
"age":"21-30",
"id":"80b1ea88-19d7-24e8-52cc-65cf6fb9b380"
},
"trials":{
"0":{"index":0,"word":"WORD 1","Time":3000,"keyboard":true,"train":true,"type":"A"},
"1":{"index":1,"word":"WORD 2","Time":3000,"keyboard":true,"train":true,"type":"A"}
},
"answers":{
"training":[
{"ans":0,"RT":null,"gtAns":true,"correct":0},
{"ans":0,"RT":null,"gtAns":true,"correct":0}
],
"test":[
{"ans":0,"RT":null,"gtAns":true,"correct":0},
{"ans":0,"RT":null,"gtAns":true,"correct":0}
]
}
}`
type Whatever struct {
User struct {
Gender Gender `json:"gender"`
Age Range `json:"age"`
ID IDString `json:"id"`
} `json:"user"`
Trials map[string]struct {
Index int `json:"index"`
Word string `json:"word"`
Time int // should this be a time.Duration?
Train bool `json:"train"`
Type string `json:"type"`
} `json:"trials"`
Answers map[string][]struct {
Answer int `json:"ans"`
RT json.RawMessage // ??? what type is this
GotAnswer bool `json:"gtAns"`
Correct int `json:"correct"`
} `json:"answers"`
}
// Using some custom types to show custom marshalling:
type IDString string // TODO custom unmarshal and format/error checking
type Gender int
const (
Male Gender = iota
Female
)
func (g *Gender) UnmarshalJSON(b []byte) error {
var s string
err := json.Unmarshal(b, &s)
if err != nil {
return err
}
switch strings.ToLower(s) {
case "male":
*g = Male
case "female":
*g = Female
default:
return fmt.Errorf("invalid gender %q", s)
}
return nil
}
func (g Gender) MarshalJSON() ([]byte, error) {
switch g {
case Male:
return []byte(`"male"`), nil
case Female:
return []byte(`"female"`), nil
default:
return nil, fmt.Errorf("invalid gender %v", g)
}
}
type Range struct{ Min, Max int }
func (r *Range) UnmarshalJSON(b []byte) error {
// XXX could be improved
_, err := fmt.Sscanf(string(b), `"%d-%d"`, &r.Min, &r.Max)
return err
}
func (r Range) MarshalJSON() ([]byte, error) {
return []byte(fmt.Sprintf(`"%d-%d"`, r.Min, r.Max)), nil
// Or:
b := make([]byte, 0, 8)
b = append(b, '"')
b = strconv.AppendInt(b, int64(r.Min), 10)
b = append(b, '-')
b = strconv.AppendInt(b, int64(r.Max), 10)
b = append(b, '"')
return b, nil
}
func fromJSON(r io.Reader) (Whatever, error) {
var x Whatever
dec := json.NewDecoder(r)
err := dec.Decode(&x)
return x, err
}
func main() {
// Use http.Get or whatever to get an io.Reader,
// (e.g. response.Body).
// For playground, substitute a fixed string
r := strings.NewReader(input)
// If you actually had a string or []byte:
// var x Whatever
// err := json.Unmarshal([]byte(input), &x)
x, err := fromJSON(r)
if err != nil {
log.Fatal(err)
}
fmt.Println(x)
fmt.Printf("%+v\n", x)
b, err := json.MarshalIndent(x, "", " ")
if err != nil {
log.Fatal(err)
}
fmt.Printf("Re-marshalled: %s\n", b)
}
Playground
Of course if you want to reuse those sub-types you could pull them out of the "Whatever" type into their own named types.
Also, note the use of a json.Decoder rather than reading in all the data ahead of time. Usually try and avoid any use of ioutil.ReadAll unless you really need all the data at once.