I am now coding with Martini and AppEngine/Go.
I want to use memcache to serve JSON data.
But the response is invalid JSON format.Why "null" is in the response?
null{"results":[{"Title":"Nikkei225","PriceTime":"2014-04-25 06:28:00 UTC","Price":"14,429.26","Diff":"+24.27(0.1%)"},{"Title":"USD/JPY","PriceTime":"2014-04-25 20:49:00 UTC","Price":"102.12-102.16","Diff":"-0.15(-0.1%)"},{"Title":"EURO/JPY","PriceTime":"2014-04-25 20:49:00 UTC","Price":"141.28-141.32","Diff":"-0.19(-0.1%)"},{"Title":"EURO/USD","PriceTime":"2014-04-25 20:48:00 UTC","Price":"1.3833-1.3836","Diff":"+0.0002(0.0%)"},{"Title":"USD/CNY","PriceTime":"2014-04-25 20:48:00 UTC","Price":"6.2536-6.2546","Diff":"+0.0063(0.1%)"},{"Title":"DJIA","PriceTime":"2014-04-25 07:29:00 UTC","Price":"16,361.46","Diff":"-140.19(-0.8%)"},{"Title":"Nasdaq","PriceTime":"2014-04-25 07:00:00 UTC","Price":"4,075.561","Diff":"-72.777(-1.7%)"},{"Title":"FTSE100","PriceTime":"2014-04-25 07:35:00 UTC","Price":"6,685.69","Diff":"-17.31(-0.2%)"}]}
And console says like below:
http: multiple response.WriteHeader calls
My code is like below.
Retrieve from cache:
c := appengine.NewContext(req)
memcacheKey := "markets"
results := []Result{}
cachedItem, getCacheErr := memcache.JSON.Get(c, memcacheKey,&map[string]interface{}{"results": results})
if getCacheErr != nil && getCacheErr != memcache.ErrCacheMiss {
c.Infof("get cache error")
}
if getCacheErr == nil {
c.Infof("cached data found")
c.Infof("cached data: %v",cachedItem.Value)
r.JSON(200, cachedItem.Object)
} else {
c.Infof("cached data not found")
}
Set to memcache:
client := urlfetch.Client(c)
resp, err := client.Get("http://www.nikkei.com/markets/kaigai/worldidx.aspx")
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
}
indexes := Indexes()
doc, _ := goquery.NewDocumentFromResponse(resp)
doc.Find("div.mk-world_market div table tr").Each(func(_ int, s *goquery.Selection) {
title := s.Find("th").Text()
title = strings.Trim(strings.Replace(title,"※","",-1)," ")
if val,ok := indexes[title]; ok {
price := s.Find("th").Next().Text()
diff := s.Find("td:nth-child(3)").Text()
pricetime := s.Find("td:nth-child(4)").Text()
t := time.Now()
pricetime = StringToTime(pricetime,t)
result := Result{val,pricetime,price,diff}
results = append(results,result)
}
})
item := &memcache.Item{
Key:memcacheKey,
Object: &map[string]interface{}{"results": results},
}
setErr := memcache.JSON.Set(c, item)
if setErr != nil {
c.Infof("set error: %v",setErr)
}
What is wrong with my code?
self-resolution:
What I want to do is below.
m.Get("/api/Markets", func(w http.ResponseWriter,r render.Render,req *http.Request) {
c := appengine.NewContext(req)
memcache_key := "markets"
var item_list []Result
results := []Result{}
_, get_cache_err := memcache.JSON.Get(c,memcache_key,&item_list)
if get_cache_err != nil && get_cache_err != memcache.ErrCacheMiss {
c.Infof("get cache error")
}
if get_cache_err == nil {
c.Infof("cached data found")
c.Infof("cached data: %v",item_list)
results = item_list
} else {
c.Infof("cached data not found")
client := urlfetch.Client(c)
resp, err := client.Get("http://www.nikkei.com/markets/kaigai/worldidx.aspx")
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
}
indexes := Indexes()
doc, _ := goquery.NewDocumentFromResponse(resp)
doc.Find("div.mk-world_market div table tr").Each(func(_ int, s *goquery.Selection) {
title := s.Find("th").Text()
title = strings.Trim(strings.Replace(title,"※","",-1)," ")
if val,ok := indexes[title]; ok {
price := s.Find("th").Next().Text()
diff := s.Find("td:nth-child(3)").Text()
pricetime := s.Find("td:nth-child(4)").Text()
t := time.Now()
pricetime = StringToTime(pricetime,t)
result := Result{val,pricetime,price,diff}
results = append(results,result)
}
})
item_list = results
item := &memcache.Item{
Key:memcache_key,
Object: &item_list,
}
setErr := memcache.JSON.Set(c, item)
if setErr != nil {
c.Infof("set error: %v",setErr)
}
}
r.JSON(200, map[string]interface{}{"results": item_list})
})
Related
As you can see I am calling the "coll.CountDocuments" functions multiples times. What I want is to write the code without calling the "coll.CountDocuments" function multiple times by aggregating all the filters into a single query.
func NoOfDocumentsInfo(DB string, col string, filters ...bson.D) ([]int64, error) {
if nil == dbInstance {
if nil == GetDBInstance() {
logger.Error("Not connecting to DB")
err := errors.New("DB connection error")
return nil, err
}
}
logger.Debugf("%s %s", DB, col)
coll := dbInstance.Database(DB).Collection(col)
counts := make([]int64, len(filters))
for i, filter := range filters {
count, err := coll.CountDocuments(context.TODO(), filter)
if err != nil {
logger.Fatal(err)
return nil, err
}
counts[i] = count
}
return counts, nil
}
I have tried to used aggragation pipeline but "cur" and "result" is giving null output.
`func NoOfDocumentsInfo(DB string, col string, filters ...bson.D) ([]int64, error) {
if dbInstance == nil {
if GetDBInstance() == nil {
logger.Error("Not connecting to DB")
err := errors.New("DB connection error")
return nil, err
}
}
logger.Debugf("%s %s", DB, col)
coll := dbInstance.Database(DB).Collection(col)
pipeline := make([]bson.M, 0, len(filters)+2)
pipeline = append(pipeline, bson.M{"$match": bson.M{"$or": filters}})
pipeline = append(pipeline, bson.M{"$group": bson.M{"_id": nil, "count": bson.M{"$sum": 1}}})
pipeline = append(pipeline, bson.M{"$group": bson.M{"_id": nil, "count": bson.M{"$first": "$count"}}})
var result struct {
Count int64 `bson:"count"`
}
cur, err := coll.Aggregate(context.TODO(), pipeline)
if err != nil {
logger.Fatal(err)
return nil, err
}
logger.Debugf("cur: %+v", cur)
err = cur.Decode(&result)
logger.Debugf("result: %+v, err: %v", result, err)
if err != nil {
logger.Fatal(err)
return nil, err
}
return []int64{result.Count}, nil
}`
A much simpler approach would be the one that I'm going to share here. Let's start with the code:
package main
import (
"context"
"fmt"
"time"
"go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/mongo"
"go.mongodb.org/mongo-driver/mongo/options"
)
var (
dbInstance *mongo.Client
ctx context.Context
cancel context.CancelFunc
)
func NoOfDocumentsInfo(client *mongo.Client, DB string, col string, filters bson.A) (int64, error) {
coll := client.Database(DB).Collection(col)
myFilters := bson.D{
bson.E{
Key: "$and",
Value: filters,
},
}
counts, err := coll.CountDocuments(ctx, myFilters)
if err != nil {
panic(err)
}
return counts, nil
}
func main() {
ctx, cancel = context.WithTimeout(context.Background(), 20*time.Second)
defer cancel()
// set MongoDB connection
clientOptions := options.Client().ApplyURI("mongodb://root:root#localhost:27017")
mongoClient, err := mongo.Connect(ctx, clientOptions)
if err != nil {
panic(err)
}
defer mongoClient.Disconnect(ctx)
// query with filters
numDocs, err := NoOfDocumentsInfo(mongoClient, "demodb", "myCollection", bson.A{
bson.D{bson.E{Key: "Name", Value: bson.D{bson.E{Key: "$eq", Value: "John Doe"}}}},
bson.D{bson.E{Key: "Song", Value: bson.D{bson.E{Key: "$eq", Value: "White Roses"}}}},
})
if err != nil {
panic(err)
}
fmt.Println("num docs:", numDocs)
}
Let's see the relevant changes applied to the code:
Expect a parameter called filters of type bson.A which is the type for the array in the MongoDB environment.
Build the myFilters variable which is of type bson.D (slice) with the following single item (bson.E) in this way:
The Key is the logical operator
The Value is the array passed into the function
Build the array to pass to the function with all of the needed filters (e.g. two equal conditions: one on the Name key and the other on the Song).
Finally, I also did some improvements on how you've opened the MongoDB connection and how you've released the allocated resources.
Let me know if this solves your issue, thanks!
I have two columns in a CSV file. I am accessing only the first column using the SearchData() function.
The problem is that I want to access the data as an array but when I return an array string in the AccessData() function and write the products[0] in the SearchData(), it gives me all the data by removing the bracket sign [] only and when I write products[1], it gives me runtime error: index out of range [1] with length 1.
Required result
products[0] = First Item
products[1] = Second Item
...
so on
Code
func AccessData(number int) string {
content, err := ioutil.ReadFile("products/data1.csv")
if err != nil {
log.Fatal(err)
}
Data := string(content)
sliceData := strings.Split(Data, ",")
return sliceData[number]
}
func SearchData(){
for i := 0; i <= 34; i = i + 2 {
products := AccessData(i)
fmt.Println(products)
}
}
This should do the trick:
func firstColumns(filename string) []string {
f, err := os.Open(filename)
if err != nil {
log.Fatal(err)
}
defer f.Close()
r := csv.NewReader(f)
var result []string
for {
row, err := r.Read()
if err != nil {
if err == io.EOF {
break
}
log.Fatal(err)
}
if len(row) > 0 {
result = append(result, row[0])
}
}
return result
}
func main() {
data := firstColumns("products/data1.csv")
fmt.Println(data)
fmt.Println(data[1])
}
This turns the the first column of every row into a []string which can be access index.
The output is:
[First item Second item]
Second item
i am trying to learn some new stuff with GoLang, and got a litlebit stuck, probaly the reason is just that i am not very good at using arrays.
So heres what i want to do:
Make variable.
Download with that variable.
Add ++1 for that variable
Download with added 1
and loop it lets say 10 times.
I am all good with points 1 and two, but little stuck with 3 & 4. :).
all the files come in .pdf, thats why i made that strconv there.
I probaly should make somekind of Loop in main, and call DownloadFile function with some array parameters in there?
package main
import (
"fmt"
"io"
"net/http"
"os"
"strconv"
)
func main() {
url_id := strconv.Itoa(23430815+2)
filename := url_id+".pdf"
fileUrl := "https://someurLid="+url_id
if err := DownloadFile(filename, fileUrl); err != nil {
panic(err)
}
fmt.Println(fileUrl)
}
func DownloadFile(filepath string, url string) error {
resp, err := http.Get(url)
if err != nil {
return err
}
defer resp.Body.Close()
out, err := os.Create(filepath)
if err != nil {
return err
}
defer out.Close()
_, err = io.Copy(out, resp.Body)
return err
}
try this.
package main
import (
"fmt"
"io"
"net/http"
"os"
"strconv"
)
func main() {
url_id_num := 23430815+2
for i := 0; i < 10; i++ {
url_id := strconv.Itoa(url_id_num+i)
filename := url_id+".pdf"
fileUrl := "https://someurLid="+url_id
if err := DownloadFile(filename, fileUrl); err != nil {
panic(err)
}
fmt.Println(fileUrl)
}
}
func DownloadFile(filepath string, url string) error {
resp, err := http.Get(url)
if err != nil {
return err
}
defer resp.Body.Close()
out, err := os.Create(filepath)
if err != nil {
return err
}
defer out.Close()
_, err = io.Copy(out, resp.Body)
return err
}
Cerise Limón gave the answer and thats thats how it worked out.
arr := make([]uint8, 3) //How many times it loops
url_id := 23430815 //Starting from id, filename
for range arr {
filename := strconv.Itoa(url_id)+".pdf"
fileUrl := "https://someurl?id="+strconv.Itoa(url_id)
if err := DownloadFile(filename, fileUrl); err != nil {
panic(err)
}
fmt.Println(fileUrl)
url_id++
}
Thank you for pointing out where i should start! :).
I have code in Go like below :
package main
import (
"database/sql"
"log"
_ "github.com/lib/pq"
)
const (
insertLoginSQL = `insert into Logins(id, name,password) values($1, $2, $3)`
)
func main() {
db, err := sql.Open("postgres", "user=postgres password=admin dbname=Quality sslmode=disable")
if err != nil {
log.Fatal(err)
}
defer db.Close()
if err := Insert(db); err != nil {
log.Println("error with double insert", err)
}
}
func Insert(db *sql.DB) error {
tx, err := db.Begin()
if err != nil {
return err
}
stmt, err := tx.Prepare(insertLoginSQL)
if err != nil {
return err
}
defer stmt.Close()
if _, err := stmt.Exec(10, "user","pwd"); err != nil {
tx.Rollback()
return err
}
return tx.Commit()
}
When I run above code, records inserted twice in database. Can someone let me know why duplicate records inserted? Any issue with this code?
Probably commit is done twice. First time by some of previous operations like stmt.exec and second time when tx.Commit() executed.
I am trying to learn Go with GAE.
I have created 2 handlers. One for saving an object to datastore and the other retrieve it and output to screen. The problem is that when i retrieve the UserAccount object from datastore, every values inside the object are gone.
Any help would be appreciate.
Output:
a/c count: 2
val: core.UserAccount{idString:"", deviceId:""}
val: core.UserAccount{idString:"", deviceId:""}
type UserAccount struct {
idString string
deviceId string
}
func create_account(w http.ResponseWriter, r *http.Request) {
c := appengine.NewContext(r)
idstr := "ABCDEFGH"
devId := r.FormValue("deviceId")
newAccount := UserAccount{ idString: idstr, deviceId: devId,}
key := datastore.NewIncompleteKey(c, "UserAccount", nil)
_, err := datastore.Put(c, key, &newAccount)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
fmt.Fprintf(w, "val: %#v \n", newAccount)
}
func get_info(w http.ResponseWriter, r *http.Request) {
c := appengine.NewContext(r)
q := datastore.NewQuery("UserAccount")
accounts := make([]UserAccount, 0, 10)
if _, err := q.GetAll(c, &accounts); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
fmt.Fprintf(w, "a/c count: %v \n", len(accounts))
for i := 0; i < len(accounts); i++ {
fmt.Fprintf(w, "val: %#v \n", accounts[i])
}
}
If the datastore API uses reflection, which I presume it does, it cannot access struct fields that aren't exported, i.e. field names that do not begin with a capital letter.
Export them and it should work.