Many years ago I wrote an app on Google App Engine. I saved over 100,000 entries.
Here's the code I used to store it in the blob. Now I want to export all the data. Ideally, I would have prefered to download all these entries as a csv but there is no option on the backend. How can I easily download all the data I have saved over the years?
import webapp2
from google.appengine.ext import ndb
class Database(ndb.Model):
"""Models an individual user entry with email, app, and date."""
email = ndb.StringProperty()
app = ndb.StringProperty()
platform = ndb.StringProperty()
date = ndb.DateTimeProperty(auto_now_add=True)
class add(webapp2.RequestHandler):
def post(self):
e = self.request.get("email")
a = self.request.get("app")
p = self.request.get("platform")
b = e+', '+a+', '+p
u = Database()
u.email = e
u.app = a
u.platform = p
u.put()
Is there a way to get all of my data? I cannot output all the data on a webpage. It crashes.
Related
So I've deployed my flask app with react front end to heroku, but there seems to be some problem where flask is running on my local host instead of one the heroku server.
I've read tons of stackoverflow posts on this but to no resolution. Here is my flask code:
from flask import Flask, request
import flask
from flask_sqlalchemy import SQLAlchemy
from datetime import datetime
from flask_cors import CORS
app = Flask(__name__,static_folder="./build",static_url_path="/")
app.config['SQLALCHEMY_DATABASE_URI'] = 'my database url'
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
app.secret_key = 'secret string'
CORS(app)
db = SQLAlchemy(app)
class Feature_votes(db.Model):
feature = db.Column(db.String(500), primary_key=True)
votes = db.Column(db.Integer, nullable=False)
date = db.Column(db.DateTime, nullable=False)
def __init__(self, feature, votes, date):
self.feature = feature
self.votes = votes
self.date = date
# Serve the react app
#app.route("/")
def index():
return app.send_static_file("index.html")
# Retrieve currently polled features from Feature_votes
#app.route("/getVotes", methods=['GET'])
def getVotes():
rows = Feature_votes.query.filter().order_by(Feature_votes.date)
response = []
for row in rows:
response.append(
{"feature": row.feature,
"votes": row.votes
})
return flask.jsonify(response)
# Add a new feature to the db with votes set to 0
#app.route("/featureAdd", methods=['POST'])
def featureAdd():
feature = request.get_json()["feature"]
featureEntry = Feature_votes(feature, 0, datetime.utcnow())
db.session.add(featureEntry)
db.session.commit()
response = {"feature": featureEntry.feature,
"votes": 0,
"date": featureEntry.date
}
return response
#app.route("/featureModifyVotes", methods=['POST'])
def featureUnvote():
feature = request.get_json()["feature"]
direction = request.get_json()["direction"]
featureEntry = Feature_votes.query.filter_by(feature=feature).first()
if (direction == "increase"):
featureEntry.votes += 1
else:
featureEntry.votes -= 1
db.session.commit()
response = {featureEntry.feature: featureEntry.votes}
return response
if __name__ == '__main__':
app.run()
and here is my Procfile
web: gunicorn --bind 0.0.0.0:$PORT server:app
Also here is a snip I took from inspect element to show that this request is being served locally.
I am relatively new to web development so it is possible I made a lot of mistakes. Please let me know if you can help or need any more info from me. Thanks.
So apparently that screenshot I posted in the question didn't mean that my server was running on localhost, but rather that my request was being made to the localhost. Turns out I had fetch("http://localhost...) in my build files. After using a relative path, rebuilding and pushing to heroku, everything is working.
I am developing a flask web application currently. However, I do not know how I can get the uploaded images from the user into the SQLite database and retrieve it for later use (such as display it on the homepage and other pages).
I am very new to web development so I am not familar with many programming languages yet. I have seen other pages talking about the use of php, may I know if that is really needed? Are there other ways of doing so in flask?
Would appreciate it if someone is able to guide me.
Thank you!
You can base64 encode an image and use a data: url
<img src="data:image/png;base64, iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAHElEQVQI12P4//8/w38GIAXDIBKE0DHxgljNBAAO9TXL0Y4OHwAAAABJRU5ErkJggg==" alt="Red dot" />
if you simple paste
data:image/png;base64, iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAHElEQVQI12P4//8/w38GIAXDIBKE0DHxgljNBAAO9TXL0Y4OHwAAAABJRU5ErkJggg==
in your address bar, you will see a small red dot
In an app this looks like this:
# FLASK_APP=mini_app flask run
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
class User(db.Model):
__tablename__ = "Users"
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String)
image = db.Column(db.String)
def create_app(config_filename=None, host="localhost"):
app = Flask("demo_app")
app.config.update(SQLALCHEMY_DATABASE_URI="sqlite://")
db.init_app(app)
with app.app_context():
db.create_all()
user = User(name="test", image="iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAHElEQVQI12P4//8/w38GIAXDIBKE0DHxgljNBAAO9TXL0Y4OHwAAAABJRU5ErkJggg==")
db.session.add(user)
db.session.commit()
return app
host = "0.0.0.0"
app = create_app(host=host)
#app.route("/image/<string:user_name>")
def show_profile(user_name):
user = User.query.filter(User.name==user_name).one_or_none()
if user:
return f'User: {user.name}<br/>Pic:<img src="data:image/png;base64, {user.image}"/>'
return "Not Found", 404
if __name__ == "__main__":
app.run(host=host)
then go to
http://localhost:5000/image/test
I have a CSV file of this form:
Username, Password_Hash
noam , ************
paz , ************
I want to import this CSV into my datastore so the data could be accessed from python by using this model:
class Company(ndb.Model):
Username = ndb.StringProperty()
Password_Hash= ndb.StringProperty(indexed=False)
Of course, manual import one by one is not an option because the real file is pretty large.
I've no idea of which structure the file used by gcloud preview datastore upload is based on.
Google has a lack of good documentation on this issue.
How about something like:
from google.appengine.api import urlfetch
from models import Company
def do_it(request):
csv_string = 'http://mysite-or-localhost/table.csv'
csv_response = urlfetch.fetch(csv_string, allow_truncated=True)
if csv_response.status_code == 200:
for row in csv_response.content.split('\n'):
if row != '' and not row.lower().startswith('Username,'):
row_values = row.split(',')
new_record = Company(
Username = row_values[0],
Password_Hash = row_values[1]
)
new_record.put()
return Response("Did it", mimetype='text/plain')
there is no magic way of migrating. you need to write a program that reads the file and saves to the datastore one by one. it's not particularly difficult to write this program. give it as long as it takes, it won't be forever...
I have a simple model:
class News(models.Model):
title = models.CharField(max_length=255, verbose_name=_("title"))
content = models.TextField(default='', verbose_name=_("content"))
panel = models.CharField(max_length=50, default='', verbose_name=_("panel"))
created = TzDateTimeProperty(auto_now_add=True, verbose_name=_("date created"))
lastmodified = TzDateTimeProperty(auto_now=True, verbose_name=_("date modified"))
I want to get the 5 recent news records, and I know that with Google App Engine DB queryset I can get 5 recent records in the following easy way:
results = News.all().filter(panel = panel).order('created').fetch(5)
With Django running in Google App Engine I would need to do:
results = News.objects.filter(panel = panel).order_by('created')[:5]
But it will throw exception if there are no news records. I could wrap it within catch exception, but what is the proper and optimized way to limit query results within Django?
You can do something like this
results = News.objects.filter(panel = panel).order_by('created')
if results is not None:
new_results = results[:5]
I have a datastore with a kind named MyUsers(db.Model) that currently contains about 30 entities.
I have written a script that prints all the entities' "name" attribute to the screen (separated by the '#' char), using the following code:
def get(self):
q_1 = MyUsers.all().order('name')
for user in q_1:
self.response.out.write(user.name)
self.response.out.write("#")
The script works just fine, but the problem is that I always get critical message in the app engine log:
12-12 12:45AM 22.691
Exceeded soft memory limit with
220.043 MB after servicing 1 requests total
I 12-12 12:45AM 22.691
This request caused a new process to
be started for your application, and
thus caused your application code to
be loaded for the first time. This
request may thus take longer and use
more CPU than a typical request for
your application.
W 12-12 12:45AM 22.691
After handling this request, the
process that handled this request was
found to be using too much memory and
was terminated. This is likely to
cause a new process to be used for the
next request to your application. If
you see this message frequently, you
may have a memory leak in your
application.
It seems like this is a very straightforward basic operation, that shouldn't exceed any memory limits, so what can I do to improve it?
Thanks,
Joel
EDIT:
As for the imports, the imports I use are:
from models.model import *
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
import profiler.appengine.request
import profiler.appengine.datastore
I used a profiler to try and understand what is wrong, maybe you can help
Thanks!
Joel
EDIT 2
This is the full version of the code (the problem occurred also before I imported the profiler, I used it after it happened to try and debug):
from models.model import MyUsers
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
import profiler.appengine.request
import profiler.appengine.datastore
class PrintAll(webapp.RequestHandler):
def get(self):
q_1 = MyUsers.all().order('name')
for user in q_1:
self.response.out.write(user.name)
self.response.out.write("#")
application = webapp.WSGIApplication(
[('/print', PrintAll)
],
debug=True)
def main():
profiler.appengine.request.activate()
profiler.appengine.datastore.activate()
run_wsgi_app(application)
profiler.appengine.request.show_summary()
profiler.appengine.datastore.show_summary()
profiler.appengine.datastore.dump_requests() # optional
if __name__ == "__main__":
main()
As for the MyUsers() model class:
class MyUsers(db.Model):
user = db.UserProperty()
points = db.FloatProperty()
bonus = db.FloatProperty(default=0.0)
joindate = db.DateTimeProperty(auto_now_add=True)
lastEntry=db.DateTimeProperty(auto_now_add=True)
name=db.StringProperty()
last_name = db.StringProperty()
homepage = db.StringProperty()
hobbies = db.ListProperty(str)
other = db.StringProperty()
calculate1 = db.FloatProperty()
calculate2 = db.FloatProperty()
calculate3= db.IntegerProperty(default=0)
history = db.ListProperty(str)
history2 = db.ListProperty(str)
title = db.IntegerProperty(default=0)
title_string = db.StringProperty()
updateDate = db.DateTimeProperty(auto_now_add=True)
level=db.IntegerProperty(default=0)
debug_helper=db.IntegerProperty(default=0)
debug_list=db.ListProperty(str)
As it stands, there's not really any way that this could cause the error you're seeing. Can you provide a complete reproduction case? It's likely that something other than the code snippet you've included is the cause of this issue.