Unable to make Flask-SQLAlchemy connect to my Google App Engine DB - google-app-engine

I am trying to set up a web-application based on Flask using Google App Engine (I'm new to both).
The web-application receives data from the client and it should be processed and saved in a database.
I've tried to use Flask-SQLAlchemy but I'm unable to set it up with Google Cloud SQL, I've used this guide to create a MySQL DB in the same project:
and then I'm trying to use it on my main python code:
app.config('SQLALCHEMY_DATABASE_URI') = 'mysql+mysqldb://root#/Results?unix_socket=/cloudsql/crafty-circlet-164415:psy01'
app.config['SECRET_KEY'] = 'NglfxE8FOP9pgV8fxpyj'
db = SQLAlchemy(app)
class Result(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.Text)
profession = db.Column(db.Text)
year = db.Column(db.Text)
pressure_level = db.Column(db.Integer)
reported_suc_count = db.Column(db.Integer)
marked_suc_count = db.Column(db.Integer)
real_suc_count = db.Column(db.Integer)
insertion_time = db.Column(db.DateTime)
def __init__(self, name, profession, year, pressure_level, reported_suc_count, marked_suc_count, real_suc_count):
self.name = name
self.profession = profession
self.year = year
self.pressure_level = pressure_level
self.reported_suc_count = reported_suc_count
self.marked_suc_count = marked_suc_count
self.real_suc_count = real_suc_count
self.insertion_time = datetime.utcnow()
#app.route('/resultform', methods=['POST', 'GET'])
def resultform():
if request.method == 'POST':
if not request.form['successmatrices']:
flash('please fill all the fields', 'error')
else:
if 'name' in request.form:
name = request.form['name']
else:
name = None
if 'profession' in request.form:
profession = request.form['profession']
else:
profession = None
if 'year' in request.form:
year = request.form['year']
else:
year = None
if 'pressure_level' in request.form:
pressure_level = int(request.form['pressure_level'])
else:
pressure_level = None
if 'successmatrices' in request.form:
successmatrices = int(request.form['successmatrices'])
else:
successmatrices = 0
new_result = Result(name=name, profession=profession, year=year, pressure_level=pressure_level, reported_suc_count=successmatrices, marked_suc_count=len(session['marked']), real_suc_count=len(session['correct']))
db.session.add(new_result)
db.session.commit()
return redirect(url_for('showresults'))
return render_template("resultform.html")
#app.route('/showresults')
def showresults():
return render_template("showresults.html", results=Results.query.all())
if __name__ == '__main__':
db.create_all()
app.run(debug=True)
When I'm trying to run it in my local development (I'm using PyCharm) I receive the following error in the background:
ERROR 2017-04-16 09:20:19,802 wsgi.py:263]
Traceback (most recent call last):
File "C:\Users\<>\AppData\Local\Google\Cloud SDK\google-cloud-sdk\platform\google_appengine\google\appengine\runtime\wsgi.py", line 240, in Handle
handler = _config_handle.add_wsgi_middleware(self._LoadHandler())
File "C:\Users\<>\AppData\Local\Google\Cloud SDK\google-cloud-sdk\platform\google_appengine\google\appengine\runtime\wsgi.py", line 299, in _LoadHandler
handler, path, err = LoadObject(self._handler)
File "C:\Users\<>\AppData\Local\Google\Cloud SDK\google-cloud-sdk\platform\google_appengine\google\appengine\runtime\wsgi.py", line 85, in LoadObject
obj = __import__(path[0])
File "C:\Users\<>\PycharmProjects\crafty-circlet-164415\main.py", line 7
app.config('SQLALCHEMY_DATABASE_URI') = 'mysql+mysqldb://root#/Results?unix_socket=/cloudsql/crafty-circlet-164415:psy01'
SyntaxError: can't assign to function call
And after deployment to GAE the following error appears:
Error: Server Error
The server encountered an error and could not complete your request.
Please try again in 30 seconds.
Any idea how to solve this?

app.config is a dictionary, so to add a config value you'll use a [ ] instead of () just as done in you app.config['SECRET_KEY'].
So it should be:
app.config['SQLALCHEMY_DATABASE_URI'] = SQLALCHEMY_DATABASE_URI
Some other pointers too for a successful connection. You'll need to format your connection details properly:
USER = 'root'
PASSWORD = 'your-cloudsql-password'
DATABASE = 'your-cloudsql-database-name'
# connection_name is of the format `project:region:your-cloudsql-instance`
CONNECTION_NAME = 'your-cloudsql-connection-name'
SQLALCHEMY_DATABASE_URI = (
'mysql+pymysql://{user}:{password}#localhost/{database}'
'?unix_socket=/cloudsql/{connection_name}').format(
user=USER, password=PASSWORD,
database=DATABASE, connection_name=CONNECTION_NAME)
app.config['SQLALCHEMY_DATABASE_URI'] = SQLALCHEMY_DATABASE_URI
I'll probably separate my secrets and all other sensitive info into a config file not checked into source or use environment variables, etc.
If you want to locally test your application with your Cloud SQL instance, you'll need to install the Cloud SQL Proxy and add the connection name as an environment variable and the MySQLdb library to your app.yaml
> cloud_sql_proxy -instances=your-connection-name=tcp:3306
Else you can use a local MySQL instance for testing but switching to Cloud SQL when on app engine.
More information on setting up Cloud SQL with App Engine can be found here

Related

Message insert - adding external label

I'm using gmail API to insert an email to the inbox. When the From domain is outside the organization, I expect the external label to be added but it's not.
I'm using python and everything else is working, I am using a service account and able to impersonate on behalf the user's email and I'm using labelIds of ['INBOX', 'UNREAD'] and I need the newly external label as well but couldn't figure a way to add it through the API.
This feature is turned ON for the workspace.
Update - adding my code:
from googleapiclient import discovery, errors
from google.oauth2 import service_account
from email.mime.text import MIMEText
import base64
SERVICE_ACCOUNT_FILE = 'insert-messages-91e77b62878f.json'
SCOPES = ['https://www.googleapis.com/auth/gmail.insert']
def validationService():
# Set the credentials
credentials = service_account.Credentials.\
from_service_account_file(SERVICE_ACCOUNT_FILE, scopes= SCOPES)
# Delegate the credentials to the user you want to impersonate
delegated_credentials = credentials.with_subject('<some_user>')
service = discovery.build('gmail', 'v1', credentials=delegated_credentials)
return service
def SendMessage(service, message):
message = service.users().messages().insert(userId='me', body=message).execute() # me will use <some_user> from above
return message
def CreateMessage(sender, to, subject, message_text):
message = MIMEText(message_text)
message['To'] = to
message['From'] = sender
message['Subject'] = subject
return {'raw': base64.urlsafe_b64encode(message.as_string().encode()).decode(), 'labelIds': ['INBOX', 'UNREAD']}
def main():
try:
service = validationService()
email = CreateMessage('some#external.com', "<some_user>", "Test", "This is a test")
email_sent = SendMessage(service, email)
print('Message Id:', email_sent['id'])
except errors.HttpError as err:
print('\n---------------You have the following error-------------')
print(err)
print('---------------You have the following error-------------\n')
if __name__ == '__main__':
main()

Flask app is being run locally instead of on heroku

So I've deployed my flask app with react front end to heroku, but there seems to be some problem where flask is running on my local host instead of one the heroku server.
I've read tons of stackoverflow posts on this but to no resolution. Here is my flask code:
from flask import Flask, request
import flask
from flask_sqlalchemy import SQLAlchemy
from datetime import datetime
from flask_cors import CORS
app = Flask(__name__,static_folder="./build",static_url_path="/")
app.config['SQLALCHEMY_DATABASE_URI'] = 'my database url'
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
app.secret_key = 'secret string'
CORS(app)
db = SQLAlchemy(app)
class Feature_votes(db.Model):
feature = db.Column(db.String(500), primary_key=True)
votes = db.Column(db.Integer, nullable=False)
date = db.Column(db.DateTime, nullable=False)
def __init__(self, feature, votes, date):
self.feature = feature
self.votes = votes
self.date = date
# Serve the react app
#app.route("/")
def index():
return app.send_static_file("index.html")
# Retrieve currently polled features from Feature_votes
#app.route("/getVotes", methods=['GET'])
def getVotes():
rows = Feature_votes.query.filter().order_by(Feature_votes.date)
response = []
for row in rows:
response.append(
{"feature": row.feature,
"votes": row.votes
})
return flask.jsonify(response)
# Add a new feature to the db with votes set to 0
#app.route("/featureAdd", methods=['POST'])
def featureAdd():
feature = request.get_json()["feature"]
featureEntry = Feature_votes(feature, 0, datetime.utcnow())
db.session.add(featureEntry)
db.session.commit()
response = {"feature": featureEntry.feature,
"votes": 0,
"date": featureEntry.date
}
return response
#app.route("/featureModifyVotes", methods=['POST'])
def featureUnvote():
feature = request.get_json()["feature"]
direction = request.get_json()["direction"]
featureEntry = Feature_votes.query.filter_by(feature=feature).first()
if (direction == "increase"):
featureEntry.votes += 1
else:
featureEntry.votes -= 1
db.session.commit()
response = {featureEntry.feature: featureEntry.votes}
return response
if __name__ == '__main__':
app.run()
and here is my Procfile
web: gunicorn --bind 0.0.0.0:$PORT server:app
Also here is a snip I took from inspect element to show that this request is being served locally.
I am relatively new to web development so it is possible I made a lot of mistakes. Please let me know if you can help or need any more info from me. Thanks.
So apparently that screenshot I posted in the question didn't mean that my server was running on localhost, but rather that my request was being made to the localhost. Turns out I had fetch("http://localhost...) in my build files. After using a relative path, rebuilding and pushing to heroku, everything is working.

Sagemaker model deployment failing due to custom endpoint name

AWS Sagemaker model deployment is failing when endpoint_name argument is specified. Any thoughts?
Without endpoint_name argument in deploy, model deployment works successfully.
Model training and saving into S3 location is successful either way.
import boto3
import os
import sagemaker
from sagemaker import get_execution_role
from sagemaker.predictor import csv_serializer
from sagemaker.amazon.amazon_estimator import get_image_uri
bucket = 'Y'
prefix = 'Z'
role = get_execution_role()
train_data, validation_data, test_data = np.split(df.sample(frac=1, random_state=100), [int(0.5 * len(df)), int(0.8 * len(df))])
train_data.to_csv('train.csv', index=False, header=False)
validation_data.to_csv('validation.csv', index=False, header=False)
test_data.to_csv('test.csv', index=False)
boto3.Session().resource('s3').Bucket(bucket).Object(os.path.join(prefix, 'train/X/train.csv')).upload_file('train.csv')
boto3.Session().resource('s3').Bucket(bucket).Object(os.path.join(prefix, 'validation/X/validation.csv')).upload_file('validation.csv')
container = get_image_uri(boto3.Session().region_name, 'xgboost')
#print(container)
s3_input_train = sagemaker.s3_input(s3_data='s3://{}/{}/train/{}'.format(bucket, prefix, suffix), content_type='csv')
s3_input_validation = sagemaker.s3_input(s3_data='s3://{}/{}/validation/{}/'.format(bucket, prefix, suffix), content_type='csv')
sess = sagemaker.Session()
output_loc = 's3://{}/{}/output'.format(bucket, prefix)
xgb = sagemaker.estimator.Estimator(container,
role,
train_instance_count=1,
train_instance_type='ml.m4.xlarge',
output_path=output_loc,
sagemaker_session=sess,
base_job_name='X')
#print('Model output to: {}'.format(output_location))
xgb.set_hyperparameters(eta=0.5,
objective='reg:linear',
eval_metric='rmse',
max_depth=3,
min_child_weight=1,
gamma=0,
early_stopping_rounds=10,
subsample=0.8,
colsample_bytree=0.8,
num_round=1000)
#Model fitting
xgb.fit({'train': s3_input_train, 'validation': s3_input_validation})
#Deploy model with automatic endpoint created
xgb_predictor_X = xgb.deploy(initial_instance_count=1, instance_type='ml.m4.xlarge', endpoint_name='X')
xgb_predictor_X.content_type = 'text/csv'
xgb_predictor_X.serializer = csv_serializer
xgb_predictor_X.deserializer = None
INFO:sagemaker:Creating endpoint with name delaymins
ClientError: An error occurred (ValidationException) when calling the CreateEndpoint operation: Could not find model "arn:aws:sagemaker:us-west-2::model/X-2019-01-08-18-17-42-158".
Figured it out! If custom endpoint name is not ended before redeploying it, it get blacklisted(not sure if this is temporary). Therefore a different endpoint name must be used if this mistake is made. Moral of the story: Always end an endpoint before redeploying.

WinError 10054 An existing connection was forcibly closed by the remote host when sending an email with attachment through Gmail API with Python

Trying to send an email with an attachment through Gmail API using Python.
When sending an email without an attachment, everything works fine, but once I add PDF file, Google returns me "[WinError 10054] An existing connection was forcibly closed by the remote host".
Does anyone have an idea why this happening?
My code:
import base64
from email.mime.multipart import MIMEMultipart
from oauth2client import file, client, tools
from googleapiclient.discovery import build
from email.mime.text import MIMEText
from email.mime.base import MIMEBase
from httplib2 import Http
def auth(scopes):
store = file.Storage('token.json')
creds = store.get()
if not creds or creds.invalid:
flow = client.flow_from_clientsecrets('credentials.json', scopes)
creds = tools.run_flow(flow, store)
service = build('gmail', 'v1', http=creds.authorize(Http()))
return service
def create_mess():
message = MIMEText('Message text')
message['to'] = 'to#gmail.com'
message['from'] = 'from#domain.com'
message['subject'] = 'Subject'
return {'raw': base64.urlsafe_b64encode(message.as_string().encode()).decode()}
def create_mess_att():
message = MIMEMultipart()
message['to'] = 'to#gmail.com'
message['from'] = 'from#domain.com'
message['subject'] = 'Subject'
msg = MIMEText('Message text')
message.attach(msg)
fp = open('TestFile.pdf', 'rb')
msg = MIMEBase('application', 'octet-stream')
msg.set_payload(fp.read())
fp.close()
filename = 'TestFile.pdf'
msg.add_header('Content-Disposition', 'attachment', filename=filename)
message.attach(msg)
return {'raw': base64.urlsafe_b64encode(message.as_string().encode()).decode()}
def send_mess(message):
service.users().messages().send(userId='me', body=message).execute()
This part does works:
service = auth('https://www.googleapis.com/auth/gmail.modify')
message = create_mess()
send_mess(message)
This part does not:
service = auth('https://www.googleapis.com/auth/gmail.modify')
message = create_mess_att()
send_mess(message)
ConnectionResetError: [WinError 10054] An existing connection was forcibly closed by the remote host
Thanks in advance!

Google Cloud Storage - com.google.appengine.api.appidentity.AppIdentityServiceFailureException

I am trying to store a file to Google Cloud Storage using a jaxrs Service running in Google App Engine. While trying to store the file I am getting below error.
com.google.appengine.tools.cloudstorage.NonRetriableException: com.google.appengine.api.appidentity.AppIdentityServiceFailureException: The AppIdentity service threw an unexpected error. Details:
I am trying to save the file to a new bucket and gave below ids (compute, app engine and service account) permissions to the bucket. I also created a separate service account and gave this service account also the Writer Permission (with Editor Role to this account to the project)
myaccount#myproject.iam.gserviceaccount.com
xxxx-compute#developer.gserviceaccount.com
xxxx#cloudservices.gserviceaccount.com
I understand the service account is not required because from app engine with the default service we should be able to store the file. But just to try I also created the above service account and stored the file in WEB-INF/resources/service_account_credentials.json location and set the below property in appengine-web.xml
<property name="GOOGLE_APPLICATION_CREDENTIALS" value="WEB-INF/resources/service_account_credentials.json"/>
I tried below two ways to store the file, but both are giving the error.
First Way...
Getting the Service
GcsService gcsService = GcsServiceFactory.createGcsService(new RetryParams.Builder()
.initialRetryDelayMillis(10)
.retryMaxAttempts(10)
.totalRetryPeriodMillis(15000)
.build());
Storing the file
GcsFileOptions gcsFileOptions = null ;
GcsFileOptions.Builder builder = new GcsFileOptions.Builder() ;
if(aclEntityName != null)
builder = builder.acl(aclEntityName) ;
if(fileMetaData != null && !fileMetaData.isEmpty())
for(Map.Entry<String, String> entry : fileMetaData.entrySet()){
builder = builder.addUserMetadata(entry.getKey(), entry.getValue()) ;
}
gcsFileOptions = builder.build() ;
GcsFilename fileName = new GcsFilename(bucketName, name) ;
GcsService gcsService = StorageFactory.getGcsService() ;
GcsOutputChannel outputChannel = gcsService.createOrReplace(fileName, gcsFileOptions);
copy(contentStream, Channels.newOutputStream(outputChannel));
Second Way as given below also gives the same error...
Getting the Service
HttpTransport transport = GoogleNetHttpTransport.newTrustedTransport();
JsonFactory jsonFactory = new JacksonFactory();
GoogleCredential credential =
GoogleCredential.getApplicationDefault(transport, jsonFactory);
if (credential.createScopedRequired()) {
Collection<String> scopes = StorageScopes.all();
credential = credential.createScoped(scopes);
}
return new Storage.Builder(transport, jsonFactory, credential)
.setApplicationName("GCS Samples")
.build();
Storing the file second way
StorageObject objectMetaData = new StorageObject();
objectMetaData.setName(name);
if(fileMetaData != null && fileMetaData.isEmpty() == false )
objectMetaData.setMetadata(fileMetaData) ;
// Set the access control list to publicly read-only
if(aclEntityName != null && !aclEntityName.trim().equals("")
&& aclRole != null && !aclRole.trim().equals("") ) {
objectMetaData.setAcl(Arrays.asList(
new ObjectAccessControl().setEntity(aclEntityName).setRole(aclRole)));
}
InputStreamContent mediaContent = new InputStreamContent("application/octet-stream", contentStream);
// Do the insert
Storage client = StorageFactory.getService();
Storage.Objects.Insert insertRequest = client.objects().insert(
bucketName, objectMetaData, mediaContent);
if (mediaContent.getLength() > 0 && mediaContent.getLength() <= 2 * 1000 * 1000 /* 2MB */) {
insertRequest.getMediaHttpUploader().setDirectUploadEnabled(true);
}
insertRequest.execute();
What am I doing wrong? Is there any setting I need to do to fix this error? Please help !!!
From my understanding of Google Application Default Credentials, using GOOGLE_APPLICATION_CREDENTIALS environment variable should be pointing to a local file, and is generally used with gcloud command line tool to authenticate when testing code locally.
That said, according to https://stackoverflow.com/a/36408645/374638, perhaps this should be in <env-variables> instead.

Resources