I am using Flask Web Framework on GAE/Python. After uploading a file to Cloud Storage I want to get a reference to the file so that it can be served. I can't get the parse_file_info to work. I've searched long and hard and spent over two days trying to make this work. I'm at my wit's end!! You can see my handlers below:
#app.route('/upload_form', methods = ['GET'])
def upload_form():
blobupload_url = blobstore.create_upload_url('/upload', gs_bucket_name = 'mystorage')
return render_template('upload_form.html', blobupload_url = blobupload_url)
#app.route('/upload', methods = ['POST'])
def blobupload():
file_info = blobstore.parse_file_info(cgi.FieldStorage()['file'])
return file_info.gs_object_name
The data is encoded in the payload of uploaded_file you retrieve after uploading a blob. This is example code on how to extract the name:
import email
from google.appengine.api.blobstore import blobstore
def extract_cloud_storage_meta_data(file_storage):
""" Exctract the cloud storage meta data from a file. """
uploaded_headers = _format_email_headers(file_storage.read())
storage_object_url = uploaded_headers.get(blobstore.CLOUD_STORAGE_OBJECT_HEADER, None)
return tuple(_split_storage_url(storage_object_url))
def _format_email_headers(raw_headers):
""" Returns an email message containing the headers from the raw_headers. """
message = email.message.Message()
message.set_payload(raw_headers)
payload = message.get_payload(decode=True)
return email.message_from_string(payload)
def _split_storage_url(storage_object_url):
""" Returns a list containing the bucket id and the object id. """
return storage_object_url.split("/")[2:4]
#app.route('/upload', methods = ['POST'])
def blobupload():
uploaded_file = request.files['file']
storage_meta_data = extract_cloud_storage_meta_data(uploaded_file)
bucket_name, object_name = storage_meta_data
return object_name
Related
I'm using Django to feed a front end web page built with React.
I have an API that gets the necessary data with some formatting, but it's pretty slow. Any suggestions on how to build a faster API? It's currently returning 8 records which takes >3 seconds.
def deployed_data(request):
deployments = deployment.objects.filter(LAUNCH_DATE__isnull=False).filter(HISTORICAL=False)
res = []
for dep in deployments:
crt_dep = {
"FLOAT_SERIAL_NO":dep.FLOAT_SERIAL_NO,
"PLATFORM_NUMBER":dep.PLATFORM_NUMBER,
"PLATFORM_TYPE":dep.PLATFORM_TYPE.VALUE,
"DEPLOYMENT_CRUISE_ID":dep.DEPLOYMENT_CRUISE_ID,
"DEPLOYMENT_PLATFORM":dep.DEPLOYMENT_PLATFORM.VALUE,
"LAUNCH_DATE":dep.LAUNCH_DATE.strftime("%Y-%m-%d"),
"status":dep.status,
"last_report":dep.last_report.strftime("%Y-%m-%d %H:%M"),
"next_report":dep.next_report.strftime("%Y-%m-%d %H:%M"),
"days_since_last":dep.days_since_last,
"last_cycle":dep.last_cycle,
"age":dep.age.days
}
res.append(crt_dep)
return JsonResponse(res, status = 200, safe=False)
of course it's slower, you reialize on every loop every value in the dict is a separate hit to the database!
just use drf serializer or even django serializer to convert these data at once
or use values after filter
views.py
class GetDeploymentData(generics.ListAPIView):
serializer_class = DeployedDataSerializer
queryset=deployment.objects.filter(LAUNCH_DATE__isnull=False).filter(HISTORICAL=False)
serializers.py
class DeployedDataSerializer(serializers.Serializer):
FLOAT_SERIAL_NO = serializers.IntegerField()
PLATFORM_NUMBER = serializers.IntegerField()
PLATFORM_TYPE = serializers.CharField()
status = serializers.CharField()
DEPLOYMENT_CRUISE_ID = serializers.CharField()
DEPLOYMENT_PLATFORM = serializers.CharField()
LAUNCH_DATE = serializers.DateTimeField(format="%Y-%m-%d %H:%M")
last_report = serializers.DateTimeField(format="%Y-%m-%d %H:%M")
next_report = serializers.DateTimeField(format="%Y-%m-%d %H:%M")
days_since_last = serializers.IntegerField()
last_cycle = serializers.IntegerField()
age = serializers.IntegerField(source="age.days")
As Mohamed pointed out, serializers are much faster. But this is still not as fast as the same page created using django's templates.
I am using Angular and Bootstrap to serve my forms. If a user uploads an image, Angular serves it in the "data:" format, but Django is looking for a file type. I have fixed this issue by overriding both perform_authentication (To modify the image to a file) and perform_create (to inject my user_id). Is there a better way to override?
I'd rather not override my view. I'd rather override the way Django validates ImageFields. What I want to do is check if the passed value is a 64-bit string, if it is, modify it to a file type, then validate the ImageField. The below code works as is, I just don't feel is optimal.
Here is my view:
class UserCredentialList(generics.ListCreateAPIView):
permission_classes = (IsCredentialOwnerOrAdmin,)
serializer_class = CredentialSerializer
"""
This view should return a list of all the purchases
for the currently authenticated user.
"""
def get_queryset(self):
"""
This view should return a list of all models by
the maker passed in the URL
"""
user = self.request.user
return Credential.objects.filter(member=user)
def perform_create(self, serializer):
serializer.save(member_id=self.request.user.id)
def perform_authentication(self, request):
if request.method == 'POST':
data = request.data.pop('document_image', None)
from django.core.files.base import ContentFile
import base64
import six
import uuid
# Check if this is a base64 string
if isinstance(data, six.string_types):
# Check if the base64 string is in the "data:" format
if 'data:' in data and ';base64,' in data:
# Break out the header from the base64 content
header, data = data.split(';base64,')
# Try to decode the file. Return validation error if it fails.
try:
decoded_file = base64.b64decode(data)
except TypeError:
self.fail('invalid_image')
# Generate file name:
file_name = str(uuid.uuid4())[:12] # 12 characters are more than enough.
# Get the file name extension:
import imghdr
file_extension = imghdr.what(file_name, decoded_file)
file_extension = "jpg" if file_extension == "jpeg" else file_extension
complete_file_name = "%s.%s" % (file_name, file_extension,)
data = ContentFile(decoded_file, name=complete_file_name)
request.data['document_image'] = data
request.user
And here is my serializer:
class CredentialSerializer(serializers.ModelSerializer):
class Meta:
model = Credential
fields = (
'id',
'credential_type',
'credential_number',
'date_received',
'is_verified',
'date_verified',
'document_image',
)
And here is my model:
class Credential(models.Model):
"""Used to store various credentials for member validation."""
document_image = models.ImageField(
upload_to=get_upload_path(instance="instance",
filename="filename.ext",
path='images/credentials/'))
PASSENGER = 'P'
OWNER = 'O'
CAPTAIN = 'C'
CREDENTIAL_CHOICES = (
(PASSENGER, 'Passenger'),
(OWNER, 'Owner'),
(CAPTAIN, 'Captain'),
)
credential_type = models.CharField(max_length=1,
choices=CREDENTIAL_CHOICES,
default=PASSENGER)
credential_number = models.CharField(max_length=255)
date_received = models.DateTimeField(auto_now_add=True)
is_verified = models.BooleanField(default=False)
date_verified = models.DateTimeField(blank=True, null=True)
member = models.ForeignKey(settings.AUTH_USER_MODEL,
related_name='credentials')
I used the below link to help me, now I just want to figure out how override the proper method
Django REST Framework upload image: "The submitted data was not a file"
Well I've made one change since making: I have moved this function to my serializer and instead I now override the method: is_valid and that works as well. At least it's not in my view anymore.
I want to use data.photos.service.PhotosService to push and pull photos from Picasa. I got a service key file XXXXXXXX-privatekey.p12 from Google console and am now trying to authenticate using said key with google.
The documentation for OAUTH2 using appengine has led me to believe that using the following would be of use:
f = file(settings.SITE_ROOT + '/aurora/' + settings.PRIVATE_KEY, 'rb')
key = f.read()
f.close()
credentials = SignedJwtAssertionCredentials(settings.SERVICE_ACCOUNT_NAME, key, scope = 'http://picasaweb.google.com/data https://www.googleapis.com/auth/userinfo.email https://www.googleapis.com/auth/userinfo.profile')
http = httplib2.Http()
http = credentials.authorize(http)
service = build("oauth2", "v2", http=http)
user_info = None
try:
user_info = service.userinfo().get().execute()
# neither of these two methods work
#gd_client.SetOAuthInputParameters(signature_method = gdata.auth.OAuthSignatureMethod.RSA_SHA1, consumer_key = "asdfasdfasdf.apps.googleusercontent.com", rsa_key = key, two_legged_oauth = True, requestor_id = user_info.get('email'))
#gd_client.auth_token = gdata.gauth.TwoLeggedOAuthRsaToken(consumer_key = user_info.get('email'), rsa_private_key = key, requestor_id = user_info.get('email'))
except errors.HttpError, e:
logging.error('An error occurred: %s', e)
user_inf0 = {u'verified_email': True, u'id': u'1234', u'name': u'asdfasdfasdf#developer.gserviceaccount.com', u'email': u'asdfasdfasdf#developer.gserviceaccount.com'}
The issue is that either method 1 using SetOAuthInputParameters returns a invalid token, or method 2 returns a 403 restricted.
I am at my wits' end reading through mountains of code that all do regular 3 legged oauth when I really and truly do not want to do it that way. Any ideas/articles I haven't seen yet?
Use gdata.gauth.OAuth2TokenFromCredentials.
auth2token = gdata.gauth.OAuth2TokenFromCredentials(credentials)
gd_client = auth2token.authorize(gd_client)
OAuth2TokenFromCredentials is designed to help you use apiclient and gdata at the same time. Under the covers, it uses the credentials for making sure it has the auth information it needs to perform gdata calls.
Note, if you still get 403, it may be something else entirely. I was using a service account to access a user's data and was getting 403 because I hadn't spec'd the user properly in the SignedJwtAssertionCredentials call.
UPDATE: Here's the basic pattern I used:
from oauth2client.client import SignedJwtAssertionCredentials
credentials = SignedJwtAssertionCredentials(
"XXXXXXXXXXX#developer.gserviceaccount.com",
open("keyfile").read(),
scope=(
"https://www.googleapis.com/auth/drive",
"https://spreadsheets.google.com/feeds",
"https://docs.google.com/feeds"
), # For example.
sub="user#gmail.com"
)
http = httplib2.Http()
http = credentials.authorize(http) # Not needed? See comment below.
auth2token = gdata.gauth.OAuth2TokenFromCredentials(credentials)
gd_client = gdata.photos.service.PhotosService() # For example.
gd_client = auth2token.authorize(gd_client)
If you are using MFA on your google account, you need to use the consent screen authentication method. With Picassa API, it does not work as is, as the request API is slightly different.
import gdata.gauth
import os
import pickle
import gdata.photos.service
clientid='xxx' # https://console.developers.google.com/apis/credentials
clientsecret='xxx'
Scope='https://picasaweb.google.com/data/'
User_agent='myself'
def GetAuthToken():
if os.path.exists(".token"):
with open(".token") as f:
token = pickle.load(f)
else:
token = gdata.gauth.OAuth2Token(client_id=clientid,client_secret=clientsecret,scope=Scope,user_agent=User_agent)
print token.generate_authorize_url(redirect_uri='urn:ietf:wg:oauth:2.0:oob')
code = raw_input('What is the verification code? ').strip()
token.get_access_token(code)
with open(".token", 'w') as f:
pickle.dump(token, f)
return token
token = GetAuthToken()
gd_client = gdata.photos.service.PhotosService()
old_request = gd_client.request
def request(operation, url, data=None, headers=None):
headers = headers or {}
headers['Authorization'] = 'Bearer ' + token.access_token
return old_request(operation, url, data=data, headers=headers)
gd_client.request = request
photos = gd_client.GetUserFeed(kind='photo', limit='10')
for photo in photos.entry:
print 'Recently added photo title:', photo.title.text
I write code as below
from google.appengine.ext import ndb
__metaclass__ = type
class UserSession(ndb.Model):
session = ndb.BlobProperty()
class KV:
#staticmethod
def get(id):
r = ndb.Key(UserSession, int(id)).get()
if r:
return r.session
#staticmethod
def set(id, value):
return UserSession.get_or_insert(int(id), session=value)
#staticmethod
def delete(id):
ndb.Key(UserSession, int(id)).delete()
where I write
id = 1
key = ndb.Key(UserSession, int(id))
UserSession.get_or_insert(key, session=1)
the sdk raise
TypeError: name must be a string; received Key('UserSession', 1)
when I call KV.get ()
the sdk raise
File "/home/bitcoin/42btc/zapp/_plugin/auth/model/gae/user.py", line 14, in get
r = ndb.Key(UserSession,int(id)).get()
...
BadRequestError: missing key id/name
So , how to use NDB?
The get_or_insert() method takes a string which is only the ID part of the key, not a Key. It cannot use numeric IDs.
I am using SOAP UI 3.0.1 for testing my web service which returns a byte array. I want to save the byte array as a word file. How do I accomplish it using Groovy Script or any other way?
The web service response is,
0M8R4KGxGuEAAAAAAAAAAAAAAAAAAAAAPgADAP7/CQAGAAAAAAA...............................
Bingo!!
import org.apache.commons.codec.binary.Base64
def groovyUtils = new com.eviware.soapui.support.GroovyUtils( context );
filename = groovyUtils.projectPath + "\\" +System.currentTimeMillis()+ ".doc"
def objFile = new java.io.File(filename)
def holder = groovyUtils.getXmlHolder('GetDocument#Response');
holder.declareNamespace('ns1','Utlities.Service.Documents');
def byteArray = holder.getNodeValue("//ns1:GetDocumentResponse[1]/ns1:GetDocumentResult" )
def b64 = new Base64()
def textBytes = b64.decode(byteArray.getBytes())
FileOutputStream fos = new java.io.FileOutputStream(objFile);
fos.write( textBytes );
fos.flush();
fos.close();
log.info("Output file: " + filename)