How to use python-socketio with django and reactjs? - reactjs

I am getting this error when I am using reactjs with django:
Not Found: /socket.io/
HTTP GET /socket.io/?EIO=4&transport=polling&t=NSwqecY 404 [0.02, 127.0.0.1:51874]
wsgi.py ==>file in django project--
import os
from django.core.wsgi import get_wsgi_application
import socketio
from myapp.server import sio
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mysite.settings")
django_app = get_wsgi_application()
application = socketio.WSGIApp(sio, django_app)
server.py==> this is file in django project app
async_mode = None
import os
from django.http import HttpResponse
import socketio
basedir = os.path.dirname(os.path.realpath(__file__))
sio = socketio.Server(async_mode=async_mode)
thread = None
global thread
if thread is None:
thread = sio.start_background_task(background_thread)
def background_thread():
"""Example of how to send server generated events to clients."""
count = 0
while True:
sio.sleep(10)
count += 1
sio.emit('my_response', {'data': 'Server generated event'},
namespace='/test'
#sio.event
def close_room(sid, message):
sio.emit('my_response',
{'data': 'Room ' + message['room'] + ' is closing.'},
room=message['room'])
sio.close_room(message['room'])
#sio.event
def connect(sid, environ):
sio.emit('my_response', {'data': 'Connected', 'count': 0}, room=sid)
#sio.event
def disconnect(sid):
print('Client disconnected')
#sio.event
def disconnect(sid):
print('disconnect ', sid)
client.js==>start connection with calling connect function
connect(){
socket = io.connect("http://localhost:8000");
socket.on('connect', function () {
console.log("connected to server")
});}
What should I do?
Or any other library/method recommended.
Thanks in advance..

Related

Using AWS Lambda functions in Amazon Neptune

I created an AWS Lambda function using this example code in AWS neptuneDB documentation. I followed documentation properly and set the all necessary lambda environmentale variables as well. But when I tested the function it raises an error. Could you please help me solve this issue. I wonder why the code in the AWS neptunedb documentation does not work.
code:
import os, sys, backoff, math
from random import randint
from gremlin_python import statics
from gremlin_python.driver.driver_remote_connection import DriverRemoteConnection
from gremlin_python.driver.protocol import GremlinServerError
from gremlin_python.driver import serializer
from gremlin_python.process.anonymous_traversal import traversal
from gremlin_python.process.graph_traversal import __
from gremlin_python.process.strategies import *
from gremlin_python.process.traversal import T
from tornado.websocket import WebSocketClosedError
from tornado import httpclient
from botocore.auth import SigV4Auth
from botocore.awsrequest import AWSRequest
from botocore.credentials import ReadOnlyCredentials
from types import SimpleNamespace
reconnectable_err_msgs = [
'ReadOnlyViolationException',
'Server disconnected',
'Connection refused'
]
retriable_err_msgs = ['ConcurrentModificationException'] + reconnectable_err_msgs
network_errors = [WebSocketClosedError, OSError]
retriable_errors = [GremlinServerError] + network_errors
def prepare_iamdb_request(database_url):
service = 'neptune-db'
method = 'GET'
access_key = os.environ['AWS_ACCESS_KEY_ID']
secret_key = os.environ['AWS_SECRET_ACCESS_KEY']
region = os.environ['AWS_REGION']
session_token = os.environ['AWS_SESSION_TOKEN']
creds = SimpleNamespace(
access_key=access_key, secret_key=secret_key, token=session_token, region=region,
)
request = AWSRequest(method=method, url=database_url, data=None)
SigV4Auth(creds, service, region).add_auth(request)
return httpclient.HTTPRequest(database_url, headers=request.headers.items())
def is_retriable_error(e):
is_retriable = False
err_msg = str(e)
if isinstance(e, tuple(network_errors)):
is_retriable = True
else:
is_retriable = any(retriable_err_msg in err_msg for retriable_err_msg in retriable_err_msgs)
print('error: [{}] {}'.format(type(e), err_msg))
print('is_retriable: {}'.format(is_retriable))
return is_retriable
def is_non_retriable_error(e):
return not is_retriable_error(e)
def reset_connection_if_connection_issue(params):
is_reconnectable = False
e = sys.exc_info()[1]
err_msg = str(e)
if isinstance(e, tuple(network_errors)):
is_reconnectable = True
else:
is_reconnectable = any(reconnectable_err_msg in err_msg for reconnectable_err_msg in reconnectable_err_msgs)
print('is_reconnectable: {}'.format(is_reconnectable))
if is_reconnectable:
global conn
global g
conn.close()
conn = create_remote_connection()
g = create_graph_traversal_source(conn)
#backoff.on_exception(backoff.constant,
tuple(retriable_errors),
max_tries=5,
jitter=None,
giveup=is_non_retriable_error,
on_backoff=reset_connection_if_connection_issue,
interval=1)
def query(**kwargs):
id = kwargs['id']
return (g.V(id)
.fold()
.coalesce(
__.unfold(),
__.addV('User').property(T.id, id)
)
.id().next())
def doQuery(event):
return query(id=str(randint(0, 10000)))
def lambda_handler(event, context):
return doQuery(event)
def create_graph_traversal_source(conn):
return traversal().withRemote(conn)
def create_remote_connection():
print('Creating remote connection')
return DriverRemoteConnection(
connection_string(),
'g',
pool_size=1,
message_serializer=serializer.GraphSONSerializersV2d0())
def connection_string():
database_url = 'wss://{}:{}/gremlin'.format(os.environ['neptuneEndpoint'], os.environ['neptunePort'])
if 'USE_IAM' in os.environ and os.environ['USE_IAM'] == 'true':
return prepare_iamdb_request(database_url)
else:
return database_url
conn = create_remote_connection()
g = create_graph_traversal_source(conn)
error message:
{
"errorMessage": "'GraphTraversal' object is not callable",
"errorType": "TypeError",
"requestId": "69e6ecd3-1291-4d21-a8fa-1fc910525fc1",
"stackTrace": [
" File \"/var/task/lambda_function.py\", line 111, in lambda_handler\n return doQuery(event)\n",
" File \"/var/task/lambda_function.py\", line 108, in doQuery\n return query(id=str(randint(0, 10000)))\n",
" File \"/var/task/backoff/_sync.py\", line 105, in retry\n ret = target(*args, **kwargs)\n",
" File \"/var/task/lambda_function.py\", line 99, in query\n return (g.V(id)\n"
]
}
log output:
LOGS Name: cloudwatch_lambda_agent State: Subscribed Types: [Platform]
Creating remote connection
EXTENSION Name: cloudwatch_lambda_agent State: Ready Events: [INVOKE,SHUTDOWN]
START RequestId: 69e6ecd3-1291-4d21-a8fa-1fc910525fc1 Version: $LATEST
[ERROR] TypeError: 'GraphTraversal' object is not callable
Traceback (most recent call last):
  File "/var/task/lambda_function.py", line 111, in lambda_handler
    return doQuery(event)
  File "/var/task/lambda_function.py", line 108, in doQuery
    return query(id=str(randint(0, 10000)))
  File "/var/task/backoff/_sync.py", line 105, in retry
    ret = target(*args, **kwargs)
  File "/var/task/lambda_function.py", line 99, in query
    return (g.V(id)END RequestId: 69e6ecd3-1291-4d21-a8fa-1fc910525fc1
REPORT RequestId: 69e6ecd3-1291-4d21-a8fa-1fc910525fc1 Duration: 108.64 ms Billed Duration: 109 ms Memory Size: 128 MB Max Memory Used: 88 MB Init Duration: 1025.72 ms

How to get the output of a shell command to a React frontend through a Flask RESTful API?

I am trying to get the output of a shell command to a React frontend through a Flask RESTful API.
class SerVer(Resource):
def put(self):
args = cred_put_args.parse_args()
cred = args
return cred, 201
for it in cred:
client = SSHClient()
client.set_missing_host_key_policy(AutoAddPolicy())
client.load_system_host_keys()
client.connect(it.ip, it.uname, it.pwd)
stdin, stdout, stderr = client.exec_command('ps aux')
psaux = (f'STDOUT: {stdout.read().decode("utf8")}')
def get(self):
return {'data': psaux}, 203
api.add_resource(SerVer, '/request', '/psaux')
The GET request is sent from react, like so:
const [psaux, psauxSet] = useState();
useEffect(() => {
axios.get("http://127.0.0.1:5000/request").then((res) => {
const i = res.data;
console.log(i);
psauxSet(i);
});
}, []);
I understand the problem is variable scoping. I had it working at some point, that I'm trying to reproduce :)
Because the indentation of the code is wrong, it should probably be:
from flask import Response
class SerVer(Resource):
def put(self):
args = cred_put_args.parse_args()
cred = args
return cred, 201
for it in cred:
client = SSHClient()
client.set_missing_host_key_policy(AutoAddPolicy())
client.load_system_host_keys()
client.connect(it.ip, it.uname, it.pwd)
stdin, stdout, stderr = client.exec_command('ps aux')
psaux = (f'STDOUT: {stdout.read().decode("utf8")}')
def get(self):
res = {
'data': 'psaux'
}
return Response(mimetype="application/json", response=json.dumps(res), status=203)
Pay also attention when you add your Resource object (SerVer) to the api object, take a look here

GET API showing KeyError in browser, but working in Postman

I have encountered a strange problem where the GET API that I created was working fine in Postman, but not working at that specific URL when entered in the browser.
Here is what the input and output should look like as shown successfully in Postman: Trying API in Postman
Here is what the error is showing on my browser: Error in browser
I am also getting this error in React about CORS headers even though I have added in code to try to handle that issue: Error in React about CORS
Here is the code in settings.py in Django:
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'Prediction',
'rest_framework',
'corsheaders',
]
MIDDLEWARE = [
'corsheaders.middleware.CorsMiddleware',
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
Here is the code in local_settings.py in Django:
#########################################
## IMPORT LOCAL SETTINGS ##
#########################################
try:
from .local_settings import *
except ImportError:
pass
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'postgres',
'USER': 'postgres',
'PASSWORD': 'postgres',
'HOST': '127.0.0.1',
'PORT': '5432',
}
}
#################################################################
## (CORS) Cross-Origin Resource Sharing Settings ##
#################################################################
CORS_ORIGIN_ALLOW_ALL = True
I also tried adding this code in index.js in React to handle the CORS headers problem but it didn't work:
const express = require('express');
const request = require('request');
const app = express();
app.use((req, res, next) => {
res.header('Access-Control-Allow-Origin', '*');
next();
});
app.get('/jokes/random', (req, res) => {
request({
url: 'https://joke-api-strict-cors.appspot.com/jokes/random'
},
(error, response, body) => {
if (error || response.statusCode !== 200) {
return res.status(500).json({
type: 'error',
message: err.message
});
}
res.json(JSON.parse(body));
}
)
});
const PORT = process.env.PORT || 3000;
app.listen(PORT, () => console.log(`listening on ${PORT}`));
Here is the code in App.js in React:
import logo from './logo.svg';
import './App.css';
import { useState } from 'react';
import axios from 'axios';
function App() {
const [json_response1, set_json_response1] = useState("1st algorithm - List of similar events");
function request_json_response() {
axios.get('http://127.0.0.1:8000/api/get_events_1st_alg', {
data: {
"ID": "User_ID1"
}
}).then(function (response) {
// handle success
set_json_response1(response);
});
}
return (
<div className="App">
<header className="App-header">
<img src={logo} className="App-logo" alt="logo" />
<p>
{json_response1}
</p>
<button onClick={request_json_response}>
Generate events for user
</button>
<a
className="App-link"
href="https://reactjs.org"
target="_blank"
rel="noopener noreferrer"
>
Learn React
</a>
</header>
</div>
);
}
export default App;
Here is the code in views.py in Django:
class get_events_1st_alg(APIView):
def get(self, request, format=None):
"""
data = request.data
banana_dictionary = {'banana':17}
return Response(banana_dictionary, status=status.HTTP_201_CREATED)
"""
import pandas as pd
import numpy as np
import psycopg2
import sqlalchemy
from sklearn.metrics.pairwise import cosine_similarity
from sklearn.metrics import pairwise_distances
import requests
from sqlalchemy import create_engine
engine = create_engine('postgresql://postgres:postgres#localhost/postgres')
# pd.read_sql_query('''SELECT * FROM arts_user_interaction LIMIT 5;''', engine)
events = pd.read_sql_query('''SELECT * FROM arts_event;''', engine)
Ratings = pd.read_sql_query('''SELECT * FROM arts_user_interaction;''', engine)
Mean = Ratings.groupby(by="User_ID", as_index = False)['User Rating'].mean()
Rating_avg = pd.merge(Ratings, Mean, on = "User_ID")
Rating_avg['adg_rating']=Rating_avg['User Rating_x']-Rating_avg['User Rating_y']
check = pd.pivot_table(Rating_avg,values='User Rating_x',index='User_ID',columns='Event_ID')
final = pd.pivot_table(Rating_avg,values='adg_rating',index='User_ID',columns='Event_ID')
final_event = final.fillna(final.mean(axis=0))
final_user = final.apply(lambda row: row.fillna(row.mean()), axis=1)
cosine = cosine_similarity(final_event)
np.fill_diagonal(cosine, 0 )
similarity_with_event =pd.DataFrame(cosine,index=final_event.index)
similarity_with_event.columns=final_user.index
def find_n_neighbours(df,n):
order = np.argsort(df.values, axis=1)[:, :n]
df = df.apply(lambda x: pd.Series(x.sort_values(ascending=False)
.iloc[:n].index,
index=['top{}'.format(i) for i in range(1, n+1)]), axis=1)
return df
sim_user_30_e = find_n_neighbours(similarity_with_event,30)
def get_user_similar_events( user1, user2 ):
common_events = Rating_avg[Rating_avg.User_ID == user1].merge(
Rating_avg[Rating_avg.User_ID == user2],
on = "Event_ID",
how = "inner" )
return common_events.merge(events, on ='Event_ID')
a = get_user_similar_events('User_ID10','User_ID220')
a = a.reindex(columns= ['User Rating_x_x','User Rating_x_y','Name'])
Rating_avg = Rating_avg.astype({"Event_ID": str})
Movie_user = Rating_avg.groupby(by = 'User_ID')['Event_ID'].apply(lambda x:','.join(x))
def User_item_score1(user):
Movie_seen_by_user = check.columns[check[check.index==user].notna().any()].tolist()
a = sim_user_30_e[sim_user_30_e.index==user].values
b = a.squeeze().tolist()
d = Movie_user[Movie_user.index.isin(b)]
l = ','.join(d.values)
Movie_seen_by_similar_users = l.split(',')
Movies_under_consideration = list(set(Movie_seen_by_similar_users)-set(list(map(str, Movie_seen_by_user))))
Movies_under_consideration = list(map(str, Movies_under_consideration))
score = []
for item in Movies_under_consideration:
c = final_event.loc[:,item]
d = c[c.index.isin(b)]
f = d[d.notnull()]
avg_user = Mean.loc[Mean['User_ID'] == user,'User Rating'].values[0]
index = f.index.values.squeeze().tolist()
corr = similarity_with_event.loc[user,index]
fin = pd.concat([f, corr], axis=1)
fin.columns = ['adg_score','correlation']
fin['score']=fin.apply(lambda x:x['adg_score'] * x['correlation'],axis=1)
nume = fin['score'].sum()
deno = fin['correlation'].sum()
final_score = avg_user + (nume/deno)
score.append(final_score)
data = pd.DataFrame({'Event_ID':Movies_under_consideration,'score':score})
top_5_recommendation = data.sort_values(by='score',ascending=False).head(5)
Movie_Name = top_5_recommendation.merge(events, how='inner', on='Event_ID')
Movie_Names = Movie_Name.Name.values.tolist()
return Movie_Names
# user = input("Enter the user id to whom you want to recommend : ")
data = request.data
user = ""
for i, v in data.items():
user = str(v)
predicted_movies = User_item_score1(user)
return Response(predicted_movies, status=status.HTTP_201_CREATED)
I really don't know what I am doing as I am just following a bunch of tutorials online so I would love it if anyone can help with resolving the API issue in the browser and the React issue with CORS in the browser as well. Thank you so much!
The problem is that GET request shouldn't have body (it is not specified/supported by axios). See this issue in axios repository: link.
Please do a change from data to params:
axios.get('http://127.0.0.1:8000/api/get_events_1st_alg', { params: { "ID":"User_ID1"}})
And access it in DRF like:
user = request.query_params.get("ID")
Your CORS config is OK.

akka-http How to use MergeHub to throttle requests from client side

I use Source.queue to queue up HttpRequests and throttle it on the client side to download files from a remote server. I understand that Source.queue is not threadsafe and we need to use MergeHub to make it threadsafe. Following is the piece of code that uses Source.queue and uses cachedHostConnectionPool.
import java.io.File
import akka.actor.Actor
import akka.event.Logging
import akka.http.scaladsl.Http
import akka.http.scaladsl.client.RequestBuilding
import akka.http.scaladsl.model.{HttpResponse, HttpRequest, Uri}
import akka.stream._
import akka.stream.scaladsl._
import akka.util.ByteString
import com.typesafe.config.ConfigFactory
import scala.concurrent.{Promise, Future}
import scala.concurrent.duration._
import scala.util.{Failure, Success}
class HttpClient extends Actor with RequestBuilding {
implicit val system = context.system
val logger = Logging(system, this)
implicit lazy val materializer = ActorMaterializer()
val config = ConfigFactory.load()
val remoteHost = config.getString("pool.connection.host")
val remoteHostPort = config.getInt("pool.connection.port")
val queueSize = config.getInt("pool.queueSize")
val throttleSize = config.getInt("pool.throttle.numberOfRequests")
val throttleDuration = config.getInt("pool.throttle.duration")
import scala.concurrent.ExecutionContext.Implicits.global
val connectionPool = Http().cachedHostConnectionPool[Promise[HttpResponse]](host = remoteHost, port = remoteHostPort)
// Construct a Queue
val requestQueue =
Source.queue[(HttpRequest, Promise[HttpResponse])](queueSize, OverflowStrategy.backpressure)
.throttle(throttleSize, throttleDuration.seconds, 1, ThrottleMode.shaping)
.via(connectionPool)
.toMat(Sink.foreach({
case ((Success(resp), p)) => p.success(resp)
case ((Failure(error), p)) => p.failure(error)
}))(Keep.left)
.run()
// Convert Promise[HttpResponse] to Future[HttpResponse]
def queueRequest(request: HttpRequest): Future[HttpResponse] = {
val responsePromise = Promise[HttpResponse]()
requestQueue.offer(request -> responsePromise).flatMap {
case QueueOfferResult.Enqueued => responsePromise.future
case QueueOfferResult.Dropped => Future.failed(new RuntimeException("Queue overflowed. Try again later."))
case QueueOfferResult.Failure(ex) => Future.failed(ex)
case QueueOfferResult.QueueClosed => Future.failed(new RuntimeException("Queue was closed (pool shut down) while running the request. Try again later."))
}
}
def receive = {
case "download" =>
val uri = Uri(s"http://localhost:8080/file_csv.csv")
downloadFile(uri, new File("/tmp/compass_audience.csv"))
}
def downloadFile(uri: Uri, destinationFilePath: File) = {
def fileSink: Sink[ByteString, Future[IOResult]] =
Flow[ByteString].buffer(512, OverflowStrategy.backpressure)
.toMat(FileIO.toPath(destinationFilePath.toPath)) (Keep.right)
// Submit to queue and execute HttpRequest and write HttpResponse to file
Source.fromFuture(queueRequest(Get(uri)))
.flatMapConcat(_.entity.dataBytes)
.via(Framing.delimiter(ByteString("\n"), maximumFrameLength = 10000, allowTruncation = true))
.map(_.utf8String)
.map(d => s"$d\n")
.map(ByteString(_))
.runWith(fileSink)
}
}
However, when I use MergeHub, it returns Sink[(HttpRequest, Promise[HttpResponse]), NotUsed]. I need to extract the response.entity.dataBytes and write the response to a file using a filesink. I am not able figure out how to use MergeHub to achieve this. Any help will be appreciated.
val hub: Sink[(HttpRequest, Promise[HttpResponse]), NotUsed] =
MergeHub.source[(HttpRequest, Promise[HttpResponse])](perProducerBufferSize = queueSize)
.throttle(throttleSize, throttleDuration.seconds, 1, ThrottleMode.shaping)
.via(connectionPool)
.toMat(Sink.foreach({
case ((Success(resp), p)) => p.success(resp)
case ((Failure(error), p)) => p.failure(error)
}))(Keep.left)
.run()
Source.Queue is actually thread safe now. If you want to use MergeHub:
private lazy val poolFlow: Flow[(HttpRequest, Promise[HttpResponse]), (Try[HttpResponse], Promise[HttpResponse]), Http.HostConnectionPool] =
Http().cachedHostConnectionPool[Promise[HttpResponse]](host).tail.head, port, connectionPoolSettings)
val ServerSink =
poolFlow.toMat(Sink.foreach({
case ((Success(resp), p)) => p.success(resp)
case ((Failure(e), p)) => p.failure(e)
}))(Keep.left)
// Attach a MergeHub Source to the consumer. This will materialize to a
// corresponding Sink.
val runnableGraph: RunnableGraph[Sink[(HttpRequest, Promise[HttpResponse]), NotUsed]] =
MergeHub.source[(HttpRequest, Promise[HttpResponse])](perProducerBufferSize = 16).to(ServerSink)
val toConsumer: Sink[(HttpRequest, Promise[HttpResponse]), NotUsed] = runnableGraph.run()
protected[akkahttp] def executeRequest[T](httpRequest: HttpRequest, unmarshal: HttpResponse => Future[T]): Future[T] = {
val responsePromise = Promise[HttpResponse]()
Source.single((httpRequest -> responsePromise)).runWith(toConsumer)
responsePromise.future.flatMap(handleHttpResponse(_, unmarshal))
)
}
}

Adding methods to GAE database class

I am messing around with GAE. I want to place my database object in one file and call it from another. Here is the DB object:
import webapp2
import os
import jinja2
import json
import logging
import main
from google.appengine.ext import db
class User(db.Model):
user_name = db.StringProperty(required = True)
hashed_password = db.StringProperty(required = True)
email = db.EmailProperty(required = True)
created_dttm = db.DateTimeProperty(auto_now_add = True)
last_modified = db.DateTimeProperty(auto_now = True)
coords = db.GeoPtProperty(required = False)
# def as_dict(self):
# time_fmt = '%c'
# d = {
# 'subject':self.subject,
# 'content':self.content,
# 'created':self.created_dttm.strftime(time_fmt),
# 'last_modified': self.last_modified.strftime(time_fmt)
# }
# return d
def isValueUnique(self,column,value):
result = None
q = User.all()
q.filter(column, value)
result = q.get()
return result
I cannot instantiate the DB because it thinks I'm trying to store data.
I want to call the isValueUnique method from another file like so:
import webapp2
import os
import jinja2
import json
import logging
import main
import database
import validation
from google.appengine.ext import db
class SignUp(main.Handler):
def post(self):
user_username = self.request.get("username")
user_email = self.request.get("email")
user_pass = self.request.get("password")
user_verify = self.request.get("verify")
valid = validation.Valid()
error1=""
error2=""
error3=""
error4=""
q = database.User.all()
q.filter("username =", user_username)
result = q.get()
if result:
error1="Username already taken"
if (not valid.valid_user(user_username)) and (not error1):
error1 = "Enter a valid username"
if not valid.valid_password(user_pass):
error2 = "Enter a valid password"
if not valid.valid_pass_match(user_pass,user_verify):
error3 = "Passwords must match"
# Email Validation
email=valid.valid_email(user_email)
if not email:
error4 = "Invalid email"
email=""
elif not database.User.isValueUnique("email",email):
error4 = "Email already in use, please sign in"
email=""
I get this error:
elif not database.User.isValueUnique("email",email):
TypeError: unbound method isValueUnique() must be called with User instance as first argument (got str instance instead)
I can't instantiate User like I already said. What is the work around here?
database.User.isValueUnique("email",email)
This is attempting to call a method on the database.User class, but isValueUnique is an instance method.
If you decorate isValueUnique with #staticmethod you'll get farther.
Where are you trying to instantiate a User?

Resources