When I use Dasha's old SDK, from another file with "dashaApp.enqueueJobs()" sended call number to the main Dasha.js file and I get the number, but in the new Dasha SDK how I can get the info sented from enqueueJobs?
https://docs.dasha.ai/en-us/default/sdk/node-js/classes/conversationqueue
when you want to add entry
application.queue.push("key", {
after: new Date(),
before: new Date(Date.now() + 60 * 60 * 1000)
});
in main handler
application.queue.on("ready", async (key, conversation) => {
//key -- something for identification of job on your side, for example phone number
conversation.input = getInput(key);
const result = await conversation.execute();
});
Or you can use queue-less execution https://docs.dasha.ai/en-us/default/sdk/node-js/classes/application#createconversation
const dasha = require("#dasha.ai/sdk");
const app = await dasha.deploy("path/to/app");
....
const conv = app.createConversation({
foo: 123,
});
const result = await conv.execute();
Related
I wanted to access Ethereum Transaction Trie using LevelDB, but it's working only for Block 0, for any other block it's not given me any data. Just wondering how does web3.eth.getTransaction() works internally:
Here is the code:
const Trie1 = require ('merkle-patricia-tree').SecureTrie
const { Account, BN, bufferToHex, rlp } = require ('ethereumjs-util') ;
var levelup = require('levelup');
var leveldown = require('leveldown');
const gethDbPath = '/Users/AK/gethDataDir/geth/chaindata'
const db = new levelup(leveldown(gethDbPath))
async function test() {
const txnRoot = '0x51736d42e0e8fe95c3e609e5e5fb70da476613d24b5cd0d710d68190649c39f4'
const txnRootBuffer = Buffer.from(txnRoot.slice(2), 'hex')
const trie1 = new Trie1(db,txnRootBuffer)
const stream = trie1.createReadStream()
stream.on('data', (data) => {
console.log(`key: ${bufferToHex(data.key)}`)
console.log(`Value: ${bufferToHex(rlp.decode(data.value))}`)
})
.on('end', () => {
console.log('1111--->>> Finished')
})
}
test() ;
Below is the transaction I am trying to access the levelDB directly. I tried almost all combinations like TxnRoot, TxnHash, with or without Kecak256 etc
{ difficulty: 149904, extraData:
"0xd983010a10846765746888676f312e31372e368664617277696e", gasLimit:
2770332, gasUsed: 21000, hash:
"0xa98255eed7b9a16b30013b8cabf8adf07def8cb772ba9d3110f72a97fd351ad7",
logsBloom:
"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
miner: "0x3f457dd6c71534bc7955c29b6a097cf39b96bfd7", mixHash:
"0x366ff733fc9e2768595a25299edec785688deb4c8fe2d53543b860e8ee0724fc",
nonce: "0x24bc70e55ec61f1d", number: 284, parentHash:
"0x17d0dee586ef5064cfcd835e4c40d34f6a5683449cd3a1a1f0f705a2bab6a938",
receiptsRoot:
"0x056b23fbba480696b65fe5a59b8f2148a1299103c4f57df839233af2cf4ca2d2",
sha3Uncles:
"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347",
size: 647, stateRoot:
"0xb2ae3c859e0873667bdc088217be87cab1fed2a068e9e6baa124922915c83419",
timestamp: 1654920279, totalDifficulty: 39858713, transactions:
["0x50a949ef9cab9d8edad24d8b6b33b3023ef499a6f43aabd8db1a7db85daf68f7"],
transactionsRoot:
"0x51736d42e0e8fe95c3e609e5e5fb70da476613d24b5cd0d710d68190649c39f4",
uncles: [] }
I believe it's your use of SecureTrie for transanstion trie instead of Trie. Transaction and Receipts trie aren't hashed like State and Storage are. I had the same issue go look at my code https://ethereum.stackexchange.com/questions/130716/prove-transactionhash-of-block-using-ethereumjs-tx-and-merkle-patricia-tree
Question: Why does navigating appear to change the readyState of the previous EventSources?
===============================================
Explanation: I'm working on a frontend (React) in which the user can enter a sequence of search queries (i.e. strings) and for each search query, my backend (Flask) will return a sequence of URLs. For each search query, I've decided to receive the server's response via an EventSource. Specifically, I first create a React state array of backendEventSources:
const [backendEventSources, setBackendEventSources] = useState([]);
Then I update the backendEventSources when a new prompt comes in:
useEffect(() => {
console.log('Inside useEffect')
// Take 0 for the newest prompt.
const newBackendEventSource = new EventSource(
`https://localhost:8080/generate?counter=${promptsResultsArray[0].counter}&prompt=${promptsResultsArray[0].prompt}`,
{withCredentials: false})
newBackendEventSource.addEventListener('open', () => {
console.log('SSE opened!');
});
newBackendEventSource.addEventListener('error', (e) => {
console.log('SSE error!');
console.error('Error: ', e);
});
newBackendEventSource.addEventListener('close', (e) => {
console.log('SSE closed!');
const data = JSON.parse(e.data);
console.log("close data: ", data);
newBackendEventSource.close();
});
newBackendEventSource.addEventListener('message', (e) => {
const data = JSON.parse(e.data);
console.log("message data: ", data);
// https://stackoverflow.com/a/47580775/4570472
const newPromptsResultsArray = [...promptsResultsArray];
// Since we preprend new results, we need to compute the right index from
// the counter with the equation: length - counter - 1.
// e.g., For counter 2 of a length 3 array, we want index 0.
// e.g., For counter 2 of a length 4 array, we want index 1.
// Recall, the counter uses 0-based indexing.
const index = newPromptsResultsArray.length - data.counter - 1
newPromptsResultsArray[index].URIs = [data.uri];
newPromptsResultsArray[index].isLoading = false;
setPromptsResultsArray(newPromptsResultsArray);
// Instantiating the element and setting the src property starts preloading the image.
// for (const newImgURI of newImgURIs) {
// const imageElement = new Image();
// imageElement.src = newImgURI;
// }
// setTimeout(() => {setImgURIs(newImgURIs)}, 8000);
});
// Add new backend event source to state for persistence.
setBackendEventSources(backendEventSources => [
newBackendEventSource,
...backendEventSources])
return () => {
newBackendEventSource.close();
};
}, [prompt]);
I use URL params for React navigation:
const navigateToGenerateResults = (promptString) => {
console.log('Adding new prompt results to promptsResultsArray');
// State doesn't update immediately (or even synchronously). To ensure we can immediately
// access the new values, we create a newPromptsResults.
// https://stackoverflow.com/a/62900445/4570472
const newPromptsResults = {
isLoading: true,
prompt: promptString,
counter: promptsResultsArray.length,
URIs: ["https://simtooreal-public.s3.amazonaws.com/white_background.png"]
}
// Prepend the new prompt to our promptsResultsArray
// https://stackoverflow.com/a/60792858/4570472
setPromptsResultsArray(promptsResultsArray => [
newPromptsResults, ...promptsResultsArray])
console.log('Setting prompt to: ' + newPromptsResults.prompt)
setPrompt(newPromptsResults.prompt)
console.log('Navigating from /generate to /generate with prompt: ' + newPromptsResults.prompt)
navigate(`/generate?counter=${newPromptsResults.counter}&prompt=${newPromptsResults.prompt}`)
}
However, I've discovered that as soon as I navigate from one URL to another, the previous EventSource's ready state switches from 0/1 to 2. Additionally, my newBackendEventSource.addEventListener('close' function is never triggered.
Why does navigating appear to change the readyState of the previous EventSources?
const config = require(`${process.cwd()}/botconfig/config.json`)
var ee = require(`${process.cwd()}/botconfig/embed.json`)
const fetch = require("node-fetch");
const { MessageEmbed } = require(`discord.js`);
module.exports = {
//definition
name: "glifestats", //the name of the command
category: "⌨️ Programming", //the category this will be listed at, for the help cmd
aliases: [""], //every parameter can be an alias
cooldown: 4, //this will set it to a 4 second cooldown
usage: "glifestats <id>", //this is for the help command for EACH cmd
description: "check stats", //the description of the command
};
run: async (client, message, args, cmduser, text, prefix) => {
await interaction.deferReply();
const term = interaction.options.getString('term');
const query = new URLSearchParams({ term });
const { list } = await fetch(`https://api.gtaliferp.fr:8443/v1/extinction/profiles/main/${query}`)
.then(response => response.json());
}
When i try to do this command, it does nothing.
It needs to get the query from the user message and post it to the url, example:
.command 4443
bot returns the https://api.gtaliferp.fr:8443/v1/extinction/profiles/main/4443 data and postes it.
And also, i wanted to make the different data on an embed like this: data 1: data data 2: data .... but i cant do it, ( check the url provided for the data titles if you want to help with that)
So you seperated your module from the rest of the command with this line marked below
module.exports = {
//definition
name: "glifestats", //the name of the command
category: "⌨️ Programming", //the category this will be listed at, for the help cmd
aliases: [""], //every parameter can be an alias
cooldown: 4, //this will set it to a 4 second cooldown
usage: "glifestats <id>", //this is for the help command for EACH cmd
description: "check stats", //the description of the command
} //here
Also term is reqesting an option that is not defined
const term = interaction.options.getString('term');
Interaction is not defined
run: async (client, message, args, cmduser, text, prefix) => {
await interaction.deferReply();
const term = interaction.options.getString('term');
Try this
const config = require(`${process.cwd()}/botconfig/config.json`) // is this needed if not, delete
var ee = require(`${process.cwd()}/botconfig/embed.json`) // is this needed if not, delete
const fetch = require("node-fetch");
const {
MessageEmbed
} = require(`discord.js`); // is this needed if not, delete
module.exports = {
name: "glifestats", //the name of the command
category: "⌨️ Programming", //the category this will be listed at, for the help cmd
aliases: [""], //every parameter can be an alias
cooldown: 4, //this will set it to a 4 second cooldown
usage: "glifestats <id>", //this is for the help command for EACH cmd
description: "check stats", //the description of the command
options: [{
name: "term",
description: "Code to search",
required: true,
type: "STRING",
}],
run: async (client, interaction) => {
await interaction.deferReply();
const term = interaction.options.getString('term');
const url = `https://api.gtaliferp.fr:8443/v1/extinction/profiles/main/${term}`
const list = await fetch(url).then(response => response.json())
// Do something with list
console.log(list)
return interaction.followUp({
content: `List has been logged but can't be sent yet because it is an object and need to be further worked to get specific elements from it`
})
// Above seemed cleaner to me
/*
const query = new URLSearchParams({
term
});
const {
list
} = await fetch(`https://api.gtaliferp.fr:8443/v1/extinction/profiles/main/${query}`)
.then(response => response.json());
*/
}
}
I want to programmatically query the balance of an address or a list of addresses. What's the best way to do it?
To get the balance of 0xc466c8ff5dAce08A09cFC63760f7Cc63734501C1 at the latest block, do:
curl -X POST -H 'Content-Type: application/json' -s --data '{"jsonrpc":"2.0","method":"eth_getBalance","params":["0xad23b02673214973e354d41e19999d9e01f3be58", "latest"], "id":1}' https://mainnet-rpc.thundercore.com/
Output: {"jsonrpc":"2.0","id":1,"result":"0xde0b6b3a7640000"}
Fetching the balance of a single account with web3.js:
const Eth = require('web3-eth');
const Web3 = require('web3');
const web3Provider = () => {
return Eth.giveProvider || 'https://mainnet-rpc.thundercore.com';
}
const balance = async (address) => {
const eth = new Eth(web3Provider());
return Web3.utils.fromWei(await eth.getBalance(address));
}
Sample Session
const address = '0xc466c8ff5dAce08A09cFC63760f7Cc63734501C1';
await balance(address) // -> '1'
Units
0xde0b6b3a7640000 equals 10**18
Using Ethereum terminology, fromWei converts 10^18 Wei to 1 Ether, or using Thunder terminology
Using ThunderCore terminology, fromWei converts 10^18 Ella to 1 TT
fromWei(0xde0b6b3a7640000) equals fromWei(10**18) equals 1
Batch Request to Query Balance
For an array of addresses, you can use JSON-RPC 2.0's Batch Requests to save network round trips
When querying https://mainnet-rpc.thundercore.com, limit the batch size to around 30
The following class wraps web3.js-1.2.6's BatchRequest and make it return a Javascript Promise:
class BatchRequest {
constructor(web3) {
this.b = new web3.BatchRequest();
this.results = [];
this.resolve = null;
this.reject = null;
this.resultsFilled = 0;
}
web3BatchRequestCallBack(index, err, result) {
/* if any request in our batch fails, reject the promise we return from `execute` */
if (err) {
this.reject(new Error(`request ${index} failed: ${err}`))
return;
}
this.results[index] = result;
this.resultsFilled++;
if (this.resultsFilled === this.results.length) {
this.resolve(this.results);
}
}
resultPromiseExecutor(resolve, reject) {
this.resolve = resolve;
this.reject = reject;
}
add(method /* web3-core-method.Method */) {
const index = this.results.length;
method.callback = (err, result) => {
this.web3BatchRequestCallBack(index, err, result)
};
this.b.add(method);
this.results.push(undefined);
}
execute() /*: Promise */ {
const p = new Promise((resolve, reject) => { this.resultPromiseExecutor(resolve, reject) });
/* must arrange for resultPromiseExecutor to be called before b.execute */
this.b.execute();
return p;
}
}
const balanceBatch = async (addresses) => {
const eth = new Eth(web3Provider());
const b = new BatchRequest(eth);
for (a of addresses) {
b.add(eth.getBalance.request(a));
}
const ellaS = await b.execute();
const ttS = [];
for (e of ellaS) {
ttS.push(Web3.utils.fromWei(e));
}
return ttS;
}
batch-balance-test.js
const Web3 = require('web3');
(async() => {
const web3 = new Web3('https://mainnet-rpc.thundercore.com');
const results = await balanceBatch([
'0xc466c8ff5dAce08A09cFC63760f7Cc63734501C1',
'0x4f3c8e20942461e2c3bdd8311ac57b0c222f2b82',
]);
console.log('results:', results);
})();
Sample Session
$ node batch-balance-test.js
results: [ '1', '84.309961496' ]
See the complete project setup here in the balance branch of the field-support repo.
Got a component in AngularJS (porting soon to Angular 7) for updating a user profile that invokes an AngularJS service method to perform a PUT to /api/user/:id.
Want to add a small photo (<100K) to send in the same PUT with the other fields then handle the request like this in the controller...
// route: PUT /api/user/:id
import db from '../../utils/db';
export async function upsert(req, res) {
const { id, name, phone, email, photo } = req.body;
// users.photo in PostgreSQL has datatype of bytea
const sql = `UPDATE users SET name = $2, phone = $3, email = $4, photo = $5) WHERE id = $1 RETURNING id, name, phone, email, photo;`;
const { rows } = db.query(sql, [id, name, phone, email, photo];
return res.status(200).send(rows);
}
Is there a clean way to encode the image client-side so it can be included in the JSON the AngularJS service PUTs? Other solutions I found seem like overkill for this use-case - requiring the image upload to be handled very differently from the other fields.
Ended up biting the bullet - creating a separate table for files along with its own API using formidable and node-postgres. In case this helps anyone else, here's how it turned out.
PostgreSQL data definition...
-- DROP SEQUENCE public.files_seq;
CREATE SEQUENCE IF NOT EXISTS public.files_seq;
-- DROP TABLE public.files;
CREATE TABLE IF NOT EXISTS public.files (
_id integer PRIMARY KEY DEFAULT nextval('files_seq'::regclass),
name character varying(512) NOT NULL,
type character varying(20) NOT NULL,
data bytea
);
-- DROP INDEX public.users_first_name;
CREATE INDEX files_name ON public.files USING btree (name);
Controller for Express...
import stream from 'stream';
import fs from 'fs';
import { IncomingForm } from 'formidable';
import db from '../../utils/db';
// Returns list of images
export async function index(req, res) {
const { rows } = await db.query('SELECT _id, name, type FROM files ORDER BY name;', []);
return res.send(rows);
}
// Uploads a single file
export async function upload(req, res) {
let _id;
new IncomingForm().parse(req, (err, fields, files) => {
if(err) throw err;
if(Array.isArray(files)) throw new Error('Only one file can be uploaded at a time');
const { name, type, path } = files.file;
fs.readFile(path, 'hex', async(err, fileData) => {
if(err) throw err;
fileData = `\\x${fileData}`;
const sql = 'INSERT INTO files (name, type, data) VALUES($1, $2, $3) RETURNING _id;';
const { rows } = await db.query(sql, [name, type, fileData]);
_id = rows[0]._id;
res.send({ id: _id });
// console.log(`Uploaded ${name} to ${path} and inserted into database (ID = ${_id})`);
// No need to delete the file uploaded as Heroku has an ephemeral file system
});
});
}
// Downloads a file by its _id
export async function download(req, res) {
const _id = req.params.id;
const sql = 'SELECT _id, name, type, data FROM files WHERE _id = $1;';
const { rows } = await db.query(sql, [_id]);
const file = rows[0];
const fileContents = Buffer.from(file.data, 'base64');
const readStream = new stream.PassThrough();
readStream.end(fileContents);
res.set('Content-disposition', `attachment; filename=${file.name}`);
res.set('Content-Type', file.type);
readStream.pipe(res);
return rows[0];
}
// Deletes a file from the database (admin-only)
export async function destroy(req, res) {
const _id = req.params.id;
const sql = 'DELETE FROM files WHERE _id = $1;';
await db.query(sql, [_id]);
res.status(204).send({ message: `File ${_id} deleted.`});
}
On the client side, I'm using ng-file-upload with AngularJS.
Here's the relevant part of the view (in pug for brevity)...
.form-group.col-md-6
label(for='photo') Teacher photo
input.form-control(ngf-select='$ctrl.uploadPhoto($file)', type='file', id='photo', name='photo', ng-model='$ctrl.user.photo', ngf-pattern="'image/*'", ngf-accept="'image/*'", ngf-max-size='100KB', ngf-min-height='276', ngf-max-height='276', ngf-min-width='236', ngf-max-width='236', ngf-resize='{width: 236, height: 276}', ngf-model-invalid='errorFile')
ng-messages.help-block.has-error(for='form.photo.$error', ng-show='form.photo.$dirty || form.$submitted', role='alert')
ng-message(when='maxSize') Please select a photo that is less than 100K.
ng-message(when='minHeight,maxHeight,minWidth,maxWidth') The image must be 236 x 276 pixels.
span(ng-if='$ctrl.user.imageId')
img(ng-src='/api/file/{{ $ctrl.user.imageId }}' alt="Photo of Teacher")
and method in its controller...
uploadPhoto(file) {
if(file) {
this.uploadService.upload({
url: '/api/file/upload',
data: { file }
})
.then(response => {
this.user.imageId = response.data.id;
}, response => {
if(response.status > 0) console.log(`${response.status}: ${response.data}`);
}, evt => {
// Math.min is to fix IE which reports 200% sometimes
this.uploadProgress = Math.min(100, parseInt(100.0 * evt.loaded / evt.total, 10));
});
}
}