How to access GoEthereum Transaction Trie using LevelDB - database

I wanted to access Ethereum Transaction Trie using LevelDB, but it's working only for Block 0, for any other block it's not given me any data. Just wondering how does web3.eth.getTransaction() works internally:
Here is the code:
const Trie1 = require ('merkle-patricia-tree').SecureTrie
const { Account, BN, bufferToHex, rlp } = require ('ethereumjs-util') ;
var levelup = require('levelup');
var leveldown = require('leveldown');
const gethDbPath = '/Users/AK/gethDataDir/geth/chaindata'
const db = new levelup(leveldown(gethDbPath))
async function test() {
const txnRoot = '0x51736d42e0e8fe95c3e609e5e5fb70da476613d24b5cd0d710d68190649c39f4'
const txnRootBuffer = Buffer.from(txnRoot.slice(2), 'hex')
const trie1 = new Trie1(db,txnRootBuffer)
const stream = trie1.createReadStream()
stream.on('data', (data) => {
console.log(`key: ${bufferToHex(data.key)}`)
console.log(`Value: ${bufferToHex(rlp.decode(data.value))}`)
})
.on('end', () => {
console.log('1111--->>> Finished')
})
}
test() ;
Below is the transaction I am trying to access the levelDB directly. I tried almost all combinations like TxnRoot, TxnHash, with or without Kecak256 etc
{ difficulty: 149904, extraData:
"0xd983010a10846765746888676f312e31372e368664617277696e", gasLimit:
2770332, gasUsed: 21000, hash:
"0xa98255eed7b9a16b30013b8cabf8adf07def8cb772ba9d3110f72a97fd351ad7",
logsBloom:
"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
miner: "0x3f457dd6c71534bc7955c29b6a097cf39b96bfd7", mixHash:
"0x366ff733fc9e2768595a25299edec785688deb4c8fe2d53543b860e8ee0724fc",
nonce: "0x24bc70e55ec61f1d", number: 284, parentHash:
"0x17d0dee586ef5064cfcd835e4c40d34f6a5683449cd3a1a1f0f705a2bab6a938",
receiptsRoot:
"0x056b23fbba480696b65fe5a59b8f2148a1299103c4f57df839233af2cf4ca2d2",
sha3Uncles:
"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347",
size: 647, stateRoot:
"0xb2ae3c859e0873667bdc088217be87cab1fed2a068e9e6baa124922915c83419",
timestamp: 1654920279, totalDifficulty: 39858713, transactions:
["0x50a949ef9cab9d8edad24d8b6b33b3023ef499a6f43aabd8db1a7db85daf68f7"],
transactionsRoot:
"0x51736d42e0e8fe95c3e609e5e5fb70da476613d24b5cd0d710d68190649c39f4",
uncles: [] }

I believe it's your use of SecureTrie for transanstion trie instead of Trie. Transaction and Receipts trie aren't hashed like State and Storage are. I had the same issue go look at my code https://ethereum.stackexchange.com/questions/130716/prove-transactionhash-of-block-using-ethereumjs-tx-and-merkle-patricia-tree

Related

NextJS Global Variable with Assignment

I'm new to NextJS, and trying to figure out, how to create a global variable that I could assign a different value anytime. Could someone give a simple example? (I know global might not be the best approach, but still I would like to know how to set up a global variable).
Let's say:
_app.js
NAME = "Ana" // GLOBAL VARIABLE
page_A.js
console.log(NAME) // "Ana"
NAME = "Ben"
page_B.js
console.log(NAME) // "Ben"
try using Environment Variables
/next.config.js
module.exports = {
env: {
customKey: 'my-value',
},
}
/pages/page_A.js
function Page() {
return <h1>The value of customKey is: {process.env.customKey}</h1>
}
export default Page
but you can not change its contents, except by changing it directly in next.config.js
Nextjs no special ways to provide global variables you want. You can achieve by:
Stateful management tool, like redux-react
Using Context
It's not like it's impossible,
I created a file called _customGlobals.jsx and put this as content
String.prototype.title = function () {
const sliced = this.slice(1);
return (
this.charAt(0).toUpperCase() +
(sliced.toUpperCase() === sliced ? sliced.toLowerCase() : sliced)
);
};
and imported it in _app.jsx like this:
import "./_customGlobals";
So now I can call this function on any string anywhere in my project like this:
"this is a title".title()
Database designed for this purpose. But for one var it's not wise to install whole db!
So, you can do it in a JSON file.
Add a var to a JSON file and use a function to update it.
this is a simple function for this usage:
const fs = require('fs');
function updateJSONFile(filePath, updates) {
fs.readFile(filePath, 'utf8', function (err, data) {
if (err) {
console.error(err);
return;
}
let json = JSON.parse(data);
for (let key in updates) {
json[key] = updates[key];
}
fs.writeFile(filePath, JSON.stringify(json, null, 2), 'utf8', function (err) {
if (err) {
console.error(err);
}
});
});
}
So, use it like this:
updateJSONFile('file.json', { name: 'John Doe', age: 30 });
You can create another function to read it dynamicly:
function readJSONFile(filePath) {
fs.readFile(filePath, 'utf8', function (err, data) {
if (err) {
return callback(err);
}
let json;
try {
json = JSON.parse(data);
} catch (error) {
return callback(error);
}
return callback(null, json);
});
}
and you can use it like this:
const readedFile = readJSONFile('file.json')
I deleted the callback function to have a simple code but you can add callback function to log error messages.

Pass an Array as a query String Parameter node.js

How can I pass an array as a query string parameter?
I've tried numerous ways including adding it to the path but i'm not able to pull the array on the back end.
If I hard code the array it works fine, but when I try to pass the array from my front end to the backend it does not work properly.
Can anyone point me in the right direction?
FrontEnd
function loadJob() {
return API.get("realtorPilot", "/myTable/ListJobs", {
'queryStringParameters': {
radius,
availableServices,
}
});
BackEnd
import * as dynamoDbLib from "./libs/dynamodb-lib";
import { success, failure } from "./libs/response-lib";
export async function main(event, context) {
const data = {
radius: event.queryStringParameters.radius,
availableServices: event.queryStringParameters.availableServices,
};
// These hold ExpressionAttributeValues
const zipcodes = {};
const services = {};
data.radius.forEach((zipcode, i) => {
zipcodes[`:zipcode${i}`] = zipcode;
});
data.availableServices.forEach((service, i) => {
services[`:services${i}`] = service;
});
// These hold FilterExpression attribute aliases
const zipcodex = Object.keys(zipcodes).toString();
const servicex = Object.keys(services).toString();
const params = {
TableName: "myTable",
IndexName: "zipCode-packageSelected-index",
FilterExpression: `zipCode IN (${zipcodex}) AND packageSelected IN (${servicex})`,
ExpressionAttributeValues : {...zipcodes, ...services},
};
try {
const result = await dynamoDbLib.call("scan", params);
// Return the matching list of items in response body
return success(result.Items);
} catch (e) {
return failure(e.message);
}
}
Pass a comma seperated string and split it in backend.
Example: https://example.com/apis/sample?radius=a,b,c,d&availableServices=x,y,z
And in the api defenition split the fields on comma.
const data = {
radius: event.queryStringParameters.radius.split(','),
availableServices: event.queryStringParameters.availableServices.split(',')
};

relay prisma graphql update store

I am trying to get prisma and relay working. Here's my repo:
https://github.com/jamesmbowler/prisma-relay-todo
It's a simple todo list. I am able to add the todos, but the ui does not update. When I refresh, the todo is there.
All of the examples of updating the store, that I can find, use a "parent" to the object that is being updated / created.
See https://facebook.github.io/relay/docs/en/mutations.html#using-updater-and-optimisticupdater
Also, the "updater configs" also requires a "parentID". https://facebook.github.io/relay/docs/en/mutations.html#updater-configs
From relay-runtime's RelayConnectionHandler.js comment here:
https://github.com/facebook/relay/blob/master/packages/relay-runtime/handlers/connection/RelayConnectionHandler.js#L232
* store => {
* const user = store.get('<id>');
* const friends = RelayConnectionHandler.getConnection(user, 'FriendsFragment_friends');
* const edge = store.create('<edge-id>', 'FriendsEdge');
* RelayConnectionHandler.insertEdgeAfter(friends, edge);
* }
Is it possible to update the store without having a "parent"? I just have todos, with no parent.
Again, creating the record works, and gives this response:
{ "data" : {
"createTodo" : {
"id" : "cjpdbivhc00050903ud6bkl3x",
"name" : "testing",
"complete" : false
} } }
Here's my updater function
updater: store => {
const payload = store.getRootField('createTodo');
const conn = ConnectionHandler.getConnection(store.get(payload.getDataID()), 'Todos_todoesConnection');
ConnectionHandler.insertEdgeAfter(conn, payload, cursor);
},
I have done a console.log(conn), and it is undefined.
Please help.
----Edit----
Thanks to Denis, I think one problem is solved - that of the ConnectionHandler.
But, I still can't get the ui to update. Here's what I've tried in the updater function:
const payload = store.getRootField('createTodo');
const clientRoot = store.get('client:root');
const conn = ConnectionHandler.getConnection(clientRoot, 'Todos_todoesConnection');
ConnectionHandler.createEdge(store, conn, payload, 'TodoEdge');
I've also tried this:
const payload = store.getRootField('createTodo');
const clientRoot = store.get('client:root');
const conn = ConnectionHandler.getConnection(clientRoot, 'Todos_todoesConnection');
ConnectionHandler.insertEdgeAfter(conn, payload);
My data shape is different from their example, as I don't have the 'todoEdge', and 'node' inside my returned data (see above).
todoEdge {
cursor
node {
complete
id
text
}
}
How do I getLinkedRecord, like this?
const newEdge = payload.getLinkedRecord('todoEdge');
If query is the parent, the parentID will be client:root.
Take a look at this: https://github.com/facebook/relay/blob/1d72862fa620a9db69d6219d5aa562054d9b93c7/packages/react-relay/classic/store/RelayStoreConstants.js#L18
Also at this issue: https://github.com/facebook/relay/issues/2157#issuecomment-385009482
create a const ROOT_ID = 'client:root'; and pass ROOT_ID as your parentID. Also, check the name of your connection on the updater, it has to be exactly equal to the name where you declared the query.
UPDATE:
Actually, you can import ROOT_ID of relay-runtime
import { ROOT_ID } from 'relay-runtime';
UPDATE 2:
Your edit was not very clear for me, but I will provide you an example of it should work ok? After your mutation is run, you first access its data by using getRootField just like you are doing. So, if I have a mutation like:
mutation UserAddMutation($input: UserAddInput!) {
UserAdd(input: $input) {
userEdge {
node {
name
id
age
}
}
error
}
}
You will do:
const newEdge = store.getRootField('UserAdd').getLinkedRecord('userEdge');
connectionUpdater({
store,
parentId: ROOT_ID,
connectionName: 'UserAdd_users',
edge: newEdge,
before: true,
});
This connectionUpdater is a helper function that looks likes this:
export function connectionUpdater({ store, parentId, connectionName, edge, before, filters }) {
if (edge) {
if (!parentId) {
// eslint-disable-next-line
console.log('maybe you forgot to pass a parentId: ');
return;
}
const parentProxy = store.get(parentId);
const connection = ConnectionHandler.getConnection(parentProxy, connectionName, filters);
if (!connection) {
// eslint-disable-next-line
console.log('maybe this connection is not in relay store yet:', connectionName);
return;
}
const newEndCursorOffset = connection.getValue('endCursorOffset');
connection.setValue(newEndCursorOffset + 1, 'endCursorOffset');
const newCount = connection.getValue('count');
connection.setValue(newCount + 1, 'count');
if (before) {
ConnectionHandler.insertEdgeBefore(connection, edge);
} else {
ConnectionHandler.insertEdgeAfter(connection, edge);
}
}
}
Hope it helps :)

How to for loop all documents in a collection - Azure CosmosDB - Nodejs

I have looked around at a few answers/questions regarding this issue but yet to find a solution.
I have a collection with documents (simplified) as such:
{
"id": 123
"stuff": "abc"
"array":[
{
"id2":456
"properties": [
{
"id3": 789
"important": true
}
]
}
]
}
I want to check for each document in my collection, for each array object within array, for each properties, if it has important: true for example. Then return:
"id": 123
"id2": 456
"id3": 789
I have tried using:
client.queryDocuments(self.collection._self, querySpec).toArray(function(err, results) {
if (err) {
callback(err);
} else {
callback(null, results[0]);
}
});
But the issue is an array has a maximum character limit. If my collection has millions of documents, this would presumably be exceeded. (Javascript Increase max array size)
Or, am I misunderstanding the above question? Is it talking about the number of objects in an array (of which, each can have unlimited object character length?)
Thus I am looking a for loop-esque solution, where each document is returned, I do my analysis, then move to then next/do them in parallel.
Any insight would be greatly appreciated.
But the issue is an array has a maximum character limit. If my
collection has millions of documents, this would presumably be
exceeded. (Javascript Increase max array size)
Based on my research,the longest possible array in js could have 232-1 = 4,294,967,295 = 4.29 billion elements. However, it is perfectly enough to meet your millions data volume requirements. In addition,you can't query such huge volume data directly surely,that's impossible you do that.
Whether about throughput constraints(RUs settings) or query efficiency factors, you should consider batching large volumes of data anyway.
Thus I am looking a for loop-esque solution, where each document is
returned, I do my analysis, then move to then next/do them in
parallel.
Maybe you could use v2 js sdk for cosmos db sql api.Please refer to the sample code:
const cosmos = require('#azure/cosmos');
const CosmosClient = cosmos.CosmosClient;
const endpoint = "https://***.documents.azure.com:443/"; // Add your endpoint
const masterKey = "***"; // Add the masterkey of the endpoint
const client = new CosmosClient({ endpoint, auth: { masterKey } });
const databaseId = "db";
const containerId = "coll";
async function run() {
const { container, database } = await init();
const querySpec = {
query: "SELECT r.id,r._ts FROM root r"
};
const queryOptions = {
maxItemCount : -1
}
const queryIterator = await container.items.query(querySpec,queryOptions);
while (queryIterator.hasMoreResults()) {
const { result: results, headers } = await queryIterator.executeNext();
console.log(results)
console.log(headers)
//do what you want to do
if (results === undefined) {
// no more results
break;
}
}
}
async function init() {
const { database } = await client.databases.createIfNotExists({ id: databaseId });
const { container } = await database.containers.createIfNotExists({ id: containerId });
return { database, container };
}
run().catch(err => {
console.error(err);
});
More details about continuation token ,please refer to my previous case.Any concern,please let me know.
I am using Cosmos DB SQL API Node.js library. I am unable to find the Continuation Token from this library so that I can return it to client. The idea is to get it back from the client for the next pagination request.
I have a working code which iterates multiple times to get all the documents. What changes will be required here to get the continuation token?
function queryCollectionPaging() {
return new Promise((resolve, reject) => {
function executeNextWithRetry(iterator, callback) {
iterator.executeNext(function (err, results, responseHeaders) {
if (err) {
return callback(err, null);
}
else {
documents = documents.concat(results);
if (iterator.hasMoreResults()) {
executeNextWithRetry(iterator, callback);
}
else {
callback();
}
}
});
}
let options = {
maxItemCount: 1,
enableCrossPartitionQuery: true
};
let documents = []
let iterator = client.queryDocuments( collectionUrl, 'SELECT r.partitionkey, r.documentid, r._ts FROM root r WHERE r.partitionkey in ("user1", "user2") ORDER BY r._ts', options);
executeNextWithRetry(iterator, function (err, result) {
if (err) {
reject(err)
}
else {
console.log(documents);
resolve(documents)
}
});
});
};

Parsing dynamic CSV through Node and writing schema in Mongo [duplicate]

Currently I need to push a large CSV file into a mongo DB and the order of the values needs to determine the key for the DB entry:
Example CSV file:
9,1557,358,286,Mutantville,4368,2358026,,M,0,0,0,1,0
9,1557,359,147,Wroogny,4853,2356061,,D,0,0,0,1,0
Code to parse it into arrays:
var fs = require("fs");
var csv = require("fast-csv");
fs.createReadStream("rank.txt")
.pipe(csv())
.on("data", function(data){
console.log(data);
})
.on("end", function(data){
console.log("Read Finished");
});
Code Output:
[ '9',
'1557',
'358',
'286',
'Mutantville',
'4368',
'2358026',
'',
'M',
'0',
'0',
'0',
'1',
'0' ]
[ '9',
'1557',
'359',
'147',
'Wroogny',
'4853',
'2356061',
'',
'D',
'0',
'0',
'0',
'1',
'0' ]
How do I insert the arrays into my mongoose schema to go into mongo db?
Schema:
var mongoose = require("mongoose");
var rankSchema = new mongoose.Schema({
serverid: Number,
resetid: Number,
rank: Number,
number: Number,
name: String,
land: Number,
networth: Number,
tag: String,
gov: String,
gdi: Number,
protection: Number,
vacation: Number,
alive: Number,
deleted: Number
});
module.exports = mongoose.model("Rank", rankSchema);
The order of the array needs to match the order of the schema for instance in the array the first number 9 needs to always be saved as they key "serverid" and so forth. I'm using Node.JS
You can do it with fast-csv by getting the headers from the schema definition which will return the parsed lines as "objects". You actually have some mismatches, so I've marked them with corrections:
const fs = require('mz/fs');
const csv = require('fast-csv');
const { Schema } = mongoose = require('mongoose');
const uri = 'mongodb://localhost/test';
mongoose.Promise = global.Promise;
mongoose.set('debug', true);
const rankSchema = new Schema({
serverid: Number,
resetid: Number,
rank: Number,
name: String,
land: String, // <-- You have this as Number but it's a string
networth: Number,
tag: String,
stuff: String, // the empty field in the csv
gov: String,
gdi: Number,
protection: Number,
vacation: Number,
alive: Number,
deleted: Number
});
const Rank = mongoose.model('Rank', rankSchema);
const log = data => console.log(JSON.stringify(data, undefined, 2));
(async function() {
try {
const conn = await mongoose.connect(uri);
await Promise.all(Object.entries(conn.models).map(([k,m]) => m.remove()));
let headers = Object.keys(Rank.schema.paths)
.filter(k => ['_id','__v'].indexOf(k) === -1);
console.log(headers);
await new Promise((resolve,reject) => {
let buffer = [],
counter = 0;
let stream = fs.createReadStream('input.csv')
.pipe(csv({ headers }))
.on("error", reject)
.on("data", async doc => {
stream.pause();
buffer.push(doc);
counter++;
log(doc);
try {
if ( counter > 10000 ) {
await Rank.insertMany(buffer);
buffer = [];
counter = 0;
}
} catch(e) {
stream.destroy(e);
}
stream.resume();
})
.on("end", async () => {
try {
if ( counter > 0 ) {
await Rank.insertMany(buffer);
buffer = [];
counter = 0;
resolve();
}
} catch(e) {
stream.destroy(e);
}
});
});
} catch(e) {
console.error(e)
} finally {
process.exit()
}
})()
As long as the schema actually lines up to the provided CSV then it's okay. These are the corrections that I can see but if you need the actual field names aligned differently then you need to adjust. But there was basically a Number in the position where there is a String and essentially an extra field, which I'm presuming is the blank one in the CSV.
The general things are getting the array of field names from the schema and passing that into the options when making the csv parser instance:
let headers = Object.keys(Rank.schema.paths)
.filter(k => ['_id','__v'].indexOf(k) === -1);
let stream = fs.createReadStream('input.csv')
.pipe(csv({ headers }))
Once you actually do that then you get an "Object" back instead of an array:
{
"serverid": "9",
"resetid": "1557",
"rank": "358",
"name": "286",
"land": "Mutantville",
"networth": "4368",
"tag": "2358026",
"stuff": "",
"gov": "M",
"gdi": "0",
"protection": "0",
"vacation": "0",
"alive": "1",
"deleted": "0"
}
Don't worry about the "types" because Mongoose will cast the values according to schema.
The rest happens within the handler for the data event. For maximum efficiency we are using insertMany() to only write to the database once every 10,000 lines. How that actually goes to the server and processes depends on the MongoDB version, but 10,000 should be pretty reasonable based on the average number of fields you would import for a single collection in terms of the "trade-off" for memory usage and writing a reasonable network request. Make the number smaller if necessary.
The important parts are to mark these calls as async functions and await the result of the insertMany() before continuing. Also we need to pause() the stream and resume() on each item otherwise we run the risk of overwriting the buffer of documents to insert before they are actually sent. The pause() and resume() are necessary to put "back-pressure" on the pipe, otherwise items just keep "coming out" and firing the data event.
Naturally the control for the 10,000 entries requires we check that both on each iteration and on stream completion in order to empty the buffer and send any remaining documents to the server.
That's really what you want to do, as you certainly don't want to fire off an async request to the server both on "every" iteration through the data event or essentially without waiting for each request to complete. You'll get away with not checking that for "very small files", but for any real world load you're certain to exceed the call stack due to "in flight" async calls which have not yet completed.
FYI - a package.json used. The mz is optional as it's just a modernized Promise enabled library of standard node "built-in" libraries that I'm simply used to using. The code is of course completely interchangeable with the fs module.
{
"description": "",
"main": "index.js",
"dependencies": {
"fast-csv": "^2.4.1",
"mongoose": "^5.1.1",
"mz": "^2.7.0"
},
"keywords": [],
"author": "",
"license": "ISC"
}
Actually with Node v8.9.x and above then we can even make this much simpler with an implementation of AsyncIterator through the stream-to-iterator module. It's still in Iterator<Promise<T>> mode, but it should do until Node v10.x becomes stable LTS:
const fs = require('mz/fs');
const csv = require('fast-csv');
const streamToIterator = require('stream-to-iterator');
const { Schema } = mongoose = require('mongoose');
const uri = 'mongodb://localhost/test';
mongoose.Promise = global.Promise;
mongoose.set('debug', true);
const rankSchema = new Schema({
serverid: Number,
resetid: Number,
rank: Number,
name: String,
land: String,
networth: Number,
tag: String,
stuff: String, // the empty field
gov: String,
gdi: Number,
protection: Number,
vacation: Number,
alive: Number,
deleted: Number
});
const Rank = mongoose.model('Rank', rankSchema);
const log = data => console.log(JSON.stringify(data, undefined, 2));
(async function() {
try {
const conn = await mongoose.connect(uri);
await Promise.all(Object.entries(conn.models).map(([k,m]) => m.remove()));
let headers = Object.keys(Rank.schema.paths)
.filter(k => ['_id','__v'].indexOf(k) === -1);
//console.log(headers);
let stream = fs.createReadStream('input.csv')
.pipe(csv({ headers }));
const iterator = await streamToIterator(stream).init();
let buffer = [],
counter = 0;
for ( let docPromise of iterator ) {
let doc = await docPromise;
buffer.push(doc);
counter++;
if ( counter > 10000 ) {
await Rank.insertMany(buffer);
buffer = [];
counter = 0;
}
}
if ( counter > 0 ) {
await Rank.insertMany(buffer);
buffer = [];
counter = 0;
}
} catch(e) {
console.error(e)
} finally {
process.exit()
}
})()
Basically, all of the stream "event" handling and pausing and resuming gets replaced by a simple for loop:
const iterator = await streamToIterator(stream).init();
for ( let docPromise of iterator ) {
let doc = await docPromise;
// ... The things in the loop
}
Easy! This gets cleaned up in later node implementation with for..await..of when it becomes more stable. But the above runs fine on the from the specified version and above.
By saying #Neil Lunn need headerline within the CSV itself.
Example using csvtojson module.
const csv = require('csvtojson');
const csvArray = [];
csv()
.fromFile(file-path)
.on('json', (jsonObj) => {
csvArray.push({ name: jsonObj.name, id: jsonObj.id });
})
.on('done', (error) => {
if (error) {
return res.status(500).json({ error});
}
Model.create(csvArray)
.then((result) => {
return res.status(200).json({result});
}).catch((err) => {
return res.status(500).json({ error});
});
});
});

Resources