How to display NFTs of Torus wallet? - reactjs

I've successfully integrated the Torus wallet and now I am trying to display the NFTs that a person has in the wallet. I am doing it successfully with MetaMask but I am having troubles making it work with Torus. I am guessing I have a problem with the provider? I get the following errors on the browser:
index.ts:225 Uncaught (in promise) Error: unsupported provider (argument="provider", value="[object Object]", code=INVALID_ARGUMENT, version=providers/5.5.2)
at Logger.makeError (index.ts:225)
at Logger.throwError (index.ts:237)
at Logger.throwArgumentError (index.ts:241)
at new Web3Provider (web3-provider.ts:156)
at loadNFTs (UsersNFTs.js:112)
and
VM9765:2 Uncaught ReferenceError: process is not defined
at Object.4043 (<anonymous>:2:13168)
at r (<anonymous>:2:306599)
at Object.8048 (<anonymous>:2:9496)
at r (<anonymous>:2:306599)
at Object.8641 (<anonymous>:2:1379)
at r (<anonymous>:2:306599)
at <anonymous>:2:315627
at <anonymous>:2:324225
at <anonymous>:2:324229
at HTMLIFrameElement.e.onload (index.js:1)
And this is my code:
*** UsersNFTs.js ***
let [torusSdkInstance,setTorusSdkInstance] = useState()
// Import dynamically torus wallet object
useEffect(()=>{
const initialize = async () => {
const torus = (await import("#toruslabs/torus-embed")).default;
setTorusSdkInstance(new torus({}));
}
initialize();
}, [])
const providerOptions = {
"custom-Torus": { //Torus wallet
display: {
logo: 'https://miime.io/images/wallet-login-torus-logo.png',
name: "Torus",
description: "Connect to Torus Wallet"
},
package: torusSdkInstance,
options: {
// apiKey: "EXAMPLE_PROVIDER_API_KEY"
},
connector: async (_, options) => {
await torusSdkInstance.init({
enableLogging: false,
});
await torusSdkInstance.login();
const web3 = new Web3(torusSdkInstance.provider);
return web3;
}
}
}
const web3Modal = new Web3Modal({
network: "mainnet",
cacheProvider: true,
providerOptions
});
const connection = await web3Modal.connect()
const provider = new ethers.providers.Web3Provider(connection)
const signer = provider.getSigner()
const marketContract = new ethers.Contract(nftmarketaddress, Market.abi, signer)
const tokenContract = new ethers.Contract(nftaddress, NFT.abi, provider)
const data = await marketContract.fetchMyNFTs()
*** fetchMyNFTs function in the smart contract ***
function fetchMyNFTs() public view returns (MarketItem[] memory) {
uint totalItemCount = _itemIds.current();
uint itemCount = 0;
uint currentIndex = 0;
for (uint i = 0; i < totalItemCount; i++) {
if (idToMarketItem[i + 1].owner == msg.sender) {
itemCount += 1;
}
}
MarketItem[] memory items = new MarketItem[](itemCount);
for (uint i = 0; i < totalItemCount; i++) {
if (idToMarketItem[i + 1].owner == msg.sender) {
uint currentId = idToMarketItem[i + 1].itemId;
MarketItem storage currentItem = idToMarketItem[currentId];
items[currentIndex] = currentItem;
currentIndex += 1;
}
}
return items;
}

I found the way to display the minted NFTs (not the purchased ones) of a wallet, so I want to share my solution!
I have a fetchNFTs function in my smart contract that gets the tokens made available in the market place but not sold yet (address(0)):
function fetchMarketItems() public view returns (MarketItem[] memory) {
uint itemCount = _itemIds.current();
uint unsoldItemCount = _itemIds.current() - _itemsSold.current();
uint currentIndex = 0;
MarketItem[] memory items = new MarketItem[](unsoldItemCount);
for (uint i = 0; i < itemCount; i++) {
if (idToMarketItem[i + 1].owner == address(0)) {
uint currentId = idToMarketItem[i + 1].itemId;
MarketItem storage currentItem = idToMarketItem[currentId];
items[currentIndex] = currentItem;
currentIndex += 1;
}
}
return items;
}
And in the frontend after I call this function, I just had to map through the items and select the ones which the seller address equals the address of the connected wallet, so I get the NFTs that the address has minted but not sold yet.

Related

call revert exception when calling a view function with Panic code 50

I'm getting an error when calling a view function of my solidity contract from my frontend.
The error is listed in the docs as error 32:
0x32: If you access an array, bytesN or an array slice at an out-of-bounds or negative index (i.e. x[i] where i >= x.length or i < 0).
My contract:
address public owner;
struct FoodItem {
address owner;
string url;
string name;
string originCountry;
}
FoodItem[] public foodItems;
function addFoodItem(
string memory url,
string memory name,
string memory originCountry
) public {
foodItems.push(FoodItem(msg.sender, name, url, originCountry));
}
function getFoodItemsByOwner() public view returns (FoodItem[] memory) {
uint256 itemCount = 0;
for (uint256 i = 0; i < foodItems.length; i++) {
if (foodItems[i].owner == msg.sender) {
itemCount += 1;
}
}
FoodItem[] memory myfoods = new FoodItem[](itemCount);
for (uint256 i = 0; i < foodItems.length; i++) {
if (foodItems[i].owner == msg.sender) {
myfoods[i] = foodItems[i];
}
}
return myfoods;
}
And my function from react:
const getDishesByUser = async () => {
const { ethereum } = window;
if(ethereum) {
const provider = new ethers.providers.Web3Provider(ethereum);
const signer = provider.getSigner();
const contract = new ethers.Contract(abiFoodAddress, Food.abi, signer);
const data = await contract.getFoodItemsByOwner();
console.log(data);
setDishesByuser(data)
//router.push('/');
}
};
useEffect(() => {
getDishesByUser();
}, []);
Complete error output in the console:
index.js?dd68:224 Uncaught (in promise) Error: call revert exception; VM Exception while processing transaction: reverted with panic code 50 [ See: https://links.ethers.org/v5-errors-CALL_EXCEPTION ] (method="getFoodItemsByOwner()", data="0x4e487b710000000000000000000000000000000000000000000000000000000000000032", errorArgs=[{"type":"BigNumber","hex":"0x32"}], errorName="Panic", errorSignature="Panic(uint256)", reason=null, code=CALL_EXCEPTION, version=abi/5.7.0)
You can console log foodItems in addFoodItem function to be sure that you actually pushed the items. If not, you may try:
Mapping(address => FoodItem[]) public foodItems;
So you can do:
foodItems[owner].push(FoodItem(msg.sender, name, url, originCountry));
But you have to specify who the owner is

Why flutter contacts don't work on Xiaomi device?

I'm new to Flutter and I'm trying to store contact list as name and number in local database. The code I wrote works on the Samsung A21S device that I use as an emulator, but it does not work on devices like Xiaomi, where did I go wrong?
void getContacts() async {
List<Contact>? _contacts;
List<String> contactList = [];
final bool isConnected = await InternetConnectionChecker().hasConnection;
if (isConnected) {
_contacts = await FlutterContacts.getContacts(
withThumbnail: false, withPhoto: false, withProperties: true);
for (var i = 0; i < _contacts.length; i++) {
//Error this line
var num = _contacts[i].phones.first.normalizedNumber;
var name = _contacts[i].displayName;
var nameNum = "$name,$num";
contactList.insert(i, nameNum);
}
print(contactList);
print(contactList.length);
String json = jsonEncode(contactList);
}
}

How to display NFTs using Nextjs and Solidity

I recently started studying how to develop web3 dapps and I am building a NFT martketplace.
I have been following some tutorials using solidity and web3/ethers and I managed to display the NFTs of the currently connected wallet.
My next step is to display the NFTs of any given address (not of the connected wallet) as in a gallery. I am trying to build this gallery from the code I have that displays the NFTs of the connected wallet, but I don't fully understand the code, and hence don't know how/what to change.
This is the function to load the NFTs on the page of the connected wallet:
const web3Modal = new Web3Modal({
network: "mainnet",
cacheProvider: true,
});
const connection = await web3Modal.connect()
const provider = new ethers.providers.Web3Provider(connection)
const signer = provider.getSigner()
const marketContract = new ethers.Contract(nftmarketaddress, Market.abi, signer)
const tokenContract = new ethers.Contract(nftaddress, NFT.abi, provider)
const data = await marketContract.fetchMyNFTs()
const items = await Promise.all(data.map(async i => {
const tokenUri = await tokenContract.tokenURI(i.tokenId)
const meta = await axios.get(tokenUri)
let price = Web3.utils.fromWei(i.price.toString(), 'ether');
let item = {
price,
tokenId: i.tokenId.toNumber(),
seller: i.seller,
owner: i.owner,
image: meta.data.image,
}
return item
}))
setNfts(items)
}
nfts will have all the metadata of the NFTs.
The function fetchMyNFTs, defined in the smart contract, is as follows:
uint totalItemCount = _itemIds.current();
uint itemCount = 0;
uint currentIndex = 0;
for (uint i = 0; i < totalItemCount; i++) {
if (idToMarketItem[i + 1].owner == msg.sender) {
itemCount += 1;
}
}
MarketItem[] memory items = new MarketItem[](itemCount);
for (uint i = 0; i < totalItemCount; i++) {
if (idToMarketItem[i + 1].owner == msg.sender) {
uint currentId = idToMarketItem[i + 1].itemId;
MarketItem storage currentItem = idToMarketItem[currentId];
items[currentIndex] = currentItem;
currentIndex += 1;
}
}
return items;
}
So I guess I have two questions:
Do we always need to use the smart contract when fetching NFTs metadata?
How can we display the NFTs of any given account, like zapper.fi or context.app?
I understand that this can be a very broad question but any help or direction to tutorials would be great!
Thanks!
well, most of nft contracts have structures or functions that you can use to get the information you need, like ownerOf that is usually a function that receive the token id and returns the owner, usually the contracts have mappings of the token id to the owner or the address of the owner to an array of the tokens, in this case the contract you are using have a function to return the tokens of the owner but most contracts don't have one, so you will need to understand how all this data is stored and the relation with each other

How to extract results of a Solidity payable transaction in React front-end using ethers.js?

I have a Solidity smart contract function that returns 3 variables upon transaction completion, matchednumbers, roll, winnings.
function createTicket() external payable returns (uint _matchednumbers, uint[10] memory _generatednumbers, uint _winnings) {
generateNumbers();
uint[10] memory roll = getGenerateNumbers();
uint matchednumbers;
uint winnings;
for(uint i = 0; i < 10; i++) {
for (uint j = 0; j < 10; j++) {
if (arrays[msg.sender][i] == roll[j]) {
matchednumbers +=1;
}
}
if (matchednumbers == 5) {
winnings = msg.value * 5; // msg.value is amount of wei (ether / 1e18), winnings is returning wei, winnings is being saved to next roll after win, need to fix!
}
if (matchednumbers == 6) {
winnings = msg.value * 24;
}
if (matchednumbers == 7) {
winnings = msg.value * 142;
}
if (matchednumbers == 8) {
winnings = msg.value * 1000;
}
if (matchednumbers == 9) {
winnings = msg.value * 4500;
}
if (matchednumbers == 10) {
winnings = msg.value * 10000;
}
}
return (matchednumbers, roll, winnings);
}
I'm trying to render the results of the transaction, the matchednumbers (just an int), the roll (an array of numbers) and winnings (another int) in my front-end in React after the transaction has been completed. Just need sure how to do this? Or if it's possible with a external, payable Solidity function. In my App.js I have this:
async function roll() {
if (typeof window.ethereum !== "undefined") {
await requestAccount();
const provider = new ethers.providers.Web3Provider(window.ethereum);
const signer = provider.getSigner();
const contract = new ethers.Contract(keno3Address, Keno3.abi, signer);
// const transaction = await contract.deposit({ value: ethers.utils.parseEther("6") })
try {
const data = await contract.createTicket(); // createTicket()
await data.wait();
console.log("data: ", data);
const result = ethers.iface.decodeFunctionData("transferFrom", data);
return data
} catch (err) {
console.log("Error: ", err);
}
}
}
It's returning data of the transaction in the console.log but I cannot see the outcome of those variables, which I do see in Remix if I run it there. Or do I need to decode the transaction? I also tried that but it didn't work as you see.
Uints returned from contract are in BigNumber type (read more about BigNumber here)
so data should log in console something like this:
(3) [BigNumber, Array(6), BigNumber]
Also data[0] = BigNumber{_hex: "0xfe", _isBigNumber: true} and maybe data[1] = [1,2,3,...,9,10] and data[3] = BigNumber{_hex: "0xfe", _isBigNumber: true}. Note that your result is completely different from results I wrote and they are just examples.
to change BigNumber to Number you can use function toNumber() simply adding to BigNumber.
let matchedNumbers = data[0].toNumber();
let generatedNumbers = data[1]; //this is array type, not BigNumber!
let winnings = data[2]

wrong output of classifier

I'm new to machine learning and i used an mnist demo model to train a cat and dog classifier.But it doesn't seem to work very well.Here are some diagrams of the model:
It seems that this model always predicts any input as a cat.
This is my code. Please help me.
index.js:
import {IMAGE_H, IMAGE_W, MnistData} from './data.js';
import * as ui from './ui.js';
let classNum = 0;
function createConvModel() {
const model = tf.sequential();
model.add(tf.layers.conv2d({
inputShape: [IMAGE_H, IMAGE_W, 3],
kernelSize: 5,
filters: 32,
activation: 'relu'
}));
model.add(tf.layers.maxPooling2d({poolSize: 2, strides: 2}));
model.add(tf.layers.conv2d({kernelSize: 5, filters: 32, activation: 'relu'}));
model.add(tf.layers.maxPooling2d({poolSize: 2, strides: 2}));
model.add(tf.layers.conv2d({kernelSize: 5, filters: 64, activation: 'relu'}));
model.add(tf.layers.flatten({}));
model.add(tf.layers.dense({units: 64, activation: 'relu'}));
model.add(tf.layers.dense({units: classNum, activation: 'softmax'}));
return model;
}
function createDenseModel() {
const model = tf.sequential();
model.add(tf.layers.flatten({inputShape: [IMAGE_H, IMAGE_W, 3]}));
model.add(tf.layers.dense({units: 42, activation: 'relu'}));
model.add(tf.layers.dense({units: classNum, activation: 'softmax'}));
return model;
}
async function train(model, fitCallbacks) {
ui.logStatus('Training model...');
const optimizer = 'rmsprop';
model.compile({
optimizer,
loss: 'categoricalCrossentropy',
metrics: ['accuracy'],
});
const batchSize = 64;
const trainEpochs = ui.getTrainEpochs();
let trainBatchCount = 0;
const trainData = data.getTrainData();
const valData = data.getValData();
const testData = data.getTestData();
await model.fit(trainData.xs, trainData.labels, {
batchSize:batchSize,
validationData:[valData.xs,valData.labels],
shuffle:true,
epochs: trainEpochs,
callbacks: fitCallbacks
});
console.log("complete");
const classNames = ['cat','dog'];
const [preds, labels] = doPrediction(model,testData);
const classAccuracy = await tfvis.metrics.perClassAccuracy(labels, preds);
const container = { name: 'Accuracy', tab: 'Evaluation' };
tfvis.show.perClassAccuracy(container, classAccuracy, classNames);
}
function doPrediction(model,testData) {
const testxs = testData.xs;
const labels = testData.labels.argMax([-1]);
const preds = model.predict(testxs).argMax([-1]);
testxs.dispose();
return [preds, labels];
}
function createModel() {
let model;
const modelType = ui.getModelTypeId();
if (modelType === 'ConvNet') {
model = createConvModel();
} else if (modelType === 'DenseNet') {
model = createDenseModel();
} else {
throw new Error(`Invalid model type: ${modelType}`);
}
return model;
}
async function watchTraining(model) {
const metrics = ['loss', 'val_loss', 'acc', 'val_acc'];
const container = {
name: 'charts', tab: 'Training', styles: { height: '1000px' }
};
const callbacks = tfvis.show.fitCallbacks(container, metrics);
return train(model, callbacks);
}
let data;
async function load() {
tf.disableDeprecationWarnings();
classNum = await localforage.getItem('classNum');
tfvis.visor();
data = new MnistData();
await data.load();
}
ui.setTrainButtonCallback(async () => {
ui.logStatus('Loading data...');
await load();
ui.logStatus('Creating model...');
const model = createModel();
model.summary();
ui.logStatus('Starting model training...');
await watchTraining(model);
});
data.js:
export const IMAGE_H = 64;
export const IMAGE_W = 64;
const IMAGE_SIZE = IMAGE_H * IMAGE_W;
let NUM_CLASSES = 0;
let trainImagesLabels;
let testLabels;
let trainImages ;
let testImages ;
let validateImages;
let validateLabels;
let validateSplit = 0.2;
let modelId;
let classNum;
/**
* A class that fetches the sprited MNIST dataset and provide data as
* tf.Tensors.
*/
export class MnistData {
constructor() {}
//shuffle
static shuffleSwap(arr1,arr2) {
if(arr1.length == 1) return {arr1,arr2};
let i = arr1.length;
while(--i > 1) {
let j = Math.floor(Math.random() * (i+1));
[arr1[i], arr1[j]] = [arr1[j], arr1[i]];
[arr2[i], arr2[j]] = [arr2[j], arr2[i]];
}
return {arr1,arr2};
}
async load() {
//get data from localforage
this.trainImages = await localforage.getItem('dataset');
this.trainImagesLabels = await localforage.getItem('datasetLabel');
this.modelId = await localforage.getItem('modelId');
this.classNum = await localforage.getItem('classNum');
this.trainImages.shift();
this.trainImagesLabels.shift();
//construct the validateData
let status = false;
let maxVal = Math.floor(this.trainImages.length * 0.2);
this.validateImages = new Array();
this.validateLabels = new Array();
for(let i=0;i<maxVal;i++){
if(status){
this.validateImages.push(this.trainImages.pop());
this.validateLabels.push(this.trainImagesLabels.pop());
status = false;
}else{
this.validateImages.push(this.trainImages.shift());
this.validateLabels.push(this.trainImagesLabels.shift());
status = true;
}
}
//construct the testData
this.testImages = new Array();
this.testLabels = new Array();
for(let i=0;i<maxVal;i++){
if(status){
this.testImages.push(this.trainImages.pop());
this.testLabels.push(this.trainImagesLabels.pop());
status = false;
}else{
this.testImages.push(this.trainImages.shift());
this.testLabels.push(this.trainImagesLabels.shift());
status = true;
}
}
//shuffle
let val = MnistData.shuffleSwap(this.validateImages,this.validateLabels);
this.validateImages = val.arr1;
this.validateLabels = val.arr2;
let train = MnistData.shuffleSwap(this.trainImages,this.trainImagesLabels);
this.trainImages = train.arr1;
this.trainImagesLabels = train.arr2;
}
getTrainData() {
const xs = tf.tensor4d(this.trainImages);
const labels = tf.oneHot(tf.tensor1d(this.trainImagesLabels,'int32'),this.classNum);
return {xs, labels};
}
getValData() {
const xs = tf.tensor4d(this.validateImages);
const labels = tf.oneHot(tf.tensor1d(this.validateLabels,'int32'),this.classNum);
return {xs, labels};
}
getTestData() {
const xs = tf.tensor4d(this.testImages);
const labels = tf.oneHot(tf.tensor1d(this.testLabels,'int32'),this.classNum);
return {xs, labels};
}
}
I added some pictures at the beginning.
//getclassNum
function getClassNum(files) {
let classArr = new Array();
let dirArr = new Array();
let imageNum = 0;
for (let i = 0; i < files.length; i++) {
if (files[i].type.split('/')[0] == 'image' && files[i].type.split('/')[1] == 'jpeg') {
dirArr = files[i].webkitRelativePath.split('/');
let currentClassIndex = dirArr.length - 2;
let isExist = false;
if (currentClassIndex <= 0)
isExist = true;
else {
imageNum++;
}
if (classArr == null) {
classArr.push(dirArr[currentClassIndex]);
}
for (let j = 0; j < classArr.length; j++) {
if (classArr[j] == dirArr[currentClassIndex]) {
isExist = true;
}
}
if (!isExist) {
classArr.push(dirArr[currentClassIndex]);
}
}
}
let classNum = classArr.length;
return {classNum, imageNum, classArr};
}
//get nested array
function getDataset(files, classArr,imgNum) {
let trainLabelArr = new Array();
let trainDataArr = new Array();
for (let i = 0; i < files.length; i++) {
if (files[i].type.split('/')[0] == 'image'&& files[i].type.split('/')[1] == 'jpeg') {
let dirArr = files[i].webkitRelativePath.split('/');
let currentClassIndex = dirArr.length - 2;
if (currentClassIndex >= 0) {
for(let j=0;j<classArr.length;j++){
if(dirArr[currentClassIndex]==classArr[j]){
let reader = new FileReader();
reader.readAsDataURL(files[i]);
reader.onload = function () {
document.getElementById('image').setAttribute( 'src', reader.result);
let tensor= tf.browser.fromPixels(document.getElementById('image'));
let nest = tensor.arraySync();
trainDataArr.push(nest);
trainLabelArr.push(j);
}
}
}
}
}
}
return{trainDataArr,trainLabelArr,trainDataLength}
}
//getfiles
async function fileChange(that) {
let files = that.files;
let container = getClassNum(files);
let data = getDataset(files, container.classArr,container.imageNum);
let trainDataArr = data.trainDataArr;
let trainLabelArr = data.trainLabelArr;
setTimeout(function () {
localforage.setItem('dataset',trainDataArr,function (err,result) {
});
localforage.setItem('datasetLabel',trainLabelArr,function (err,result) {
});
localforage.setItem('modelId',modelId,function (err,result) {
});
localforage.setItem('classNum',container.classNum,function (err,result) {
});
},container.imageNum * 10);
}
}
Let me answer my question. After a day of testing, I found that this model needs a lot of data. Each category requires at least 1,000 images. If there is not enough training data, the model can only output one result. Moreover, this model performs very well in recognizing objects with fewer characters such as letters and signs, and not very well in recognizing animals or natural environments.

Resources