I'm getting the following error in my react application using enigma.js (https://qlik.dev/apis/javascript/enigmajs) . I'm trying to initialize a WebSocket connection and im getting the error. "Failed to construct 'WebSocket': The subprotocol '[object Object]' is invalid".
The WebSocket connection URL is correct as it can be tested with https://catwalk.core.qlik.com/?engine_url=wss://sense-demo.qlik.com/app/133dab5d-8f56-4d40-b3e0-a6b401391bde which returns the data. You can try by editing the URL which will return an error.
the code is
async init() {
const appId = "133dab5d-8f56-4d40-b3e0-a6b401391bde";
const url =
"wss://sense-demo.qlik.com/app/133dab5d-8f56-4d40-b3e0-a6b401391bde";
const session = enigma.create({
schema,
createSocket: () =>
new WebSocket(url, {
}),
});
const global = await session.open();
const app = await global.openDoc(appId);
const appLayout = await app.getAppLayout();
console.log(appLayout);
}
I found the solution:
qDoc.config.js
const enigma = require('enigma.js');
const schema = require('enigma.js/schemas/12.20.0.json');
const SenseUtilities = require('enigma.js/sense-utilities');
const config = {
host: 'sense-demo.qlik.com',
secure: true,
port: 443,
prefix: '',
appId: '133dab5d-8f56-4d40-b3e0-a6b401391bde',
};
const url = SenseUtilities.buildUrl(config);
async function init() {
const session = enigma.create({
schema,
url,
suspendOnClose: true,
});
const global = await session.open();
const app = await global.openDoc(config.appId);
const appLayout = await app.getAppLayout();
console.log(appLayout);
}
init();
const session = enigma.create({ schema, url, suspendOnClose: true });
// open doc and return promise which will resolve to doc
export const openDoc = () => (
session.open().then((global) => global.openDoc(config.appId))
);
// close session
export const closeSession = () => (
session.close()
);
INSTURCTION
downoad this project
delete package-lock.json file
npm i
npm run-script dev
This is the direvtory view:
This is result log:
The solution is explained here
https://github.com/qlik-oss/enigma.js/issues/889
Related
I am currently working on a React-app front-end for Amazon AWS S3. The goal of the project is to allow a user to upload multiple images to a S3 bucket, and then call a Lambda function to send the uploaded images to Amazon Rekognition for labeling. The results will then be returned in a CSV file that the user can download as the output.
However, I am currently encountering an issue where I am getting the following error message:
Failed to compile.
export 'downloadCSV' (reexported as 'downloadCSV') was not found in './rekognitionActions' (possible exports: addFacesToCollection, createCollection, detectLabels, getLabeledResults, handleFileUpload, indexFaces, parseCSV, searchFaces, searchFacesByImage, uploadImages, uploadImagesAndGetLabels)
ERROR in ./src/components/actions/index.js 5:0-84
export 'downloadCSV' (reexported as 'downloadCSV') was not found in './rekognitionActions' (possible exports: addFacesToCollection, createCollection, detectLabels, getLabeledResults, handleFileUpload, indexFaces, parseCSV, searchFaces, searchFacesByImage, uploadImages, uploadImagesAndGetLabels)
ERROR in ./src/components/actions/rekognitionActions.js 4:0-38
Module not found: Error: Can't resolve 'fs' in 'C:\Users\luisj\Desktop\awsapp\awsapp\src\components\actions'
I am also receiving
Module not found: Error: Can't resolve 'fs' in 'C:\Users\luisj\Desktop\awsapp\awsapp\src\components\actions'
I am unsure of what is causing this issue and would greatly appreciate any help in resolving it.
rekognitionActions.js
import AWS from 'aws-sdk';
import { createReadStream } from 'fs';
import Papa from 'papaparse';
const rekognition = new AWS.Rekognition({
accessKeyId: process.env.REACT_APP_AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.REACT_APP_AWS_SECRET_ACCESS_KEY,
});
const s3 = new AWS.S3({
accessKeyId: process.env.REACT_APP_AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.REACT_APP_AWS_SECRET_ACCESS_KEY,
});
export const detectLabels = async (image) => {
const params = {
Image: {
S3Object: {
Bucket: process.env.REACT_APP_AWS_S3_BUCKET,
Name: image,
},
},
MaxLabels: 10,
MinConfidence: 80,
};
const data = await rekognition.detectLabels(params).promise();
return data.Labels;
};
export const createCollection = async (collectionId) => {
const params = {
CollectionId: collectionId,
};
await rekognition.createCollection(params).promise();
};
export const indexFaces = async (collectionId, image) => {
const params = {
CollectionId: collectionId,
Image: {
S3Object: {
Bucket: process.env.REACT_APP_AWS_S3_BUCKET,
Name: image,
},
},
};
const data = await rekognition.indexFaces(params).promise();
return data.FaceRecords;
};
export const searchFacesByImage = async (collectionId, image) => {
const params = {
CollectionId: collectionId,
Image: {
S3Object: {
Bucket: process.env.REACT_APP_AWS_S3_BUCKET,
Name: image,
},
},
MaxFaces: 10,
FaceMatchThreshold: 80,
};
const data = await rekognition.searchFacesByImage(params).promise();
return data.FaceMatches;
};
export const uploadImages = async (images) => {
const uploadPromises = images.map(async (image) => {
const params = {
Bucket: process.env.REACT_APP_AWS_S3_BUCKET,
Key: image.name,
Body: image,
};
await s3.upload(params).promise();
});
await Promise.all(uploadPromises);
};
export const getLabeledResults = async (images) => {
const labelPromises = images.map(async (image) => {
const labels = await detectLabels(image.name);
return { imageName: image.name, labels };
});
const labeledResults = await Promise.all(labelPromises);
return labeledResults;
};
export const uploadImagesAndGetLabels = async (images) => {
await uploadImages(images);
const labeledResults = await getLabeledResults(images);
return labeledResults;
};
export const parseCSV = async (file) => {
return new Promise((resolve, reject) => {
Papa.parse(createReadStream(file), {
header: true,
complete: (results) => {
resolve(results.data);
},
error: (error) => {
reject(error);
},
});
});
};
export const addFacesToCollection = async (collectionId, images) => {
const indexFacePromises = images.map(async (image) => {
const indexedFaces = await indexFaces(collectionId, image.name);
return { imageName: image.name, indexedFaces };
});
const indexedResults = await Promise.all(indexFacePromises);
return indexedResults;
};
export const searchFaces = async (collectionId, images) => {
const searchFacePromises = images.map(async (image) => {
const faceMatches = await searchFacesByImage(collectionId, image.name);
return { imageName: image.name, faceMatches };
});
const searchResults = await Promise.all(searchFacePromises);
return searchResults;
};
export const handleFileUpload = async (file, collectionId) => {
try {
const images = await parseCSV(file);
await uploadImagesAndGetLabels(images);
await createCollection(collectionId);
const indexedResults = await addFacesToCollection(collectionId, images);
const searchResults = await searchFaces(collectionId, images);
return { indexedResults, searchResults };
} catch (error) {
throw error;
}
};
index.js
import * as rekognitionActions from './rekognitionActions';
import * as otherActions from './otherActions';
import { uploadImages, getLabeledResults, downloadCSV } from './actions';
export const { uploadImages, getLabeledResults, downloadCSV } = rekognitionActions;
export const { otherAction1, otherAction2 } = otherActions;
I have made my full project available on my GitHub repository, which can be accessed HERE
I am trying to create a React-app front-end for Amazon AWS S3. The goal of this project is to allow a user to upload multiple images to a S3 bucket, and then call a lambda function to send the uploaded images to Amazon Rekognition for labeling. The results are then returned in a CSV file that the user can download as the output.
I have created the necessary components and actions for this project, but when I try to compile the code, I am getting an error :
Module not found: Error: Can't resolve './components/actions' in 'C:\Users\luisj\Desktop\awsapp\awsapp\src\components'". I am also getting an error that says "export 'downloadCSV' (reexported as 'downloadCSV') was not found in './rekognitionActions' (possible exports: addFacesToCollection, createCollection, detectLabels, getLabeledResults, handleFileUpload, indexFaces, parseCSV, searchFaces, searchFacesByImage, uploadImages, uploadImagesAndGetLabels)". Additionally, there is an error that says "Module not found: Error: Can't resolve 'fs' in 'C:\Users\luisj\Desktop\awsapp\awsapp\src\components\actions'
I was expecting the code to compile without any errors, and for the application to function as intended. However, the errors I am receiving are preventing me from moving forward with the project.
I have tried re-organizing the file structure, double checking the imports and exports in the code, and making sure that all necessary dependencies are installed, but I am still encountering these errors.
Below is how i create the client.
import { create as ipfsHttpClient } from 'ipfs-http-client';
const projectId = 'xx';
const projectSecret = 'xx';
const auth = `Basic ${Buffer.from(`${projectId}:${projectSecret}`).toString('base64')}`;
const options = {
host: 'ipfs.infura.io',
protocol: 'https',
port: 5001,
apiPath: '/ipfs/api/v0',
headers: {
authorization: auth,
},
};
const dedicatedEndPoint = 'https://xx.infura-ipfs.io';
const client = ipfsHttpClient(options);
Here is the function that will be called from front-end that takes in a file, uploads to IPFS and returns URL. Please note that the "ipfsHTTPClient()" is just the create function.
const uploadToIPFS = async (file) => {
try {
const added = await client.add({ content: file });
const url = `${dedicatedEndPoint}${added.path}`;
return url;
} catch (error) {
console.log('Error uploading file to IPFS: ', error);
}
};
The error I am getting is
POST https://ipfs.infura.io:5001/ipfs/api/v0/add?stream-channels=true&progress=false 403 (Forbidden)
When i console log the error it says the IPFS method is not supported.
On the IPFS forum, i have seen someone say that add function does not work anymore but i have also seen people using it and it working. Im not sure whats wrong here.
Here is how i call the function on front-end
const { uploadToIPFS } = useContext(NFTContext);
// function called from useDropzone
const onDrop = useCallback(async (acceptedFile) => {
const url = await uploadToIPFS(acceptedFile[0]);
setFileUrl(url);
}, []);
All the above code is correct and the error was from Next.js
Needed to add
images: {
domains: ['xx.infura-ipfs.io'],
},
to the next.config.js file.
I have resolved this problem
so make sure first you have installed buffer
npm install --save buffer
then import it in your file
import {Buffer} from 'buffer';
then it works successfully
import { create } from "ipfs-http-client";
import { Buffer } from "buffer";
const projectId = "YOUR_INFURA_PROJECT_ID";
const projectSecret = "YOUR_INFURA_PROJECT_SECRET";
const auth = `Basic ${Buffer.from(`${projectId}:${projectSecret}`).toString(
"base64"
)}`;
const client = create({
host: "ipfs.infura.io",
port: 5001,
protocol: "https",
apiPath: "/api/v0",
headers: {
authorization: auth,
},
});
const uploadFiles = async (e) => {
e.preventDefault();
setUploading(true);
if (text !== "") {
try {
const added = await client.add(text);
setDescriptionUrl(added.path);
} catch (error) {
toast.warn("error to uploading text");
}
}
///component
function Home() {
const [show, setShow]= useState([{name:'', info:'', airingDate:'', poster:''}])
useEffect(()=>{
fetch("/home")
//.then(res=> res.json())
.then(res => res.text())
.then(text => console.log(text))
})
return (
<div>
{show.map(a=>
<div>
<h2>{a.title}</h2>
</div>
)}
</div>
)
/////index.js
const TvShows = require("./models/TvShows");
const express = require("express");
const app = express();
const mongoose= require("mongoose")
const dotenv= require("dotenv");
const authRoute = require("./routes/auth");
const { application } = require("express");
const userRoute = require("./routes/users");
const commentRoute = require("./routes/comments");
const tvshowsRoute = require("./routes/tvshows");
const cors = require("cors");
app.use(cors());
console.log(".");
dotenv.config();
app.use(express.json());
mongoose.connect(process.env.MONGO_URL,{
useCreateIndex: true,
useNewUrlParser: true,
useUnifiedTopology: true,
}).then(console.log("connected to mongoDB"));
app.use("/auth", authRoute);
app.use("/users", userRoute);
app.use("/comments", commentRoute);
app.post("/api/home", tvshowsRoute);
app.use("/api/home", tvshowsRoute);
/*
app.get('/api/home', (req,res)=>{
TvShows.find().then((result)=>{
res.send(result);
})
})
*/
/*
app.use("/",(req,res)=>{
console.log("main url")
})*/
app.listen("3001",()=>{
console.log("backend running");
})
//////route
const router = require("express").Router();
const TvShows = require("../models/TvShows");
router.post("/api/home", async (req, res) => {
console.log("here")
try{
const newTvShow = new TvShows({
title: req.body.title,
poster: req.body.poster,
info: req.body.info
});
const savedTvShows = await newTvShow.save();
res.status(200).json(savedTvShows);
}catch (err) {
res.status(500).json(err);
}
}
);
router.route("/api/home").get((req, res)=>{
TvShows.find()
.then(foundShows=> res.json(foundShows))
})
module.exports = router;
when I change res.json with res.text I see my index.html page on console not the data I want to fetch from mongodb. This error is probably because I didn't use /api/ on root url but I couldn't figure it out where I should write it. I tried but didn't work. It would be so good if someone could've helped. Thank you so much.
Indeed, you are fetching the /home page of your front-end app.
Assuming the api is on a different server, you would need to call the address of that server.
If you have a set up locally with a nodejs server and a react app running separately, you should have them run on two different ports.
If you have react app on http://localhost:3000 (default), then change your api to listen on 3001, then in your react code above, you can use the full uri
http://localhost:3001/api/home
in your fetch call.
I'm making a lot of assumptions about how you have this set up, based on my own experience of local development for similar problems.
Hello I am trying to get a dynamic proxy going in react and express. I do not want to configure the proxy in the package.json because I need it to be dynamic. I have tried the following using setupProxy.js by the CRA documentation. Here is my code
TypeError: Cannot read property 'split' of null
[1] at required (/Users/chadlew/Desktop/SC.CRM.React/client/node_modules/requires-port/index.js:13:23)
[1] at Object.common.setupOutgoing (/Users/chadlew/Desktop/SC.CRM.React/client/node_modules/http-proxy/lib/http-proxy/common.js:101:7)
[1] at Array.stream (/Users/chadlew/Desktop/SC.CRM.React/client/node_modules/http-proxy/lib/http-proxy/passes/web-incoming.js:127:14)
[1] at ProxyServer.<anonymous> (/Users/chadlew/Desktop/SC.CRM.React/client/node_modules/http-proxy/lib/http-proxy/index.js:81:21)
[1] at HttpProxyMiddleware.middleware (/Users/chadlew/Desktop/SC.CRM.React/client/node_modules/http-proxy-middleware/dist/http-proxy-middleware.js:22:32)
This is the error I'm getting everytime.
And here is my code: setupProxy.js
const { createProxyMiddleware } = require('http-proxy-middleware');
module.exports = function(app) {
app.use(
'/api', // You can pass in an array too eg. ['/api', '/another/path']
createProxyMiddleware({
target: process.env.REACT_APP_PROXY_HOST,
changeOrigin: true,
})
);
};
Here is the React:
import React, { useState, useEffect } from 'react'
import GoogleLogin from 'react-google-login';
const Login = ({history}) => {
const [authData, setAuthData] = useState({});
useEffect(() => {
if(Object.keys(authData).length > 0) {
(async () => {
const res = await fetch("/api/auth/login", {
method: 'POST',
headers: {'Content-Type': 'application/json'},
body: JSON.stringify(authData)
});
// This data would be set into app level state (react-context api) and be accessible everywhere
const { message, isAuth, userData } = await res.json();
})();
}
}, [authData, history])
const successHandler = data => {
setAuthData(data);
history.push("/home");
}
const failureHandler = (data) => setAuthData(data);
return (
<div className = "login-page">
<h1 style = {{marginBottom: '1rem'}}>Welcome</h1>
<GoogleLogin
clientId = {process.env.REACT_APP_GOOGLE_CLIENT_ID}
buttonText = "Login with Google"
onSuccess = {successHandler}
onFailure = {failureHandler}
cookiePolicy = {'single_host_origin'}
/>
</div>
)
}
export default Login;
Whenever I login with google I get the error message and the proxy ultimately does not work. Any suggestions would be highly appreciated
I see there are a lot of posts on this subject already, so I apologize if this is a repeat.
What is strange and possibly unique (I don't know) is that the server seems to be functioning and executing the API call properly.
I have a react front end with an express backend being hosted on an AWS EC2 instance. As said above, when my front end makes a axios.post request, the server does everything it is supposed to, but I'm am returned two errors. One is
OPTIONS http://us-west-1.compute.amazonaws.com:3000 net::ERR_CONNECTION_REFUSED
The other is
Error: Network Error
at createError (createError.js:17)
at XMLHttpRequest.handleError (xhr.js:87)
react code is:
import React from "react";
import PaypalExpressBtn from "react-paypal-express-checkout";
import axios from "axios";
export default class Pay extends React.Component {
constructor(props) {
super(props);
this.state = {
items: {}
};
}
render() {
const onSuccess = payment => {
axios
.post("http://compute.amazonaws.com:3000/", {
value: this.props.value,
fileName: this.props.fileName,
hash: this.props.hash
})
.then(response => console.log(response.data))
.catch(function(error) {
console.log(error);
});
console.log(payment);
};
let env = "sandbox"; // you can set here to 'production' for production
let currency = "USD"; // or you can set this value from your props or state
let total = 3.33; // same as above, this is the total amount (based on
const client = {
sandbox:
"...key...",
production: "YOUR-PRODUCTION-APP-ID"
};
return (
<div>
<PaypalExpressBtn
onSuccess={onSuccess}
/>
</div>
);
}
}
express code is:
const express = require("express");
const app = express();
const Tx = require("ethereumjs-tx");
var cors = require('cors')
const Web3 = require("web3");
const web3 = new Web3(
"https://ropsten.infura.io/v3/d55489f8ea264a1484c293b05ed7eb85"
);
app.use(cors());
const abi = [...]
const contractAddress = "0x15E1ff7d97CB0D7C054D19bCF579e3147FC9009b";
const myAccount = "0x59f568176e21EF86017EfED3660625F4397A2ecE";
const privateKey1 = new Buffer(
"...privateKey...",
"hex"
);
app.post("/", function(req, res, next) {
var hashValue = req.body.hash,
fileName = req.body.fileName,
value = req.body.value;
const contract = new web3.eth.Contract(abi, contractAddress, {
from: myAccount
// gas: '50000'
});
web3.eth.getTransactionCount(myAccount, (err, txCount) => {
//Smart contract data
const data = contract.methods
.setHashValue(value + " " + fileName + " " + hashValue)
.encodeABI();
// Build the transaction
const txObject = {
nonce: web3.utils.toHex(txCount),
gasLimit: web3.utils.toHex(1000000),
gasPrice: 20000000000,
data: data,
from: myAccount,
to: contractAddress
};
// Sign the transaction
const tx = new Tx(txObject);
const serializedTx = tx.serialize();
// const raw = '0x' + serializedTx.toString('hex')
// Broadcast the transaction
web3.eth
.sendSignedTransaction("0x" + serializedTx.toString("hex"))
.on("receipt", console.log);
next();
});
});
app.listen(3000, () => console.log("listening on 3000"));
I would reiterate that the server is broadcasting the Ethereum transaction as intended. The reason that I am asking is because I do not want errors, and am checking to see if this is part of a larger issue I'm having with a json return call.
Any help is appreciated. Thanks!
I resolved this by adding a res.json()
web3.eth
.sendSignedTransaction("0x" + serializedTx.toString("hex"))
.on("receipt", console.log, res.json);