Receive object datatype from back4app in react - reactjs

I have an issue with receiving the datatype object from the database in back4app in the column "expiration" 1. I have no issue with receiving the other datatypes (String, Array..), but I can't deconstruct the object into day, month and year.
I have the following code where I receive the other datatypes and save it in an useState:
const [ideaTable, setIdeaTable] = useState();
async function fetchIdeas() {
const query = new Parse.Query("Idea");
try {
const ideas = await query.find();
console.log("Parse Objects: ", ideas);
const destructuredIdeas = destructureIdeas(ideas);
setIdeaTable(destructuredIdeas);
console.log("from readIdeas: ", ideaTable);
return true;
} catch (error) {
alert(`Error ${error.message}`);
return false;
}
}
function destructure(idea) {
return {
id: idea.id,
title: idea.get("title"),
section: idea.get("tags"),
author: idea.get("author"),
/* expiration: JSON.stringify(idea.get("expiration")) */
expiration: idea.get("expiration")
};
}
function destructureIdeas(ideas) {
return ideas.map(destructure);
}
Here I just call: "expiration: idea.get("expiration")" as in above code. So, I think I need to restructure the expiration object to get the day, month and year... but how?
If I do this: "expiration: JSON.stringify(idea.get("expiration"))", I get the following result 3.
So, what to do if I want to display for example: "14.12.2021"?

I found the answer myself:
expiration: idea.get("expiration").day
expiration: idea.get("expiration").month
expiration: idea.get("expiration").month

Related

NextJS Global Variable with Assignment

I'm new to NextJS, and trying to figure out, how to create a global variable that I could assign a different value anytime. Could someone give a simple example? (I know global might not be the best approach, but still I would like to know how to set up a global variable).
Let's say:
_app.js
NAME = "Ana" // GLOBAL VARIABLE
page_A.js
console.log(NAME) // "Ana"
NAME = "Ben"
page_B.js
console.log(NAME) // "Ben"
try using Environment Variables
/next.config.js
module.exports = {
env: {
customKey: 'my-value',
},
}
/pages/page_A.js
function Page() {
return <h1>The value of customKey is: {process.env.customKey}</h1>
}
export default Page
but you can not change its contents, except by changing it directly in next.config.js
Nextjs no special ways to provide global variables you want. You can achieve by:
Stateful management tool, like redux-react
Using Context
It's not like it's impossible,
I created a file called _customGlobals.jsx and put this as content
String.prototype.title = function () {
const sliced = this.slice(1);
return (
this.charAt(0).toUpperCase() +
(sliced.toUpperCase() === sliced ? sliced.toLowerCase() : sliced)
);
};
and imported it in _app.jsx like this:
import "./_customGlobals";
So now I can call this function on any string anywhere in my project like this:
"this is a title".title()
Database designed for this purpose. But for one var it's not wise to install whole db!
So, you can do it in a JSON file.
Add a var to a JSON file and use a function to update it.
this is a simple function for this usage:
const fs = require('fs');
function updateJSONFile(filePath, updates) {
fs.readFile(filePath, 'utf8', function (err, data) {
if (err) {
console.error(err);
return;
}
let json = JSON.parse(data);
for (let key in updates) {
json[key] = updates[key];
}
fs.writeFile(filePath, JSON.stringify(json, null, 2), 'utf8', function (err) {
if (err) {
console.error(err);
}
});
});
}
So, use it like this:
updateJSONFile('file.json', { name: 'John Doe', age: 30 });
You can create another function to read it dynamicly:
function readJSONFile(filePath) {
fs.readFile(filePath, 'utf8', function (err, data) {
if (err) {
return callback(err);
}
let json;
try {
json = JSON.parse(data);
} catch (error) {
return callback(error);
}
return callback(null, json);
});
}
and you can use it like this:
const readedFile = readJSONFile('file.json')
I deleted the callback function to have a simple code but you can add callback function to log error messages.

relay prisma graphql update store

I am trying to get prisma and relay working. Here's my repo:
https://github.com/jamesmbowler/prisma-relay-todo
It's a simple todo list. I am able to add the todos, but the ui does not update. When I refresh, the todo is there.
All of the examples of updating the store, that I can find, use a "parent" to the object that is being updated / created.
See https://facebook.github.io/relay/docs/en/mutations.html#using-updater-and-optimisticupdater
Also, the "updater configs" also requires a "parentID". https://facebook.github.io/relay/docs/en/mutations.html#updater-configs
From relay-runtime's RelayConnectionHandler.js comment here:
https://github.com/facebook/relay/blob/master/packages/relay-runtime/handlers/connection/RelayConnectionHandler.js#L232
* store => {
* const user = store.get('<id>');
* const friends = RelayConnectionHandler.getConnection(user, 'FriendsFragment_friends');
* const edge = store.create('<edge-id>', 'FriendsEdge');
* RelayConnectionHandler.insertEdgeAfter(friends, edge);
* }
Is it possible to update the store without having a "parent"? I just have todos, with no parent.
Again, creating the record works, and gives this response:
{ "data" : {
"createTodo" : {
"id" : "cjpdbivhc00050903ud6bkl3x",
"name" : "testing",
"complete" : false
} } }
Here's my updater function
updater: store => {
const payload = store.getRootField('createTodo');
const conn = ConnectionHandler.getConnection(store.get(payload.getDataID()), 'Todos_todoesConnection');
ConnectionHandler.insertEdgeAfter(conn, payload, cursor);
},
I have done a console.log(conn), and it is undefined.
Please help.
----Edit----
Thanks to Denis, I think one problem is solved - that of the ConnectionHandler.
But, I still can't get the ui to update. Here's what I've tried in the updater function:
const payload = store.getRootField('createTodo');
const clientRoot = store.get('client:root');
const conn = ConnectionHandler.getConnection(clientRoot, 'Todos_todoesConnection');
ConnectionHandler.createEdge(store, conn, payload, 'TodoEdge');
I've also tried this:
const payload = store.getRootField('createTodo');
const clientRoot = store.get('client:root');
const conn = ConnectionHandler.getConnection(clientRoot, 'Todos_todoesConnection');
ConnectionHandler.insertEdgeAfter(conn, payload);
My data shape is different from their example, as I don't have the 'todoEdge', and 'node' inside my returned data (see above).
todoEdge {
cursor
node {
complete
id
text
}
}
How do I getLinkedRecord, like this?
const newEdge = payload.getLinkedRecord('todoEdge');
If query is the parent, the parentID will be client:root.
Take a look at this: https://github.com/facebook/relay/blob/1d72862fa620a9db69d6219d5aa562054d9b93c7/packages/react-relay/classic/store/RelayStoreConstants.js#L18
Also at this issue: https://github.com/facebook/relay/issues/2157#issuecomment-385009482
create a const ROOT_ID = 'client:root'; and pass ROOT_ID as your parentID. Also, check the name of your connection on the updater, it has to be exactly equal to the name where you declared the query.
UPDATE:
Actually, you can import ROOT_ID of relay-runtime
import { ROOT_ID } from 'relay-runtime';
UPDATE 2:
Your edit was not very clear for me, but I will provide you an example of it should work ok? After your mutation is run, you first access its data by using getRootField just like you are doing. So, if I have a mutation like:
mutation UserAddMutation($input: UserAddInput!) {
UserAdd(input: $input) {
userEdge {
node {
name
id
age
}
}
error
}
}
You will do:
const newEdge = store.getRootField('UserAdd').getLinkedRecord('userEdge');
connectionUpdater({
store,
parentId: ROOT_ID,
connectionName: 'UserAdd_users',
edge: newEdge,
before: true,
});
This connectionUpdater is a helper function that looks likes this:
export function connectionUpdater({ store, parentId, connectionName, edge, before, filters }) {
if (edge) {
if (!parentId) {
// eslint-disable-next-line
console.log('maybe you forgot to pass a parentId: ');
return;
}
const parentProxy = store.get(parentId);
const connection = ConnectionHandler.getConnection(parentProxy, connectionName, filters);
if (!connection) {
// eslint-disable-next-line
console.log('maybe this connection is not in relay store yet:', connectionName);
return;
}
const newEndCursorOffset = connection.getValue('endCursorOffset');
connection.setValue(newEndCursorOffset + 1, 'endCursorOffset');
const newCount = connection.getValue('count');
connection.setValue(newCount + 1, 'count');
if (before) {
ConnectionHandler.insertEdgeBefore(connection, edge);
} else {
ConnectionHandler.insertEdgeAfter(connection, edge);
}
}
}
Hope it helps :)

How to for loop all documents in a collection - Azure CosmosDB - Nodejs

I have looked around at a few answers/questions regarding this issue but yet to find a solution.
I have a collection with documents (simplified) as such:
{
"id": 123
"stuff": "abc"
"array":[
{
"id2":456
"properties": [
{
"id3": 789
"important": true
}
]
}
]
}
I want to check for each document in my collection, for each array object within array, for each properties, if it has important: true for example. Then return:
"id": 123
"id2": 456
"id3": 789
I have tried using:
client.queryDocuments(self.collection._self, querySpec).toArray(function(err, results) {
if (err) {
callback(err);
} else {
callback(null, results[0]);
}
});
But the issue is an array has a maximum character limit. If my collection has millions of documents, this would presumably be exceeded. (Javascript Increase max array size)
Or, am I misunderstanding the above question? Is it talking about the number of objects in an array (of which, each can have unlimited object character length?)
Thus I am looking a for loop-esque solution, where each document is returned, I do my analysis, then move to then next/do them in parallel.
Any insight would be greatly appreciated.
But the issue is an array has a maximum character limit. If my
collection has millions of documents, this would presumably be
exceeded. (Javascript Increase max array size)
Based on my research,the longest possible array in js could have 232-1 = 4,294,967,295 = 4.29 billion elements. However, it is perfectly enough to meet your millions data volume requirements. In addition,you can't query such huge volume data directly surely,that's impossible you do that.
Whether about throughput constraints(RUs settings) or query efficiency factors, you should consider batching large volumes of data anyway.
Thus I am looking a for loop-esque solution, where each document is
returned, I do my analysis, then move to then next/do them in
parallel.
Maybe you could use v2 js sdk for cosmos db sql api.Please refer to the sample code:
const cosmos = require('#azure/cosmos');
const CosmosClient = cosmos.CosmosClient;
const endpoint = "https://***.documents.azure.com:443/"; // Add your endpoint
const masterKey = "***"; // Add the masterkey of the endpoint
const client = new CosmosClient({ endpoint, auth: { masterKey } });
const databaseId = "db";
const containerId = "coll";
async function run() {
const { container, database } = await init();
const querySpec = {
query: "SELECT r.id,r._ts FROM root r"
};
const queryOptions = {
maxItemCount : -1
}
const queryIterator = await container.items.query(querySpec,queryOptions);
while (queryIterator.hasMoreResults()) {
const { result: results, headers } = await queryIterator.executeNext();
console.log(results)
console.log(headers)
//do what you want to do
if (results === undefined) {
// no more results
break;
}
}
}
async function init() {
const { database } = await client.databases.createIfNotExists({ id: databaseId });
const { container } = await database.containers.createIfNotExists({ id: containerId });
return { database, container };
}
run().catch(err => {
console.error(err);
});
More details about continuation token ,please refer to my previous case.Any concern,please let me know.
I am using Cosmos DB SQL API Node.js library. I am unable to find the Continuation Token from this library so that I can return it to client. The idea is to get it back from the client for the next pagination request.
I have a working code which iterates multiple times to get all the documents. What changes will be required here to get the continuation token?
function queryCollectionPaging() {
return new Promise((resolve, reject) => {
function executeNextWithRetry(iterator, callback) {
iterator.executeNext(function (err, results, responseHeaders) {
if (err) {
return callback(err, null);
}
else {
documents = documents.concat(results);
if (iterator.hasMoreResults()) {
executeNextWithRetry(iterator, callback);
}
else {
callback();
}
}
});
}
let options = {
maxItemCount: 1,
enableCrossPartitionQuery: true
};
let documents = []
let iterator = client.queryDocuments( collectionUrl, 'SELECT r.partitionkey, r.documentid, r._ts FROM root r WHERE r.partitionkey in ("user1", "user2") ORDER BY r._ts', options);
executeNextWithRetry(iterator, function (err, result) {
if (err) {
reject(err)
}
else {
console.log(documents);
resolve(documents)
}
});
});
};

Moment.js is converting Date to Today date using React Big Calendar

I've got a problem where the date is being converted to today's date even though the date is being parse by moment. It looks like Moment is converting the date to today's date. I believe I am just using the script incorrectly. I'm not familiar with Moment. Any help would be appreciated.
export function getEvents (callback) {
request
.get(url)
.end((err, resp) => {
if (!err) {
const events = [];
JSON.parse(resp.text).items.map((event) => {
events.push({
start: moment(event.start.date).toDate()|| moment(event.start.dateTime),
end: moment(event.end.date).toDate() || moment(event.end.dateTime),
title: event.summary,
})
});
callback(events)
}
})
This is an example of the trace where the "start" date from Google Calendar is in a Timeformat.
This is the conversion and a trace of the script
Here is the date in real time on the call:
The issue that fixed this is in the Moment api. Use this :
JSON.parse(resp.text).items.map((event) => {
var start = moment(event.start.dateTime,'YYYY-MM-DD HH:mm').toDate();
var end = moment(event.start.dateTime,'YYYY-MM-DD HH:mm').toDate();
events.push({
start: start,
end: end,
title: event.summary,
})
});
After looking into your resp.text that provided in comments I created the following parse method to parse your response as the way you wanted.
Here response passed to this method is your resp.text which you supplied in comments.
import moment from 'moment'
const parseResponse = (response) => {
const events = []
response.forEach(obj => {
obj.items.forEach(
item => {
events.push({
start: moment(item.start.dateTime),
end: moment(item.end.dateTime),
title: item.summary
})
}
)
})
return events
}
Note: Check the codesandbox.io/s/ywpznzrmv9 pen if you want to look into moment workaround. You can get rid of first forEach block if the resp.text is having only one array of object.|
like:
const parseResponse = (response) => {
const events = []
response[0].items.forEach(
item => {
events.push({
start: moment(item.start.dateTime),
end: moment(item.end.dateTime),
title: item.summary
})
}
)
return events
}
Note: If you stick to using JSON.parse() then change map to forEach. map creates an object, which is a garbage that you won't need in your case.

Deeper levels in reducer getting overwritten

I have an action that fetches some data from a database using multiple calls. I am trying to create the correct structure for my data in the reducer, but the deeper levels of the object just get overwritten.
React call:
_.forEach(['cat1', 'cat2', 'cat3', 'cat4'], (category) => {
_.forEach(['day1', 'day2', 'day3'], (day) => {
this.props.fetchSingleResult('2018', 'USA', day, category)
})
})
My action:
export function fetchSingleResult(year, venue, day, category) {
// Get race result from a single category and day
const request = axios.get('/api/races/singleresult',
{params: {
year: year,
venue: venue,
day: day,
category: category }
});
return {
type: FETCH_SINGLE_RESULT,
payload: request,
meta: {year: year, venue: venue, day: day, category: category}
};
}
My reducer:
case FETCH_SINGLE_RESULT:
return {...state,
[action.meta.year]: {...state[action.meta.year],
[action.meta.venue]: {...state[action.meta.venue],
[action.meta.category]: {...state[action.meta.category],
[action.meta.day]: action.payload.data}}}};
The output of this is an object that comes from this only has the last day and category as keys i.e.
{2018: {'USA': {'cat4': {'day3' : data}}}}
instead of having all categories and days like:
{2018:
{'USA':
{'cat1': {'day1' : data, 'day2' : data, 'day3' : data}},
{'cat2': {'day1' : data, 'day2' : data, 'day3' : data}},
{'cat3': {'day1' : data, 'day2' : data, 'day3' : data}},
{'cat4': {'day1' : data, 'day2' : data, 'day3' : data}}
}
}
Any ideas?
There are a lot of years and a lot of venues.
The way you're using the spread operator is causing that result, try:
case FETCH_SINGLE_RESULT:
const {year, venue, category, day} = action.meta;
return _.merge(state, {[year]: {[venue]: {[category]: {[day]: action.payload.data}}}});
Look first of all this level of nesting is very difficult to maintain and respect immutability in react. You should change your data endpoint's response structure.
const getnewState = (state, action) => {
const initialStateValue = {...state }
const { year, venue, day, category } = action.meta
const data = action.payload
if(initialStateValue[year]) {
const currentYearValue = initialStateValue[year]
if (currentYearValue[venue]) {
const currentVenueValue = currentYearValue[venue]
if(currentVenueValue[category]) {
let currentCategoryValue = currentVenueValue[category]
currentCategoryValue = {
...currentCategoryValue,
[day]: data,
}
}
}
}
}
I just handled one flow in this code above , you need to take care of other scenartios also like , if the key is iteself not there for all levels..
I went on to code the solution for this, but found it overly complex to code.

Resources