How to get the Blob image preview in my Uppy Custom setup - reactjs

I learn React and now I use the Uppy so user can select files for upload.
When user have select his file the files are hidden by settting showSelectedFiles={false}
I use my own Component to show the selected files and I get the files using this:
.on("file-added", (file) => {
const { setFile } = props;
setFile(file);
const newList = this.state.files.concat({ file });
this.setState({
files: { newList },
});
});
For each file added to the Dashboard the setFile(file); is sending the file object to my Custom view. The problem is that the preview image Blob that is auto created by the Dashboard is not present at this stage.
How can I get the files to my Custom GUI to show them including the image preview Blob?
I'm new to React and JavaScript so please be gentle:)
Complete code:
import React from "react";
import "#uppy/status-bar/dist/style.css";
import "#uppy/drag-drop/dist/style.css";
import "#uppy/progress-bar/dist/style.css";
import "./styles.css";
import "#uppy/core/dist/style.css";
import "#uppy/dashboard/dist/style.css";
const Uppy = require("#uppy/core");
// const Dashboard = require("#uppy/dashboard");
const GoogleDrive = require("#uppy/google-drive");
const Dropbox = require("#uppy/dropbox");
const Instagram = require("#uppy/instagram");
const Webcam = require("#uppy/webcam");
const Tus = require("#uppy/tus");
const ThumbnailGenerator = require("#uppy/thumbnail-generator");
const {
Dashboard,
DashboardModal,
DragDrop,
ProgressBar,
} = require("#uppy/react");
class DashboardUppy extends React.Component {
constructor(props) {
super(props);
this.form = React.createRef();
this.state = {
showInlineDashboard: false,
open: false,
files: [],
};
this.uppy = new Uppy({
id: "uppy1",
autoProceed: false,
debug: true,
allowMultipleUploads: true,
proudlyDisplayPoweredByUppy: true,
restrictions: {
// maxFileSize: 1000000,
maxNumberOfFiles: 100,
minNumberOfFiles: 1,
allowedFileTypes: null,
},
onBeforeFileAdded: (currentFile, files) => {
console.log(files);
const modifiedFile = Object.assign({}, currentFile, {
name: currentFile + Date.now(),
});
if (!currentFile.type) {
// log to console
this.uppy.log(`Skipping file because it has no type`);
// show error message to the user
this.uppy.info(`Skipping file because it has no type`, "error", 500);
return false;
}
return modifiedFile;
},
})
.use(Tus, { endpoint: "https://master.tus.io/files/" })
.use(GoogleDrive, { companionUrl: "https://companion.uppy.io" })
.use(Dropbox, {
companionUrl: "https://companion.uppy.io",
})
.use(Instagram, {
companionUrl: "https://companion.uppy.io",
})
.use(Webcam, {
onBeforeSnapshot: () => Promise.resolve(),
countdown: false,
modes: ["video-audio", "video-only", "audio-only", "picture"],
mirror: true,
facingMode: "user",
locale: {
strings: {
// Shown before a picture is taken when the `countdown` option is set.
smile: "Smile!",
// Used as the label for the button that takes a picture.
// This is not visibly rendered but is picked up by screen readers.
takePicture: "Take a picture",
// Used as the label for the button that starts a video recording.
// This is not visibly rendered but is picked up by screen readers.
startRecording: "Begin video recording",
// Used as the label for the button that stops a video recording.
// This is not visibly rendered but is picked up by screen readers.
stopRecording: "Stop video recording",
// Title on the “allow access” screen
allowAccessTitle: "Please allow access to your camera",
// Description on the “allow access” screen
allowAccessDescription:
"In order to take pictures or record video with your camera, please allow camera access for this site.",
},
},
}).use(ThumbnailGenerator, {
thumbnailWidth: 200,
// thumbnailHeight: 200 // optional, use either width or height,
waitForThumbnailsBeforeUpload: true
})
.on("thumbnail:generated", (file, preview) => {
const img = document.createElement("img");
img.src = preview;
img.width = 100;
document.body.appendChild(img);
})
.on("file-added", (file) => {
const { setFile } = props;
setFile(file);
const newList = this.state.files.concat({ file });
this.setState({
files: { newList },
});
});
}
componentWillUnmount() {
this.uppy.close();
}
render() {
const { files } = this.state;
this.uppy.on("complete", (result) => {
console.log(
"Upload complete! We’ve uploaded these files:",
result.successful
);
});
return (
<div>
<div>
<Dashboard
uppy={this.uppy}
plugins={["GoogleDrive", "Webcam", "Dropbox", "Instagram"]}
metaFields={[
{ id: "name", name: "Name", placeholder: "File name" },
]}
open={this.state.open}
target={document.body}
onRequestClose={() => this.setState({ open: false })}
showSelectedFiles={false}
/>
</div>
</div>
);
}
}
export default DashboardUppy;

Ran into this problem as well because I wanted to use the image preview to figure out the aspect ratio of the underlying image.
If you're using Dashboard or ThumbnailGenerator for Uppy, an event is emitted for every upload:
uppy.on('thumbnail:generated', (file, preview) => {
const img = new Image();
img.src = preview;
img.onload = () => {
const aspect_ratio = img.width / img.height;
// Remove image if the aspect ratio is too weird.
// TODO: notify user.
if (aspect_ratio > 1.8) {
uppy.removeFile(file.id);
}
}
});
I realize though that you already are looking for this event in your code. I guess to answer your question, just put your logic there instead of in file-added.

Related

videojs player seek buttons works in other browsers but not in chrome. The video starts again on clicking the progress bar

I'm fetching a video from s3 bucket using django(backend) and then playing the video in react (frontend) but the seek controls work in other browsers but not in chrome. The video starts again on clicking the progress bar or clicking the arrow keys. I don't understand what seems to be the problem.
here is my django code for fetching video
class GenericFileOperation(APIView):
def get(self, request, material_type=None, course_id=None, format=None):
key = request.GET.get('key')
download = request.GET.get('download')
if key is not None:
s3_obj = download_assessment_file(key)
splited_path = key.split("/")
file_name = splited_path[len(splited_path)-1]
contents = s3_obj['Body'].read()
temp = tempfile.TemporaryFile()
temp.write(contents)
temp.seek(0)
mime_type, _ = mimetypes.guess_type(key)
response = HttpResponse(temp, content_type=mime_type)
if download is not None:
response['Content-Disposition'] = "attachment; filename=%s" % (
file_name)
response['X-Frame-Options'] = "*"
return response
here is my code for videojs in react
import React, { useEffect, useRef } from 'react'
import VideoJs from 'video.js'
import 'video.js/dist/video-js.css';
import "videojs-hotkeys";
import 'videojs-seek-buttons'
const videoJsOptions = {
controls: true,
autoplay: false,
fluid: true,
loop: false,
playbackRates: [0.5, 1, 1.5, 2],
aspectRatio: '12:5',
plugins: {
seekButtons: {
forward: 30,
back: 10
},
hotkeys: {}
}
}
const VideoPlayer = ({ url, fileType }) => {
const videoContainer = useRef()
useEffect(() => {
videoContainer.current.innerHTML = `
<div data-vjs-player>
<video data-setup='{"fluid": true, "autoplay":true,"html5": {"vhs":
{"overrideNative": true}, "nativeAudioTracks": false, "nativeVideoTracks": false}}' class="video-js" />
</div>
`
const player = VideoJs(videoContainer.current.querySelector('video'), videoJsOptions, async () => {
player.src({ src: url, type: fileType })
})
// When destruct dispose the player
return () => player.dispose()
}, [url, fileType])
return <div ref={videoContainer} />
}
export default VideoPlayer
Thank you in advance
I tried using different plugins of videojs but nothing solves the problem. Please help if you have any idea

Problem with STUN/TURN servers in WEBRTC video app made in MERN stack

I have hosted a peer to peer meeting react app on netlify. I have used Peerjs for my video purpose. Everything is working as expected except the video. For some networks the video of the the remote person is working and for some others it is not working. I looked up and found out that it may be a STUN/TURN issue. I then implemented all the STUN/TURN servers in my code. However the video is still not getting setup in some cases. In some cases it is working fine, in others the video is not showing up. Herewith, I am attaching th code for the video and the link to the site.
import React,{useEffect,useState} from 'react';
import {io} from "socket.io-client";
import {useParams} from 'react-router-dom';
import {Grid} from "#material-ui/core";
import Peer from 'peerjs';
var connectionOptions = {
"force new connection" : true,
"reconnectionAttempts": "Infinity",
"timeout" : 10000,
"transports" : ["websocket"]
};
const Videobox = ({isVideoMute,isAudioMute}) => {
var myPeer = new Peer(
{
config: {'iceServers': [
{urls:'stun:stun01.sipphone.com'},
{urls:'stun:stun.ekiga.net'},
{urls:'stun:stun.fwdnet.net'},
{urls:'stun:stun.ideasip.com'},
{urls:'stun:stun.iptel.org'},
{urls:'stun:stun.rixtelecom.se'},
{urls:'stun:stun.schlund.de'},
{urls:'stun:stun.l.google.com:19302'},
{urls:'stun:stun1.l.google.com:19302'},
{urls:'stun:stun2.l.google.com:19302'},
{urls:'stun:stun3.l.google.com:19302'},
{urls:'stun:stun4.l.google.com:19302'},
{urls:'stun:stunserver.org'},
{urls:'stun:stun.softjoys.com'},
{urls:'stun:stun.voiparound.com'},
{urls:'stun:stun.voipbuster.com'},
{urls:'stun:stun.voipstunt.com'},
{urls:'stun:stun.voxgratia.org'},
{urls:'stun:stun.xten.com'},
{
urls: 'turn:numb.viagenie.ca',
credential: 'muazkh',
username: 'webrtc#live.com'
},
{
urls: 'turn:192.158.29.39:3478?transport=udp',
credential: 'JZEOEt2V3Qb0y27GRntt2u2PAYA=',
username: '28224511:1379330808'
},
{
urls: 'turn:192.158.29.39:3478?transport=tcp',
credential: 'JZEOEt2V3Qb0y27GRntt2u2PAYA=',
username: '28224511:1379330808'
}
]} /* Sample servers, please use appropriate ones */
}
);
const peers = {}
const [socket, setSocket] = useState()
const {id:videoId} = useParams();
const videoGrid = document.getElementById('video-grid')
useEffect(()=> {
const s=io("https://weconnectbackend.herokuapp.com",connectionOptions);
setSocket(s);
return () => {
s.disconnect();
}
},[])
// let myVideoStream;
const [myVideoStream, setmyVideoStream] = useState()
const muteUnmute = () => {
const enabled = myVideoStream.getAudioTracks()[0].enabled;
if (enabled) {
myVideoStream.getAudioTracks()[0].enabled = false;
//setUnmuteButton();
} else {
//setMuteButton();
myVideoStream.getAudioTracks()[0].enabled = true;
}
}
const playStop = () => {
//console.log('object')
let enabled = myVideoStream.getVideoTracks()[0].enabled;
if (enabled) {
myVideoStream.getVideoTracks()[0].enabled = false;
//setPlayVideo()
} else {
//setStopVideo()
myVideoStream.getVideoTracks()[0].enabled = true;
}
}
useEffect(() => {
if(myVideoStream)
playStop()
}, [isVideoMute])
useEffect(() => {
if(myVideoStream)
muteUnmute()
}, [isAudioMute])
useEffect(() => {
if(socket== null)
return;
myPeer.on('open',id=>{
socket.emit('join-room',videoId,id);
})
const myVideo = document.createElement('video')
myVideo.muted = true
navigator.mediaDevices.getUserMedia({
video: true,
audio: true
}).then(stream => {
// myVideoStream = stream;
window.localStream=stream;
setmyVideoStream(stream);
console.log(myVideoStream,"myvideostream");
addVideoStream(myVideo, stream)
myPeer.on('call', call => {
call.answer(stream)
const video = document.createElement('video')
call.on('stream', userVideoStream => {
addVideoStream(video, userVideoStream)
})
})
socket.on('user-connected',userId =>{
connectToNewUser(userId, stream)
})
socket.on('user-disconnected', userId => {
if (peers[userId]) peers[userId].close()
})
})
}, [socket,videoId])
function addVideoStream(video, stream) {
video.srcObject = stream
video.addEventListener('loadedmetadata', () => {
video.play()
})
videoGrid.append(video)
}
function connectToNewUser(userId, stream) {
const call = myPeer.call(userId, stream)
const video = document.createElement('video')
call.on('stream', userVideoStream => {
addVideoStream(video, userVideoStream)
})
call.on('close', () => {
video.remove()
})
peers[userId] = call
}
return (
<div id="video-grid" className="videoStyleFromDiv">
{/* <Video srcObject={srcObject}/> */}
</div>
)
}
export default Videobox
Website Link
The TURN servers you are using have been out of commission for a couple of years in the case of the ones taken from https://www.html5rocks.com/en/tutorials/webrtc/infrastructure/
Copying credentials from random places is not how TURN works, you will need to run your own servers.

Using OpenLayers in React

I am student who is creating a big project trying to display data and information on maps using Openlayers and React. At this point I have created a basic two page project where the map is the central piece of the application. For every page I have one main Component that contains its Sidebar, the Viewer Component that contains the map itself and some additional Components specific for that page.
The viewer Class is a special Component that I wrote for every page. This Component handles all the user interactions with the page. It sends and receives information from the main Component as well. I have chosen to create a seperate Viewer Class for each page beceause of the diffrent ways each page works. They might load in different things or handle interactions completely different. If I had one Viewer Class that can be used by every page this would become to large and have to many if statements to check what code to run when handling with each separate page.
Here is an example of the Homepage Viewer. In the constructor I create the map and add some references for a popup window that shows up when the user clicks on one of the features on the map. This popup window shows some basic information about the feature and let's the user add it to his list of features.
In ComponentDidMount I first make the Map class add the
boundrieslayer and give an function to call when one of the feautres
of this layer has been clicked on. I also create the overlay window
for the popup to the map. This will be shown when a feature has been
clicked.
resetMapLayers is a function that has will be called on each Render.
Here I make the map check if what background Tilelayer to use and if
it should show the top layer or not.
featureSelected() is the function that handles a click event of a
feature of the top layer. This will create a popup with the basic
information of the feature.
closePopup will be called when the popup has been closed. This will
make the map deselct the clicked on feature. Otherwise it would stay
selected when the popup has been closed. It will also remove the
Overlay(popup) from the screen.
addFeature() is the function that will be called when a user chooses
to add this feature to his list of features. This can be done by the
user in the popup window of that feature
.
import React, { Component } from "react";
import { connect } from "react-redux";
import Map from "../Map/Map";
import "ol/ol.css";
import styles from "./Viewer.module.scss";
import Overlay from "ol/Overlay";
import Button from "../UI/Button/Button";
class MainViewer extends Component {
constructor(props) {
super(props);
Map.createNewMap();
this.popup = React.createRef();
this.popupContent = React.createRef();
}
componentDidMount() {
Map.addBoundriesLayer(this.featureSelected);
Map.map.setTarget("map");
let container = this.popup.current;
let overlay = new Overlay({
element: container,
autoPan: true,
autoPanAnimation: {
duration: 250,
},
});
this.overlay = overlay;
Map.map.addOverlay(overlay);
}
resetMapLayers() {
Map.setBackgroundTileLayer(this.props.type);
Map.togglePlotBoundriesLayers(this.props.plotBoundriesState);
}
featureSelected = (event, select) => {
if (event.selected[0]) {
this.selectedFeature = event.selected[0];
let selectedFeature = {
id: event.selected[0].id_,
gewasgroepnaam: event.selected[0].getProperties().GEWASGROEP,
gewasnaam: event.selected[0].getProperties().LBLHFDTLT,
oppervlak: (event.selected[0].getProperties().OPPERVL / 10000).toFixed(
2
),
coords: event.selected[0].getProperties().geometry.extent_,
};
let content = this.popupContent.current;
content.innerHTML =
"<p><strong>Name: </strong>" +
selectedFeature.name +
"</p>" +
"<p><strong>Exact Name: </strong>" +
selectedFeature.exactName +
"</p>" +
"<p><strong>Area: </strong>" +
selectedFeature.area +
" ha</p>";
this.overlay.setPosition(event.mapBrowserEvent.coordinate);
}
};
closePopup() {
this.overlay.setPosition(undefined);
Map.clearSelect();
return false;
}
addFeature() {
this.overlay.setPosition(undefined);
Map.clearSelect();
this.props.featureAddedHandler(this.selectedFeature);
}
render() {
this.resetMapLayers();
return (
<div>
<div id="map" className={styles.Map}></div>
<div ref={this.popup} className={styles.OlPopup}>
<div className={styles.OlPopupButtonsDiv}>
<Button
btnType="Danger"
className={[styles.PopupButton, styles.ClosePopupButton].join(
" "
)}
clicked={() => this.closePopup()}
>
Annuleer
</Button>
<Button
btnType="Success"
className={[styles.PopupButton, styles.AddPopupButton].join(" ")}
clicked={() => this.addFeature()}
>
Voeg Toe
</Button>
</div>
<div ref={this.popupContent}></div>
</div>
</div>
);
}
}
const mapStateToProps = (state) => {
return {
type: state.mapDetails.type,
plotBoundriesState: state.mapDetails.state,
};
};
export default connect(mapStateToProps)(MainViewer);
The Map class contains the actual map. It handles with all the specific map issues. It allows to add layers to the map, creating the initial map, adding map interactions,...
In the constructor I call the methods to initaly create a basic map
with a TileLayer background.
The createMap and createNewMap methods let's me create the map object
itself.
createBackgroundLayerGroups() creates the Tilelayer that is the
background layer. It can be either OSM or Bing Maps. The visibility
property let's me make handle which one to show.
clearAllBoundriesLayers() deletes all the boundries layers that are
able to be put on top of the Tilelayer. It deletes every layer and
clears the select of the interactions that has been added to these
layers. I do this so when I change page the layers will be deleted.
addBoundriesLayer let's me set and add a boundries layer. This is the
Vector layer that will be put on top of the TileLayer.
addUsersPlotBoundriesLayer does the same as "addBoundriesLayer" but does it for all the layers that the user has. This function will be called in another page that only shows the features of the user.
setExtentOfMapByUserFeaters let's me set the extent of the map by
either a given extent or by the features of the user.
setInteractionForPlotBoundriesLayer adds the interaction for the
PlotBoundriesLayer.
setHoverInteractionForUserPlotBoundries adds the hover interaction
for the plotUserBoundriesLayer.
clearSelect clears all the selected features. So they won't be
highlighted no more.
setBackgroundTileLayer let's me show a specific background TileLayer.
togglePlotBoundriesLayers let's me hide or show the Vector layer that
is then been shown.
import Map from "ol/Map";
import TileWMS from "ol/source/TileWMS";
import TileLayer from "ol/layer/Tile";
import View from "ol/View";
import OSM from "ol/source/OSM";
import BingMaps from "ol/source/BingMaps";
import VectorSource from "ol/source/Vector";
import { bbox as bboxStrategy } from "ol/loadingstrategy";
import GeoJSON from "ol/format/GeoJSON";
import { Vector, Group, Tile } from "ol/layer";
import Select from "ol/interaction/Select";
import { Feature } from "ol";
import { Polygon } from "ol/geom";
import { Fill, Stroke, Style } from "ol/style";
class OlMap {
constructor() {
this.createNewMap();
this.createBackgroundLayerGroups();
}
createNewMap() {
this.map = this.createMap();
}
createMap() {
return new Map({
target: null,
layers: [],
view: new View({
center: [594668.0262129545, 6602083.305674396],
maxZoom: 19,
zoom: 14,
}),
});
}
createBackgroundLayerGroups() {
this.layersOSM = new Group({
layers: [
new Tile({
source: new OSM(),
}),
new Tile({
source: new BingMaps({
imagerySet: "Aerial",
key: process.env.REACT_APP_BING_MAPS,
}),
visible: false,
}),
],
});
}
clearAllBoundriesLayers() {
this.map.getLayers().forEach((layer) => {
if (
layer.get("name") === "plotBoundriesLayer" ||
layer.get("name") === "plotUserBoundriesLayer"
) {
layer.getSource().clear();
this.map.removeLayer(layer);
}
});
if (this.select) {
this.select.getFeatures().clear();
}
}
addBoundriesLayer(featureSelected) {
this.clearAllBoundriesLayers();
let vectorSource = new VectorSource({
format: new GeoJSON(),
minScale: 15000000,
loader: function (extent, resolution, projection) {
/*
Link for the DLV
let url = process.env.REACT_APP_MAP_API +
extent.join(",") +
",EPSG:3857";
*/ let url = process.env.REACT_APP_MAP_API +
extent.join(",") +
",EPSG:3857";
// */
let xhr = new XMLHttpRequest();
xhr.open("GET", url);
let onError = function () {
vectorSource.removeLoadedExtent(extent);
};
xhr.onerror = onError;
xhr.onload = function () {
if (xhr.status === 200) {
let features = vectorSource
.getFormat()
.readFeatures(xhr.responseText);
features.forEach(function (feature) {
//ID for the DLV
//feature.setId(feature.get("OBJ_ID"));
feature.setId(feature.get("OIDN"));
});
vectorSource.addFeatures(features);
} else {
onError();
}
};
xhr.send();
},
strategy: bboxStrategy,
});
let vector = new Vector({
//minZoom: 13,
source: vectorSource,
});
this.setInteractionForPlotBoundriesLayer(vector, featureSelected);
vector.set("name", "plotBoundriesLayer");
this.map.addLayer(vector);
}
addUsersPlotBoundriesLayer(featureSelected, featureHovered, newFeatures) {
this.clearAllBoundriesLayers();
if (newFeatures.length > 0) {
let vectorSource = new VectorSource({
format: new GeoJSON(),
minScale: 15000000,
strategy: bboxStrategy,
});
newFeatures.forEach((newFeature) => {
let feature = new Feature({
geometry: new Polygon([newFeature.geometry]),
});
feature.setId(newFeature.plotId);
vectorSource.addFeature(feature);
});
let vector = new Vector({
//minZoom: 13,
source: vectorSource,
});
this.setInteractionForPlotBoundriesLayer(vector, featureSelected);
this.setHoverInteractionForUserPlotBoundries(vector, featureHovered);
vector.set("name", "plotUserBoundriesLayer");
this.plotsExtent = vectorSource.getExtent();
this.map.addLayer(vector);
}
}
setExtentOfMapByUserFeaters(extent) {
if (extent === undefined) {
if (this.plotsExtent !== undefined && this.plotsExtent[0] !== Infinity) {
this.map.getView().fit(this.plotsExtent);
}
} else {
this.map.getView().fit(extent);
}
}
setInteractionForPlotBoundriesLayer(layer, featureSelected) {
this.select = new Select({
layers: [layer],
});
this.select.on("select", (event) => featureSelected(event, this.select));
this.map.addInteraction(this.select);
}
setHoverInteractionForUserPlotBoundries(layer, featureHovered) {
this.hoveredFeature = null;
let defaultStyle = new Style({
stroke: new Stroke({
width: 2,
color: "#9c1616",
}),
fill: new Fill({ color: "#c04e4e" }),
});
let hoveredStyle = new Style({
stroke: new Stroke({
width: 2,
color: "#9c1616",
}),
fill: new Fill({ color: "#9c1616" }),
});
this.map.on("pointermove", (e) => {
layer
.getSource()
.getFeatures()
.forEach((feature) => {
feature.setStyle(defaultStyle);
});
let newFeature = null;
this.map.forEachFeatureAtPixel(e.pixel, (f) => {
newFeature = f;
newFeature.setStyle(hoveredStyle);
return true;
});
if (newFeature) {
if (
this.hoveredFeature === null ||
this.hoveredFeature !== newFeature
) {
this.hoveredFeature = newFeature;
featureHovered(this.hoveredFeature.id_);
}
} else {
if (this.hoveredFeature !== null) {
this.hoveredFeature = null;
featureHovered(null);
}
}
});
}
hoveredSideBarFeatureHandler(hoveredFeatureId) {
let defaultStyle = new Style({
stroke: new Stroke({
width: 2,
color: "#9c1616",
}),
fill: new Fill({ color: "#c04e4e" }),
});
let hoveredStyle = new Style({
stroke: new Stroke({
width: 2,
color: "#9c1616",
}),
fill: new Fill({ color: "#9c1616" }),
});
this.map.getLayers().forEach((layer) => {
if (layer.get("name") === "plotUserBoundriesLayer") {
layer
.getSource()
.getFeatures()
.forEach((feature) => {
if (feature.id_ === hoveredFeatureId) {
feature.setStyle(hoveredStyle);
} else {
feature.setStyle(defaultStyle);
}
});
}
});
}
clearSelect() {
this.select.getFeatures().clear();
}
setBackgroundTileLayer(type) {
if (this.backgroundTileType === null) {
this.backgroundTileType = "OPENSTREETMAP";
}
if (this.map.getLayers().getArray().length === 0) {
this.map.setLayerGroup(this.layersOSM);
} else {
if (this.backgroundTileType !== type) {
this.backgroundTileType = type;
console.log(this.map.getLayers());
this.map.getLayers().getArray()[0].setVisible(false);
this.map.getLayers().getArray()[1].setVisible(false);
if (type === "OPENSTREETMAP") {
this.map.getLayers().getArray()[0].setVisible(true);
} else if (type === "BING MAPS") {
this.map.getLayers().getArray()[1].setVisible(true);
}
}
}
}
togglePlotBoundriesLayers(state) {
if (this.plotBoundriesState === null) {
this.plotBoundriesState = true;
}
if (this.plotBoundriesState !== state) {
this.plotBoundriesState = state;
this.map.getLayers().forEach((layer) => {
if (layer.get("name") === "plotBoundriesLayer") {
layer.setVisible(state);
}
if (layer.get("name") === "plotUserBoundriesLayer") {
console.log(state);
layer.setVisible(state);
}
});
}
}
addTileLayer(url) {
const wmsLayer = new TileLayer({
source: new TileWMS({
url,
params: {
TILED: true,
},
crossOrigin: "Anonymous",
}),
});
this.map.addLayer(wmsLayer);
}
}
export default new OlMap();
At this point I am wondering if I am doing things well or what can be done different or better to optimize my code. This will help me in the long run not getting stuck with bade code I created at the beginning of the project.
Many thanks in advance!

multiple image upload one by one with delete and change option with preview

I am trying to upload image one by one with change and delete option(for each image uploaded) in multiple view with react, apollo client. But with this I can't get the clear thought about how to perform this easily and confused a lot..
Please anyone help me to get rid of this...
**updated**
Hi now i am using react-dropzonecomponent so far, but here i did mutiple file upload with delete option only..
Here i can send the files to server(node using mulitpart form data), in DB create the file in server end and store the path in database with path name only... But here i can't show the image files in front end from the path got from Back end...
const initialState = {
files: [],
imagePreviewUrl: []
};
class Image extends React.Component {
constructor(props) {
super(props);
this.state = { ...initialState };
}
componentWillMount() {
let {match, data} = this.props;
const id = match.params.id && match.params.id.slice(1);
if (id) {
let currentProduct = (data && data.getProduct) && data.getProduct.find((data) => {
return data.id == id;
});
this.setState({
imagePreviewUrl: currentProduct.images
});
}
}
handleAdd(file) {
console.log(file)
var allFiles = this.state.files;
allFiles = allFiles.concat([file]);
this.setState({
files: allFiles
});
}
handleRemove(file) {
let allFiles = this.state.files;
this.state.files.forEach((itr, i) => {
if (itr.upload.uuid == file.upload.uuid) {
allFiles.splice(i, 1)
}
});
this.setState({
files: allFiles
});
console.log(this.state.files, allFiles, file)
}
render() {
let {match, classes, data} = this.props;
let {imagePreviewUrl} = this.state;
const id = match.params.id && match.params.id.slice(1);
var self = this;
return (
<GridContainer>
<DropzoneComponent
config={{
postUrl: 'no-url',
iconFiletypes: ['.jpg', '.png', '.gif'],
showFiletypeIcon: true
}}
eventHandlers=
{{
addedfile: (file) => this.handleAdd(file),
removedfile: (file) => this.handleRemove(file),
init: (dropzone) => {
console.log(dropzone)
}
}}
djsConfig={{
autoProcessQueue: false,
addRemoveLinks: true,
previewTemplate: ReactDOMServer.renderToStaticMarkup(
...<img data-dz-thumbnail="true" /> ...)}} />
</GridContainer>
);
}
}
export default withStyles(style)(Image);

How to set custom DropzoneJS progress value?

I am using dropzone-react-component, which is simply a wrapper of DropzoneJS for react.
I added an event that handles uploading like this:
addedfile: file => handleFileUpload(file),
However, the upload progress bar simply goes up and validates the upload, while I can see that the upload is still taking place.
So my question, is, how am I supposed to set my own value for the upload percentage and make it work with the dropzone's own styling?
This is the React component:
constructor(props) {
super(props);
this.componentConfig = {
iconFiletypes: ['.jpg', '.png', '.gif', '.pdf'],
showFiletypeIcon: true,
postUrl: 'no-url',
};
this.eventHandlers = {
addedfile: file => handleFileUpload(file),
};
this.djsConfig = {
autoProcessQueue: false,
dictDefaultMessage: 'Déposez un fichier ici ou cliquez pour en choisir un',
maxFilesize: 200, // MB
clickable: true, // Lets you click the dropzone
acceptedFiles: 'image/*,application/pdf',
renameFileName: this.props.fileRename || 'myFile',
parallelUploads: 1,
uploadMultiple: false,
};
}
render() {
return (
<DropzoneComponent
config={this.componentConfig}
eventHandlers={this.eventHandlers}
djsConfig={this.djsConfig}
/>
}
And this is the code used to upload the file, which uses the package Meteor Slingshot:
const handleFileUpload = (file) => {
var uploader = new Slingshot.Upload("myFileUploads");
uploader.send(file, function (error, downloadUrl) {
if (error) {
// Log service detailed response.
console.log(error);
} else {
console.log(downloadUrl);
}
});
let computation = Tracker.autorun(() => {
if (!isNaN(uploader.progress())) {
console.log(uploader.progress());
}
});
};

Resources