Not able to intract with elements in the canvas fabric.js - reactjs

I am new to fabric.js and trying to add text to the canvas. Element is added to the canvas but I am unable to intract with it. I am using react in the frontend.
import React, { useContext } from "react";
import canvasContext from "../../context/canvasContext";
import { fabric } from "fabric";
const AddText = () => {
const { canvas } = useContext(canvasContext);
const onAddText = () => {
const textBox = canvas.add(
new fabric.Text("Tap To Edit", {
left: 100,
top: 100,
fontFamily: "arial black",
fill: "#333",
fontSize: 50,
})
);
canvas.add(textBox);
};
return <div onClick={onAddText}>Add Text</div>;
};
export default AddText;
This is my fabric-js settings, is there a property I am missing? Do we have to do enable if with proper setting.
import { useContext, useLayoutEffect } from "react";
import { fabric } from "fabric";
import canvasContext from "../../context/canvasContext";
const canvasStyle = {
border: "3px solid black",
};
export default function CanvasApp() {
const { setCanvas } = useContext(canvasContext);
useLayoutEffect(() => {
const canvas = new fabric.Canvas("canvas", {
height: 800,
width: 1200,
selectionLineWidth: 1,
controlsAboveOverlay: true,
centeredScaling: true,
});
canvas.renderAll();
setCanvas(canvas);
}, []);
return <canvas id="canvas" style={canvasStyle} />;
}

Related

fontSize in Treemap react apexchart not working

I'm using react apexcharts to render a treemap
I'm using fontSize inside dataLabels to set fontSize but it isn't working.
dataLabels: {
//enabled: true,
style: {
fontSize: "20px",
},
Here is the screenshot
Also I have used -
return `${textList[0]}
${textList[1]}
${textList[2]}`
-to return string in multiple lines but that also doesn't seem to be working.
Below is the full code
import React from 'react';
//import ReactApexChart from 'react-apexcharts';
import { ApexOptions } from "apexcharts";
import dynamic from 'next/dynamic';
import { getTenantDetails } from "utils/assetOverview";
const ReactApexChart = dynamic(() => import('react-apexcharts'), { ssr: false });
type datapointType={
value: number
seriesIndex: number
dataPointIndex: number
}
type tenantType={
x:string,
y:number
}
const TreeMap : React.FC<{areaChecked:boolean,assetCode:string}> = ({areaChecked,assetCode}) => {
let {tenantList} = getTenantDetails(assetCode);
let tenants = [] as tenantType[]
tenants = tenantList && tenantList.map((tenant)=>{
if(areaChecked){
return {
"x":`${tenant.name}\n${tenant.area}ft (${tenant.percentage}%)\n${tenant.years}`,
"y":tenant.area
}
}else{
return {
"x":`${tenant.name}\n$${tenant.revenue} (${tenant.percentage}%)\n${tenant.years}`,
"y":tenant.revenue
}
}
})
const series =[
{
data:tenants
}
]
const options:ApexOptions = {
legend: {
show: false
},
colors:['#002776'],
chart: {
height: 260,
toolbar:{
show:false
}
},
plotOptions: {
treemap: {
enableShades: false
}
},
dataLabels: {
//enabled: true,
style: {
fontSize: "20px",
},
formatter: function(text:string,o:datapointType){
let textList = text.split("\n");
return `${textList[0]}
${textList[1]}
${textList[2]}`
}
}
}
return (
<React.Fragment>
<ReactApexChart options={options} series={series} type="treemap" height={260} />
</React.Fragment>
);
}
export default TreeMap;
Also tried to set fontSize in global.scss file but the text in each rectangle overflows- Here is the screenshot
.apexcharts-data-labels > text {
font-size: 10px !important;
}
SO tried to add
.apexcharts-data-labels > text {
overflow-wrap: break-word !important;
}
but the above code doesn't seem to work

How can I send a canvas with a filter through openvidu?

In progress of a project using WebRTC
I found the code to apply the filter using face_mesh.
The code is as follows
import { FaceMesh } from "#mediapipe/face_mesh";
import React, { useRef, useEffect, createRef } from "react";
import * as Facemesh from "#mediapipe/face_mesh";
import * as cam from "#mediapipe/camera_utils";
import Webcam from "react-webcam";
import "../stream/StreamComponent.css";
function Filter() {
const webcamRef = useRef(null);
const canvasRef = useRef(null);
const connect = window.drawConnectors;
var camera = null;
function onResults(results) {
// const video = webcamRef.current.video;
const videoWidth = webcamRef.current.video.videoWidth;
const videoHeight = webcamRef.current.video.videoHeight;
const videoRef = createRef();
console.log(videoRef);
console.log(connect);
// Set canvas width
canvasRef.current.width = videoWidth;
canvasRef.current.height = videoHeight;
const canvasElement = canvasRef.current;
const canvasCtx = canvasElement.getContext("2d");
canvasCtx.save();
canvasCtx.clearRect(0, 0, canvasElement.width, canvasElement.height);
canvasCtx.drawImage(
results.image,
0,
0,
canvasElement.width,
canvasElement.height
);
if (results.multiFaceLandmarks) {
for (const landmarks of results.multiFaceLandmarks) {
connect(canvasCtx, landmarks, Facemesh.FACEMESH_TESSELATION, {
color: "#C0C0C070",
lineWidth: 1,
});
connect(canvasCtx, landmarks, Facemesh.FACEMESH_RIGHT_EYE, {
color: "#FF3030",
});
connect(canvasCtx, landmarks, Facemesh.FACEMESH_RIGHT_EYEBROW, {
color: "#FF3030",
});
connect(canvasCtx, landmarks, Facemesh.FACEMESH_LEFT_EYE, {
color: "#30FF30",
});
connect(canvasCtx, landmarks, Facemesh.FACEMESH_LEFT_EYEBROW, {
color: "#30FF30",
});
connect(canvasCtx, landmarks, Facemesh.FACEMESH_FACE_OVAL, {
color: "#E0E0E0",
});
connect(canvasCtx, landmarks, Facemesh.FACEMESH_LIPS, {
color: "#E0E0E0",
});
}
}
canvasCtx.restore();
}
// }
// setInterval(())
useEffect(() => {
const faceMesh = new FaceMesh({
locateFile: (file) => {
return `https://cdn.jsdelivr.net/npm/#mediapipe/face_mesh/${file}`;
},
});
faceMesh.setOptions({
maxNumFaces: 1,
minDetectionConfidence: 0.5,
minTrackingConfidence: 0.5,
});
faceMesh.onResults(onResults);
if (
typeof webcamRef.current !== "undefined" &&
webcamRef.current !== null
) {
camera = new cam.Camera(webcamRef.current.video, {
onFrame: async () => {
await faceMesh.send({ image: webcamRef.current.video });
},
width: 640,
height: 480,
});
camera.start();
}
}, []);
return (
<center>
<div className="Filter">
<Webcam
ref={webcamRef}
style={{
position: "absolute",
marginLeft: "auto",
marginRight: "auto",
left: 0,
right: 0,
textAlign: "center",
zindex: 9,
width: 640,
height: 480,
}}
/>{" "}
<canvas
ref={canvasRef}
className="output_canvas"
style={{
position: "absolute",
marginLeft: "auto",
marginRight: "auto",
left: 0,
right: 0,
textAlign: "center",
zindex: 9,
width: 640,
height: 480,
}}
></canvas>
</div>
</center>
);
}
export default Filter;
So I'm trying to send a screen with a filter through openvidu
The openvidu code that exports the default camera screen is as follows
import React, { Component } from "react";
import "./StreamComponent.css";
export default class OvVideoComponent extends Component {
constructor(props) {
super(props);
this.videoRef = React.createRef();
}
componentDidMount() {
if (this.props && this.props.user.streamManager && !!this.videoRef) {
console.log("PROPS: ", this.props);
this.props.user.getStreamManager().addVideoElement(this.videoRef.current);
}
}
componentDidUpdate(props) {
if (props && !!this.videoRef) {
this.props.user.getStreamManager().addVideoElement(this.videoRef.current);
}
}
render() {
return (
<video
autoPlay={true}
id={"video-" + this.props.user.getStreamManager().stream.streamId}
ref={this.videoRef}
muted={this.props.mutedSound}
/>
);
}
}
I think apply a canvas object or a canvas.captureStream() to an element**.props.user.getStreamManager().addVideoElement()**
But I don't know how to apply it
It's my first time using openvidu, so I'm facing a lot of difficulties
I'm so sorry if you don't have enough explanation

React native reanimated v2 Scale + Opacity withRepeat

I am trying to implement scaling View with Opacity togather through React native reanimated v2, but not able to contol withRepeat ...
Below code is just Perform scaling withRepeat but not Opacity. How to control Opacity + Scaling of view withRepeat ... Want to apply scaling and Opacity both on view in loop/Repeat.
import React, { useState } from 'react';
import { View, TouchableWithoutFeedback } from 'react-native';
import Animated,
{ withRepeat, useSharedValue, interpolate, useAnimatedStyle, useDerivedValue, withTiming }
from 'react-native-reanimated'
import Styles from './Styles';
function LoopApp() {
const [state, setState] = useState(0);
const scaleAnimation = useSharedValue(1);
const animationOpacityView = useSharedValue(1);
scaleAnimation.value = withRepeat(withTiming(2.5, { duration: 2000 }), -1, true);
//animationOpacityView.value = withRepeat(0, -1, true);
const debug = useDerivedValue(() => {
// console.log(scaleAnimation.value);
return scaleAnimation.value;
});
const growingViewStyle = useAnimatedStyle(() => {
return {
transform: [{ scale: scaleAnimation.value }],
opacity: withTiming(animationOpacityView.value, {
duration: 1500
}, () => {
animationOpacityView.value = 0.99
})
};
});
return (
<View style={Styles.container}>
<Animated.View style={[Styles.viewStyle, growingViewStyle]} />
</View>
);
}
export default LoopApp;
Style.js
import {DevSettings, Dimensions, I18nManager} from 'react-native';
import Constants from '../../common/Constants';
const Screen = {
width: Dimensions.get('window').width,
height: Dimensions.get('window').height,
};
export default {
container: {
flex: 1,
justifyContent: "center",
alignItems: "center",
},
viewStyle: {
backgroundColor: '#19a35c',
width: Screen.width * 0.0364,
height: Screen.width * 0.0364,
borderRadius: 100,
},
};

How to test a component that receives a ref as props?

I want to snapshot a component in React using react-test-renderer. The component I want to test receives a ref from another component. The component I'm testing relies on a function implemented by the component which is passing the ref as props:
import React from "react";
import { makeStyles, Paper, Typography } from "#material-ui/core";
import { INodeInfoProps } from "./interfaces";
const useStyles = makeStyles({
container: {
position: "absolute",
padding: 10,
maxHeight: 600,
width: 400,
overflowWrap: "break-word",
"& p": {
fontSize: 12,
},
},
channels: {
display: "flex",
},
channelsComponent: {
marginLeft: 5,
},
});
export const NodeInfo: React.FC<INodeInfoProps> = ({ graphRef, info }) => {
const classes = useStyles();
const getDivCoords = () => {
if (graphRef.current) {
const nodeCoordinates = graphRef.current.graph2ScreenCoords(
info?.x || 0,
info?.y || 0,
info?.z || 0
);
return {
top: nodeCoordinates.y + 20,
left: nodeCoordinates.x,
};
}
return { top: 0, left: 0 };
};
if (info && graphRef.current) {
return (
<Paper
className={classes.container}
style={{
top: getDivCoords().top,
left: getDivCoords().left,
}}
>
<Typography>Pubkey: {info.publicKey}</Typography>
<Typography>Alias: {info.alias}</Typography>
</Paper>
);
}
return null;
};
So the function graph2ScreenCoords is implemented in the component which the ref is received by props by my component.
My test component would look like this:
import React from "react";
import renderer from "react-test-renderer"
import {NodeInfo} from "../index";
it('should render each node info', () => {
const info = {
publicKey: "test123",
alias: "test",
color: "#fff",
visible: true,
links: [
{
channelId: "123",
node1: "test123",
node2: "test345",
capacity: "10000",
color: "#fff"
}
]
}
const tree = renderer.create(<NodeInfo graphRef={} info={info}/>).toJSON()
expect(tree).toMatchSnapshot();
})
But I need to pass the ref to the test component, so it can access the function graph2ScreenCoords.
How should I make it the right way? Should I render the component in my test, create a ref and pass it as props? Should I mock the ref? How?
Thanks in advance

React Native Expo: Camera in ViewPager

I am creating a ViewPager with a Camera inside in a View, when the ViewPager renders the Views everything is ok but then when the ViewPager change the page and get back to the Camera Page the Camera is not appearing again. How to solve this? There is a way to render the camera asynchronously?
This is my ViewPager:
import React from 'react';
import ViewPager from '#react-native-community/viewpager';
import { View } from 'react-native';
const Pager = ({
pages,
initalPage,
onPageSelected,
onPageScrollStateChanged,
onPageScroll
}) => {
return (
<ViewPager
style={{flex: 1}}
initialPage={initalPage}
onPageSelected={(e)=>onPageSelected && onPageSelected(e.nativeEvent)}
onPageScrollStateChanged={(e)=>onPageScrollStateChanged && onPageScrollStateChanged(e.nativeEvent)}
onPageScroll={(e)=>onPageScroll && onPageScroll(e.nativeEvent)}
>
{
pages.map((page,i)=>
<View key={i} style={{flex: 1}}>
{
page
}
</View>
)
}
</ViewPager>
)
}
export default Pager;
This is my Camera Page:
import React, { useEffect, Suspense, useState } from 'react';
import { StyleSheet, View } from 'react-native';
import { Camera } from 'expo-camera';
import { LinearGradient } from 'expo-linear-gradient';
import { color } from '../../utils';
const ScannerView = ({
isInView,
}) => {
let camera = null;
const [hasPermission, setHasPermission] = useState(null);
const [cameraRatio, setCameraRatio] = useState('1:1');
useEffect(()=>{
(async () => {
const { status } = await Camera.requestPermissionsAsync();
setHasPermission(status === 'granted');
})();
}, []);
const handleCameraReady = () => {
camera.getSupportedRatiosAsync().then((data)=>{
setCameraRatio(data[data.length-1]);
});
}
const handleBarcodeScanned = (data) => {
console.log(data);
}
const handleThis = (err) => {
console.log("EVENT",err.nativeEvent)
}
const renderCamera = (
<Camera
ref={ref=>{
camera=ref;
}}
style={styles.camera}
type={Camera.Constants.Type.back}
focusable={true}
ratio={cameraRatio}
onCameraReady={handleCameraReady}
onBarCodeScanned={handleBarcodeScanned}
onMountError={handleThis}
/>
)
return (
<View style={styles.container}>
{
(hasPermission) &&
<Suspense>
{renderCamera}
</Suspense>
}
<LinearGradient
colors={['transparent', color.neutral80]}
style={styles.gradient}
/>
</View>
)
}
const styles = StyleSheet.create({
container: {
flex: 1,
backgroundColor: 'black',
alignItems: 'stretch'
},
camera: {
flex: 1,
backgroundColor: 'red',
},
gradient: {
flex: 1,
height: 220,
position: 'absolute',
bottom: 0,
left: 0,
right: 0
}
})
export default ScannerView;

Resources