I have a 320x40 .png file that I am trying to play each animation from inside of my canvas, for a total of 10 frames. Each individual animation frame is a 32x40 section of the png file. I'll cut to the chase and show the png animation code for the character:
export default function SpriteAni(ctx, charAnim, currentFrame) {
const charSprite = new Image();
charSprite.src = `./images/char_${charAnim}.png`
const posx = 320;
const posy = 200;
const sheet_width = 32;
const sheet_height = 40;
const sprite_width = sheet_width * 1.5;
const sprite_height = sheet_height * 1.5;
ctx.drawImage(charSprite, currentFrame * sheet_width, 0, sheet_width, sheet_height, posx, posy, sprite_width * 1.5, sprite_height * 1.5);
}
Here's the relevant code for drawing this to the screen inside of my main Canvas.jsx file
import { useEffect, useRef, useState } from "react";
import SpriteAni from '../SpriteAni.jsx';
// Canvas and game logic for display
const Canvas = ({ width, height }) => {
const canvasRef = useRef(null);
const requestIdRef = useRef(null);
let frame = 0; // frames canvas is drawn to screen
const [currentFrame, setCurrentFrame] = useState(0); // used to determine current frame for current Wario animation
const [charAnim, setCharAnim] = useState('walk')
// Char animation frame counts
const walk_frames = 9;
const drawFrame = () => {
const ctx = canvasRef.current.getContext('2d');
/// clear screen each frame
ctx.clearRect(0,0,constants.CANVAS_WIDTH,constants.CANVAS_HEIGHT);
// draw graphics
handleBackground(ctx);
SpriteAni(ctx, charAnim, currentFrame);
if (currentFrame >= walk_frames) setCurrentFrame(0);
else setCurrentFrame(currentFrame + 1);
}
const tick = () => {
if (!canvasRef.current) return;
drawFrame();
frame++;
requestAnimationFrame(tick)
}
useEffect(() => {
requestIdRef.current = requestAnimationFrame(tick)
return () => {
cancelAnimationFrame(requestIdRef.current)
};
}, [])
return (
<canvas
width={width}
height={height}
style={canvasStyle}
ref={canvasRef}
/>
)
}
export default Canvas
const canvasStyle = {
border: "1px solid black",
position: "absolute",
}
What I'm especially confused about is I can delete all of the "if (currentFrame >= ${charAnim}_frames)..." code underneath SpriteAni in the drawFrame function and nothing changes. It seems to all be acting independently of any of that and just races through all 10 frames before vanishing off of the screen within one second. I don't even understand how that's possible since the only function inside of SpriteAni is a drawImage of which the only variable that should be updating each frame is currentFrame.
How is it animating anything independently like that? I'm just baffled what's going on here! Any help greatly appreciated!
Related
I'm attempting to use a design built with React-Three-Fiber as the background of my application.
The canvas appears to show and run the animation on chrome and firefox dev tools in the desktop browsers but is failing to show on mobile. I've tried firefox, chrome, and safari using my iPhone and a few other phones but still no prevail.
It is a fairly basic animation so it shouldn't be too much for the browser to handle.
I've included the proper meta tag in the index.html, found within the documentation.
The code below that is contained within its own component.
<meta
name="viewport"
content="width=device-width, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0"
/>
import * as THREE from "three";
import React, { Suspense, useMemo, useRef } from "react";
import styled from "styled-components";
import { Canvas, useFrame } from "#react-three/fiber";
import vertexShader from "./Shaders/VertexShader";
import fragmentShader from "./Shaders/FragmentShader";
export default function Animation() {
// universal points ref
const points = useRef();
const radius = 2;
const Planet = () => {
const distance = 2;
const count = 20000;
const particlesPosition = useMemo(() => {
// create the float32 array to store the point positions.
const positions = new Float32Array(count * 3);
for (let i = 0; i < count; i++) {
const theta = THREE.MathUtils.randFloatSpread(360);
const phi = THREE.MathUtils.randFloatSpread(360);
// Generate random values for x, y, z
let x = distance * Math.sin(theta) * Math.cos(phi);
let y = distance * Math.sin(theta) * Math.sin(phi);
let z = distance * Math.cos(theta);
// add each value to the array
positions.set([x, y, z], i * 3);
}
return positions;
}, [count]);
const uniforms = useMemo(
() => ({
uTime: {
value: 0.8,
},
uRadius: {
value: radius,
},
}),
[]
);
useFrame((state) => {
const { clock } = state;
// adding 100 starts the animation at 100s ahead of animation start time.
points.current.material.uniforms.uTime.value = clock.elapsedTime + 110;
});
return (
<points ref={points}>
<bufferGeometry>
<bufferAttribute // bufferAttribute will allow you to set the position attribute of the geometry.
attach="attributes-position" // this will allow the data being fed to bufferAttribute to be accessable under the position attribute.
count={particlesPosition.length / 3} //number of particles
array={particlesPosition}
itemSize={3} // number of values from the particlesPosition array associated with one item/vertex. It is set to 3 becuase position is based on X, Y, Z
/>
</bufferGeometry>
<shaderMaterial
depthWrite={false}
fragmentShader={fragmentShader}
vertexShader={vertexShader}
uniforms={uniforms}
/>
</points>
);
};
return (
//Canvas takes place of all the boilerplate to get three.js running with react.
<Wrapper>
<Canvas className="canvas">
<ambientLight intensity={0.5} />
<Suspense>
<Planet />
</Suspense>
</Canvas>
</Wrapper>
);
}
const Wrapper = styled.div`
position: relative;
background: inherit;
height: 100vh;
width: 100vw;
`;
Basically, I have some code that I wrote and it takes an image and draws it on the canvas however when I use an image link instead, the app breaks down and turns white with an error in the console saying "Process is not defined".
This is the code with the image directly imported
import image from '../images/leaf-spot-disease.jpg'
const Testing = () => {
const [imageLoaded, setImageLoaded] = useState(false);
const handleImageLoad = useCallback(() => {
setImageLoaded(true);
}, []);
const canvasRef = useRef(null)
const imageRef = useRef(null)
useEffect(() => {
if(imageLoaded) {
const image = imageRef.current
const canvas = canvasRef.current
canvas.width = 100
canvas.height = 100
const context = canvas.getContext('2d')
context.drawImage(image , 0 , 0 , canvas.width , canvas.height)
console.log(context.getImageData(0, 0, canvas.width , canvas.height).data.length)
}
}, [imageLoaded])
return (
<>
<img src={image} alt="" className={styles.theImage} ref={imageRef} onLoad={handleImageLoad}/>
<canvas className={styles.canvasCont} ref={canvasRef}>
</canvas>
</>
)
}
With the above way it works perfectly but the moment I use a link instead of importing an image like the example below the app breaks
const image = "https://storage.googleapis.com/object-cut-images/54062ac7-59dd-4684-baf4-840e6c5a2c3c.png"
When image is imported it works perfectly and when I use the above code it does not work again and nothing gets drawn on the canvas and also the page turns full white with that process is not defined error.
Hello everyone I am creating my own website using reactjs and I want to include 3d object in my website.
To do that I am using three js with a WebGL renderer, it usually works fine but SOMETIMES my 3d object does not render and I get a black frame instead (see the pictures). Now the issue is that this bug happens sometimes and I can't understand what produces it. The bug is only happening on firefox, chrome works fine.
3d object bug
3d object working
I know how to solve it for my computer (I have to enable graphic performances for firefox in config panel), yet it was working perfectly fine sometimes without graphic performances enabled. My fear is that if I deploy my website this bug occurs for other user.
(Edit after some research this is EXACTLY the issue https://bugzilla.mozilla.org/show_bug.cgi?id=1712486 but i don't get how to solve the issue)
I am pretty sure that this is due to the WebGL renderer which seems to sometimes have trouble to load textures with my graphic card... not sure why.
Can anyone help me to find a fix (for every user not only for my computer) or is there a way to check if this rendering bug happens in order to display an image instead ?
Here is the code to generate my object :
import React from 'react';
import './object3d.css';
import { useState, useEffect, useRef } from "react";
import * as THREE from "three";
import { OrbitControls } from 'three/addons/controls/OrbitControls.js';
import { GLTFLoader } from 'three/addons/loaders/GLTFLoader.js';
import { RGBELoader } from 'three/addons/loaders/RGBELoader.js';
var sat;
function loadGLTFModel(scene, glbPath) {
return new Promise((resolve, reject) => {
const loader = new RGBELoader();
//loader.load(
loader.load( 'royal_esplanade_1k.hdr', ( texture ) => {
texture.mapping = THREE.EquirectangularReflectionMapping;
scene.environment = texture;
const loader2 = new GLTFLoader();
loader2.load( glbPath, function ( gltf ) {
sat = gltf.scene
scene.add( sat );
resolve(sat);
} );
},
undefined,
function (error) {
console.log(error);
reject(error);
} );
});
}
const Object = ({path_file}) => {
const refContainer = useRef();
const [loading, setLoading] = useState(true);
const [renderer, setRenderer] = useState();
useEffect(() => {
const { current: container } = refContainer;
if (container && !renderer) {
const scW = container.clientWidth;
const scH = container.clientHeight;
//console.log(scW);
const renderer = new THREE.WebGLRenderer({
antialias: true,
alpha: true,
premultipliedAlpha : false
});
renderer.setPixelRatio(window.devicePixelRatio);
renderer.setSize(scW, scH);
renderer.outputEncoding = THREE.sRGBEncoding;
container.appendChild(renderer.domElement);
setRenderer(renderer);
const scene = new THREE.Scene();
const scale = 25;
const camera = new THREE.OrthographicCamera(
-scale,
scale,
scale,
-scale,
0.01,
50000
);
camera.position.y = 25;
camera.position.x = 2;
camera.position.z = 0;
camera.rotation.y = 45* Math.PI /180;
camera.rotation.z = 45* Math.PI /180;
const ambientLight = new THREE.AmbientLight(0xcccccc, 1);
scene.add(ambientLight);
loadGLTFModel(scene, path_file, {
}).then((sat) => {
animate(sat);
setLoading(false);
});
let req = null;
const animate = () => {
req = requestAnimationFrame(animate);
sat.rotation.y+=0.01;
renderer.render(scene, camera);
};
return () => {
cancelAnimationFrame(req);
renderer.dispose();
};
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
return (
<div className='container_3d'
ref={refContainer}
>
{loading && (
<span style={{ left: "50%", top: "50%" }}>
Loading...
</span>
)}
</div>
);
};
const Object3dsat = ({path_file}) => {
return (
<Object path_file={path_file}/>
)
}
export default Object3dsat
I notted that when the bug occurs I got multiple lines in the Fail Logs :
"RenderDXGITextureHost Failed to open shared texture, hr=0x80070057"
More Over my config :
GPU 1 : AMD radeon RX Vega 10
GPU 2 : Nvidia GeForce RTX 2060
CPU : AMD Ryzen 7 3750H with Radeon Vega Mobile Gfx
and I am using Firefox Version: 105.0.1
(I also notted that the GPU used for Web rendering is the AMD Radeon GPU but it was working even with this GPU)
Hope you guys can help me
Thank you for your help !
I have an animated background using canvas and requestAnimationFrame in my React app and I am trying to have its moving particles interact with the mouse pointer, but every solution I try ranges from significantly slowing down the animation the moment I start moving the mouse to pretty much crashing the browser.
The structure of the animated background component goes something like this:
<BackgroundParentComponent> // Gets mounted only once
<Canvas> // Reutilizable canvas, updates every frame.
// v - dozens of moving particles (just canvas drawing logic, no JSX return).
// Each particle calculates its next frame updated state every frame.
{particlesArray.map(particle => <Particle/>}
<Canvas/>
<BackgroundParentComponent />
I have tried moving the event listeners to every level of the component structure, calling them with a custom hook with an useRef to hold the value without rerendering, throttling the mouse event listener so that it does not fire that often... nothing seems to help. This is my custom hook right now:
const useMousePosition = () => {
const mousePosition = useRef({ x: null, y: null });
useEffect(() => {
window.addEventListener('mousemove', throttle(200, (event) => {
mousePosition.current = { x: event.x, y: event.y };
}))
});
useEffect(() => {
window.addEventListener('mouseout', throttle(500, () => {
mousePosition.current = { x: null, y: null };
}));
});
return mousePosition.current;
}
const throttle = (delay: number, fn: (...args: any[]) => void) => {
let shouldWait = false;
return (...args: any[]) => {
if (shouldWait) return;
fn(...args);
shouldWait = true;
setTimeout(() => shouldWait = false, delay);
return;
// return fn(...args);
}
}
For reference, my canvas component responsible of the animation looks roughly like this:
const AnimatedCanvas = ({ children, dimensions }) => {
const canvasRef = useRef(null);
const [renderingContext, setRenderingContext] = useState(null);
const [frameCount, setFrameCount] = useState(0);
// Initialize Canvas
useEffect(() => {
if (!canvasRef.current) return;
const canvas = canvasRef.current;
canvas.width = dimensions.width;
canvas.height = dimensions.height;
const canvas2DContext = canvas.getContext('2d');
setRenderingContext(canvas2DContext);
}, [dimensions]);
// make component re-render every frame
useEffect(() => {
const frameId = requestAnimationFrame(() => {
setFrameCount(frameCount + 1);
});
return () => {cancelAnimationFrame(frameId)};
}, [frameCount, setFrameCount]);
// clear canvas with each render to erase previous frame
if (renderingContext !== null) {
renderingContext.clearRect(0, 0, dimensions.width, dimensions.height);
}
return (
<Canvas2dContext.Provider value={renderingContext}>
<FrameContext.Provider value={frameCount}>
<canvas ref={canvasRef}>
{children}
</canvas>
</FrameContext.Provider>
</Canvas2dContext.Provider>
);
};
The mapped <Particle/> components are are fed to the above canvas component as children:
const Particle = (props) => {
const canvas = useContext(Canvas2dContext);
useContext(FrameContext); // only here to force the force that the particle re-render each frame after the canvas is cleared.
// lots of state calculating logic here
// This is where I need to know mouse position every (or every few) frames in order to modify each particle's behaviour when near the pointer.
canvas.beginPath();
// canvas drawing logic
return null;
}
Just to clarify, the animation is always moving regardless of the mouse being idle, I've seen other solutions that only work for animations triggered exclusively by mouse movement.
Is there any performant way of accessing the mouse position each frame in the Particle mapped components without choking the browser? Is there a better way of handling this type of interactive animation with React?
I'm currently working on a pet web project with react and I'm using a container that's 16:9 and scaling with the current viewport. The idea is based on a pen, I've created with vanilla JS (https://codepen.io/AllBitsEqual/pen/PgMrgm) and that's already working like a charm.
function adjustScreen() {...}
adjustScreen()
[...]
const resizeHandler = function () { adjustScreen() }
window.addEventListener('resize', helpers.debounce(resizeHandler, 250, this))
I've now written a script that's sitting in my App.jsx, (un)binds itself via useEffect and calculates the current size of the viewport to adjust the container, whenever the viewport changes (throttled for performance). I'm also using media queries to adjust the size and font size of elements in the container, which is working ok but isn't much fun to work with.
I want to expand on this idea and change the font size of the HTML Element in the same function that calculated the current container size so that I can use REM to scale font-size and other elements based on my calculated root font size.
Is there a valid and future-proofed way of changing the font-size style of my Tag via ReactJS either via the style tag or style attribute?
For now I've resorted to using "document.documentElement" and "style.fontSize" to achieve what I wanted but I'm not 100% sure this is the best solution. I'll see if I can find or get a better solution before I accept my own answer as the best...
I'm using useState for the game dimensions and within the useEffect, I'm attaching the listener to resize events, which I throttle a bit for performance reasons.
const App = () => {
const game_outerDOMNode = useRef(null)
const rootElement = document.documentElement
function getWindowDimensions() {
const { innerWidth: width, innerHeight: height } = window
return {
width,
height,
}
}
const [gameDimensions, setGameDimensions] = useState({ width: 0, height: 0})
useEffect(() => {
function adjustScreen() {
const game_outer = game_outerDOMNode.current
const ratioHeight = 1 / 1.78
const ratioWidth = 1.78
const { width: vw, height: vh } = getWindowDimensions()
const width = (vw > vh)
? (vh * ratioWidth <= vw)
? (vh * ratioWidth)
: (vw)
: (vh * ratioHeight <= vw)
? (vh * ratioHeight)
: (vw)
const height = (vw > vh)
? (vw * ratioHeight <= vh)
? (vw * ratioHeight)
: (vh)
: (vw * ratioWidth <= vh)
? (vw * ratioWidth)
: (vh)
const longestSide = height > width ? height : width
const fontSize = longestSide/37.5 // my calculated global base size
setGameDimensions({ width, height })
rootElement.style.fontSize = `${fontSize}px`
}
const debouncedResizeHandler = debounce(200, () => {
adjustScreen()
})
adjustScreen()
window.addEventListener('resize', debouncedResizeHandler)
return () => window.removeEventListener('resize', debouncedResizeHandler)
}, [rootElement.style.fontSize])
const { width: gameWidth, height: gameHeight } = gameDimensions
return (
<div
className="game__outer"
ref={game_outerDOMNode}
style={{ width: gameWidth, height: gameHeight }}
>
<div className="game__inner">
{my actual game code}
</div>
</div>
)
}