How to properly setup Azure Media Player in React.js? - reactjs

I'm currently integrating a React component with Azure Media Player. I followed the documentation and first, I added the required CDN urls to the index.html file. Then I added the sample code into the App. The problem is, it throws the error 'amp' is not defined no-undef
videoPlayer.js
class videoPlayer extends Component {
render () {
const myOptions = {
"nativeControlsForTouch": false,
controls: true,
autoplay: true,
width: "640",
height: "400",
}
const myPlayer = amp("azuremediaplayer", myOptions);
myPlayer.src([
{
"src": "https://devpflmedia-uswe.streaming.media.azure.net//d5f1a8b6-0d52-4e62-addc-aee7bafe408d/097cee43-6822-49bd-84f5-9f6efb05.ism/manifest",
"type": "application/vnd.ms-sstr+xml"
}
]);
return (
<video id="azuremediaplayer" class="azuremediaplayer amp-default-skin amp-big-play-centered" tabindex="0"></video>
)
}
}
How can I fix this?

When I use the amp this way, the mentioned on.progress callback works for me. Good luck!
import * as React from "react"
import loader from "./loader";
import { RefObject } from "react";
import './videoPlayer.css';
const DEFAULT_SKIN = "amp-flush";
const DEFAULT_RATIO = [16, 9];
const DEFAULT_OPTIONS = {
controls: true,
autoplay: true,
muted: true,
logo: {
enabled: false
},
};
declare const window: any;
export interface IVideoPlayerProps {
readonly src: { src: string; }[];
readonly options: any;
readonly skin: string;
readonly className: string;
readonly adaptRatio: Array<number>;
}
export default class VideoPlayer extends React.PureComponent<IVideoPlayerProps, {}> {
public static defaultProps = {
skin: DEFAULT_SKIN,
className: "",
adaptRatio: DEFAULT_RATIO,
options: DEFAULT_OPTIONS,
}
videoNode: RefObject<any>;
player: any;
initialization: any;
constructor(props: IVideoPlayerProps) {
super(props);
this.videoNode = React.createRef();
}
componentWillUnmount() {
this._destroyPlayer();
}
componentDidMount() {
const { skin } = this.props;
this.initialization = loader(skin).then(() => {
this._createPlayer();
this._setVideo();
});
}
componentDidUpdate(prevProps: IVideoPlayerProps) {
if (prevProps.src !== this.props.src) {
this.initialization.then(() => this._setVideo());
}
}
_destroyPlayer() {
this.player && this.player.dispose();
}
_setVideo() {
const { src } = this.props;
this.player.src(src);
}
_createPlayer() {
this.player = window.amp(this.videoNode.current, this.props.options);
this.player.on("progress", () => alert('on progress called'));
}
render(): JSX.Element {
return (
<div>
<video
ref={this.videoNode}
/>
</div>
);
}
}
Also the loader function - I use it this way since I may need to use the player in the (possible) offline environment.
export default (skin = 'amp-flush') => {
return new Promise((resolve, _) => {
if (document.querySelector('#amp-azure')) {
// video player is already rendered
return resolve()
}
let scriptTag = document.createElement('script')
let linkTag = document.createElement('link')
linkTag.rel = 'stylesheet'
scriptTag.id = 'amp-azure'
scriptTag.src = '//amp.azure.net/libs/amp/2.1.5/azuremediaplayer.min.js'
linkTag.href = `//amp.azure.net/libs/amp/2.1.5/skins/${skin}/azuremediaplayer.min.css`
document.body.appendChild(scriptTag)
document.head.insertBefore(linkTag, document.head.firstChild)
scriptTag.onload = () => resolve({ skin: skin })
})
}

Related

How to cluster polygons with react-leaflet?

I'm looking for a way to cluster polygons using react-leaflet v4 and react-leaflet-markercluster. I have not found any up-to-date examples of how I can achieve this, so I'm hoping I might get some help here.
Any example code to get me started would be a great help!
This will probably not solve your problem directly but hopefully show that using markercluster is rather simple. The only thing you need is to have a createMarkerCluster function.
clusterProps has a field for polygonOptions:
/*
* Options to pass when creating the L.Polygon(points, options) to show the bounds of a cluster.
* Defaults to empty
*/
polygonOptions?: PolylineOptions | undefined;
Since you now use a plain leaflet plugin it opens up for mor information on the internet, these two might help how you should configure polygonOptions
How to make MarkerClusterGroup cluster polygons
https://gis.stackexchange.com/questions/197882/is-it-possible-to-cluster-polygons-in-leaflet
Below is my general code to make clustermarkers work with React:
import { createPathComponent } from "#react-leaflet/core";
import L, { LeafletMouseEventHandlerFn } from "leaflet";
import "leaflet.markercluster";
import { ReactElement, useMemo } from "react";
import { Building, BuildingStore, Circle } from "tabler-icons-react";
import { createLeafletIcon } from "./utils";
import styles from "./LeafletMarkerCluster.module.css";
import "leaflet.markercluster/dist/MarkerCluster.css";
type ClusterType = { [key in string]: any };
type ClusterEvents = {
onClick?: LeafletMouseEventHandlerFn;
onDblClick?: LeafletMouseEventHandlerFn;
onMouseDown?: LeafletMouseEventHandlerFn;
onMouseUp?: LeafletMouseEventHandlerFn;
onMouseOver?: LeafletMouseEventHandlerFn;
onMouseOut?: LeafletMouseEventHandlerFn;
onContextMenu?: LeafletMouseEventHandlerFn;
};
// Leaflet is badly typed, if more props needed add them to the interface.
// Look in this file to see what is available.
// node_modules/#types/leaflet.markercluster/index.d.ts
// MarkerClusterGroupOptions
export interface LeafletMarkerClusterProps {
spiderfyOnMaxZoom?: boolean;
children: React.ReactNode;
size?: number;
icon?: ReactElement;
}
const createMarkerCluster = (
{
children: _c,
size = 30,
icon = <Circle size={size} />,
...props
}: LeafletMarkerClusterProps,
context: any
) => {
const markerIcons = {
default: <Circle size={size} />,
property: <Building size={size} />,
business: <BuildingStore size={size} />,
} as { [key in string]: ReactElement };
const clusterProps: ClusterType = {
iconCreateFunction: (cluster: any) => {
const markers = cluster.getAllChildMarkers();
const types = markers.reduce(
(
acc: { [x: string]: number },
marker: {
key: string;
options: { icon: { options: { className: string } } };
}
) => {
const key = marker?.key || "";
const type =
marker.options.icon.options.className || key.split("-")[0];
const increment = (key.split("-")[1] as unknown as number) || 1;
if (type in markerIcons) {
return { ...acc, [type]: (acc[type] || 0) + increment };
}
return { ...acc, default: (acc.default || 0) + increment };
},
{}
) as { [key in string]: number };
const typeIcons = Object.entries(types).map(([type, count], index) => {
if (count > 0) {
const typeIcon = markerIcons[type];
return (
<div key={`${type}-${count}`} style={{ display: "flex" }}>
<span>{typeIcon}</span>
<span style={{ width: "max-content" }}>{count}</span>
</div>
);
}
});
const iconWidth = typeIcons.length * size;
return createLeafletIcon(
<div style={{ display: "flex" }} className={"cluster-marker"}>
{typeIcons}
</div>,
iconWidth,
undefined,
iconWidth,
30
);
},
showCoverageOnHover: false,
animate: true,
animateAddingMarkers: false,
removeOutsideVisibleBounds: false,
};
const clusterEvents: ClusterType = {};
// Splitting props and events to different objects
Object.entries(props).forEach(([propName, prop]) =>
propName.startsWith("on")
? (clusterEvents[propName] = prop)
: (clusterProps[propName] = prop)
);
const instance = new (L as any).MarkerClusterGroup(clusterProps);
instance.on("spiderfied", (e: any) => {
e.cluster._icon?.classList.add(styles.spiderfied);
});
instance.on("unspiderfied", (e: any) => {
e.cluster._icon?.classList.remove(styles.spiderfied);
});
// This is not used at the moment, but could be used to add events to the cluster.
// Initializing event listeners
Object.entries(clusterEvents).forEach(([eventAsProp, callback]) => {
const clusterEvent = `cluster${eventAsProp.substring(2).toLowerCase()}`;
instance.on(clusterEvent, callback);
});
return {
instance,
context: {
...context,
layerContainer: instance,
},
};
};
const updateMarkerCluster = (instance: any, props: any, prevProps: any) => {};
const LeafletMarkerCluster = createPathComponent(
createMarkerCluster,
updateMarkerCluster
);
const LeafletMarkerClusterWrapper: React.FC<LeafletMarkerClusterProps> = ({
children,
...props
}) => {
const markerCluster = useMemo(() => {
return <LeafletMarkerCluster>{children}</LeafletMarkerCluster>;
}, [children]);
return <>{markerCluster}</>;
};
export default LeafletMarkerClusterWrapper;
Below is my function to create a marker icon from react elements:
import { divIcon } from "leaflet";
import { ReactElement } from "react";
import { renderToString } from "react-dom/server";
export const createLeafletIcon = (
icon: ReactElement,
size: number,
className?: string,
width: number = size,
height: number = size
) => {
return divIcon({
html: renderToString(icon),
iconSize: [width, height],
iconAnchor: [width / 2, height],
popupAnchor: [0, -height],
className: className ? className : "",
});
};

Azure Media Player events now working for React Component

I tried to create a React component to play Azure Media Services content and it works to play, but not to capture events. This is the code of my component:
import React, { useEffect, useRef } from 'react';
import { Helmet } from 'react-helmet';
export interface AzureMediaPlayerProps {
videoUrl: string;
}
const AzureMediaPlayer = (props: AzureMediaPlayerProps): JSX.Element => {
const { videoUrl } = props;
const videoRef = useRef<HTMLVideoElement>(null);
const clearListener = (): void => {
videoRef.current?.removeEventListener('load', (): void => { });
videoRef.current?.removeEventListener('progress', (): void => { });
};
const addListener = (): void => {
videoRef.current?.addEventListener('load', (ev): void => { console.log(ev); });
videoRef.current?.addEventListener('progress', (ev): void => { console.log(ev); });
};
useEffect((): void => {
addListener();
return clearListener();
}, [videoRef]);
return (
<>
<Helmet>
<link href="//amp.azure.net/libs/amp/2.3.7/skins/amp-default/azuremediaplayer.min.css" rel="stylesheet" />
<script src="//amp.azure.net/libs/amp/2.3.7/azuremediaplayer.min.js" />
</Helmet>
<video
id="vid1"
className="azuremediaplayer amp-default-skin"
autoPlay
controls
width="100%"
data-setup='{"nativeControlsForTouch": false}'
ref={videoRef}
>
<source src={videoUrl} type="application/vnd.ms-sstr+xml" />
<p className="amp-no-js">
To view this video please enable JavaScript, and consider upgrading to a web browser that supports HTML5 video
</p>
</video>
</>
);
};
AzureMediaPlayer.displayName = 'AzureMediaPlayer';
export default AzureMediaPlayer;
I also tried:
<video
id="vid1"
className="azuremediaplayer amp-default-skin"
autoPlay
controls
width="100%"
data-setup='{"nativeControlsForTouch": false}'
ref={videoRef}
onProgress={(ev): void => { console.log(ev); }}
>
<source src={videoUrl} type="application/vnd.ms-sstr+xml" />
<p className="amp-no-js">
To view this video please enable JavaScript, and consider upgrading to a web browser that supports HTML5 video
</p>
</video>
But I was not able to get the events of the player. Can anybody please help me? How can I get the player events in React?
Thanks a lot.
Recommend that you don't try to use the AMP player in React component directly. It was not written to be componentized.
Try looking at a more modern player like Shaka or HLS.js that may already have a React wrapper for it.
For example -
https://www.npmjs.com/package/shaka-player-react
https://github.com/matvp91/shaka-player-react
Or a more commercial player that supports React like Theo Player or Bitmovin
https://docs.theoplayer.com/getting-started/02-frameworks/02-react/00-getting-started.md
https://github.com/bitmovin/bitmovin-player-web-samples
This is the final version of VideoJsPlayer.tsx:
import { Box } from 'grommet';
import React, { useEffect, useRef, useState } from 'react';
import videojs, { VideoJsPlayerOptions } from 'video.js';
import 'video.js/dist/video-js.css';
export interface VideoJsPlayerProps {
videoUrl?: string;
transcriptionUrl?: string;
type?: string;
saveProgress: (minute: number) => Promise<void>;
completeProgress: (progress: string) => Promise<void>;
startingTime?: number;
}
const VideoJsPlayer = (props: VideoJsPlayerProps): JSX.Element => {
const {
videoUrl, transcriptionUrl, type, saveProgress, completeProgress, startingTime = 0,
} = props;
let player;
const videoRef = useRef<HTMLVideoElement>(null);
const [minutes, setMinutes] = useState<number>(0);
const getMin = (sec: number): number => Math.floor(sec / 60);
const updateMinutes = (time: string): void => {
const secs = parseInt(time, 10);
const min = getMin(secs);
if (min > minutes) {
setMinutes(min);
}
};
const videoJsOptions = {
autoplay: true,
controls: true,
responsive: true,
fill: true,
nativeControlsForTouch: false,
playbackRates: [0.5, 1, 1.5, 2],
sources: [
{
src: videoUrl as string,
type: type as string,
},
],
} as VideoJsPlayerOptions;
useEffect((): any => {
const videoElement = videoRef.current;
// mount
if (videoElement) {
// #ts-ignore
player = videojs(
videoElement,
videoJsOptions,
() => {
player.addRemoteTextTrack({
kind: 'captions',
src: transcriptionUrl || '',
label: 'English',
language: 'en',
srcLang: 'en',
default: true,
}, false);
player.on('progress', (): void => {
updateMinutes(player.currentTime());
});
player.on('ended', (): void => {
completeProgress(player.currentTime());
});
},
);
// set starting time
player.currentTime(startingTime);
}
// unmount
return (): void => {
if (player) {
player.dispose();
}
};
}, []);
useEffect((): void => {
if (minutes > 0) {
saveProgress(minutes);
}
}, [minutes]);
return (
<Box
fill="horizontal"
align="center"
background="yellow"
style={{
height: 700,
}}
>
<video
className="video-js vjs-big-play-centered"
ref={videoRef}
/>
</Box>
);
};
VideoJsPlayer.displayName = 'VideoJsPlayer';
export default VideoJsPlayer;
The only details is the url to play: it must be /manifest(format=mpd-time-csf)

Gutenberg - Call google map render function in save after DOM has been rendered

I have a bit of a dilemma.
In the save function I need to call a function renderMap that renders a dynamic google map. However I need to call it after the DOM has been rendered. I can't seem to find a solution for this. I realised you can't add a class to the save function with the React lifecycle so I am stopped. It does work for the edit function though. What are the possibilities?
import { __ } from '#wordpress/i18n';
import { registerBlockType } from '#wordpress/blocks';
import { PluginDocumentSettingPanel } from '#wordpress/edit-post';
import { Component } from '#wordpress/element';
const renderMap = function() {
let googleMap = document.getElementById('google-map')
let map
map = new google.maps.Map(googleMap, {
center: { lat: 37.79406, lng: -122.4002 },
zoom: 14,
disableDefaultUI: true,
})
}
registerBlockType( 'splash-blocks/google-maps', {
title: __('Google maps locations', 'google-maps'),
icon: 'megaphone',
category: 'common',
keyword: [
__( 'Display Google maps locations' ),
],
atrributes: {
markers: {
type: 'object'
},
address: {
type: 'string',
default: 'xxxxxxxxx',
},
api_key: {
type: 'string',
default: 'xxxxxxxxx',
}
},
edit: class extends Component {
constructor(props) {
super(props)
}
componentDidMount() {
renderMap()
}
render() {
const { attributes, setAttributes } = this.props
return (
<div id='google-map'>
</div>
)
}
},
save: props => {
const {
className,
attributes: { mapHTML }
} = props;
renderMap()
return (
<div id='google-map'>
</div>
)
}
})

onImageLoad callback react js

Im using react-image-gallery: https://www.npmjs.com/package/react-image-gallery
Im trying to set a useState variable on onImageLoad, but its not working.
the docs say to use a callback, and I tried using a callback but I don't think I was doing it correctly for functional components.
Could someone show me how to create the proper callback to get the onImageLoad prop?
Docs say..... onImageLoad: Function, callback(event)
import React, { useEffect, useState} from "react";
import ImageGallery from 'react-image-gallery';
import "react-image-gallery/styles/css/image-gallery.css";
const Job = (props) => {
const {job} = props;
const [image, setImage] = useState([]);
const [showImages, setShowImages] = useState(false);
useEffect(() => {
async function onLoad() {
try {
const downloadedImage = await getImage(job.jobId);
setImage(downloadedImage);
} catch (e) {
alert(e);
}
}
onLoad();
}, []);
return (
{showImages ? (
<div style={{width: "95%"}}>
<ImageGallery items={image} onImageLoad={() => setShowImages(true)}/>
</div>
):(
<div>
<IonSpinner name="crescent" />
</div>
)}
);
Example given from package website
import React from 'react';
import ReactDOM from 'react-dom';
import ImageGallery from '../src/ImageGallery';
const PREFIX_URL = 'https://raw.githubusercontent.com/xiaolin/react-image-gallery/master/static/';
class App extends React.Component {
constructor() {
super();
this.state = {
showIndex: false,
showBullets: true,
infinite: true,
showThumbnails: true,
showFullscreenButton: true,
showGalleryFullscreenButton: true,
showPlayButton: true,
showGalleryPlayButton: true,
showNav: true,
isRTL: false,
slideDuration: 450,
slideInterval: 2000,
slideOnThumbnailOver: false,
thumbnailPosition: 'bottom',
showVideo: {},
};
this.images = [
{
thumbnail: `${PREFIX_URL}4v.jpg`,
original: `${PREFIX_URL}4v.jpg`,
embedUrl: 'https://www.youtube.com/embed/4pSzhZ76GdM?autoplay=1&showinfo=0',
description: 'Render custom slides within the gallery',
renderItem: this._renderVideo.bind(this)
},
{
original: `${PREFIX_URL}image_set_default.jpg`,
thumbnail: `${PREFIX_URL}image_set_thumb.jpg`,
imageSet: [
{
srcSet: `${PREFIX_URL}image_set_cropped.jpg`,
media : '(max-width: 1280px)',
},
{
srcSet: `${PREFIX_URL}image_set_default.jpg`,
media : '(min-width: 1280px)',
}
]
},
{
original: `${PREFIX_URL}1.jpg`,
thumbnail: `${PREFIX_URL}1t.jpg`,
originalClass: 'featured-slide',
thumbnailClass: 'featured-thumb',
description: 'Custom class for slides & thumbnails'
},
].concat(this._getStaticImages());
}
_onImageLoad(event) {
console.debug('loaded image', event.target.src);
}
render() {
return (
<section className='app'>
<ImageGallery
ref={i => this._imageGallery = i}
items={this.images}
onImageLoad={this._onImageLoad}
/>
</section>
);
}
}
ReactDOM.render(<App/>, document.getElementById('container'));

How to display the graph after it is stabilized (vis.js)?

I am rendering a graph using implementation of vis network as shown here. Right now the graph is taking some time to stabilize after being rendered. But I want the network to be stabilized before being displayed. I tried using the stabilization options under the physics module, but I could not achieve it.
The following is my Graph Component.
import {default as React, Component} from 'react';
import isEqual from 'lodash/isEqual';
import differenceWith from 'lodash/differenceWith';
import vis from 'vis';
import uuid from 'uuid';
import PropTypes from 'prop-types';
class Graph extends Component {
constructor(props) {
super(props);
const {identifier} = props;
this.updateGraph = this.updateGraph.bind(this);
this.state = {
identifier : identifier !== undefined ? identifier : uuid.v4()
};
}
componentDidMount() {
this.edges = new vis.DataSet();
this.edges.add(this.props.graph.edges);
this.nodes = new vis.DataSet();
this.nodes.add(this.props.graph.nodes);
this.updateGraph();
}
shouldComponentUpdate(nextProps, nextState) {
let nodesChange = !isEqual(this.nodes.get(), nextProps.graph.nodes);
let edgesChange = !isEqual(this.edges.get(), nextProps.graph.edges);
let optionsChange = !isEqual(this.props.options, nextProps.options);
let eventsChange = !isEqual(this.props.events, nextProps.events);
if (nodesChange) {
const idIsEqual = (n1, n2) => n1.id === n2.id;
const nodesRemoved = differenceWith(this.nodes.get(), nextProps.graph.nodes, idIsEqual);
const nodesAdded = differenceWith(nextProps.graph.nodes, this.nodes.get(), idIsEqual);
const nodesChanged = differenceWith(differenceWith(nextProps.graph.nodes, this.nodes.get(), isEqual), nodesAdded);
this.patchNodes({nodesRemoved, nodesAdded, nodesChanged});
}
if (edgesChange) {
const edgesRemoved = differenceWith(this.edges.get(), nextProps.graph.edges, isEqual);
const edgesAdded = differenceWith(nextProps.graph.edges, this.edges.get(), isEqual);
this.patchEdges({edgesRemoved, edgesAdded});
}
if (optionsChange) {
this.Network.setOptions(nextProps.options);
}
if (eventsChange) {
let events = this.props.events || {}
for (let eventName of Object.keys(events))
this.Network.off (eventName, events[eventName])
events = nextProps.events || {}
for (let eventName of Object.keys(events))
this.Network.on (eventName, events[eventName])
}
return false;
}
componentDidUpdate() {
this.updateGraph();
}
patchEdges({edgesRemoved, edgesAdded}) {
this.edges.remove(edgesRemoved);
this.edges.add(edgesAdded);
}
patchNodes({nodesRemoved, nodesAdded, nodesChanged}) {
this.nodes.remove(nodesRemoved);
this.nodes.add(nodesAdded);
this.nodes.update(nodesChanged);
}
updateGraph() {
let options = this.props.options;
this.Network = new vis.Network(
this.refs.nw,
Object.assign(
{},
this.props.graph,
{
edges: this.edges,
nodes: this.nodes
}
),
options
);
if (this.props.getNetwork) {
this.props.getNetwork(this.Network)
}
// Add user provided events to network
let events = this.props.events || {};
for (let eventName of Object.keys(events)) {
this.Network.on(eventName, events[eventName]);
}
}
render(){
return (<div ref="nw" style={{width:'100%' , height: '480px'}}/>);
}
}
Graph.defaultProps = {
graph: {},
style: { width: '100%', height: '480px' }
};
Graph.propTypes = {
graph: PropTypes.object,
style: PropTypes.object,
getNetwork: PropTypes.func
};
export default Graph;
This is my options object
let options = {
layout: {
hierarchical: false
},
autoResize: false,
edges: {
smooth: false,
color: '#000000',
width: 0.5,
arrows: {
to: {
enabled: true,
scaleFactor: 0.5
}
}
}
};
Any help would be greatly appreciated.
Thanks in advance !
You mention that you tried enabling stabilization without success. The following in the options should work:
physics: {
stabilization: {
enabled: true,
iterations: 5000 // YMMV
}
}
Is this different from what you tried?

Resources