From 10b191ddc1bd81f8d3f5157aa3648fe934f1dd8f Mon Sep 17 00:00:00 2001
From: Silas <63477128+SilasStokes@users.noreply.github.com>
Date: Fri, 7 Jul 2023 10:12:29 -0700
Subject: [PATCH] updated to react hooks
---
.gitignore | 2 ++
src/App.js | 60 ++++++++++++++++--------------------
src/AudioAnalyser.js | 69 ++++++++++++++++++++----------------------
src/AudioVisualiser.js | 62 +++++++++++++++++--------------------
4 files changed, 89 insertions(+), 104 deletions(-)
diff --git a/.gitignore b/.gitignore
index d30f40e..566dce1 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,5 +1,7 @@
# See https://help.github.com/ignore-files/ for more about ignoring files.
+.vscode
+
# dependencies
/node_modules
diff --git a/src/App.js b/src/App.js
index 1ebb8ff..e4ad5e0 100644
--- a/src/App.js
+++ b/src/App.js
@@ -1,48 +1,40 @@
-import React, { Component } from 'react';
-import AudioAnalyser from './AudioAnalyser';
+import React, { useState } from "react";
+import AudioAnalyser from "./AudioAnalyser";
-class App extends Component {
- constructor(props) {
- super(props);
- this.state = {
- audio: null
- };
- this.toggleMicrophone = this.toggleMicrophone.bind(this);
- }
+function App() {
+ const [audio, setAudio] = useState(null);
- async getMicrophone() {
+ const getMicrophone = async () => {
const audio = await navigator.mediaDevices.getUserMedia({
audio: true,
- video: false
+ video: false,
});
- this.setState({ audio });
- }
+ setAudio(audio);
+ };
- stopMicrophone() {
- this.state.audio.getTracks().forEach(track => track.stop());
- this.setState({ audio: null });
- }
+ const stopMicrophone = () => {
+ audio.getTracks().forEach((track) => track.stop());
+ setAudio(null);
+ };
- toggleMicrophone() {
- if (this.state.audio) {
- this.stopMicrophone();
+ const toggleMicrophone = () => {
+ if (audio) {
+ stopMicrophone();
} else {
- this.getMicrophone();
+ getMicrophone();
}
- }
+ };
- render() {
- return (
-
-
-
-
- {this.state.audio ?
: ''}
+ return (
+
+
+
- );
- }
+ {audio ?
: ""}
+
+ );
}
export default App;
diff --git a/src/AudioAnalyser.js b/src/AudioAnalyser.js
index 1ba7457..0e0a30d 100644
--- a/src/AudioAnalyser.js
+++ b/src/AudioAnalyser.js
@@ -1,38 +1,35 @@
-import React, { Component } from 'react';
-import AudioVisualiser from './AudioVisualiser';
-
-class AudioAnalyser extends Component {
- constructor(props) {
- super(props);
- this.state = { audioData: new Uint8Array(0) };
- this.tick = this.tick.bind(this);
- }
-
- componentDidMount() {
- this.audioContext = new (window.AudioContext ||
- window.webkitAudioContext)();
- this.analyser = this.audioContext.createAnalyser();
- this.dataArray = new Uint8Array(this.analyser.frequencyBinCount);
- this.source = this.audioContext.createMediaStreamSource(this.props.audio);
- this.source.connect(this.analyser);
- this.rafId = requestAnimationFrame(this.tick);
- }
-
- tick() {
- this.analyser.getByteTimeDomainData(this.dataArray);
- this.setState({ audioData: this.dataArray });
- this.rafId = requestAnimationFrame(this.tick);
- }
-
- componentWillUnmount() {
- cancelAnimationFrame(this.rafId);
- this.analyser.disconnect();
- this.source.disconnect();
- }
-
- render() {
- return
;
- }
-}
+import React, { useState, useEffect } from "react";
+import AudioVisualiser from "./AudioVisualiser";
+
+const AudioAnalyser = ({ audio }) => {
+ const [audioData, setAudioData] = useState(new Uint8Array(0));
+
+ useEffect(() => {
+ const audioContext = new (window.AudioContext || window.webkitAudioContext)();
+ const analyser = audioContext.createAnalyser();
+ // const dataArray = new Uint8Array(analyser.frequencyBinCount);
+ const source = audioContext.createMediaStreamSource(audio);
+ source.connect(analyser);
+
+ const tick = () => {
+ const dataArray = new Uint8Array(analyser.frequencyBinCount);
+ analyser.getByteTimeDomainData(dataArray);
+ // setAudioData(new Uint8Array(dataArray));
+ setAudioData(dataArray);
+ rafId = requestAnimationFrame(tick);
+ };
+
+ let rafId = requestAnimationFrame(tick);
+
+ return () => {
+ cancelAnimationFrame(rafId);
+ analyser.disconnect();
+ source.disconnect();
+ };
+ }, [audio]);
+
+ return
;
+};
+
export default AudioAnalyser;
diff --git a/src/AudioVisualiser.js b/src/AudioVisualiser.js
index 0235822..565f256 100644
--- a/src/AudioVisualiser.js
+++ b/src/AudioVisualiser.js
@@ -1,42 +1,36 @@
-import React, { Component } from 'react';
+import React, { useRef, useEffect } from 'react';
-class AudioVisualiser extends Component {
- constructor(props) {
- super(props);
- this.canvas = React.createRef();
- }
+const AudioVisualiser = ({ audioData }) => {
+ const canvasRef = useRef();
- componentDidUpdate() {
- this.draw();
- }
+ useEffect(() => {
+ const draw = () => {
+ const canvas = canvasRef.current;
+ const height = canvas.height;
+ const width = canvas.width;
+ const context = canvas.getContext('2d');
+ let x = 0;
+ const sliceWidth = (width * 1.0) / audioData.length;
- draw() {
- const { audioData } = this.props;
- const canvas = this.canvas.current;
- const height = canvas.height;
- const width = canvas.width;
- const context = canvas.getContext('2d');
- let x = 0;
- const sliceWidth = (width * 1.0) / audioData.length;
+ context.lineWidth = 2;
+ context.strokeStyle = '#000000';
+ context.clearRect(0, 0, width, height);
- context.lineWidth = 2;
- context.strokeStyle = '#000000';
- context.clearRect(0, 0, width, height);
+ context.beginPath();
+ context.moveTo(0, height / 2);
+ for (const item of audioData) {
+ const y = (item / 255.0) * height;
+ context.lineTo(x, y);
+ x += sliceWidth;
+ }
+ context.lineTo(x, height / 2);
+ context.stroke();
+ };
- context.beginPath();
- context.moveTo(0, height / 2);
- for (const item of audioData) {
- const y = (item / 255.0) * height;
- context.lineTo(x, y);
- x += sliceWidth;
- }
- context.lineTo(x, height / 2);
- context.stroke();
- }
+ draw();
+ }, [audioData]);
- render() {
- return
;
- }
-}
+ return
;
+};
export default AudioVisualiser;