From b3d6eaa3a0b126712eae25c1b91925d030a2d900 Mon Sep 17 00:00:00 2001 From: geireann Date: Thu, 7 Apr 2022 18:06:40 -0400 Subject: added RecordingView --- .../views/nodes/RecordingBox/RecordingView.tsx | 326 +++++++++++++++++++++ 1 file changed, 326 insertions(+) create mode 100644 src/client/views/nodes/RecordingBox/RecordingView.tsx (limited to 'src/client/views/nodes/RecordingBox/RecordingView.tsx') diff --git a/src/client/views/nodes/RecordingBox/RecordingView.tsx b/src/client/views/nodes/RecordingBox/RecordingView.tsx new file mode 100644 index 000000000..15f8c8626 --- /dev/null +++ b/src/client/views/nodes/RecordingBox/RecordingView.tsx @@ -0,0 +1,326 @@ +import * as React from 'react'; +import "./RecordingView.scss"; +import { ReactElement, useCallback, useEffect, useRef, useState } from "react"; +import { ProgressBar } from "./ProgressBar" +import { MdBackspace } from 'react-icons/md'; +import { FaCheckCircle } from 'react-icons/fa'; +import { IconContext } from "react-icons"; + + +enum RecordingStatus { + Recording, + Stopped, + Paused +} + +interface VideoSegment { + chunks: any[], + endTime: number +} + +const MAXTIME = 1000; + +export function RecordingView() { + + const [recording, setRecording] = useState(false); + const recordingTimerRef = useRef(0); + const [recordingTimer, setRecordingTimer] = useState(0); // unit is 0.01 second + const [playing, setPlaying] = useState(false); + const [progress, setProgress] = useState(0); + const [speed, setSpeed] = useState(1); + const [muted, setMuted] = useState(false); + + const [videos, setVideos] = useState([]); + // const [videos, setVideos] = useState([]); + const [currentVid, setCurrentVid] = useState(0); + const recorder = useRef(null); + const videoElementRef = useRef(null); + + const [finished, setFinished] = useState(false) + + + + const DEFAULT_MEDIA_CONSTRAINTS = { + video: { + width: 1280, + height: 720, + }, + audio: { + echoCancellation: true, + noiseSuppression: true, + sampleRate: 44100 + } + } + + useEffect(() => { + + if (finished) { + let allVideoChunks : any = [] + console.log(videos) + videos.forEach((vid) => { + console.log(vid.chunks) + allVideoChunks = allVideoChunks.concat(vid.chunks) + }) + + console.log(allVideoChunks) + + const blob = new Blob(allVideoChunks, { + type: 'video/webm' + }) + const blobUrl = URL.createObjectURL(blob) + + videoElementRef.current!.srcObject = null + videoElementRef.current!.src = blobUrl + videoElementRef.current!.muted = false + } + + + }, [finished]) + + useEffect(() => { + // check if the browser supports media devices on first load + if (!navigator.mediaDevices) { + console.log('This browser does not support getUserMedia.') + } + console.log('This device has the correct media devices.') + }, []) + + useEffect(() => { + // get access to the video element on every render + // videoElement = document.getElementById('video') as HTMLVideoElement; + videoElementRef.current = document.getElementById('video') as HTMLVideoElement; + }) + + // useEffect(() => { + // if (playing) { + // videoElement!.srcObject = null + // // videoElement!.src = videos[currentVid].url + // videoElement!.muted = false + // videoElement!.play() + // } else { + // videoElement!.pause(); + // } + // }, [playing, videoElement]); + + useEffect(() => { + let interval: any = null; + if (recording) { + interval = setInterval(() => { + setRecordingTimer(unit => unit + 1); + }, 10); + } else if (!recording && recordingTimer !== 0) { + clearInterval(interval); + } + return () => clearInterval(interval); + }, [recording]) + + useEffect(() => { + setVideoProgressHelper(recordingTimer) + recordingTimerRef.current = recordingTimer; + }, [recordingTimer]) + + const setVideoProgressHelper = (progress: number) => { + const newProgress = (progress / MAXTIME) * 100; + setProgress(newProgress) + } + const startShowingStream = async (mediaConstraints = DEFAULT_MEDIA_CONSTRAINTS) => { + const stream = await navigator.mediaDevices.getUserMedia(mediaConstraints) + + videoElementRef.current!.src = "" + videoElementRef.current!.srcObject = stream + videoElementRef.current!.muted = true + + return stream + } + + const record = async () => { + const stream = await startShowingStream(); + recorder.current = new MediaRecorder(stream) + + // temporary chunks of video + let chunks: any = [] + recorder.current.ondataavailable = (event: any) => { + // store the video chunks as it is recording + console.log("data available") + if (event.data.size > 0) { + chunks.push(event.data) + } + } + + recorder.current.onstart = (event: any) => { + console.log("on start") + setRecording(true); + } + + recorder.current.onstop = () => { + // if we have a last portion + if (chunks.length > 1) { + // append the current portion to the video pieces + setVideos(videos => [...videos, {chunks: chunks, endTime: recordingTimerRef.current}]) + } + + // reset the temporary chunks + chunks = [] + setRecording(false); + setFinished(true); + } + + // recording paused + recorder.current.onpause = (event: any) => { + // append the current portion to the video pieces + console.log(chunks) + setVideos(videos => [...videos, {chunks: chunks, endTime: recordingTimerRef.current}]) + + // reset the temporary chunks + chunks = [] + setRecording(false); + } + + recorder.current.onresume = async (event: any) => { + console.log(event) + await startShowingStream(); + setRecording(true); + } + + recorder.current.start(200) + } + + + const stop = () => { + if (recorder.current) { + if (recorder.current.state !== "inactive") { + recorder.current.stop(); + // recorder.current.stream.getTracks().forEach((track: any) => track.stop()) + } + } + } + + const pause = () => { + if (recorder.current) { + if (recorder.current.state === "recording") { + recorder.current.pause(); + } + } + } + + const startOrResume = () => { + console.log('[RecordingView.tsx] startOrResume') + if (!recorder.current || recorder.current.state === "inactive") { + record(); + } else if (recorder.current.state === "paused") { + recorder.current.resume(); + } + } + + const playSegment = (idx: number) => { + console.log(idx) + let currentChunks = videos[idx].chunks + console.log(currentChunks) + + const blob = new Blob(currentChunks, { + type: 'video/webm' + }) + const blobUrl = URL.createObjectURL(blob) + console.log(blobUrl) + + videoElementRef.current!.srcObject = null + videoElementRef.current!.src = blobUrl + videoElementRef.current!.muted = false + } + + const clearPrevious = () => { + const numVideos = videos.length + setRecordingTimer(numVideos == 1 ? 0 : videos[numVideos - 2].endTime) + setVideoProgressHelper(numVideos == 1 ? 0 : videos[numVideos - 2].endTime) + setVideos(videos.filter((_, idx) => idx !== numVideos - 1)); + } + + // const handleVideoProgress = (event: any) => { + // const manualChange = Number(event.target.value); + // videoElement!.currentTime = (videoElement!.duration / 100) * manualChange; + // setProgress(manualChange) + // }; + + // const handleVideoSpeed = (event: any) => { + // const newSpeed = Number(event.target.value); + // videoElement!.playbackRate = speed; + // setSpeed(newSpeed) + // }; + + const handleOnTimeUpdate = () => { + if (playing) { + setVideoProgressHelper(videoElementRef.current!.currentTime) + } + }; + + const millisecondToMinuteSecond = (milliseconds: number) => { + const toTwoDigit = (digit: number) => { + return String(digit).length == 1 ? "0" + digit : digit + } + const minutes = Math.floor(( milliseconds % (1000 * 60 * 60)) / (1000 * 60)); + const seconds = Math.floor((milliseconds % (1000 * 60)) / 1000); + return toTwoDigit(minutes) + " : " + toTwoDigit(seconds); + } + + + + + useEffect(() => { + console.log(videos.map((elt) => elt.endTime / MAXTIME * 100)) + console.log(videos) + }, [videos]) + + return ( +
+
+
+
) +} \ No newline at end of file -- cgit v1.2.3-70-g09d2