diff options
Diffstat (limited to 'src/client/views/nodes/AudioBox.tsx')
-rw-r--r-- | src/client/views/nodes/AudioBox.tsx | 613 |
1 files changed, 326 insertions, 287 deletions
diff --git a/src/client/views/nodes/AudioBox.tsx b/src/client/views/nodes/AudioBox.tsx index c42c2306a..8437736ae 100644 --- a/src/client/views/nodes/AudioBox.tsx +++ b/src/client/views/nodes/AudioBox.tsx @@ -1,31 +1,29 @@ -import React = require("react"); -import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; -import { action, computed, IReactionDisposer, observable, runInAction } from "mobx"; -import { observer } from "mobx-react"; -import { DateField } from "../../../fields/DateField"; -import { Doc, DocListCast } from "../../../fields/Doc"; -import { ComputedField } from "../../../fields/ScriptField"; -import { Cast, DateCast, NumCast } from "../../../fields/Types"; -import { AudioField, nullAudio } from "../../../fields/URLField"; -import { emptyFunction, formatTime, OmitKeys, returnFalse, setupMoveUpEvents } from "../../../Utils"; -import { DocUtils } from "../../documents/Documents"; -import { Networking } from "../../Network"; -import { CurrentUserUtils } from "../../util/CurrentUserUtils"; -import { DragManager } from "../../util/DragManager"; -import { undoBatch } from "../../util/UndoManager"; -import { CollectionStackedTimeline, TrimScope } from "../collections/CollectionStackedTimeline"; -import { ContextMenu } from "../ContextMenu"; -import { ContextMenuProps } from "../ContextMenuItem"; -import { ViewBoxAnnotatableComponent, ViewBoxAnnotatableProps } from "../DocComponent"; -import "./AudioBox.scss"; -import { FieldView, FieldViewProps } from "./FieldView"; - +import React = require('react'); +import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'; +import { action, computed, IReactionDisposer, observable, runInAction } from 'mobx'; +import { observer } from 'mobx-react'; +import { DateField } from '../../../fields/DateField'; +import { Doc, DocListCast } from '../../../fields/Doc'; +import { ComputedField } from '../../../fields/ScriptField'; +import { Cast, DateCast, NumCast } from '../../../fields/Types'; +import { AudioField, nullAudio } from '../../../fields/URLField'; +import { emptyFunction, formatTime, OmitKeys, returnFalse, setupMoveUpEvents } from '../../../Utils'; +import { DocUtils } from '../../documents/Documents'; +import { Networking } from '../../Network'; +import { DragManager } from '../../util/DragManager'; +import { undoBatch } from '../../util/UndoManager'; +import { CollectionStackedTimeline, TrimScope } from '../collections/CollectionStackedTimeline'; +import { ContextMenu } from '../ContextMenu'; +import { ContextMenuProps } from '../ContextMenuItem'; +import { ViewBoxAnnotatableComponent, ViewBoxAnnotatableProps } from '../DocComponent'; +import './AudioBox.scss'; +import { FieldView, FieldViewProps } from './FieldView'; /** * AudioBox * Main component: AudioBox.tsx * Supporting Components: CollectionStackedTimeline, AudioWaveform - * + * * AudioBox is a node that supports the recording and playback of audio files in Dash. * When an audio file is importeed into Dash, it is immediately rendered as an AudioBox document. * When a blank AudioBox node is created in Dash, audio recording controls are displayed and the user can start a recording which can be paused or stopped, and can use dictation to create a text transcript. @@ -34,24 +32,23 @@ import { FieldView, FieldViewProps } from "./FieldView"; * User can trim audio: nondestructive, just sets new bounds for playback and rendering timelin */ - // used as a wrapper class for MediaStream from MediaDevices API declare class MediaRecorder { constructor(e: any); // whatever MediaRecorder has } enum media_state { - PendingRecording = "pendingRecording", - Recording = "recording", - Paused = "paused", - Playing = "playing" + PendingRecording = 'pendingRecording', + Recording = 'recording', + Paused = 'paused', + Playing = 'playing', } - @observer export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProps & FieldViewProps>() { - - public static LayoutString(fieldKey: string) { return FieldView.LayoutString(AudioBox, fieldKey); } + public static LayoutString(fieldKey: string) { + return FieldView.LayoutString(AudioBox, fieldKey); + } public static Enabled = false; static topControlsHeight = 30; // height of upper controls above timeline @@ -73,27 +70,41 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp @observable _muted: boolean = false; @observable _paused: boolean = false; // is recording paused // @observable rawDuration: number = 0; // computed from the length of the audio element when loaded - @computed get recordingStart() { return DateCast(this.dataDoc[this.fieldKey + "-recordingStart"])?.date.getTime(); } - @computed get rawDuration() { return NumCast(this.dataDoc[`${this.fieldKey}-duration`]); } // bcz: shouldn't be needed since it's computed from audio element + @computed get recordingStart() { + return DateCast(this.dataDoc[this.fieldKey + '-recordingStart'])?.date.getTime(); + } + @computed get rawDuration() { + return NumCast(this.dataDoc[`${this.fieldKey}-duration`]); + } // bcz: shouldn't be needed since it's computed from audio element // mehek: not 100% sure but i think due to the order in which things are loaded this is necessary ^^ // if you get rid of it and set the value to 0 the timeline and waveform will set their bounds incorrectly - @computed get miniPlayer() { return this.props.PanelHeight() < 50; } // used to collapse timeline when node is shrunk - @computed get links() { return DocListCast(this.dataDoc.links); } - @computed get mediaState() { return this.dataDoc.mediaState as media_state; } - @computed get path() { // returns the path of the audio file - const path = Cast(this.props.Document[this.fieldKey], AudioField, null)?.url.href || ""; - return path === nullAudio ? "" : path; + @computed get miniPlayer() { + return this.props.PanelHeight() < 50; + } // used to collapse timeline when node is shrunk + @computed get links() { + return DocListCast(this.dataDoc.links); + } + @computed get mediaState() { + return this.dataDoc.mediaState as media_state; + } + @computed get path() { + // returns the path of the audio file + const path = Cast(this.props.Document[this.fieldKey], AudioField, null)?.url.href || ''; + return path === nullAudio ? '' : path; + } + set mediaState(value) { + this.dataDoc.mediaState = value; } - set mediaState(value) { this.dataDoc.mediaState = value; } - - @computed get timeline() { return this._stackedTimeline; } // returns CollectionStackedTimeline ref + @computed get timeline() { + return this._stackedTimeline; + } // returns CollectionStackedTimeline ref componentWillUnmount() { this.removeCurrentlyPlaying(); this._dropDisposer?.(); - Object.values(this._disposers).forEach((disposer) => disposer?.()); + Object.values(this._disposers).forEach(disposer => disposer?.()); this.mediaState === media_state.Recording && this.stopRecording(); } @@ -110,14 +121,10 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp } } - getLinkData(l: Doc) { let la1 = l.anchor1 as Doc; let la2 = l.anchor2 as Doc; - const linkTime = - this.timeline?.anchorStart(la2) || - this.timeline?.anchorStart(la1) || - 0; + const linkTime = this.timeline?.anchorStart(la2) || this.timeline?.anchorStart(la1) || 0; if (Doc.AreProtosEqual(la1, this.dataDoc)) { la1 = l.anchor2 as Doc; la2 = l.anchor1 as Doc; @@ -126,20 +133,17 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp } getAnchor = () => { - return CollectionStackedTimeline.createAnchor( - this.rootDoc, - this.dataDoc, - this.annotationKey, - "_timecodeToShow" /* audioStart */, - "_timecodeToHide" /* audioEnd */, - this._ele?.currentTime || - Cast(this.props.Document._currentTimecode, "number", null) || - (this.mediaState === media_state.Recording - ? (Date.now() - (this.recordingStart || 0)) / 1000 - : undefined) - ) || this.rootDoc; - } - + return ( + CollectionStackedTimeline.createAnchor( + this.rootDoc, + this.dataDoc, + this.annotationKey, + '_timecodeToShow' /* audioStart */, + '_timecodeToHide' /* audioEnd */, + this._ele?.currentTime || Cast(this.props.Document._currentTimecode, 'number', null) || (this.mediaState === media_state.Recording ? (Date.now() - (this.recordingStart || 0)) / 1000 : undefined) + ) || this.rootDoc + ); + }; // updates timecode and shows it in timeline, follows links at time @action @@ -148,24 +152,23 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp this.links .map(l => this.getLinkData(l)) .forEach(({ la1, la2, linkTime }) => { - if (linkTime > NumCast(this.layoutDoc._currentTimecode) && - linkTime < this._ele!.currentTime) { + if (linkTime > NumCast(this.layoutDoc._currentTimecode) && linkTime < this._ele!.currentTime) { Doc.linkFollowHighlight(la1); } }); this.layoutDoc._currentTimecode = this._ele.currentTime; this.timeline?.scrollToTime(NumCast(this.layoutDoc._currentTimecode)); } - } + }; // play back the audio from seekTimeInSeconds, fullPlay tells whether clip is being played to end vs link range @action playFrom = (seekTimeInSeconds: number, endTime?: number, fullPlay: boolean = false) => { clearTimeout(this._play); // abort any previous clip ending - if (Number.isNaN(this._ele?.duration)) { // audio element isn't loaded yet... wait 1/2 second and try again + if (Number.isNaN(this._ele?.duration)) { + // audio element isn't loaded yet... wait 1/2 second and try again setTimeout(() => this.playFrom(seekTimeInSeconds, endTime), 500); - } - else if (this.timeline && this._ele && AudioBox.Enabled) { + } else if (this.timeline && this._ele && AudioBox.Enabled) { // trimBounds override requested playback bounds const end = Math.min(this.timeline.trimEnd, endTime ?? this.timeline.trimEnd); const start = Math.max(this.timeline.trimStart, seekTimeInSeconds); @@ -175,21 +178,18 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp this._ele.play(); this.mediaState = media_state.Playing; this.addCurrentlyPlaying(); - this._play = setTimeout( - () => { - // need to keep track of if end of clip is reached so on next play, clip restarts - if (fullPlay) this._finished = true; - // removes from currently playing if playback has reached end of range marker - else this.removeCurrentlyPlaying(); - this.Pause(); - }, - (end - start) * 1000); + this._play = setTimeout(() => { + // need to keep track of if end of clip is reached so on next play, clip restarts + if (fullPlay) this._finished = true; + // removes from currently playing if playback has reached end of range marker + else this.removeCurrentlyPlaying(); + this.Pause(); + }, (end - start) * 1000); } else { this.Pause(); } } - } - + }; // removes from currently playing display @action @@ -198,7 +198,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp const index = CollectionStackedTimeline.CurrentlyPlaying.indexOf(this.layoutDoc); index !== -1 && CollectionStackedTimeline.CurrentlyPlaying.splice(index, 1); } - } + }; // adds doc to currently playing display @action @@ -209,8 +209,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp if (CollectionStackedTimeline.CurrentlyPlaying.indexOf(this.layoutDoc) === -1) { CollectionStackedTimeline.CurrentlyPlaying.push(this.layoutDoc); } - } - + }; // update the recording time updateRecordTime = () => { @@ -220,13 +219,13 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp this.layoutDoc._currentTimecode = (new Date().getTime() - this._recordStart - this._pausedTime) / 1000; } } - } + }; // starts recording recordAudioAnnotation = async () => { this._stream = await navigator.mediaDevices.getUserMedia({ audio: true }); this._recorder = new MediaRecorder(this._stream); - this.dataDoc[this.fieldKey + "-recordingStart"] = new DateField(); + this.dataDoc[this.fieldKey + '-recordingStart'] = new DateField(); DocUtils.ActiveRecordings.push(this); this._recorder.ondataavailable = async (e: any) => { const [{ result }] = await Networking.UploadFilesToServer(e.data); @@ -235,11 +234,11 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp } }; this._recordStart = new Date().getTime(); - runInAction(() => this.mediaState = media_state.Recording); + runInAction(() => (this.mediaState = media_state.Recording)); setTimeout(this.updateRecordTime); this._recorder.start(); setTimeout(this.stopRecording, 60 * 60 * 1000); // stop after an hour - } + }; // stops recording @action @@ -249,52 +248,59 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp this._recorder = undefined; const now = new Date().getTime(); this._paused && (this._pausedTime += now - this._pauseStart); - this.dataDoc[this.fieldKey + "-duration"] = (now - this._recordStart - this._pausedTime) / 1000; + this.dataDoc[this.fieldKey + '-duration'] = (now - this._recordStart - this._pausedTime) / 1000; this.mediaState = media_state.Paused; this._stream?.getAudioTracks()[0].stop(); const ind = DocUtils.ActiveRecordings.indexOf(this); ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1); } - } - + }; // context menu specificContextMenu = (e: React.MouseEvent): void => { const funcs: ContextMenuProps[] = []; funcs.push({ - description: (this.layoutDoc.hideAnchors ? "Don't hide" : "Hide") + " anchors", - event: e => this.layoutDoc.hideAnchors = !this.layoutDoc.hideAnchors, - icon: "expand-arrows-alt", + description: (this.layoutDoc.hideAnchors ? "Don't hide" : 'Hide') + ' anchors', + event: e => (this.layoutDoc.hideAnchors = !this.layoutDoc.hideAnchors), + icon: 'expand-arrows-alt', }); funcs.push({ - description: (this.layoutDoc.dontAutoFollowLinks ? "" : "Don't") + " follow links when encountered", - event: e => this.layoutDoc.dontAutoFollowLinks = !this.layoutDoc.dontAutoFollowLinks, - icon: "expand-arrows-alt", + description: (this.layoutDoc.dontAutoFollowLinks ? '' : "Don't") + ' follow links when encountered', + event: e => (this.layoutDoc.dontAutoFollowLinks = !this.layoutDoc.dontAutoFollowLinks), + icon: 'expand-arrows-alt', }); funcs.push({ - description: (this.layoutDoc.dontAutoPlayFollowedLinks ? "" : "Don't") + " play when link is selected", - event: e => this.layoutDoc.dontAutoPlayFollowedLinks = !this.layoutDoc.dontAutoPlayFollowedLinks, - icon: "expand-arrows-alt", + description: (this.layoutDoc.dontAutoPlayFollowedLinks ? '' : "Don't") + ' play when link is selected', + event: e => (this.layoutDoc.dontAutoPlayFollowedLinks = !this.layoutDoc.dontAutoPlayFollowedLinks), + icon: 'expand-arrows-alt', }); funcs.push({ - description: (this.layoutDoc.autoPlayAnchors ? "Don't auto" : "Auto") + " play anchors onClick", - event: e => this.layoutDoc.autoPlayAnchors = !this.layoutDoc.autoPlayAnchors, - icon: "expand-arrows-alt", + description: (this.layoutDoc.autoPlayAnchors ? "Don't auto" : 'Auto') + ' play anchors onClick', + event: e => (this.layoutDoc.autoPlayAnchors = !this.layoutDoc.autoPlayAnchors), + icon: 'expand-arrows-alt', }); ContextMenu.Instance?.addItem({ - description: "Options...", + description: 'Options...', subitems: funcs, - icon: "asterisk", + icon: 'asterisk', }); - } - + }; // button for starting and stopping the recording Record = (e: React.PointerEvent) => { - e.button === 0 && !e.ctrlKey && setupMoveUpEvents(this, e, returnFalse, returnFalse, action(() => { - this._recorder ? this.stopRecording() : this.recordAudioAnnotation(); - }), false); - } + e.button === 0 && + !e.ctrlKey && + setupMoveUpEvents( + this, + e, + returnFalse, + returnFalse, + action(() => { + this._recorder ? this.stopRecording() : this.recordAudioAnnotation(); + }), + false + ); + }; // for play button Play = (e?: any) => { @@ -314,7 +320,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp this.playFrom(start, this.timeline.trimEnd, true); } - } + }; // pause play back @action @@ -327,60 +333,73 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp if (!this._finished) clearTimeout(this._play); this.removeCurrentlyPlaying(); } - } + }; // for dictation button, creates a text document for dictation onFile = (e: any) => { - setupMoveUpEvents(this, e, returnFalse, returnFalse, action(() => { - const newDoc = CurrentUserUtils.GetNewTextDoc( - "", - NumCast(this.rootDoc.x), - NumCast(this.rootDoc.y) + - NumCast(this.layoutDoc._height) + - 10, - NumCast(this.layoutDoc._width), - 2 * NumCast(this.layoutDoc._height) - ); - Doc.GetProto(newDoc).recordingSource = this.dataDoc; - Doc.GetProto(newDoc).recordingStart = ComputedField.MakeFunction(`self.recordingSource["${this.fieldKey}-recordingStart"]`); - Doc.GetProto(newDoc).mediaState = ComputedField.MakeFunction("self.recordingSource.mediaState"); - if (DocListCast(CurrentUserUtils.MyOverlayDocs?.data).includes(this.rootDoc)) { - newDoc.x = this.rootDoc.x; - newDoc.y = NumCast(this.rootDoc.y) + NumCast(this.rootDoc._height); - Doc.AddDocToList(CurrentUserUtils.MyOverlayDocs, undefined, newDoc); - } else { - this.props.addDocument?.(newDoc); - } - }), false); - } - + setupMoveUpEvents( + this, + e, + returnFalse, + returnFalse, + action(() => { + const newDoc = DocUtils.GetNewTextDoc('', NumCast(this.rootDoc.x), NumCast(this.rootDoc.y) + NumCast(this.layoutDoc._height) + 10, NumCast(this.layoutDoc._width), 2 * NumCast(this.layoutDoc._height)); + Doc.GetProto(newDoc).recordingSource = this.dataDoc; + Doc.GetProto(newDoc).recordingStart = ComputedField.MakeFunction(`self.recordingSource["${this.fieldKey}-recordingStart"]`); + Doc.GetProto(newDoc).mediaState = ComputedField.MakeFunction('self.recordingSource.mediaState'); + if (DocListCast(Doc.MyOverlayDocs?.data).includes(this.rootDoc)) { + newDoc.x = this.rootDoc.x; + newDoc.y = NumCast(this.rootDoc.y) + NumCast(this.rootDoc._height); + Doc.AddDocToList(Doc.MyOverlayDocs, undefined, newDoc); + } else { + this.props.addDocument?.(newDoc); + } + }), + false + ); + }; // sets <audio> ref for updating time setRef = (e: HTMLAudioElement | null) => { - e?.addEventListener("timeupdate", this.timecodeChanged); - e?.addEventListener("ended", () => { this._finished = true; this.Pause(); }); + e?.addEventListener('timeupdate', this.timecodeChanged); + e?.addEventListener('ended', () => { + this._finished = true; + this.Pause(); + }); this._ele = e; - } - + }; // pause the time during recording phase recordPause = (e: React.PointerEvent) => { - setupMoveUpEvents(this, e, returnFalse, returnFalse, action(() => { - this._pauseStart = new Date().getTime(); - this._paused = true; - this._recorder.pause(); - }), false); - } + setupMoveUpEvents( + this, + e, + returnFalse, + returnFalse, + action(() => { + this._pauseStart = new Date().getTime(); + this._paused = true; + this._recorder.pause(); + }), + false + ); + }; // continue the recording recordPlay = (e: React.PointerEvent) => { - setupMoveUpEvents(this, e, returnFalse, returnFalse, action(() => { - this._paused = false; - this._pausedTime += new Date().getTime() - this._pauseStart; - this._recorder.resume(); - }), false); - } - + setupMoveUpEvents( + this, + e, + returnFalse, + returnFalse, + action(() => { + this._paused = false; + this._pausedTime += new Date().getTime() - this._pauseStart; + this._recorder.resume(); + }), + false + ); + }; // plays link playLink = (link: Doc) => { @@ -392,8 +411,8 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp } } else { this.links - .filter((l) => l.anchor1 === link || l.anchor2 === link) - .forEach((l) => { + .filter(l => l.anchor1 === link || l.anchor2 === link) + .forEach(l => { const { la1, la2 } = this.getLinkData(l); const startTime = this.timeline?.anchorStart(la1) || this.timeline?.anchorStart(la2); const endTime = this.timeline?.anchorEnd(la1) || this.timeline?.anchorEnd(la2); @@ -406,17 +425,14 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp } }); } - } - + }; @action - timelineWhenChildContentsActiveChanged = (isActive: boolean) => - this.props.whenChildContentsActiveChanged(this._isAnyChildContentActive = isActive) + timelineWhenChildContentsActiveChanged = (isActive: boolean) => this.props.whenChildContentsActiveChanged((this._isAnyChildContentActive = isActive)); - timelineScreenToLocal = () => - this.props.ScreenToLocalTransform().translate(0, -AudioBox.bottomControlsHeight) + timelineScreenToLocal = () => this.props.ScreenToLocalTransform().translate(0, -AudioBox.bottomControlsHeight); - setPlayheadTime = (time: number) => this._ele!.currentTime = this.layoutDoc._currentTimecode = time; + setPlayheadTime = (time: number) => (this._ele!.currentTime = this.layoutDoc._currentTimecode = time); playing = () => this.mediaState === media_state.Playing; @@ -424,7 +440,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp // timeline dimensions timelineWidth = () => this.props.PanelWidth(); - timelineHeight = () => (this.props.PanelHeight() - (AudioBox.topControlsHeight + AudioBox.bottomControlsHeight)); + timelineHeight = () => this.props.PanelHeight() - (AudioBox.topControlsHeight + AudioBox.bottomControlsHeight); // ends trim, hides trim controls and displays new clip @undoBatch @@ -432,32 +448,38 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp this.Pause(); this.setPlayheadTime(Math.max(Math.min(this.timeline?.trimEnd || 0, this._ele!.currentTime), this.timeline?.trimStart || 0)); this.timeline?.StopTrimming(); - } + }; // displays trim controls to start trimming clip startTrim = (scope: TrimScope) => { this.Pause(); this.timeline?.StartTrimming(scope); - } + }; // for trim button, double click displays full clip, single displays curr trim bounds onClipPointerDown = (e: React.PointerEvent) => { e.stopPropagation(); - this.timeline && setupMoveUpEvents(this, e, returnFalse, returnFalse, action((e: PointerEvent, doubleTap?: boolean) => { - if (doubleTap) { - this.startTrim(TrimScope.All); - } else if (this.timeline) { - this.Pause(); - this.timeline.IsTrimming !== TrimScope.None ? this.finishTrim() : this.startTrim(TrimScope.Clip); - } - })); - } - + this.timeline && + setupMoveUpEvents( + this, + e, + returnFalse, + returnFalse, + action((e: PointerEvent, doubleTap?: boolean) => { + if (doubleTap) { + this.startTrim(TrimScope.All); + } else if (this.timeline) { + this.Pause(); + this.timeline.IsTrimming !== TrimScope.None ? this.finishTrim() : this.startTrim(TrimScope.Clip); + } + }) + ); + }; // for zoom slider, sets timeline waveform zoom zoom = (zoom: number) => { this.timeline?.setZoom(zoom); - } + }; // for volume slider sets volume @action @@ -469,7 +491,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp this.toggleMute(); } } - } + }; // toggles audio muted @action @@ -478,135 +500,156 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp this._muted = !this._muted; this._ele.muted = this._muted; } - } - + }; setupTimelineDrop = (r: HTMLDivElement | null) => { if (r && this.timeline) { this._dropDisposer?.(); - this._dropDisposer = DragManager.MakeDropTarget(r, + this._dropDisposer = DragManager.MakeDropTarget( + r, (e, de) => { const [xp, yp] = this.props.ScreenToLocalTransform().transformPoint(de.x, de.y); de.complete.docDragData && this.timeline.internalDocDrop(e, de, de.complete.docDragData, xp); }, - this.layoutDoc, undefined); + this.layoutDoc, + undefined + ); } - } - + }; // UI for recording, initially displayed when new audio created in Dash @computed get recordingControls() { - return <div className="audiobox-recorder"> - <div className="audiobox-dictation" onPointerDown={this.onFile}> - <FontAwesomeIcon - size="2x" - icon="file-alt" /> - </div> - {[media_state.Recording, media_state.Playing].includes(this.mediaState) ? - <div className="recording-controls" onClick={e => e.stopPropagation()}> - <div className="record-button" onPointerDown={this.Record}> - <FontAwesomeIcon - size="2x" - icon="stop" /> - </div> - <div className="record-button" onPointerDown={this._paused ? this.recordPlay : this.recordPause}> - <FontAwesomeIcon - size="2x" - icon={this._paused ? "play" : "pause"} /> + return ( + <div className="audiobox-recorder"> + <div className="audiobox-dictation" onPointerDown={this.onFile}> + <FontAwesomeIcon size="2x" icon="file-alt" /> + </div> + {[media_state.Recording, media_state.Playing].includes(this.mediaState) ? ( + <div className="recording-controls" onClick={e => e.stopPropagation()}> + <div className="record-button" onPointerDown={this.Record}> + <FontAwesomeIcon size="2x" icon="stop" /> + </div> + <div className="record-button" onPointerDown={this._paused ? this.recordPlay : this.recordPause}> + <FontAwesomeIcon size="2x" icon={this._paused ? 'play' : 'pause'} /> + </div> + <div className="record-timecode">{formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode)))}</div> </div> - <div className="record-timecode"> - {formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode)))} + ) : ( + <div className="audiobox-start-record" onPointerDown={this.Record}> + <FontAwesomeIcon icon="microphone" /> + RECORD </div> - </div> - : - <div className="audiobox-start-record" onPointerDown={this.Record}> - <FontAwesomeIcon icon="microphone" /> - RECORD - </div>} - </div>; + )} + </div> + ); } // UI for playback, displayed for imported or recorded clips, hides timeline and collapses controls when node is shrunk vertically @computed get playbackControls() { - return <div className="audiobox-file" style={{ - pointerEvents: this._isAnyChildContentActive || this.props.isContentActive() ? "all" : "none", - flexDirection: this.miniPlayer ? "row" : "column", - justifyContent: this.miniPlayer ? "flex-start" : "space-between" - }}> - <div className="audiobox-controls"> - <div className="controls-left"> - <div className="audiobox-button" - title={this.mediaState === media_state.Paused ? "play" : "pause"} - onPointerDown={this.mediaState === media_state.Paused ? this.Play : (e) => { e.stopPropagation(); this.Pause(); }}> - <FontAwesomeIcon icon={this.mediaState === media_state.Paused ? "play" : "pause"} size={"1x"} /> + return ( + <div + className="audiobox-file" + style={{ + pointerEvents: this._isAnyChildContentActive || this.props.isContentActive() ? 'all' : 'none', + flexDirection: this.miniPlayer ? 'row' : 'column', + justifyContent: this.miniPlayer ? 'flex-start' : 'space-between', + }}> + <div className="audiobox-controls"> + <div className="controls-left"> + <div + className="audiobox-button" + title={this.mediaState === media_state.Paused ? 'play' : 'pause'} + onPointerDown={ + this.mediaState === media_state.Paused + ? this.Play + : e => { + e.stopPropagation(); + this.Pause(); + } + }> + <FontAwesomeIcon icon={this.mediaState === media_state.Paused ? 'play' : 'pause'} size={'1x'} /> + </div> + + {!this.miniPlayer && ( + <div className="audiobox-button" title={this.timeline?.IsTrimming !== TrimScope.None ? 'finish' : 'trim'} onPointerDown={this.onClipPointerDown}> + <FontAwesomeIcon icon={this.timeline?.IsTrimming !== TrimScope.None ? 'check' : 'cut'} size={'1x'} /> + </div> + )} </div> - - {!this.miniPlayer && - <div className="audiobox-button" - title={this.timeline?.IsTrimming !== TrimScope.None ? "finish" : "trim"} - onPointerDown={this.onClipPointerDown}> - <FontAwesomeIcon icon={this.timeline?.IsTrimming !== TrimScope.None ? "check" : "cut"} size={"1x"} /> - </div>} - </div> - <div className="controls-right"> - <div className="audiobox-button" - title={this._muted ? "unmute" : "mute"} - onPointerDown={(e) => { e.stopPropagation(); this.toggleMute(); }}> - <FontAwesomeIcon icon={this._muted ? "volume-mute" : "volume-up"} /> + <div className="controls-right"> + <div + className="audiobox-button" + title={this._muted ? 'unmute' : 'mute'} + onPointerDown={e => { + e.stopPropagation(); + this.toggleMute(); + }}> + <FontAwesomeIcon icon={this._muted ? 'volume-mute' : 'volume-up'} /> + </div> + <input + type="range" + step="0.1" + min="0" + max="1" + value={this._muted ? 0 : this._volume} + className="toolbar-slider volume" + onPointerDown={(e: React.PointerEvent) => { + e.stopPropagation(); + }} + onChange={(e: React.ChangeEvent<HTMLInputElement>) => this.setVolume(Number(e.target.value))} + /> </div> - <input type="range" step="0.1" min="0" max="1" value={this._muted ? 0 : this._volume} - className="toolbar-slider volume" - onPointerDown={(e: React.PointerEvent) => { e.stopPropagation(); }} - onChange={(e: React.ChangeEvent<HTMLInputElement>) => this.setVolume(Number(e.target.value))} - /> - </div> - </div> - - <div className="audiobox-playback" style={{ width: this.miniPlayer ? 0 : "100%" }}> - <div className="audiobox-timeline"> - {this.renderTimeline} </div> - </div> - - {this.audio} - <div className="audiobox-timecodes"> - <div className="timecode-current"> - {this.timeline && formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode) - NumCast(this.timeline.clipStart)))} + <div className="audiobox-playback" style={{ width: this.miniPlayer ? 0 : '100%' }}> + <div className="audiobox-timeline">{this.renderTimeline}</div> </div> - {this.miniPlayer ? - <div>/</div> - : - <div className="bottom-controls-middle"> - <FontAwesomeIcon icon="search-plus" /> - <input type="range" step="0.1" min="1" max="5" value={this.timeline?._zoomFactor} - className="toolbar-slider" id="zoom-slider" - onPointerDown={(e: React.PointerEvent) => { e.stopPropagation(); }} - onChange={(e: React.ChangeEvent<HTMLInputElement>) => { this.zoom(Number(e.target.value)); }} - /> - </div>} - <div className="timecode-duration"> - {this.timeline && formatTime(Math.round(this.timeline.clipDuration))} + {this.audio} + + <div className="audiobox-timecodes"> + <div className="timecode-current">{this.timeline && formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode) - NumCast(this.timeline.clipStart)))}</div> + {this.miniPlayer ? ( + <div>/</div> + ) : ( + <div className="bottom-controls-middle"> + <FontAwesomeIcon icon="search-plus" /> + <input + type="range" + step="0.1" + min="1" + max="5" + value={this.timeline?._zoomFactor} + className="toolbar-slider" + id="zoom-slider" + onPointerDown={(e: React.PointerEvent) => { + e.stopPropagation(); + }} + onChange={(e: React.ChangeEvent<HTMLInputElement>) => { + this.zoom(Number(e.target.value)); + }} + /> + </div> + )} + + <div className="timecode-duration">{this.timeline && formatTime(Math.round(this.timeline.clipDuration))}</div> </div> </div> - - - </div>; + ); } // gets CollectionStackedTimeline @computed get renderTimeline() { return ( <CollectionStackedTimeline - ref={action((r: any) => this._stackedTimeline = r)} - {...OmitKeys(this.props, ["CollectionFreeFormDocumentView"]).omit} + ref={action((r: any) => (this._stackedTimeline = r))} + {...OmitKeys(this.props, ['CollectionFreeFormDocumentView']).omit} fieldKey={this.annotationKey} - dictationKey={this.fieldKey + "-dictation"} + dictationKey={this.fieldKey + '-dictation'} mediaPath={this.path} renderDepth={this.props.renderDepth + 1} - startTag={"_timecodeToShow" /* audioStart */} - endTag={"_timecodeToHide" /* audioEnd */} + startTag={'_timecodeToShow' /* audioStart */} + endTag={'_timecodeToHide' /* audioEnd */} bringToFront={emptyFunction} CollectionView={undefined} playFrom={this.playFrom} @@ -631,26 +674,22 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp // returns the html audio element @computed get audio() { - return <audio ref={this.setRef} - className={`audiobox-control${this.props.isContentActive() ? "-interactive" : ""}`} - onLoadedData={action(e => - (this._ele?.duration && this._ele?.duration !== Infinity) && - (this.dataDoc[this.fieldKey + "-duration"] = this._ele.duration) - )} - > - <source src={this.path} type="audio/mpeg" /> - Not supported. - </audio>; + return ( + <audio + ref={this.setRef} + className={`audiobox-control${this.props.isContentActive() ? '-interactive' : ''}`} + onLoadedData={action(e => this._ele?.duration && this._ele?.duration !== Infinity && (this.dataDoc[this.fieldKey + '-duration'] = this._ele.duration))}> + <source src={this.path} type="audio/mpeg" /> + Not supported. + </audio> + ); } render() { - return <div - ref={this.setupTimelineDrop} - className="audiobox-container" - onContextMenu={this.specificContextMenu} - style={{ pointerEvents: this.layoutDoc._lockedPosition ? "none" : undefined }} - > - {!this.path ? this.recordingControls : this.playbackControls} - </div>; + return ( + <div ref={this.setupTimelineDrop} className="audiobox-container" onContextMenu={this.specificContextMenu} style={{ pointerEvents: this.layoutDoc._lockedPosition ? 'none' : undefined }}> + {!this.path ? this.recordingControls : this.playbackControls} + </div> + ); } } |