diff options
| author | Sophie Zhang <sophie_zhang@brown.edu> | 2023-09-18 17:40:01 -0400 |
|---|---|---|
| committer | Sophie Zhang <sophie_zhang@brown.edu> | 2023-09-18 17:40:01 -0400 |
| commit | 013f25f01e729feee5db94900c61f4be4dd46869 (patch) | |
| tree | 765dd5f2e06d6217ca79438e1098cefc8da627bf /src/client/views/nodes/AudioBox.tsx | |
| parent | f5e765adff1e7b32250eb503c9724a4ac99117f3 (diff) | |
| parent | 84aa8806a62e2e957e8281d7d492139e3d8225f2 (diff) | |
Merge branch 'master' into sophie-report-manager
Diffstat (limited to 'src/client/views/nodes/AudioBox.tsx')
| -rw-r--r-- | src/client/views/nodes/AudioBox.tsx | 97 |
1 files changed, 71 insertions, 26 deletions
diff --git a/src/client/views/nodes/AudioBox.tsx b/src/client/views/nodes/AudioBox.tsx index 6558d215a..8d80f1364 100644 --- a/src/client/views/nodes/AudioBox.tsx +++ b/src/client/views/nodes/AudioBox.tsx @@ -1,14 +1,15 @@ import React = require('react'); import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'; +import { Tooltip } from '@material-ui/core'; import { action, computed, IReactionDisposer, observable, runInAction } from 'mobx'; import { observer } from 'mobx-react'; import { DateField } from '../../../fields/DateField'; -import { Doc, DocListCast } from '../../../fields/Doc'; +import { Doc } from '../../../fields/Doc'; import { ComputedField } from '../../../fields/ScriptField'; import { Cast, DateCast, NumCast } from '../../../fields/Types'; import { AudioField, nullAudio } from '../../../fields/URLField'; import { emptyFunction, formatTime, returnFalse, setupMoveUpEvents } from '../../../Utils'; -import { DocUtils } from '../../documents/Documents'; +import { Docs, DocUtils } from '../../documents/Documents'; import { Networking } from '../../Network'; import { DragManager } from '../../util/DragManager'; import { LinkManager } from '../../util/LinkManager'; @@ -18,6 +19,7 @@ import { ContextMenu } from '../ContextMenu'; import { ContextMenuProps } from '../ContextMenuItem'; import { ViewBoxAnnotatableComponent, ViewBoxAnnotatableProps } from '../DocComponent'; import './AudioBox.scss'; +import { DocFocusOptions } from './DocumentView'; import { FieldView, FieldViewProps } from './FieldView'; import { PinProps, PresBox } from './trails'; @@ -66,14 +68,14 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp _stream: MediaStream | undefined; // passed to MediaRecorder, records device input audio _play: any = null; // timeout for playback - @observable _stackedTimeline: any; // CollectionStackedTimeline ref + @observable _stackedTimeline: CollectionStackedTimeline | null | undefined; // CollectionStackedTimeline ref @observable _finished: boolean = false; // has playback reached end of clip @observable _volume: number = 1; @observable _muted: boolean = false; @observable _paused: boolean = false; // is recording paused // @observable rawDuration: number = 0; // computed from the length of the audio element when loaded @computed get recordingStart() { - return DateCast(this.dataDoc[this.fieldKey + '-recordingStart'])?.date.getTime(); + return DateCast(this.dataDoc[this.fieldKey + '_recordingStart'])?.date.getTime(); } @computed get rawDuration() { return NumCast(this.dataDoc[`${this.fieldKey}_duration`]); @@ -134,16 +136,19 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp } getAnchor = (addAsAnnotation: boolean, pinProps?: PinProps) => { - const anchor = - CollectionStackedTimeline.createAnchor( - this.rootDoc, - this.dataDoc, - this.annotationKey, - this._ele?.currentTime || Cast(this.props.Document._layout_currentTimecode, 'number', null) || (this.mediaState === media_state.Recording ? (Date.now() - (this.recordingStart || 0)) / 1000 : undefined), - undefined, - undefined, - addAsAnnotation - ) || this.rootDoc; + const timecode = Cast(this.layoutDoc._layout_currentTimecode, 'number', null); + const anchor = addAsAnnotation + ? CollectionStackedTimeline.createAnchor( + this.rootDoc, + this.dataDoc, + this.annotationKey, + this._ele?.currentTime || Cast(this.props.Document._layout_currentTimecode, 'number', null) || (this.mediaState === media_state.Recording ? (Date.now() - (this.recordingStart || 0)) / 1000 : undefined), + undefined, + undefined, + addAsAnnotation + ) || this.rootDoc + : Docs.Create.ConfigDocument({ title: '#' + timecode, _timecodeToShow: timecode, annotationOn: this.rootDoc }); + PresBox.pinDocView(anchor, { pinDocLayout: pinProps?.pinDocLayout, pinData: { ...(pinProps?.pinData ?? {}), temporal: true } }, this.rootDoc); return anchor; }; @@ -230,10 +235,10 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp recordAudioAnnotation = async () => { this._stream = await navigator.mediaDevices.getUserMedia({ audio: true }); this._recorder = new MediaRecorder(this._stream); - this.dataDoc[this.fieldKey + '-recordingStart'] = new DateField(); + this.dataDoc[this.fieldKey + '_recordingStart'] = new DateField(); DocUtils.ActiveRecordings.push(this); this._recorder.ondataavailable = async (e: any) => { - const [{ result }] = await Networking.UploadFilesToServer({file: e.data}); + const [{ result }] = await Networking.UploadFilesToServer({ file: e.data }); if (!(result instanceof Error)) { this.props.Document[this.fieldKey] = new AudioField(result.accessPaths.agnostic.client); } @@ -359,9 +364,10 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp returnFalse, action(() => { const newDoc = DocUtils.GetNewTextDoc('', NumCast(this.rootDoc.x), NumCast(this.rootDoc.y) + NumCast(this.layoutDoc._height) + 10, NumCast(this.layoutDoc._width), 2 * NumCast(this.layoutDoc._height)); - Doc.GetProto(newDoc).recordingSource = this.dataDoc; - Doc.GetProto(newDoc).recordingStart = ComputedField.MakeFunction(`self.recordingSource["${this.fieldKey}-recordingStart"]`); - Doc.GetProto(newDoc).mediaState = ComputedField.MakeFunction('self.recordingSource.mediaState'); + const textField = Doc.LayoutFieldKey(newDoc); + Doc.GetProto(newDoc)[`${textField}_recordingSource`] = this.dataDoc; + Doc.GetProto(newDoc)[`${textField}_recordingStart`] = ComputedField.MakeFunction(`self.${textField}_recordingSource.${this.fieldKey}_recordingStart`); + Doc.GetProto(newDoc).mediaState = ComputedField.MakeFunction(`self.${textField}_recordingSource.mediaState`); if (Doc.IsInMyOverlay(this.rootDoc)) { newDoc.overlayX = this.rootDoc.x; newDoc.overlayY = NumCast(this.rootDoc.y) + NumCast(this.rootDoc._height); @@ -417,7 +423,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp }; // plays link - playLink = (link: Doc) => { + playLink = (link: Doc, options: DocFocusOptions) => { if (link.annotationOn === this.rootDoc) { if (!this.layoutDoc.dontAutoPlayFollowedLinks) { this.playFrom(this.timeline?.anchorStart(link) || 0, this.timeline?.anchorEnd(link)); @@ -472,8 +478,34 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp }; // for trim button, double click displays full clip, single displays curr trim bounds + onResetPointerDown = (e: React.PointerEvent) => { + e.stopPropagation(); + this.timeline && + setupMoveUpEvents( + this, + e, + returnFalse, + returnFalse, + action(e => { + if (this.timeline?.IsTrimming !== TrimScope.None) { + this.timeline?.CancelTrimming(); + } else { + this.beginEndtime = this.timeline?.trimEnd; + this.beginStarttime = this.timeline?.trimStart; + this.startTrim(TrimScope.All); + } + }) + ); + }; + + beginEndtime: number | undefined; + beginStarttime: number | undefined; + + // for trim button, double click displays full clip, single displays curr trim bounds onClipPointerDown = (e: React.PointerEvent) => { e.stopPropagation(); + this.beginEndtime = this.timeline?.trimEnd; + this.beginStarttime = this.timeline?.trimStart; this.timeline && setupMoveUpEvents( this, @@ -524,7 +556,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp r, (e, de) => { const [xp, yp] = this.props.ScreenToLocalTransform().transformPoint(de.x, de.y); - de.complete.docDragData && this.timeline.internalDocDrop(e, de, de.complete.docDragData, xp); + de.complete.docDragData && this.timeline?.internalDocDrop(e, de, de.complete.docDragData, xp); }, this.layoutDoc, undefined @@ -586,9 +618,22 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp </div> {!this.miniPlayer && ( - <div className="audiobox-button" title={this.timeline?.IsTrimming !== TrimScope.None ? 'finish' : 'trim'} onPointerDown={this.onClipPointerDown}> - <FontAwesomeIcon icon={this.timeline?.IsTrimming !== TrimScope.None ? 'check' : 'cut'} size={'1x'} /> - </div> + <> + <Tooltip title={<>trim audio</>}> + <div className="audiobox-button" onPointerDown={this.onClipPointerDown}> + <FontAwesomeIcon icon={this.timeline?.IsTrimming !== TrimScope.None ? 'check' : 'cut'} size={'1x'} /> + </div> + </Tooltip> + {this.timeline?.IsTrimming == TrimScope.None && !NumCast(this.layoutDoc.clipStart) && NumCast(this.layoutDoc.clipEnd) === this.rawDuration ? ( + <></> + ) : ( + <Tooltip title={<>{this.timeline?.IsTrimming !== TrimScope.None ? 'Cancel trimming' : 'Edit original timeline'}</>}> + <div className="audiobox-button" onPointerDown={this.onResetPointerDown}> + <FontAwesomeIcon icon={this.timeline?.IsTrimming !== TrimScope.None ? 'cancel' : 'arrows-left-right'} size={'1x'} /> + </div> + </Tooltip> + )} + </> )} </div> <div className="controls-right"> @@ -653,12 +698,12 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp @computed get renderTimeline() { return ( <CollectionStackedTimeline - ref={action((r: any) => (this._stackedTimeline = r))} + ref={action((r: CollectionStackedTimeline | null) => (this._stackedTimeline = r))} {...this.props} CollectionFreeFormDocumentView={undefined} dataFieldKey={this.fieldKey} fieldKey={this.annotationKey} - dictationKey={this.fieldKey + '-dictation'} + dictationKey={this.fieldKey + '_dictation'} mediaPath={this.path} renderDepth={this.props.renderDepth + 1} startTag={'_timecodeToShow' /* audioStart */} |
