aboutsummaryrefslogtreecommitdiff
path: root/src/client/views/nodes/DocumentView.tsx
diff options
context:
space:
mode:
Diffstat (limited to 'src/client/views/nodes/DocumentView.tsx')
-rw-r--r--src/client/views/nodes/DocumentView.tsx90
1 files changed, 41 insertions, 49 deletions
diff --git a/src/client/views/nodes/DocumentView.tsx b/src/client/views/nodes/DocumentView.tsx
index 40592c2cd..bbeacef88 100644
--- a/src/client/views/nodes/DocumentView.tsx
+++ b/src/client/views/nodes/DocumentView.tsx
@@ -38,7 +38,7 @@ import { DocComponent, ViewBoxInterface } from '../DocComponent';
import { EditableView } from '../EditableView';
import { GestureOverlay } from '../GestureOverlay';
import { LightboxView } from '../LightboxView';
-import { StyleProp } from '../StyleProvider';
+import { AudioAnnoState, StyleProp } from '../StyleProvider';
import { DocumentContentsView, ObserverJsxParser } from './DocumentContentsView';
import { DocumentLinksButton } from './DocumentLinksButton';
import './DocumentView.scss';
@@ -1004,49 +1004,41 @@ export class DocumentViewInternal extends DocComponent<FieldViewProps & Document
public static recordAudioAnnotation(dataDoc: Doc, field: string, onRecording?: (stop: () => void) => void, onEnd?: () => void) {
let gumStream: any;
let recorder: any;
- navigator.mediaDevices
- .getUserMedia({
- audio: true,
- })
- .then(function (stream) {
- let audioTextAnnos = Cast(dataDoc[field + '_audioAnnotations_text'], listSpec('string'), null);
- if (audioTextAnnos) audioTextAnnos.push('');
- else audioTextAnnos = dataDoc[field + '_audioAnnotations_text'] = new List<string>(['']);
- DictationManager.Controls.listen({
- interimHandler: value => (audioTextAnnos[audioTextAnnos.length - 1] = value),
- continuous: { indefinite: false },
- }).then(results => {
- if (results && [DictationManager.Controls.Infringed].includes(results)) {
- DictationManager.Controls.stop();
- }
- onEnd?.();
- });
-
- gumStream = stream;
- recorder = new MediaRecorder(stream);
- recorder.ondataavailable = async (e: any) => {
- const [{ result }] = await Networking.UploadFilesToServer({ file: e.data });
- if (!(result instanceof Error)) {
- const audioField = new AudioField(result.accessPaths.agnostic.client);
- const audioAnnos = Cast(dataDoc[field + '_audioAnnotations'], listSpec(AudioField), null);
- if (audioAnnos === undefined) {
- dataDoc[field + '_audioAnnotations'] = new List([audioField]);
- } else {
- audioAnnos.push(audioField);
- }
- }
- };
- //runInAction(() => (dataDoc.audioAnnoState = 'recording'));
- recorder.start();
- const stopFunc = () => {
- recorder.stop();
- DictationManager.Controls.stop(false);
- runInAction(() => (dataDoc.audioAnnoState = 'stopped'));
- gumStream.getAudioTracks()[0].stop();
- };
- if (onRecording) onRecording(stopFunc);
- else setTimeout(stopFunc, 5000);
+ navigator.mediaDevices.getUserMedia({ audio: true }).then(function (stream) {
+ let audioTextAnnos = Cast(dataDoc[field + '_audioAnnotations_text'], listSpec('string'), null);
+ if (audioTextAnnos) audioTextAnnos.push('');
+ else audioTextAnnos = dataDoc[field + '_audioAnnotations_text'] = new List<string>(['']);
+ DictationManager.Controls.listen({
+ interimHandler: value => (audioTextAnnos[audioTextAnnos.length - 1] = value),
+ continuous: { indefinite: false },
+ }).then(results => {
+ if (results && [DictationManager.Controls.Infringed].includes(results)) {
+ DictationManager.Controls.stop();
+ }
+ onEnd?.();
});
+
+ gumStream = stream;
+ recorder = new MediaRecorder(stream);
+ recorder.ondataavailable = async (e: any) => {
+ const [{ result }] = await Networking.UploadFilesToServer({ file: e.data });
+ if (!(result instanceof Error)) {
+ const audioField = new AudioField(result.accessPaths.agnostic.client);
+ const audioAnnos = Cast(dataDoc[field + '_audioAnnotations'], listSpec(AudioField), null);
+ if (audioAnnos) audioAnnos.push(audioField);
+ else dataDoc[field + '_audioAnnotations'] = new List([audioField]);
+ }
+ };
+ recorder.start();
+ const stopFunc = () => {
+ recorder.stop();
+ DictationManager.Controls.stop(false);
+ dataDoc.audioAnnoState = AudioAnnoState.stopped;
+ gumStream.getAudioTracks()[0].stop();
+ };
+ if (onRecording) onRecording(stopFunc);
+ else setTimeout(stopFunc, 5000);
+ });
}
}
@@ -1231,25 +1223,25 @@ export class DocumentView extends DocComponent<DocumentViewProps>() {
public playAnnotation = () => {
const self = this;
- const audioAnnoState = this.dataDoc.audioAnnoState ?? 'stopped';
+ const audioAnnoState = this.dataDoc.audioAnnoState ?? AudioAnnoState.stopped;
const audioAnnos = Cast(this.dataDoc[this.LayoutFieldKey + '_audioAnnotations'], listSpec(AudioField), null);
const anno = audioAnnos?.lastElement();
if (anno instanceof AudioField) {
switch (audioAnnoState) {
- case 'stopped':
+ case AudioAnnoState.stopped:
this.dataDoc[AudioPlay] = new Howl({
src: [anno.url.href],
format: ['mp3'],
autoplay: true,
loop: false,
volume: 0.5,
- onend: action(() => (self.dataDoc.audioAnnoState = 'stopped')),
+ onend: action(() => (self.dataDoc.audioAnnoState = AudioAnnoState.stopped)),
});
- this.dataDoc.audioAnnoState = 'playing';
+ this.dataDoc.audioAnnoState = AudioAnnoState.playing;
break;
- case 'playing':
+ case AudioAnnoState.playing:
this.dataDoc[AudioPlay]?.stop();
- this.dataDoc.audioAnnoState = 'stopped';
+ this.dataDoc.audioAnnoState = AudioAnnoState.stopped;
break;
}
}