From e4791f989024410443059bee424606c0567bc5f7 Mon Sep 17 00:00:00 2001 From: bobzel Date: Mon, 22 Mar 2021 00:30:21 -0400 Subject: fixed audio/video synchronizatoin for screengrab videos. fixed following isLinkButton's in stackedTimelines. Made links from screenGrab videos to document whn presented in lightbox. --- src/client/views/LightboxView.tsx | 1 + 1 file changed, 1 insertion(+) (limited to 'src/client/views/LightboxView.tsx') diff --git a/src/client/views/LightboxView.tsx b/src/client/views/LightboxView.tsx index 731d46502..48b8ca341 100644 --- a/src/client/views/LightboxView.tsx +++ b/src/client/views/LightboxView.tsx @@ -116,6 +116,7 @@ export class LightboxView extends React.Component { const target = LightboxView._docTarget = LightboxView._future?.pop(); const docView = target && DocumentManager.Instance.getLightboxDocumentView(target); if (docView && target) { + DocUtils.MakeLinkToActiveAudio(target); docView.focus(target, { originalTarget: target, willZoom: true, scale: 0.9 }); if (LightboxView._history?.lastElement().target !== target) LightboxView._history?.push({ doc, target }); } else { -- cgit v1.2.3-70-g09d2 From 8f8a12f6e81482d6cbc4789c3b7f74015f33f423 Mon Sep 17 00:00:00 2001 From: bobzel Date: Wed, 24 Mar 2021 03:50:41 -0400 Subject: fixes for audiotags so that they can be navigated to when focusing. fixes to dictation to start/stop properly. added "RecodingEvents" to trigger dictation waypoints. --- src/client/util/DictationManager.ts | 28 ++++---- src/client/views/GlobalKeyHandler.ts | 2 +- src/client/views/LightboxView.tsx | 3 +- src/client/views/nodes/ScreenshotBox.tsx | 2 +- .../views/nodes/formattedText/FormattedTextBox.tsx | 84 ++++++++++++---------- src/client/views/nodes/formattedText/nodes_rts.ts | 8 ++- 6 files changed, 73 insertions(+), 54 deletions(-) (limited to 'src/client/views/LightboxView.tsx') diff --git a/src/client/util/DictationManager.ts b/src/client/util/DictationManager.ts index c6b654dda..f00cdce1e 100644 --- a/src/client/util/DictationManager.ts +++ b/src/client/util/DictationManager.ts @@ -84,7 +84,13 @@ export namespace DictationManager { terminators: string[]; } + let pendingListen: Promise | string | undefined; + export const listen = async (options?: Partial) => { + if (pendingListen instanceof Promise) return pendingListen.then(pl => innerListen(options)); + return innerListen(options); + } + const innerListen = async (options?: Partial) => { let results: string | undefined; const overlay = options?.useOverlay; @@ -94,7 +100,8 @@ export namespace DictationManager { } try { - results = await listenImpl(options); + results = await (pendingListen = listenImpl(options)); + pendingListen = undefined; if (results) { Utils.CopyText(results); if (overlay) { @@ -106,6 +113,7 @@ export namespace DictationManager { options?.tryExecute && await DictationManager.Commands.execute(results); } } catch (e) { + console.log(e); if (overlay) { DictationOverlay.Instance.isListening = false; DictationOverlay.Instance.dictatedPhrase = results = `dictation error: ${"error" in e ? e.error : "unknown error"}`; @@ -146,7 +154,8 @@ export namespace DictationManager { recognizer.onerror = (e: any) => { // e is SpeechRecognitionError but where is that defined? if (!(indefinite && e.error === "no-speech")) { recognizer.stop(); - reject(e); + resolve(e); + //reject(e); } }; @@ -158,8 +167,8 @@ export namespace DictationManager { recognizer.abort(); return complete(); } - handler && handler(current); - isManuallyStopped && complete(); + !isManuallyStopped && handler?.(current); + //isManuallyStopped && complete(); }; recognizer.onend = (e: Event) => { @@ -168,7 +177,7 @@ export namespace DictationManager { } if (current) { - sessionResults.push(current); + !isManuallyStopped && sessionResults.push(current); current = undefined; } recognizer.start(); @@ -199,14 +208,7 @@ export namespace DictationManager { } isListening = false; isManuallyStopped = true; - salvageSession ? recognizer.stop() : recognizer.abort(); - // let main = MainView.Instance; - // if (main.dictationOverlayVisible) { - // main.cancelDictationFade(); - // main.dictationOverlayVisible = false; - // main.dictationSuccess = undefined; - // setTimeout(() => main.dictatedPhrase = placeholder, 500); - // } + recognizer.stop(); // salvageSession ? recognizer.stop() : recognizer.abort(); }; const synthesize = (e: SpeechRecognitionEvent, delimiter?: string) => { diff --git a/src/client/views/GlobalKeyHandler.ts b/src/client/views/GlobalKeyHandler.ts index f387546af..671c0c507 100644 --- a/src/client/views/GlobalKeyHandler.ts +++ b/src/client/views/GlobalKeyHandler.ts @@ -132,7 +132,7 @@ export class KeyManager { SelectionManager.DeselectAll(); LightboxView.SetLightboxDoc(undefined); } - DictationManager.Controls.stop(); + // DictationManager.Controls.stop(); GoogleAuthenticationManager.Instance.cancel(); SharingManager.Instance.close(); GroupManager.Instance.close(); diff --git a/src/client/views/LightboxView.tsx b/src/client/views/LightboxView.tsx index 48b8ca341..5715b62b0 100644 --- a/src/client/views/LightboxView.tsx +++ b/src/client/views/LightboxView.tsx @@ -116,7 +116,8 @@ export class LightboxView extends React.Component { const target = LightboxView._docTarget = LightboxView._future?.pop(); const docView = target && DocumentManager.Instance.getLightboxDocumentView(target); if (docView && target) { - DocUtils.MakeLinkToActiveAudio(target); + const l = DocUtils.MakeLinkToActiveAudio(target); + l && (Cast(l.anchor2, Doc, null).backgroundColor = "lightgreen"); docView.focus(target, { originalTarget: target, willZoom: true, scale: 0.9 }); if (LightboxView._history?.lastElement().target !== target) LightboxView._history?.push({ doc, target }); } else { diff --git a/src/client/views/nodes/ScreenshotBox.tsx b/src/client/views/nodes/ScreenshotBox.tsx index a481cbbc5..8e1a43fd1 100644 --- a/src/client/views/nodes/ScreenshotBox.tsx +++ b/src/client/views/nodes/ScreenshotBox.tsx @@ -218,7 +218,7 @@ export class ScreenshotBox extends ViewBoxAnnotatableComponent { let linkTime; let linkAnchor; + let link; DocListCast(this.dataDoc.links).forEach((l, i) => { const anchor = (l.anchor1 as Doc).annotationOn ? l.anchor1 as Doc : (l.anchor2 as Doc).annotationOn ? (l.anchor2 as Doc) : undefined; if (anchor && (anchor.annotationOn as Doc).audioState === "recording") { linkTime = NumCast(anchor._timecodeToShow /* audioStart */); linkAnchor = anchor; + link = l; } }); if (this._editorView && linkTime) { @@ -636,14 +638,16 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<(FieldViewProp } breakupDictation = () => { - if (this._editorView) { + if (this._editorView && this._recording) { this.stopDictation(true); this._break = true; const state = this._editorView.state; const to = state.selection.to; const updated = TextSelection.create(state.doc, to, to); - this._editorView!.dispatch(this._editorView!.state.tr.setSelection(updated).insertText("\n", to)); - if (this._recording) setTimeout(() => this.recordDictation(), 500); + this._editorView.dispatch(state.tr.setSelection(updated).insertText("\n", to)); + if (this._recording) { + this.recordDictation(); + } } } recordDictation = () => { @@ -659,25 +663,29 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<(FieldViewProp stopDictation = (abort: boolean) => DictationManager.Controls.stop(!abort); setDictationContent = (value: string) => { - if (this._editorView) { - const state = this._editorView.state; - if (this._recordingStart) { - let from = this._editorView.state.selection.from; - if (this._break) { - const l = DocUtils.MakeLinkToActiveAudio(this.rootDoc, false); - if (!l) return; - const anchor = (l.anchor1 as Doc).annotationOn ? l.anchor1 as Doc : (l.anchor2 as Doc).annotationOn ? (l.anchor2 as Doc) : undefined; - if (!anchor) return; - const timeCode = NumCast(anchor._timecodeToShow /* audioStart */); - const audiotag = this._editorView.state.schema.nodes.audiotag.create({ timeCode, audioId: anchor[Id] }); - this._editorView.dispatch(this._editorView.state.tr.insert(state.selection.from, audiotag)); - from = this._editorView.state.doc.content.size; - this._editorView.dispatch(this._editorView.state.tr.setSelection(TextSelection.create(this._editorView.state.tr.doc, from))); - } - this._break = false; - const tr = this._editorView.state.tr.insertText(value); - this._editorView.dispatch(tr.setSelection(TextSelection.create(tr.doc, from, tr.doc.content.size)).scrollIntoView()); + if (this._editorView && this._recordingStart) { + if (this._break) { + const textanchor = Docs.Create.TextanchorDocument({ title: "dictation anchor" }); + this.addDocument(textanchor); + const link = DocUtils.MakeLinkToActiveAudio(textanchor, false); + if (!link) return; + const audioanchor = Cast(link.anchor2, Doc, null); + if (!audioanchor) return; + audioanchor.backgroundColor = "tan"; + const audiotag = this._editorView.state.schema.nodes.audiotag.create({ + timeCode: NumCast(audioanchor._timecodeToShow), + audioId: audioanchor[Id], + textId: textanchor[Id] + }); + Doc.GetProto(textanchor).title = "dictation:" + audiotag.attrs.timeCode; + const tr = this._editorView.state.tr.insert(this._editorView.state.doc.content.size, audiotag); + const tr2 = tr.setSelection(TextSelection.create(tr.doc, tr.doc.content.size)); + this._editorView.dispatch(tr.setSelection(TextSelection.create(tr2.doc, tr2.doc.content.size))); } + const from = this._editorView.state.selection.from; + this._break = false; + const tr = this._editorView.state.tr.insertText(value); + this._editorView.dispatch(tr.setSelection(TextSelection.create(tr.doc, from, tr.doc.content.size)).scrollIntoView()); } } @@ -709,14 +717,14 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<(FieldViewProp return anchorDoc ?? this.rootDoc; } - scrollFocus = (doc: Doc, smooth: boolean) => { - const anchorId = doc[Id]; + scrollFocus = (textAnchor: Doc, smooth: boolean) => { + const textAnchorId = textAnchor[Id]; const findAnchorFrag = (frag: Fragment, editor: EditorView) => { const nodes: Node[] = []; let hadStart = start !== 0; frag.forEach((node, index) => { const examinedNode = findAnchorNode(node, editor); - if (examinedNode?.node.textContent) { + if (examinedNode?.node && (examinedNode.node.textContent || examinedNode.node.type === this._editorView?.state.schema.nodes.audiotag)) { nodes.push(examinedNode.node); !hadStart && (start = index + examinedNode.start); hadStart = true; @@ -725,28 +733,35 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<(FieldViewProp return { frag: Fragment.fromArray(nodes), start }; }; const findAnchorNode = (node: Node, editor: EditorView) => { + if (node.type === this._editorView?.state.schema.nodes.audiotag) { + if (node.attrs.textId === textAnchorId) { + return { node, start: 0 }; + } + return undefined; + } if (!node.isText) { const content = findAnchorFrag(node.content, editor); return { node: node.copy(content.frag), start: content.start }; } const marks = [...node.marks]; const linkIndex = marks.findIndex(mark => mark.type === editor.state.schema.marks.linkAnchor); - return linkIndex !== -1 && marks[linkIndex].attrs.allAnchors.find((item: { href: string }) => anchorId === item.href.replace(/.*\/doc\//, "")) ? { node, start: 0 } : undefined; + return linkIndex !== -1 && marks[linkIndex].attrs.allAnchors.find((item: { href: string }) => textAnchorId === item.href.replace(/.*\/doc\//, "")) ? { node, start: 0 } : undefined; }; let start = 0; - if (this._editorView && anchorId) { + if (this._editorView && textAnchorId) { const editor = this._editorView; const ret = findAnchorFrag(editor.state.doc.content, editor); - if (ret.frag.size > 2 && ret.start >= 0) { + const content = (ret.frag as any)?.content; + if ((ret.frag.size > 2 || (content?.length && content[0].type === this._editorView.state.schema.nodes.audiotag)) && ret.start >= 0) { smooth && (this._focusSpeed = 500); let selection = TextSelection.near(editor.state.doc.resolve(ret.start)); // default to near the start if (ret.frag.firstChild) { selection = TextSelection.between(editor.state.doc.resolve(ret.start), editor.state.doc.resolve(ret.start + ret.frag.firstChild.nodeSize)); // bcz: looks better to not have the target selected } editor.dispatch(editor.state.tr.setSelection(new TextSelection(selection.$from, selection.$from)).scrollIntoView()); - const escAnchorId = anchorId[0] >= '0' && anchorId[0] <= '9' ? `\\3${anchorId[0]} ${anchorId.substr(1)}` : anchorId; + const escAnchorId = textAnchorId[0] >= '0' && textAnchorId[0] <= '9' ? `\\3${textAnchorId[0]} ${textAnchorId.substr(1)}` : textAnchorId; addStyleSheetRule(FormattedTextBox._highlightStyleSheet, `${escAnchorId}`, { background: "yellow" }); setTimeout(() => this._focusSpeed = undefined, this._focusSpeed); setTimeout(() => clearStyleSheetRules(FormattedTextBox._highlightStyleSheet), Math.max(this._focusSpeed || 0, 1500)); @@ -768,7 +783,7 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<(FieldViewProp this.props.setContentView?.(this); // this tells the DocumentView that this AudioBox is the "content" of the document. this allows the DocumentView to indirectly call getAnchor() on the AudioBox when making a link. this.props.contentsActive?.(this.active); this._cachedLinks = DocListCast(this.Document.links); - this._disposers.breakupDictation = reaction(() => DocumentManager.Instance.RecordingEvent, () => this.breakupDictation()); + this._disposers.breakupDictation = reaction(() => DocumentManager.Instance.RecordingEvent, this.breakupDictation); this._disposers.autoHeight = reaction(() => this.autoHeight, autoHeight => autoHeight && this.tryUpdateScrollHeight()); this._disposers.scrollHeight = reaction(() => ({ scrollHeight: this.scrollHeight, autoHeight: this.autoHeight, width: NumCast(this.layoutDoc._width) }), ({ width, scrollHeight, autoHeight }) => width && autoHeight && this.resetNativeHeight(scrollHeight) @@ -838,7 +853,6 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<(FieldViewProp this._disposers.selected = reaction(() => this.props.isSelected(), action((selected) => { - this._recording = false; if (RichTextMenu.Instance?.view === this._editorView && !selected) { RichTextMenu.Instance?.updateMenu(undefined, undefined, undefined); } @@ -847,15 +861,13 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<(FieldViewProp if (!this.props.dontRegisterView) { this._disposers.record = reaction(() => this._recording, () => { + this.stopDictation(true); if (this._recording) { - setTimeout(action(() => { - this.stopDictation(true); - setTimeout(() => this.recordDictation(), 500); - }), 500); - } else setTimeout(() => this.stopDictation(true), 0); + this.recordDictation(); + } }, - { fireImmediately: true } ); + if (this._recording) setTimeout(() => this.recordDictation()); } var quickScroll: string | undefined = ""; this._disposers.scroll = reaction(() => NumCast(this.layoutDoc._scrollTop), diff --git a/src/client/views/nodes/formattedText/nodes_rts.ts b/src/client/views/nodes/formattedText/nodes_rts.ts index df93fc117..2fe0a67cb 100644 --- a/src/client/views/nodes/formattedText/nodes_rts.ts +++ b/src/client/views/nodes/formattedText/nodes_rts.ts @@ -28,14 +28,17 @@ export const nodes: { [index: string]: NodeSpec } = { group: "block", attrs: { timeCode: { default: 0 }, - audioId: { default: "" } + audioId: { default: "" }, + textId: { default: "" } }, toDOM(node) { return ['audiotag', { + class: node.attrs.textId, // style: see FormattedTextBox.scss "data-timecode": node.attrs.timeCode, "data-audioid": node.attrs.audioId, + "data-textid": node.attrs.textId, }, formatAudioTime(node.attrs.timeCode.toString()) ]; @@ -45,7 +48,8 @@ export const nodes: { [index: string]: NodeSpec } = { tag: "audiotag", getAttrs(dom: any) { return { timeCode: dom.getAttribute("data-timecode"), - audioId: dom.getAttribute("data-audioid") + audioId: dom.getAttribute("data-audioid"), + textId: dom.getAttribute("data-textid") }; } }, -- cgit v1.2.3-70-g09d2 From d0515c81be9f4292eaf165762ce15e7bc8d1737a Mon Sep 17 00:00:00 2001 From: bobzel Date: Thu, 25 Mar 2021 02:39:52 -0400 Subject: moved dictation view to be a component of the screenshotbox --- src/client/util/CurrentUserUtils.ts | 4 +- src/client/util/LinkManager.ts | 6 +- src/client/views/LightboxView.tsx | 8 +-- .../collections/CollectionStackedTimeline.tsx | 3 +- src/client/views/nodes/ScreenshotBox.tsx | 77 ++++++++++++++++------ src/client/views/nodes/VideoBox.tsx | 4 +- src/fields/util.ts | 2 +- 7 files changed, 70 insertions(+), 34 deletions(-) (limited to 'src/client/views/LightboxView.tsx') diff --git a/src/client/util/CurrentUserUtils.ts b/src/client/util/CurrentUserUtils.ts index 05e560f51..0fb32970a 100644 --- a/src/client/util/CurrentUserUtils.ts +++ b/src/client/util/CurrentUserUtils.ts @@ -423,7 +423,7 @@ export class CurrentUserUtils { ((doc.emptyScript as Doc).proto as Doc)["dragFactory-count"] = 0; } if (doc.emptyScreenshot === undefined) { - doc.emptyScreenshot = Docs.Create.ScreenshotDocument("", { _width: 400, _height: 200, title: "screen snapshot", system: true, cloneFieldFilter: new List(["system"]) }); + doc.emptyScreenshot = Docs.Create.ScreenshotDocument("", { _fitWidth: true, _width: 400, _height: 200, title: "screen snapshot", system: true, cloneFieldFilter: new List(["system"]) }); } if (doc.emptyAudio === undefined) { doc.emptyAudio = Docs.Create.AudioDocument(nullAudio, { _width: 200, title: "audio recording", system: true, cloneFieldFilter: new List(["system"]) }); @@ -453,7 +453,7 @@ export class CurrentUserUtils { { toolTip: "Tap to create a progressive slide", title: "Slide", icon: "file", click: 'openOnRight(copyDragFactory(this.dragFactory))', drag: 'copyDragFactory(this.dragFactory)', dragFactory: doc.emptySlide as Doc, noviceMode: true }, { toolTip: "Tap to create a cat image in a new pane, drag for a cat image", title: "Image", icon: "cat", click: 'openOnRight(copyDragFactory(this.dragFactory))', drag: 'copyDragFactory(this.dragFactory)', dragFactory: doc.emptyImage as Doc }, { toolTip: "Tap to create a comparison box in a new pane, drag for a comparison box", title: "Compare", icon: "columns", click: 'openOnRight(copyDragFactory(this.dragFactory))', drag: 'copyDragFactory(this.dragFactory)', dragFactory: doc.emptyComparison as Doc, noviceMode: true }, - { toolTip: "Tap to create a screen grabber in a new pane, drag for a screen grabber", title: "Grab", icon: "photo-video", click: 'openOnRight(copyDragFactory(this.dragFactory))', drag: 'copyDragFactory(this.dragFactory)', dragFactory: doc.emptyScreenshot as Doc }, + { toolTip: "Tap to create a screen grabber in a new pane, drag for a screen grabber", title: "Grab", icon: "photo-video", click: 'openOnRight(copyDragFactory(this.dragFactory))', drag: 'copyDragFactory(this.dragFactory)', dragFactory: doc.emptyScreenshot as Doc, noviceMode: true }, { toolTip: "Tap to create an audio recorder in a new pane, drag for an audio recorder", title: "Audio", icon: "microphone", click: 'openOnRight(copyDragFactory(this.dragFactory))', drag: 'copyDragFactory(this.dragFactory)', dragFactory: doc.emptyAudio as Doc, noviceMode: true }, { toolTip: "Tap to create a button in a new pane, drag for a button", title: "Button", icon: "bolt", click: 'openOnRight(copyDragFactory(this.dragFactory))', drag: 'copyDragFactory(this.dragFactory)', dragFactory: doc.emptyButton as Doc }, { toolTip: "Tap to create a presentation in a new pane, drag for a presentation", title: "Trails", icon: "pres-trail", click: 'openOnRight(Doc.UserDoc().activePresentation = copyDragFactory(this.dragFactory))', drag: `Doc.UserDoc().activePresentation = copyDragFactory(this.dragFactory)`, dragFactory: doc.emptyPresentation as Doc, noviceMode: true }, diff --git a/src/client/util/LinkManager.ts b/src/client/util/LinkManager.ts index bd27c9e56..159011516 100644 --- a/src/client/util/LinkManager.ts +++ b/src/client/util/LinkManager.ts @@ -135,14 +135,14 @@ export class LinkManager { // follows a link - if the target is on screen, it highlights/pans to it. - // if the target isn't onscreen, then it will open up the target in a tab, on the right, or in place + // if the target isn't onscreen, then it will open up the target in the lightbox, or in place // depending on the followLinkLocation property of the source (or the link itself as a fallback); public static FollowLink = (linkDoc: Opt, sourceDoc: Doc, docViewProps: DocumentViewSharedProps, altKey: boolean, zoom: boolean = false) => { const batch = UndoManager.StartBatch("follow link click"); // open up target if it's not already in view ... const createViewFunc = (doc: Doc, followLoc: string, finished?: Opt<() => void>) => { const createTabForTarget = (didFocus: boolean) => new Promise(res => { - const where = LightboxView.LightboxDoc ? "lightbox" : StrCast(sourceDoc.followLinkLocation) || followLoc; + const where = LightboxView.LightboxDoc ? "lightbox" : StrCast(sourceDoc.followLinkLocation, followLoc); docViewProps.addDocTab(doc, where); setTimeout(() => { const targDocView = DocumentManager.Instance.getFirstDocumentView(doc); @@ -195,7 +195,7 @@ export class LinkManager { const containerDoc = Cast(target.annotationOn, Doc, null) || target; const targetContext = Cast(containerDoc?.context, Doc, null); const targetNavContext = !Doc.AreProtosEqual(targetContext, currentContext) ? targetContext : undefined; - DocumentManager.Instance.jumpToDocument(target, zoom, (doc, finished) => createViewFunc(doc, StrCast(linkDoc.followLinkLocation, "add:right"), finished), targetNavContext, linkDoc, undefined, sourceDoc, finished); + DocumentManager.Instance.jumpToDocument(target, zoom, (doc, finished) => createViewFunc(doc, StrCast(linkDoc.followLinkLocation, "lightbox"), finished), targetNavContext, linkDoc, undefined, sourceDoc, finished); } } else { finished?.(); diff --git a/src/client/views/LightboxView.tsx b/src/client/views/LightboxView.tsx index 5715b62b0..84738112f 100644 --- a/src/client/views/LightboxView.tsx +++ b/src/client/views/LightboxView.tsx @@ -114,11 +114,11 @@ export class LightboxView extends React.Component { @action public static Next() { const doc = LightboxView._doc!; const target = LightboxView._docTarget = LightboxView._future?.pop(); - const docView = target && DocumentManager.Instance.getLightboxDocumentView(target); - if (docView && target) { - const l = DocUtils.MakeLinkToActiveAudio(target); + const targetDocView = target && DocumentManager.Instance.getLightboxDocumentView(target); + if (targetDocView && target) { + const l = DocUtils.MakeLinkToActiveAudio(targetDocView.ComponentView?.getAnchor?.() || target); l && (Cast(l.anchor2, Doc, null).backgroundColor = "lightgreen"); - docView.focus(target, { originalTarget: target, willZoom: true, scale: 0.9 }); + targetDocView.focus(target, { originalTarget: target, willZoom: true, scale: 0.9 }); if (LightboxView._history?.lastElement().target !== target) LightboxView._history?.push({ doc, target }); } else { if (!target && LightboxView.path.length) { diff --git a/src/client/views/collections/CollectionStackedTimeline.tsx b/src/client/views/collections/CollectionStackedTimeline.tsx index de75a3c4a..db02ab986 100644 --- a/src/client/views/collections/CollectionStackedTimeline.tsx +++ b/src/client/views/collections/CollectionStackedTimeline.tsx @@ -19,6 +19,7 @@ import "./CollectionStackedTimeline.scss"; import { Transform } from "../../util/Transform"; import { LinkManager } from "../../util/LinkManager"; import { computedFn } from "mobx-utils"; +import { LightboxView } from "../LightboxView"; type PanZoomDocument = makeInterface<[]>; const PanZoomDocument = makeInterface(); @@ -306,7 +307,7 @@ class StackedTimelineAnchor extends React.Component componentDidMount() { this._disposer = reaction(() => this.props.currentTimecode(), (time) => { - if (DocListCast(this.props.mark.links).length && + if (!LightboxView.LightboxDoc && DocListCast(this.props.mark.links).length && time > NumCast(this.props.mark[this.props.startTag]) && time < NumCast(this.props.mark[this.props.endTag]) && this._lastTimecode < NumCast(this.props.mark[this.props.startTag])) { diff --git a/src/client/views/nodes/ScreenshotBox.tsx b/src/client/views/nodes/ScreenshotBox.tsx index dba730d12..d1da2fcd5 100644 --- a/src/client/views/nodes/ScreenshotBox.tsx +++ b/src/client/views/nodes/ScreenshotBox.tsx @@ -24,6 +24,7 @@ import { FieldView, FieldViewProps } from './FieldView'; import "./ScreenshotBox.scss"; import { VideoBox } from "./VideoBox"; import { TraceMobx } from "../../../fields/util"; +import { FormattedTextBox } from "./formattedText/FormattedTextBox"; declare class MediaRecorder { constructor(e: any, options?: any); // whatever MediaRecorder has } @@ -40,6 +41,10 @@ export class ScreenshotBox extends ViewBoxAnnotatableComponent { const startTime = Cast(this.layoutDoc._currentTimecode, "number", null) || (this._videoRec ? (Date.now() - (this.recordingStart || 0)) / 1000 : undefined); return CollectionStackedTimeline.createAnchor(this.rootDoc, this.dataDoc, this.annotationKey, "_timecodeToShow", "_timecodeToHide", @@ -59,6 +64,7 @@ export class ScreenshotBox extends ViewBoxAnnotatableComponent { + if (this.dataDoc[this.fieldKey + "-dictation"]) return; const dictationText = CurrentUserUtils.GetNewTextDoc("", NumCast(this.rootDoc.x), NumCast(this.rootDoc.y) + NumCast(this.layoutDoc._height) + 10, NumCast(this.layoutDoc._width), 2 * NumCast(this.layoutDoc._height)); @@ -133,32 +142,58 @@ export class ScreenshotBox extends ViewBoxAnnotatableComponent [this.content]; + videoPanelHeight = () => NumCast(this.dataDoc[this.fieldKey + "-nativeHeight"], 1) / NumCast(this.dataDoc[this.fieldKey + "-nativeWidth"], 1) * this.props.PanelWidth(); + formattedPanelHeight = () => Math.max(0, this.props.PanelHeight() - this.videoPanelHeight()) render() { TraceMobx(); return
- - {this.contentFunc} - +
+ + {this.contentFunc} +
+
+ +
{!this.props.isSelected() ? (null) :
diff --git a/src/client/views/nodes/VideoBox.tsx b/src/client/views/nodes/VideoBox.tsx index 5a60f9312..4e03589d6 100644 --- a/src/client/views/nodes/VideoBox.tsx +++ b/src/client/views/nodes/VideoBox.tsx @@ -182,7 +182,7 @@ export class VideoBox extends ViewBoxAnnotatableComponent { const url = this.choosePath(Utils.prepend(relative)); - const width = this.layoutDoc._width || 0; + const width = this.layoutDoc._width || 1; const height = this.layoutDoc._height || 0; const imageSummary = Docs.Create.ImageDocument(url, { _nativeWidth: Doc.NativeWidth(this.layoutDoc), _nativeHeight: Doc.NativeHeight(this.layoutDoc), @@ -548,7 +548,7 @@ export class VideoBox extends ViewBoxAnnotatableComponent [this.youtubeVideoId ? this.youtubeContent : this.content]; scaling = () => this.props.scaling?.() || 1; panelWidth = () => this.props.PanelWidth() * this.heightPercent / 100; - panelHeight = () => this.layoutDoc._fitWidth ? this.panelWidth() / Doc.NativeAspect(this.rootDoc) : this.props.PanelHeight() * this.heightPercent / 100; + panelHeight = () => this.layoutDoc._fitWidth ? this.panelWidth() / (Doc.NativeAspect(this.rootDoc) || 1) : this.props.PanelHeight() * this.heightPercent / 100; screenToLocalTransform = () => { const offset = (this.props.PanelWidth() - this.panelWidth()) / 2 / this.scaling(); return this.props.ScreenToLocalTransform().translate(-offset, 0).scale(100 / this.heightPercent); diff --git a/src/fields/util.ts b/src/fields/util.ts index 6c9c9d45c..ea91cc057 100644 --- a/src/fields/util.ts +++ b/src/fields/util.ts @@ -19,7 +19,7 @@ function _readOnlySetter(): never { throw new Error("Documents can't be modified in read-only mode"); } -const tracing = true; +const tracing = false; export function TraceMobx() { tracing && trace(); } -- cgit v1.2.3-70-g09d2 From c7619302a639c61096249362167e75dca6dbb376 Mon Sep 17 00:00:00 2001 From: bobzel Date: Thu, 25 Mar 2021 15:41:06 -0400 Subject: added a dictation area to the stackedTimeline view for A/V. Now dictation annotations scroll in place. --- src/client/documents/Documents.ts | 2 +- src/client/views/LightboxView.tsx | 2 +- .../collections/CollectionStackedTimeline.tsx | 29 +++++++++++++++++++--- src/client/views/nodes/ScreenshotBox.tsx | 2 +- src/client/views/nodes/VideoBox.tsx | 3 +++ .../views/nodes/formattedText/FormattedTextBox.tsx | 3 ++- 6 files changed, 34 insertions(+), 7 deletions(-) (limited to 'src/client/views/LightboxView.tsx') diff --git a/src/client/documents/Documents.ts b/src/client/documents/Documents.ts index 4e05793d4..9406b374e 100644 --- a/src/client/documents/Documents.ts +++ b/src/client/documents/Documents.ts @@ -1051,7 +1051,7 @@ export namespace DocUtils { export function MakeLinkToActiveAudio(doc: Doc, broadcastEvent = true) { broadcastEvent && runInAction(() => DocumentManager.Instance.RecordingEvent = DocumentManager.Instance.RecordingEvent + 1); return DocUtils.ActiveRecordings.map(audio => - DocUtils.MakeLink({ doc: doc }, { doc: audio.getAnchor() || audio.props.Document }, "recording link", "recording timeline")).lastElement(); + DocUtils.MakeLink({ doc: doc }, { doc: audio.getAnchor() || audio.props.Document }, "recording link", "recording timeline")); } export function MakeLink(source: { doc: Doc }, target: { doc: Doc }, linkRelationship: string = "", description: string = "", id?: string, allowParCollectionLink?: boolean, showPopup?: number[]) { diff --git a/src/client/views/LightboxView.tsx b/src/client/views/LightboxView.tsx index 84738112f..07ebe5fa4 100644 --- a/src/client/views/LightboxView.tsx +++ b/src/client/views/LightboxView.tsx @@ -116,7 +116,7 @@ export class LightboxView extends React.Component { const target = LightboxView._docTarget = LightboxView._future?.pop(); const targetDocView = target && DocumentManager.Instance.getLightboxDocumentView(target); if (targetDocView && target) { - const l = DocUtils.MakeLinkToActiveAudio(targetDocView.ComponentView?.getAnchor?.() || target); + const l = DocUtils.MakeLinkToActiveAudio(targetDocView.ComponentView?.getAnchor?.() || target).lastElement(); l && (Cast(l.anchor2, Doc, null).backgroundColor = "lightgreen"); targetDocView.focus(target, { originalTarget: target, willZoom: true, scale: 0.9 }); if (LightboxView._history?.lastElement().target !== target) LightboxView._history?.push({ doc, target }); diff --git a/src/client/views/collections/CollectionStackedTimeline.tsx b/src/client/views/collections/CollectionStackedTimeline.tsx index 2dcdf58c2..66b74277b 100644 --- a/src/client/views/collections/CollectionStackedTimeline.tsx +++ b/src/client/views/collections/CollectionStackedTimeline.tsx @@ -7,7 +7,7 @@ import { List } from "../../../fields/List"; import { listSpec, makeInterface } from "../../../fields/Schema"; import { ComputedField, ScriptField } from "../../../fields/ScriptField"; import { Cast, NumCast } from "../../../fields/Types"; -import { emptyFunction, formatTime, OmitKeys, returnFalse, setupMoveUpEvents, StopEvent } from "../../../Utils"; +import { emptyFunction, formatTime, OmitKeys, returnFalse, setupMoveUpEvents, StopEvent, returnOne } from "../../../Utils"; import { Docs } from "../../documents/Documents"; import { Scripting } from "../../util/Scripting"; import { SelectionManager } from "../../util/SelectionManager"; @@ -20,6 +20,7 @@ import { Transform } from "../../util/Transform"; import { LinkManager } from "../../util/LinkManager"; import { computedFn } from "mobx-utils"; import { LightboxView } from "../LightboxView"; +import { FormattedTextBox } from "../nodes/formattedText/FormattedTextBox"; type PanZoomDocument = makeInterface<[]>; const PanZoomDocument = makeInterface(); @@ -234,7 +235,8 @@ export class CollectionStackedTimeline extends CollectionSubView this.currentTime; render() { const timelineContentWidth = this.props.PanelWidth(); - const timelineContentHeight = this.props.PanelHeight(); + const timelineContentHeight = this.props.PanelHeight() * 2 / 3; + const dictationHeight = this.props.PanelHeight() / 3; const overlaps: { anchorStartTime: number, anchorEndTime: number, level: number }[] = []; const drawAnchors = this.childDocs.map(anchor => ({ level: this.getLevel(anchor, overlaps), anchor })); const maxLevel = overlaps.reduce((m, o) => Math.max(m, o.level), 0) + 2; @@ -268,6 +270,25 @@ export class CollectionStackedTimeline extends CollectionSubView; })} {this.selectionContainer} +
+ dictationHeight} + isAnnotationOverlay={true} + select={emptyFunction} + active={returnFalse} + scaling={returnOne} + xMargin={25} + yMargin={10} + whenActiveChanged={emptyFunction} + removeDocument={returnFalse} + moveDocument={returnFalse} + addDocument={returnFalse} + CollectionView={undefined} + renderDepth={this.props.renderDepth + 1}> + +
+
; } @@ -307,7 +328,9 @@ class StackedTimelineAnchor extends React.Component componentDidMount() { this._disposer = reaction(() => this.props.currentTimecode(), (time) => { - if (!Doc.AreProtosEqual(LightboxView.LightboxDoc, this.props.layoutDoc) && DocListCast(this.props.mark.links).length && + const dictationDoc = Cast(this.props.layoutDoc["data-dictation"], Doc, null); + const isDictation = dictationDoc && DocListCast(this.props.mark.links).some(link => Cast(link.anchor1, Doc, null)?.annotationOn === dictationDoc); + if ((isDictation || !Doc.AreProtosEqual(LightboxView.LightboxDoc, this.props.layoutDoc)) && DocListCast(this.props.mark.links).length && time > NumCast(this.props.mark[this.props.startTag]) && time < NumCast(this.props.mark[this.props.endTag]) && this._lastTimecode < NumCast(this.props.mark[this.props.startTag])) { diff --git a/src/client/views/nodes/ScreenshotBox.tsx b/src/client/views/nodes/ScreenshotBox.tsx index d1da2fcd5..6c019d6f2 100644 --- a/src/client/views/nodes/ScreenshotBox.tsx +++ b/src/client/views/nodes/ScreenshotBox.tsx @@ -117,7 +117,6 @@ export class ScreenshotBox extends ViewBoxAnnotatableComponent
, +
+ +
, VideoBox._showControls ? (null) : [ //
diff --git a/src/client/views/nodes/formattedText/FormattedTextBox.tsx b/src/client/views/nodes/formattedText/FormattedTextBox.tsx index 369f5ab39..aaf3a938e 100644 --- a/src/client/views/nodes/formattedText/FormattedTextBox.tsx +++ b/src/client/views/nodes/formattedText/FormattedTextBox.tsx @@ -667,7 +667,8 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<(FieldViewProp if (this._break) { const textanchor = Docs.Create.TextanchorDocument({ title: "dictation anchor" }); this.addDocument(textanchor); - const link = DocUtils.MakeLinkToActiveAudio(textanchor, false); + const link = DocUtils.MakeLinkToActiveAudio(textanchor, false).lastElement(); + link && (Doc.GetProto(link).isDictation = true); if (!link) return; const audioanchor = Cast(link.anchor2, Doc, null); if (!audioanchor) return; -- cgit v1.2.3-70-g09d2