aboutsummaryrefslogtreecommitdiff
path: root/src/fields
diff options
context:
space:
mode:
Diffstat (limited to 'src/fields')
-rw-r--r--src/fields/DateField.ts14
-rw-r--r--src/fields/Doc.ts355
-rw-r--r--src/fields/InkField.ts5
-rw-r--r--src/fields/List.ts4
-rw-r--r--src/fields/RichTextField.ts26
-rw-r--r--src/fields/RichTextUtils.ts10
-rw-r--r--src/fields/ScriptField.ts2
-rw-r--r--src/fields/URLField.ts9
-rw-r--r--src/fields/documentSchemas.ts1
-rw-r--r--src/fields/util.ts3
10 files changed, 233 insertions, 196 deletions
diff --git a/src/fields/DateField.ts b/src/fields/DateField.ts
index 26f51b2d3..2ea619bd9 100644
--- a/src/fields/DateField.ts
+++ b/src/fields/DateField.ts
@@ -1,11 +1,11 @@
-import { Deserializable } from "../client/util/SerializationHelper";
-import { serializable, date } from "serializr";
-import { ObjectField } from "./ObjectField";
-import { Copy, ToScriptString, ToString } from "./FieldSymbols";
-import { scriptingGlobal, ScriptingGlobals } from "../client/util/ScriptingGlobals";
+import { Deserializable } from '../client/util/SerializationHelper';
+import { serializable, date } from 'serializr';
+import { ObjectField } from './ObjectField';
+import { Copy, ToScriptString, ToString } from './FieldSymbols';
+import { scriptingGlobal, ScriptingGlobals } from '../client/util/ScriptingGlobals';
@scriptingGlobal
-@Deserializable("date")
+@Deserializable('date')
export class DateField extends ObjectField {
@serializable(date())
readonly date: Date;
@@ -24,7 +24,7 @@ export class DateField extends ObjectField {
}
[ToScriptString]() {
- return `new DateField(new Date(${this.date.toISOString()}))`;
+ return `new DateField(new Date("${this.date.toISOString()}"))`;
}
[ToString]() {
return this.date.toLocaleString();
diff --git a/src/fields/Doc.ts b/src/fields/Doc.ts
index de94ed5db..b8ac8fb5d 100644
--- a/src/fields/Doc.ts
+++ b/src/fields/Doc.ts
@@ -10,11 +10,12 @@ import { scriptingGlobal, ScriptingGlobals } from '../client/util/ScriptingGloba
import { SelectionManager } from '../client/util/SelectionManager';
import { afterDocDeserialize, autoObject, Deserializable, SerializationHelper } from '../client/util/SerializationHelper';
import { UndoManager } from '../client/util/UndoManager';
+import { decycle } from '../decycler/decycler';
import { DashColor, incrementTitleCopy, intersectRect, Utils } from '../Utils';
import { DateField } from './DateField';
import { Copy, HandleUpdate, Id, OnUpdate, Parent, Self, SelfProxy, ToScriptString, ToString, Update } from './FieldSymbols';
-import { InkTool } from './InkField';
-import { List } from './List';
+import { InkField, InkTool } from './InkField';
+import { List, ListFieldName } from './List';
import { ObjectField } from './ObjectField';
import { PrefetchProxy, ProxyField } from './Proxy';
import { FieldId, RefField } from './RefField';
@@ -22,15 +23,19 @@ import { RichTextField } from './RichTextField';
import { listSpec } from './Schema';
import { ComputedField, ScriptField } from './ScriptField';
import { Cast, DocCast, FieldValue, NumCast, StrCast, ToConstructor } from './Types';
-import { AudioField, ImageField, MapField, PdfField, VideoField, WebField } from './URLField';
+import { AudioField, CsvField, ImageField, PdfField, VideoField, WebField } from './URLField';
import { deleteProperty, GetEffectiveAcl, getField, getter, makeEditable, makeReadOnly, normalizeEmail, setter, SharingPermissions, updateFunction } from './util';
import JSZip = require('jszip');
-
+import * as JSZipUtils from '../JSZipUtils';
export namespace Field {
export function toKeyValueString(doc: Doc, key: string): string {
- const onDelegate = Object.keys(doc).includes(key);
+ const onDelegate = Object.keys(doc).includes(key.replace(/^_/, ''));
const field = ComputedField.WithoutComputed(() => FieldValue(doc[key]));
- return !Field.IsField(field) ? '' : (onDelegate ? '=' : '') + (field instanceof ComputedField ? `:=${field.script.originalScript}` : Field.toScriptString(field));
+ return !Field.IsField(field)
+ ? key.startsWith('_')
+ ? '='
+ : ''
+ : (onDelegate ? '=' : '') + (field instanceof ComputedField ? `:=${field.script.originalScript}` : field instanceof ScriptField ? `$=${field.script.originalScript}` : Field.toScriptString(field));
}
export function toScriptString(field: Field): string {
switch (typeof field) {
@@ -95,6 +100,7 @@ export const HighlightSym = Symbol('Highlight');
export const DataSym = Symbol('Data');
export const LayoutSym = Symbol('Layout');
export const FieldsSym = Symbol('Fields');
+export const CssSym = Symbol('Css');
export const AclSym = Symbol('Acl');
export const DirectLinksSym = Symbol('DirectLinks');
export const AclUnset = Symbol('AclUnset');
@@ -340,6 +346,7 @@ export class Doc extends RefField {
@observable private ___fieldKeys: any = {};
/// all of the raw acl's that have been set on this document. Use GetEffectiveAcl to determine the actual ACL of the doc for editing
@observable public [AclSym]: { [key: string]: symbol } = {};
+ @observable public [CssSym]: number = 0; // incrementer denoting a change to CSS layout
@observable public [DirectLinksSym]: Set<Doc> = new Set();
@observable public [AnimationSym]: Opt<Doc>;
@observable public [HighlightSym]: boolean = false;
@@ -517,20 +524,13 @@ export namespace Doc {
}
export async function SetInPlace(doc: Doc, key: string, value: Field | undefined, defaultProto: boolean) {
if (key.startsWith('_')) key = key.substring(1);
- const hasProto = doc.proto instanceof Doc;
+ const hasProto = Doc.GetProto(doc) !== doc ? Doc.GetProto(doc) : undefined;
const onDeleg = Object.getOwnPropertyNames(doc).indexOf(key) !== -1;
const onProto = hasProto && Object.getOwnPropertyNames(doc.proto).indexOf(key) !== -1;
if (onDeleg || !hasProto || (!onProto && !defaultProto)) {
doc[key] = value;
} else doc.proto![key] = value;
}
- export async function SetOnPrototype(doc: Doc, key: string, value: Field) {
- const proto = Object.getOwnPropertyNames(doc).indexOf('isPrototype') === -1 ? doc.proto : doc;
-
- if (proto) {
- proto[key] = value;
- }
- }
export function GetAllPrototypes(doc: Doc): Doc[] {
const protos: Doc[] = [];
let d: Opt<Doc> = doc;
@@ -701,89 +701,113 @@ export namespace Doc {
return bestAlias ?? Doc.MakeAlias(doc);
}
- export async function makeClone(
- doc: Doc,
- cloneMap: Map<string, Doc>,
- linkMap: Map<Doc, Doc>,
- rtfs: { copy: Doc; key: string; field: RichTextField }[],
- exclusions: string[],
- topLevelExclusions: string[],
- dontCreate: boolean,
- asBranch: boolean
- ): Promise<Doc> {
+ // this lists out all the tag ids that can be in a RichTextField that might contain document ids.
+ // if a document is cloned, we need to make sure to clone all of these referenced documents as well;
+ export const DocsInTextFieldIds = ['audioId', 'textId', 'anchorId', 'docId'];
+ export async function makeClone(doc: Doc, cloneMap: Map<string, Doc>, linkMap: Map<string, Doc>, rtfs: { copy: Doc; key: string; field: RichTextField }[], exclusions: string[], pruneDocs: Doc[], cloneLinks: boolean): Promise<Doc> {
if (Doc.IsBaseProto(doc)) return doc;
if (cloneMap.get(doc[Id])) return cloneMap.get(doc[Id])!;
- const copy = dontCreate ? (asBranch ? Cast(doc.branchMaster, Doc, null) || doc : doc) : new Doc(undefined, true);
+ const copy = new Doc(undefined, true);
cloneMap.set(doc[Id], copy);
- const fieldExclusions = doc.type === DocumentType.MARKER ? exclusions.filter(ex => ex !== 'annotationOn') : exclusions;
- const filter = [...fieldExclusions, ...topLevelExclusions, ...Cast(doc.cloneFieldFilter, listSpec('string'), [])];
+ const filter = [...exclusions, ...StrListCast(doc.cloneFieldFilter)];
await Promise.all(
Object.keys(doc).map(async key => {
if (filter.includes(key)) return;
- const assignKey = (val: any) => !dontCreate && (copy[key] = val);
+ const assignKey = (val: any) => (copy[key] = val);
const cfield = ComputedField.WithoutComputed(() => FieldValue(doc[key]));
const field = ProxyField.WithoutProxy(() => doc[key]);
const copyObjectField = async (field: ObjectField) => {
const list = await Cast(doc[key], listSpec(Doc));
const docs = list && (await DocListCastAsync(list))?.filter(d => d instanceof Doc);
if (docs !== undefined && docs.length) {
- const clones = await Promise.all(docs.map(async d => Doc.makeClone(d, cloneMap, linkMap, rtfs, exclusions, [], dontCreate, asBranch)));
- !dontCreate && assignKey(new List<Doc>(clones));
- } else if (doc[key] instanceof Doc) {
- assignKey(key.includes('layout[') ? undefined : key.startsWith('layout') ? (doc[key] as Doc) : await Doc.makeClone(doc[key] as Doc, cloneMap, linkMap, rtfs, exclusions, [], dontCreate, asBranch)); // reference documents except copy documents that are expanded template fields
+ const clones = await Promise.all(docs.map(async d => Doc.makeClone(d, cloneMap, linkMap, rtfs, exclusions, pruneDocs, cloneLinks)));
+ assignKey(new List<Doc>(clones));
} else {
- !dontCreate && assignKey(ObjectField.MakeCopy(field));
+ assignKey(ObjectField.MakeCopy(field));
if (field instanceof RichTextField) {
- if (field.Data.includes('"audioId":') || field.Data.includes('"textId":') || field.Data.includes('"anchorId":')) {
+ if (DocsInTextFieldIds.some(id => field.Data.includes(`"${id}":`))) {
+ const docidsearch = new RegExp('(' + DocsInTextFieldIds.map(exp => '(' + exp + ')').join('|') + ')":"([a-z-A-Z0-9_]*)"', 'g');
+ const rawdocids = field.Data.match(docidsearch);
+ const docids = rawdocids?.map((str: string) =>
+ DocsInTextFieldIds.reduce((output, exp) => output.replace(new RegExp(`${exp}":`, 'g'), ''), str)
+ .replace(/"/g, '')
+ .trim()
+ );
+ const results = docids && (await DocServer.GetRefFields(docids));
+ const docs = results && Array.from(Object.keys(results)).map(key => DocCast(results[key]));
+ docs?.map(doc => doc && Doc.makeClone(doc, cloneMap, linkMap, rtfs, exclusions, pruneDocs, cloneLinks));
rtfs.push({ copy, key, field });
}
}
}
};
- if (key === 'proto') {
- if (doc[key] instanceof Doc) {
- assignKey(await Doc.makeClone(doc[key] as Doc, cloneMap, linkMap, rtfs, exclusions, [], dontCreate, asBranch));
- }
- } else if (key === 'anchor1' || key === 'anchor2') {
- if (doc[key] instanceof Doc) {
- assignKey(await Doc.makeClone(doc[key] as Doc, cloneMap, linkMap, rtfs, exclusions, [], true, asBranch));
- }
- } else {
- if (field instanceof RefField) {
- assignKey(field);
- } else if (cfield instanceof ComputedField) {
- !dontCreate && assignKey(cfield[Copy]());
- // ComputedField.MakeFunction(cfield.script.originalScript));
- } else if (field instanceof ObjectField) {
- await copyObjectField(field);
- } else if (field instanceof Promise) {
- debugger; //This shouldn't happen...
+ const docAtKey = doc[key];
+ if (docAtKey instanceof Doc) {
+ if (pruneDocs.includes(docAtKey)) {
+ // prune doc and do nothing
+ } else if (!Doc.IsSystem(docAtKey) && (key.startsWith('layout') || ['context', 'annotationOn', 'proto'].includes(key) || ((key === 'anchor1' || key === 'anchor2') && doc.author === Doc.CurrentUserEmail))) {
+ assignKey(await Doc.makeClone(docAtKey, cloneMap, linkMap, rtfs, exclusions, pruneDocs, cloneLinks));
} else {
- assignKey(field);
+ assignKey(docAtKey);
}
+ } else if (field instanceof RefField) {
+ assignKey(field);
+ } else if (cfield instanceof ComputedField) {
+ assignKey(cfield[Copy]()); // ComputedField.MakeFunction(cfield.script.originalScript));
+ } else if (field instanceof ObjectField) {
+ await copyObjectField(field);
+ } else if (field instanceof Promise) {
+ debugger; //This shouldn't happen...
+ } else {
+ assignKey(field);
}
})
);
- for (const link of Array.from(doc[DirectLinksSym])) {
- const linkClone = await Doc.makeClone(link, cloneMap, linkMap, rtfs, exclusions, [], dontCreate, asBranch);
- linkMap.set(link, linkClone);
- }
- if (!dontCreate) {
- Doc.SetInPlace(copy, 'title', (asBranch ? 'BRANCH: ' : 'CLONE: ') + doc.title, true);
- asBranch ? (copy.branchOf = doc) : (copy.cloneOf = doc);
- if (!Doc.IsPrototype(copy)) {
- Doc.AddDocToList(doc, 'branches', Doc.GetProto(copy));
+ Array.from(doc[DirectLinksSym]).forEach(async link => {
+ if (
+ cloneLinks ||
+ ((cloneMap.has(DocCast(link.anchor1)?.[Id]) || cloneMap.has(DocCast(DocCast(link.anchor1)?.annotationOn)?.[Id])) && (cloneMap.has(DocCast(link.anchor2)?.[Id]) || cloneMap.has(DocCast(DocCast(link.anchor2)?.annotationOn)?.[Id])))
+ ) {
+ linkMap.set(link[Id], await Doc.makeClone(link, cloneMap, linkMap, rtfs, exclusions, pruneDocs, cloneLinks));
}
- cloneMap.set(doc[Id], copy);
- }
+ });
+ Doc.SetInPlace(copy, 'title', 'CLONE: ' + doc.title, true);
+ copy.cloneOf = doc;
+ cloneMap.set(doc[Id], copy);
+
Doc.AddFileOrphan(copy);
return copy;
}
- export async function MakeClone(doc: Doc, dontCreate: boolean = false, asBranch = false, cloneMap: Map<string, Doc> = new Map()) {
- const linkMap = new Map<Doc, Doc>();
+ export function repairClone(clone: Doc, cloneMap: Map<string, Doc>, visited: Set<Doc>) {
+ if (visited.has(clone)) return;
+ visited.add(clone);
+ Object.keys(clone)
+ .filter(key => key !== 'cloneOf')
+ .map(key => {
+ const docAtKey = DocCast(clone[key]);
+ if (docAtKey && !Doc.IsSystem(docAtKey)) {
+ if (!Array.from(cloneMap.values()).includes(docAtKey)) {
+ if (cloneMap.has(docAtKey[Id])) {
+ clone[key] = cloneMap.get(docAtKey[Id]);
+ } else clone[key] = undefined;
+ } else {
+ repairClone(docAtKey, cloneMap, visited);
+ }
+ }
+ });
+ }
+ export function MakeClones(docs: Doc[], cloneLinks: boolean) {
+ const cloneMap = new Map<string, Doc>();
+ return docs.map(doc => Doc.MakeClone(doc, cloneLinks, cloneMap));
+ }
+
+ export async function MakeClone(doc: Doc, cloneLinks = true, cloneMap: Map<string, Doc> = new Map()) {
+ const linkMap = new Map<string, Doc>();
const rtfMap: { copy: Doc; key: string; field: RichTextField }[] = [];
- const copy = await Doc.makeClone(doc, cloneMap, linkMap, rtfMap, ['cloneOf', 'branches', 'branchOf'], ['context'], dontCreate, asBranch);
- Array.from(linkMap.entries()).map((links: Doc[]) => LinkManager.Instance.addLink(links[1], true));
+ const copy = await Doc.makeClone(doc, cloneMap, linkMap, rtfMap, ['cloneOf'], doc.context ? [DocCast(doc.context)] : [], cloneLinks);
+ const repaired = new Set<Doc>();
+ const linkedDocs = Array.from(linkMap.values());
+ linkedDocs.map((link: Doc) => LinkManager.Instance.addLink(link, true));
rtfMap.map(({ copy, key, field }) => {
const replacer = (match: any, attr: string, id: string, offset: any, string: any) => {
const mapped = cloneMap.get(id);
@@ -793,70 +817,80 @@ export namespace Doc {
const mapped = cloneMap.get(id);
return href + (mapped ? mapped[Id] : id);
};
- const regex = `(${Doc.localServerPath()})([^"]*)`;
- const re = new RegExp(regex, 'g');
- copy[key] = new RichTextField(field.Data.replace(/("textId":|"audioId":|"anchorId":)"([^"]+)"/g, replacer).replace(re, replacer2), field.Text);
+ const re = new RegExp(`(${Doc.localServerPath()})([^"]*)`, 'g');
+ const docidsearch = new RegExp('(' + DocsInTextFieldIds.map(exp => `"${exp}":`).join('|') + ')"([^"]+)"', 'g');
+ copy[key] = new RichTextField(field.Data.replace(docidsearch, replacer).replace(re, replacer2), field.Text);
});
- return { clone: copy, map: cloneMap };
+ const clonedDocs = [...Array.from(cloneMap.values()), ...linkedDocs];
+ clonedDocs.map(clone => Doc.repairClone(clone, cloneMap, repaired));
+ return { clone: copy, map: cloneMap, linkMap };
}
- export async function Zip(doc: Doc) {
- // const a = document.createElement("a");
- // const url = Utils.prepend(`/downloadId/${this.props.Document[Id]}`);
- // a.href = url;
- // a.download = `DocExport-${this.props.Document[Id]}.zip`;
- // a.click();
- const { clone, map } = await Doc.MakeClone(doc, true);
+ export async function Zip(doc: Doc, zipFilename = 'dashExport.zip') {
+ const { clone, map, linkMap } = await Doc.MakeClone(doc);
+ const proms = new Set<string>();
function replacer(key: any, value: any) {
- if (['branchOf', 'cloneOf', 'context', 'cursors'].includes(key)) return undefined;
- else if (value instanceof Doc) {
- if (key !== 'field' && Number.isNaN(Number(key))) {
- const __fields = value[FieldsSym]();
- return { id: value[Id], __type: 'Doc', fields: __fields };
- } else {
- return { fieldId: value[Id], __type: 'proxy' };
- }
- } else if (value instanceof ScriptField) return { script: value.script, __type: 'script' };
- else if (value instanceof RichTextField) return { Data: value.Data, Text: value.Text, __type: 'RichTextField' };
- else if (value instanceof ImageField) return { url: value.url.href, __type: 'image' };
- else if (value instanceof PdfField) return { url: value.url.href, __type: 'pdf' };
- else if (value instanceof AudioField) return { url: value.url.href, __type: 'audio' };
- else if (value instanceof VideoField) return { url: value.url.href, __type: 'video' };
- else if (value instanceof WebField) return { url: value.url.href, __type: 'web' };
- else if (value instanceof MapField) return { url: value.url.href, __type: 'map' };
- else if (value instanceof DateField) return { date: value.toString(), __type: 'date' };
- else if (value instanceof ProxyField) return { fieldId: value.fieldId, __type: 'proxy' };
- else if (value instanceof Array && key !== 'fields') return { fields: value, __type: 'list' };
- else if (value instanceof ComputedField) return { script: value.script, __type: 'computed' };
- else return value;
+ if (key && ['branchOf', 'cloneOf', 'cursors'].includes(key)) return undefined;
+ if (value instanceof ImageField) {
+ const extension = value.url.href.replace(/.*\./, '');
+ proms.add(value.url.href.replace('.' + extension, '_o.' + extension));
+ return SerializationHelper.Serialize(value);
+ }
+ if (value instanceof PdfField || value instanceof AudioField || value instanceof VideoField) {
+ proms.add(value.url.href);
+ return SerializationHelper.Serialize(value);
+ }
+ if (
+ value instanceof Doc ||
+ value instanceof ScriptField ||
+ value instanceof RichTextField ||
+ value instanceof InkField ||
+ value instanceof CsvField ||
+ value instanceof WebField ||
+ value instanceof DateField ||
+ value instanceof ProxyField ||
+ value instanceof ComputedField
+ ) {
+ return SerializationHelper.Serialize(value);
+ }
+ if (value instanceof Array && key !== ListFieldName && key !== InkField.InkDataFieldName) return { fields: value, __type: 'list' };
+ return value;
}
const docs: { [id: string]: any } = {};
+ const links: { [id: string]: any } = {};
Array.from(map.entries()).forEach(f => (docs[f[0]] = f[1]));
- const docString = JSON.stringify({ id: doc[Id], docs }, replacer);
+ Array.from(linkMap.entries()).forEach(l => (links[l[0]] = l[1]));
+ const jsonDocs = JSON.stringify({ id: clone[Id], docs, links }, decycle(replacer));
const zip = new JSZip();
-
- zip.file('doc.json', docString);
-
- // // Generate a directory within the Zip file structure
- // var img = zip.folder("images");
-
- // // Add a file to the directory, in this case an image with data URI as contents
- // img.file("smile.gif", imgData, {base64: true});
-
- // Generate the zip file asynchronously
- zip.generateAsync({ type: 'blob' }).then((content: any) => {
- // Force down of the Zip file
- saveAs(content, doc.title + '.zip'); // glr: Possibly change the name of the document to match the title?
- });
- }
- //
- // Determines whether the layout needs to be expanded (as a template).
- // template expansion is rquired when the layout is a template doc/field and there's a datadoc which isn't equal to the layout template
- //
- export function WillExpandTemplateLayout(layoutDoc: Doc, dataDoc?: Doc) {
- return (layoutDoc.isTemplateForField || layoutDoc.isTemplateDoc) && dataDoc && layoutDoc !== dataDoc;
+ var count = 0;
+ const promArr = Array.from(proms).filter(url => url.startsWith(window.location.origin));
+ if (!promArr.length) {
+ zip.file('docs.json', jsonDocs);
+ zip.generateAsync({ type: 'blob' }).then(content => saveAs(content, zipFilename));
+ } else
+ promArr.forEach((url, i) => {
+ // loading a file and add it in a zip file
+ JSZipUtils.getBinaryContent(url, (err: any, data: any) => {
+ if (err) throw err; // or handle the error
+ // // Generate a directory within the Zip file structure
+ // const assets = zip.folder("assets");
+ // assets.file(filename, data, {binary: true});
+ const assetPathOnServer = promArr[i].replace(window.location.origin + '/', '').replace(/\//g, '%%%');
+ zip.file(assetPathOnServer, data, { binary: true });
+ console.log(' => ' + url);
+ if (++count === promArr.length) {
+ zip.file('docs.json', jsonDocs);
+ zip.generateAsync({ type: 'blob' }).then(content => saveAs(content, zipFilename));
+ // const a = document.createElement("a");
+ // const url = Utils.prepend(`/downloadId/${this.props.Document[Id]}`);
+ // a.href = url;
+ // a.download = `DocExport-${this.props.Document[Id]}.zip`;
+ // a.click();
+ }
+ });
+ });
}
const _pendingMap: Map<string, boolean> = new Map();
@@ -864,44 +898,34 @@ export namespace Doc {
// Returns an expanded template layout for a target data document if there is a template relationship
// between the two. If so, the layoutDoc is expanded into a new document that inherits the properties
// of the original layout while allowing for individual layout properties to be overridden in the expanded layout.
- // templateArgs should be equivalent to the layout key that generates the template since that's where the template parameters are stored in ()'s at the end of the key.
- // NOTE: the template will have references to "@params" -- the template arguments will be assigned to the '@params' field
- // so that when the @params key is accessed, it will be rewritten as the key that is stored in the 'params' field and
- // the derefence will then occur on the rootDocument (the original document).
- // in the future, field references could be written as @<someparam> and then arguments would be passed in the layout key as:
- // layout_mytemplate(somparam=somearg).
- // then any references to @someparam would be rewritten as accesses to 'somearg' on the rootDocument
- export function expandTemplateLayout(templateLayoutDoc: Doc, targetDoc?: Doc, templateArgs?: string) {
- const args = templateArgs?.match(/\(([a-zA-Z0-9._\-]*)\)/)?.[1].replace('()', '') || StrCast(templateLayoutDoc.PARAMS);
- if ((!args && !WillExpandTemplateLayout(templateLayoutDoc, targetDoc)) || !targetDoc) return templateLayoutDoc;
-
- const templateField = StrCast(templateLayoutDoc.isTemplateForField); // the field that the template renders
+ export function expandTemplateLayout(templateLayoutDoc: Doc, targetDoc?: Doc) {
+ // nothing to do if the layout isn't a template or we don't have a target that's different than the template
+ if (!targetDoc || templateLayoutDoc === targetDoc || (!templateLayoutDoc.isTemplateForField && !templateLayoutDoc.isTemplateDoc)) {
+ return templateLayoutDoc;
+ }
+
+ const templateField = StrCast(templateLayoutDoc.isTemplateForField, Doc.LayoutFieldKey(templateLayoutDoc)); // the field that the template renders
// First it checks if an expanded layout already exists -- if so it will be stored on the dataDoc
// using the template layout doc's id as the field key.
// If it doesn't find the expanded layout, then it makes a delegate of the template layout and
// saves it on the data doc indexed by the template layout's id.
//
- const params = args.split('=').length > 1 ? args.split('=')[0] : 'PARAMS';
- const layoutFielddKey = Doc.LayoutFieldKey(templateLayoutDoc);
- const expandedLayoutFieldKey = (templateField || layoutFielddKey) + '-layout[' + templateLayoutDoc[Id] + (args ? `(${args})` : '') + ']';
+ const expandedLayoutFieldKey = templateField + '-layout[' + templateLayoutDoc[Id] + ']';
let expandedTemplateLayout = targetDoc?.[expandedLayoutFieldKey];
if (templateLayoutDoc.resolvedDataDoc instanceof Promise) {
expandedTemplateLayout = undefined;
_pendingMap.set(targetDoc[Id] + expandedLayoutFieldKey, true);
- } else if (expandedTemplateLayout === undefined && !_pendingMap.get(targetDoc[Id] + expandedLayoutFieldKey + args)) {
- if (templateLayoutDoc.resolvedDataDoc === (targetDoc.rootDocument || Doc.GetProto(targetDoc)) && templateLayoutDoc.PARAMS === StrCast(targetDoc.PARAMS)) {
+ } else if (expandedTemplateLayout === undefined && !_pendingMap.get(targetDoc[Id] + expandedLayoutFieldKey)) {
+ if (templateLayoutDoc.resolvedDataDoc === (targetDoc.rootDocument || Doc.GetProto(targetDoc))) {
expandedTemplateLayout = templateLayoutDoc; // reuse an existing template layout if its for the same document with the same params
} else {
templateLayoutDoc.resolvedDataDoc && (templateLayoutDoc = Cast(templateLayoutDoc.proto, Doc, null) || templateLayoutDoc); // if the template has already been applied (ie, a nested template), then use the template's prototype
if (!targetDoc[expandedLayoutFieldKey]) {
- _pendingMap.set(targetDoc[Id] + expandedLayoutFieldKey + args, true);
+ _pendingMap.set(targetDoc[Id] + expandedLayoutFieldKey, true);
setTimeout(
action(() => {
const newLayoutDoc = Doc.MakeDelegate(templateLayoutDoc, undefined, '[' + templateLayoutDoc.title + ']');
- // the template's arguments are stored in params which is derefenced to find
- // the actual field key where the parameterized template data is stored.
- newLayoutDoc[params] = args !== '...' ? args : ''; // ... signifies the layout has sub template(s) -- so we have to expand the layout for them so that they can get the correct 'rootDocument' field, but we don't need to reassign their params. it would be better if the 'rootDocument' field could be passed dynamically to avoid have to create instances
newLayoutDoc.rootDocument = targetDoc;
const dataDoc = Doc.GetProto(targetDoc);
newLayoutDoc.resolvedDataDoc = dataDoc;
@@ -910,7 +934,7 @@ export namespace Doc {
}
targetDoc[expandedLayoutFieldKey] = newLayoutDoc;
- _pendingMap.delete(targetDoc[Id] + expandedLayoutFieldKey + args);
+ _pendingMap.delete(targetDoc[Id] + expandedLayoutFieldKey);
})
);
}
@@ -923,11 +947,11 @@ export namespace Doc {
// otherwise, it just returns the childDoc
export function GetLayoutDataDocPair(containerDoc: Doc, containerDataDoc: Opt<Doc>, childDoc: Doc) {
if (!childDoc || childDoc instanceof Promise || !Doc.GetProto(childDoc)) {
- console.log('No, no, no!');
+ console.log('Warning: GetLayoutDataDocPair childDoc not defined');
return { layout: childDoc, data: childDoc };
}
- const resolvedDataDoc = Doc.AreProtosEqual(containerDataDoc, containerDoc) || (!childDoc.isTemplateDoc && !childDoc.isTemplateForField && !childDoc.PARAMS) ? undefined : containerDataDoc;
- return { layout: Doc.expandTemplateLayout(childDoc, resolvedDataDoc, '(' + StrCast(containerDoc.PARAMS) + ')'), data: resolvedDataDoc };
+ const resolvedDataDoc = Doc.AreProtosEqual(containerDataDoc, containerDoc) || (!childDoc.isTemplateDoc && !childDoc.isTemplateForField) ? undefined : containerDataDoc;
+ return { layout: Doc.expandTemplateLayout(childDoc, resolvedDataDoc), data: resolvedDataDoc };
}
export function Overwrite(doc: Doc, overwrite: Doc, copyProto: boolean = false): Doc {
@@ -1158,7 +1182,7 @@ export namespace Doc {
return doc[StrCast(doc.layoutKey, 'layout')];
}
export function LayoutFieldKey(doc: Doc): string {
- return StrCast(Doc.Layout(doc).layout).split("'")[1];
+ return StrCast(Doc.Layout(doc).layout).split("'")[1]; // bcz: TODO check on this . used to always reference 'layout', now it uses the layout speicfied by the current layoutKey
}
export function NativeAspect(doc: Doc, dataDoc?: Doc, useDim?: boolean) {
return Doc.NativeWidth(doc, dataDoc, useDim) / (Doc.NativeHeight(doc, dataDoc, useDim) || 1);
@@ -1167,9 +1191,10 @@ export namespace Doc {
return !doc ? 0 : NumCast(doc._nativeWidth, NumCast((dataDoc || doc)[Doc.LayoutFieldKey(doc) + '-nativeWidth'], useWidth ? doc[WidthSym]() : 0));
}
export function NativeHeight(doc?: Doc, dataDoc?: Doc, useHeight?: boolean) {
- const dheight = doc ? NumCast((dataDoc || doc)[Doc.LayoutFieldKey(doc) + '-nativeHeight'], useHeight ? doc[HeightSym]() : 0) : 0;
- const nheight = doc ? (Doc.NativeWidth(doc, dataDoc, useHeight) * doc[HeightSym]()) / doc[WidthSym]() : 0;
- return !doc ? 0 : NumCast(doc._nativeHeight, nheight || dheight);
+ if (!doc) return 0;
+ const nheight = (Doc.NativeWidth(doc, dataDoc, useHeight) * doc[HeightSym]()) / doc[WidthSym]();
+ const dheight = NumCast((dataDoc || doc)[Doc.LayoutFieldKey(doc) + '-nativeHeight'], useHeight ? doc[HeightSym]() : 0);
+ return NumCast(doc._nativeHeight, nheight || dheight);
}
export function SetNativeWidth(doc: Doc, width: number | undefined, fieldKey?: string) {
doc[(fieldKey ?? Doc.LayoutFieldKey(doc)) + '-nativeWidth'] = width;
@@ -1279,6 +1304,7 @@ export namespace Doc {
}
export function LinkEndpoint(linkDoc: Doc, anchorDoc: Doc) {
+ if (linkDoc.anchor2 === anchorDoc || (linkDoc.anchor2 as Doc).annotationOn) return '2';
return Doc.AreProtosEqual(anchorDoc, (linkDoc.anchor1 as Doc).annotationOn as Doc) || Doc.AreProtosEqual(anchorDoc, linkDoc.anchor1 as Doc) ? '1' : '2';
}
@@ -1337,7 +1363,7 @@ export namespace Doc {
});
}
export function UnBrushAllDocs() {
- brushManager.BrushedDoc.clear();
+ runInAction(() => brushManager.BrushedDoc.clear());
}
export function getDocTemplate(doc?: Doc) {
@@ -1362,7 +1388,7 @@ export namespace Doc {
if (typeof value === 'string') {
value = value.replace(`,${Utils.noRecursionHack}`, '');
}
- const fieldVal = key === '#' ? (StrCast(doc.tags).includes(':#' + value + ':') ? StrCast(doc.tags) : undefined) : doc[key];
+ const fieldVal = doc[key];
if (Cast(fieldVal, listSpec('string'), []).length) {
const vals = Cast(fieldVal, listSpec('string'), []);
const docs = vals.some(v => (v as any) instanceof Doc);
@@ -1370,7 +1396,7 @@ export namespace Doc {
return vals.some(v => v.includes(value)); // bcz: arghh: Todo: comparison should be parameterized as exact, or substring
}
const fieldStr = Field.toString(fieldVal as Field);
- return fieldStr.includes(value); // bcz: arghh: Todo: comparison should be parameterized as exact, or substring
+ return fieldStr.includes(value) || (value === String.fromCharCode(127) + '--undefined--' && fieldVal === undefined); // bcz: arghh: Todo: comparison should be parameterized as exact, or substring
}
export function deiconifyView(doc: Doc) {
@@ -1511,16 +1537,26 @@ export namespace Doc {
}
}
- export async function importDocument(file: File) {
+ ///
+ // imports a previously exported zip file which contains a set of documents and their assets (eg, images, videos)
+ // the 'remap' parameter determines whether the ids of the documents loaded should be kept as they were, or remapped to new ids
+ // If they are not remapped, loading the file will overwrite any existing documents with those ids
+ //
+ export async function importDocument(file: File, remap = false) {
const upload = Utils.prepend('/uploadDoc');
const formData = new FormData();
if (file) {
formData.append('file', file);
- formData.append('remap', 'true');
+ formData.append('remap', remap.toString());
const response = await fetch(upload, { method: 'POST', body: formData });
const json = await response.json();
if (json !== 'error') {
- const doc = await DocServer.GetRefField(json);
+ const docs = await DocServer.GetRefFields(json.docids as string[]);
+ const doc = DocCast(await DocServer.GetRefField(json.id));
+ const links = await DocServer.GetRefFields(json.linkids as string[]);
+ Array.from(Object.keys(links))
+ .map(key => links[key])
+ .forEach(link => link instanceof Doc && LinkManager.Instance.addLink(link));
return doc;
}
}
@@ -1668,8 +1704,11 @@ export namespace Doc {
}
}
+export function IdToDoc(id: string) {
+ return DocCast(DocServer.GetCachedRefField(id));
+}
ScriptingGlobals.add(function idToDoc(id: string): any {
- return DocServer.GetCachedRefField(id);
+ return IdToDoc(id);
});
ScriptingGlobals.add(function renameAlias(doc: any) {
return StrCast(Doc.GetProto(doc).title).replace(/\([0-9]*\)/, '') + `(${doc.aliasNumber})`;
diff --git a/src/fields/InkField.ts b/src/fields/InkField.ts
index a074098c1..22bea3927 100644
--- a/src/fields/InkField.ts
+++ b/src/fields/InkField.ts
@@ -1,5 +1,5 @@
import { Bezier } from 'bezier-js';
-import { createSimpleSchema, list, object, serializable } from 'serializr';
+import { alias, createSimpleSchema, list, object, serializable } from 'serializr';
import { ScriptingGlobals } from '../client/util/ScriptingGlobals';
import { Deserializable } from '../client/util/SerializationHelper';
import { Copy, ToScriptString, ToString } from './FieldSymbols';
@@ -64,7 +64,8 @@ const strokeDataSchema = createSimpleSchema({
@Deserializable('ink')
export class InkField extends ObjectField {
- @serializable(list(object(strokeDataSchema)))
+ public static InkDataFieldName = '__inkData';
+ @serializable(alias(InkField.InkDataFieldName, list(object(strokeDataSchema))))
readonly inkData: InkData;
constructor(data: InkData) {
diff --git a/src/fields/List.ts b/src/fields/List.ts
index 9c7794813..e33627be5 100644
--- a/src/fields/List.ts
+++ b/src/fields/List.ts
@@ -240,6 +240,7 @@ type ListUpdate<T> = ListSpliceUpdate<T> | ListIndexUpdate<T>;
type StoredType<T extends Field> = T extends RefField ? ProxyField<T> : T;
+export const ListFieldName="fields";
@Deserializable('list')
class ListImpl<T extends Field> extends ObjectField {
constructor(fields?: T[]) {
@@ -289,7 +290,8 @@ class ListImpl<T extends Field> extends ObjectField {
return this.__fields.map(toRealField);
}
- @serializable(alias('fields', list(autoObject(), { afterDeserialize: afterDocDeserialize })))
+ public static FieldDataName = 'fields';
+ @serializable(alias(ListFieldName, list(autoObject(), { afterDeserialize: afterDocDeserialize })))
private get __fields() {
return this.___fields;
}
diff --git a/src/fields/RichTextField.ts b/src/fields/RichTextField.ts
index d7edd4266..3e75a071f 100644
--- a/src/fields/RichTextField.ts
+++ b/src/fields/RichTextField.ts
@@ -1,11 +1,11 @@
-import { serializable } from "serializr";
-import { scriptingGlobal } from "../client/util/ScriptingGlobals";
-import { Deserializable } from "../client/util/SerializationHelper";
-import { Copy, ToScriptString, ToString } from "./FieldSymbols";
-import { ObjectField } from "./ObjectField";
+import { serializable } from 'serializr';
+import { scriptingGlobal } from '../client/util/ScriptingGlobals';
+import { Deserializable } from '../client/util/SerializationHelper';
+import { Copy, ToScriptString, ToString } from './FieldSymbols';
+import { ObjectField } from './ObjectField';
@scriptingGlobal
-@Deserializable("RichTextField")
+@Deserializable('RichTextField')
export class RichTextField extends ObjectField {
@serializable(true)
readonly Data: string;
@@ -13,14 +13,14 @@ export class RichTextField extends ObjectField {
@serializable(true)
readonly Text: string;
- constructor(data: string, text: string = "") {
+ constructor(data: string, text: string = '') {
super();
this.Data = data;
this.Text = text;
}
Empty() {
- return !(this.Text || this.Data.toString().includes("dashField") || this.Data.toString().includes("align"));
+ return !(this.Text || this.Data.toString().includes('dashField') || this.Data.toString().includes('align'));
}
[Copy]() {
@@ -28,14 +28,16 @@ export class RichTextField extends ObjectField {
}
[ToScriptString]() {
- return `new RichTextField("${this.Data.replace(/"/g, "\\\"")}", "${this.Text}")`;
+ return `new RichTextField("${this.Data.replace(/"/g, '\\"')}", "${this.Text}")`;
}
[ToString]() {
return this.Text;
}
public static DashField(fieldKey: string) {
- return new RichTextField(`{"doc":{"type":"doc","content":[{"type":"paragraph","attrs":{"align":null,"color":null,"id":null,"indent":null,"inset":null,"lineSpacing":null,"paddingBottom":null,"paddingTop":null},"content":[{"type":"dashField","attrs":{"fieldKey":"${fieldKey}","docid":""}}]}]},"selection":{"type":"text","anchor":2,"head":2},"storedMarks":[]}`, "");
+ return new RichTextField(
+ `{"doc":{"type":"doc","content":[{"type":"paragraph","attrs":{"align":null,"color":null,"id":null,"indent":null,"inset":null,"lineSpacing":null,"paddingBottom":null,"paddingTop":null},"content":[{"type":"dashField","attrs":{"fieldKey":"${fieldKey}","docId":""}}]}]},"selection":{"type":"text","anchor":2,"head":2},"storedMarks":[]}`,
+ ''
+ );
}
-
-} \ No newline at end of file
+}
diff --git a/src/fields/RichTextUtils.ts b/src/fields/RichTextUtils.ts
index bf055cd8b..239b59e83 100644
--- a/src/fields/RichTextUtils.ts
+++ b/src/fields/RichTextUtils.ts
@@ -264,18 +264,18 @@ export namespace RichTextUtils {
const imageNode = (schema: any, image: ImageTemplate, textNote: Doc) => {
const { url: src, width, agnostic } = image;
- let docid: string;
+ let docId: string;
const guid = Utils.GenerateDeterministicGuid(agnostic);
const backingDocId = StrCast(textNote[guid]);
if (!backingDocId) {
const backingDoc = Docs.Create.ImageDocument(agnostic, { _width: 300, _height: 300 });
DocUtils.makeCustomViewClicked(backingDoc, Docs.Create.FreeformDocument);
- docid = backingDoc[Id];
- textNote[guid] = docid;
+ docId = backingDoc[Id];
+ textNote[guid] = docId;
} else {
- docid = backingDocId;
+ docId = backingDocId;
}
- return schema.node('image', { src, agnostic, width, docid, float: null, location: 'add:right' });
+ return schema.node('image', { src, agnostic, width, docId, float: null, location: 'add:right' });
};
const textNode = (schema: any, run: docs_v1.Schema$TextRun) => {
diff --git a/src/fields/ScriptField.ts b/src/fields/ScriptField.ts
index feb419597..2b8750714 100644
--- a/src/fields/ScriptField.ts
+++ b/src/fields/ScriptField.ts
@@ -114,7 +114,7 @@ export class ScriptField extends ObjectField {
}
[ToScriptString]() {
- return 'script field';
+ return this.script.originalScript;
}
[ToString]() {
return this.script.originalScript;
diff --git a/src/fields/URLField.ts b/src/fields/URLField.ts
index 00c78e231..8ac20b1e5 100644
--- a/src/fields/URLField.ts
+++ b/src/fields/URLField.ts
@@ -54,9 +54,6 @@ export const nullAudio = 'https://actions.google.com/sounds/v1/alarms/beep_short
@Deserializable('audio')
export class AudioField extends URLField {}
@scriptingGlobal
-@Deserializable('recording')
-export class RecordingField extends URLField {}
-@scriptingGlobal
@Deserializable('image')
export class ImageField extends URLField {}
@scriptingGlobal
@@ -69,14 +66,8 @@ export class PdfField extends URLField {}
@Deserializable('web')
export class WebField extends URLField {}
@scriptingGlobal
-@Deserializable('map')
-export class MapField extends URLField {}
-@scriptingGlobal
@Deserializable('csv')
export class CsvField extends URLField {}
@scriptingGlobal
@Deserializable('youtube')
export class YoutubeField extends URLField {}
-@scriptingGlobal
-@Deserializable('webcam')
-export class WebCamField extends URLField {}
diff --git a/src/fields/documentSchemas.ts b/src/fields/documentSchemas.ts
index 5b489a96c..b7fd06973 100644
--- a/src/fields/documentSchemas.ts
+++ b/src/fields/documentSchemas.ts
@@ -89,7 +89,6 @@ export const documentSchema = createSchema({
hideAllLinks: 'boolean', // whether all individual blue anchor dots should be hidden
linkDisplay: 'boolean', // whether a link connection should be shown between link anchor endpoints.
isLightbox: 'boolean', // whether the marked object will display addDocTab() calls that target "lightbox" destinations
- isLinkButton: 'boolean', // whether document functions as a link follow button to follow the first link on the document when clicked
layers: listSpec('string'), // which layers the document is part of
_lockedPosition: 'boolean', // whether the document can be moved (dragged)
_lockedTransform: 'boolean', // whether a freeformview can pan/zoom
diff --git a/src/fields/util.ts b/src/fields/util.ts
index 70d9ed61f..92f3a69eb 100644
--- a/src/fields/util.ts
+++ b/src/fields/util.ts
@@ -107,8 +107,11 @@ const _setterImpl = action(function (target: any, prop: string | symbol | number
redo: () => (receiver[prop] = value),
undo: () => {
const wasUpdate = receiver[UpdatingFromServer];
+ const wasForce = receiver[ForceServerWrite];
+ receiver[ForceServerWrite] = true; // needed since writes aren't propagated to server if UpdatingFromServerIsSet
receiver[UpdatingFromServer] = true; // needed if the event caused ACL's to change such that the doc is otherwise no longer editable.
receiver[prop] = curValue;
+ receiver[ForceServerWrite] = wasForce;
receiver[UpdatingFromServer] = wasUpdate;
},
prop: prop?.toString(),