From 2a313f28fcb8675223708b0657de7517a3281095 Mon Sep 17 00:00:00 2001 From: bobzel Date: Wed, 17 Apr 2024 12:27:21 -0400 Subject: restoring eslint - updates not complete yet --- src/server/ActionUtilities.ts | 71 +- src/server/ApiManagers/DataVizManager.ts | 15 +- src/server/ApiManagers/DeleteManager.ts | 2 +- src/server/ApiManagers/DownloadManager.ts | 385 ++++++----- src/server/ApiManagers/GeneralGoogleManager.ts | 39 +- src/server/ApiManagers/MongoStore.js | 414 ++++++++++++ src/server/ApiManagers/SearchManager.ts | 8 +- src/server/ApiManagers/SessionManager.ts | 61 +- src/server/ApiManagers/UploadManager.ts | 346 ++++------ src/server/ApiManagers/UserManager.ts | 60 +- src/server/Client.ts | 8 +- src/server/DashSession/DashSessionAgent.ts | 28 +- src/server/DashStats.ts | 241 ++++--- src/server/DashUploadUtils.ts | 748 ++++++++++----------- src/server/Message.ts | 91 ++- src/server/RouteManager.ts | 92 +-- src/server/SocketData.ts | 35 + src/server/apis/google/GoogleApiServerUtils.ts | 8 +- src/server/authentication/AuthenticationManager.ts | 2 +- src/server/authentication/DashUserModel.ts | 10 +- src/server/database.ts | 166 ++--- src/server/index.ts | 16 +- src/server/remapUrl.ts | 69 +- src/server/server_Initialization.ts | 251 +++---- src/server/websocket.ts | 380 +++++------ 25 files changed, 1963 insertions(+), 1583 deletions(-) create mode 100644 src/server/ApiManagers/MongoStore.js create mode 100644 src/server/SocketData.ts (limited to 'src/server') diff --git a/src/server/ActionUtilities.ts b/src/server/ActionUtilities.ts index 55b50cc12..6f5b9272a 100644 --- a/src/server/ActionUtilities.ts +++ b/src/server/ActionUtilities.ts @@ -1,14 +1,14 @@ import { exec } from 'child_process'; import { Color, yellow } from 'colors'; import { createWriteStream, exists, mkdir, readFile, unlink, writeFile } from 'fs'; -import * as nodemailer from "nodemailer"; -import { MailOptions } from "nodemailer/lib/json-transport"; +import * as nodemailer from 'nodemailer'; +import { MailOptions } from 'nodemailer/lib/json-transport'; import * as path from 'path'; -import { rimraf } from "rimraf"; +import { rimraf } from 'rimraf'; import { ExecOptions } from 'shelljs'; import * as Mail from 'nodemailer/lib/mailer'; -const projectRoot = path.resolve(__dirname, "../../"); +const projectRoot = path.resolve(__dirname, '../../'); export function pathFromRoot(relative?: string) { if (!relative) { return projectRoot; @@ -18,7 +18,7 @@ export function pathFromRoot(relative?: string) { export async function fileDescriptorFromStream(path: string) { const logStream = createWriteStream(path); - return new Promise(resolve => logStream.on("open", resolve)); + return new Promise(resolve => logStream.on('open', resolve)); } export const command_line = (command: string, fromDirectory?: string) => { @@ -27,25 +27,25 @@ export const command_line = (command: string, fromDirectory?: string) => { if (fromDirectory) { options.cwd = fromDirectory ? path.resolve(projectRoot, fromDirectory) : projectRoot; } - exec(command, options, (err, stdout) => err ? reject(err) : resolve(stdout)); + exec(command, options, (err, stdout) => (err ? reject(err) : resolve(stdout))); }); }; export const read_text_file = (relativePath: string) => { const target = path.resolve(__dirname, relativePath); return new Promise((resolve, reject) => { - readFile(target, (err, data) => err ? reject(err) : resolve(data.toString())); + readFile(target, (err, data) => (err ? reject(err) : resolve(data.toString()))); }); }; export const write_text_file = (relativePath: string, contents: any) => { const target = path.resolve(__dirname, relativePath); return new Promise((resolve, reject) => { - writeFile(target, contents, (err) => err ? reject(err) : resolve()); + writeFile(target, contents, err => (err ? reject(err) : resolve())); }); }; -export type Messager = (outcome: { result: T | undefined, error: Error | null }) => string; +export type Messager = (outcome: { result: T | undefined; error: Error | null }) => string; export interface LogData { startMessage: string; @@ -56,22 +56,23 @@ export interface LogData { } let current = Math.ceil(Math.random() * 20); -export async function log_execution({ startMessage, endMessage, action, color }: LogData): Promise { - let result: T | undefined = undefined, error: Error | null = null; - const resolvedColor = color || `\x1b[${31 + ++current % 6}m%s\x1b[0m`; +export async function logExecution({ startMessage, endMessage, action, color }: LogData): Promise { + let result: T | undefined = undefined, + error: Error | null = null; + const resolvedColor = color || `\x1b[${31 + (++current % 6)}m%s\x1b[0m`; log_helper(`${startMessage}...`, resolvedColor); try { result = await action(); } catch (e: any) { error = e; } finally { - log_helper(typeof endMessage === "string" ? endMessage : endMessage({ result, error }), resolvedColor); + log_helper(typeof endMessage === 'string' ? endMessage : endMessage({ result, error }), resolvedColor); } return result; } function log_helper(content: string, color: Color | string) { - if (typeof color === "string") { + if (typeof color === 'string') { console.log(color, content); } else { console.log(color(content)); @@ -88,11 +89,11 @@ export function msToTime(duration: number) { minutes = Math.floor((duration / (1000 * 60)) % 60), hours = Math.floor((duration / (1000 * 60 * 60)) % 24); - const hoursS = (hours < 10) ? "0" + hours : hours; - const minutesS = (minutes < 10) ? "0" + minutes : minutes; - const secondsS = (seconds < 10) ? "0" + seconds : seconds; + const hoursS = hours < 10 ? '0' + hours : hours; + const minutesS = minutes < 10 ? '0' + minutes : minutes; + const secondsS = seconds < 10 ? '0' + seconds : seconds; - return hoursS + ":" + minutesS + ":" + secondsS + "." + milliseconds; + return hoursS + ':' + minutesS + ':' + secondsS + '.' + milliseconds; } export const createIfNotExists = async (path: string) => { @@ -112,13 +113,12 @@ export async function Prune(rootDirectory: string): Promise { export const Destroy = (mediaPath: string) => new Promise(resolve => unlink(mediaPath, error => resolve(error === null))); export namespace Email { - const smtpTransport = nodemailer.createTransport({ service: 'Gmail', auth: { user: 'browndashptc@gmail.com', - pass: 'TsarNicholas#2' - } + pass: 'TsarNicholas#2', + }, }); export interface DispatchOptions { @@ -135,16 +135,18 @@ export namespace Email { export async function dispatchAll({ to, subject, content, attachments }: DispatchOptions) { const failures: DispatchFailure[] = []; - await Promise.all(to.map(async recipient => { - let error: Error | null; - const resolved = attachments ? "length" in attachments ? attachments : [attachments] : undefined; - if ((error = await Email.dispatch({ to: recipient, subject, content, attachments: resolved })) !== null) { - failures.push({ - recipient, - error - }); - } - })); + await Promise.all( + to.map(async recipient => { + let error: Error | null; + const resolved = attachments ? ('length' in attachments ? attachments : [attachments]) : undefined; + if ((error = await Email.dispatch({ to: recipient, subject, content, attachments: resolved })) !== null) { + failures.push({ + recipient, + error, + }); + } + }) + ); return failures.length ? failures : undefined; } @@ -153,10 +155,9 @@ export namespace Email { to, from: 'browndashptc@gmail.com', subject, - text: `Hello ${to.split("@")[0]},\n\n${content}`, - attachments + text: `Hello ${to.split('@')[0]},\n\n${content}`, + attachments, } as MailOptions; return new Promise(resolve => smtpTransport.sendMail(mailOptions, resolve)); } - -} \ No newline at end of file +} diff --git a/src/server/ApiManagers/DataVizManager.ts b/src/server/ApiManagers/DataVizManager.ts index 0d43130d1..88f22992d 100644 --- a/src/server/ApiManagers/DataVizManager.ts +++ b/src/server/ApiManagers/DataVizManager.ts @@ -1,14 +1,14 @@ -import { csvParser, csvToString } from "../DataVizUtils"; -import { Method, _success } from "../RouteManager"; -import ApiManager, { Registration } from "./ApiManager"; -import { Directory, serverPathToFile } from "./UploadManager"; import * as path from 'path'; +import { csvParser, csvToString } from '../DataVizUtils'; +import { Method, _success } from '../RouteManager'; +import { Directory, serverPathToFile } from '../SocketData'; +import ApiManager, { Registration } from './ApiManager'; export default class DataVizManager extends ApiManager { protected initialize(register: Registration): void { register({ method: Method.GET, - subscription: "/csvData", + subscription: '/csvData', secureHandler: async ({ req, res }) => { const uri = req.query.uri as string; @@ -19,8 +19,7 @@ export default class DataVizManager extends ApiManager { _success(res, parsedCsv); resolve(); }); - } + }, }); } - -} \ No newline at end of file +} diff --git a/src/server/ApiManagers/DeleteManager.ts b/src/server/ApiManagers/DeleteManager.ts index c6c4ca464..9a9b807ae 100644 --- a/src/server/ApiManagers/DeleteManager.ts +++ b/src/server/ApiManagers/DeleteManager.ts @@ -1,5 +1,5 @@ import ApiManager, { Registration } from './ApiManager'; -import { Method, _permission_denied } from '../RouteManager'; +import { Method, _permissionDenied } from '../RouteManager'; import { WebSocket } from '../websocket'; import { Database } from '../database'; import { rimraf } from 'rimraf'; diff --git a/src/server/ApiManagers/DownloadManager.ts b/src/server/ApiManagers/DownloadManager.ts index 2175b6db6..b105c825c 100644 --- a/src/server/ApiManagers/DownloadManager.ts +++ b/src/server/ApiManagers/DownloadManager.ts @@ -1,13 +1,13 @@ -import ApiManager, { Registration } from "./ApiManager"; -import { Method } from "../RouteManager"; -import RouteSubscriber from "../RouteSubscriber"; import * as Archiver from 'archiver'; import * as express from 'express'; -import { Database } from "../database"; -import * as path from "path"; -import { DashUploadUtils, SizeSuffix } from "../DashUploadUtils"; -import { publicDirectory } from ".."; -import { serverPathToFile, Directory } from "./UploadManager"; +import * as path from 'path'; +import { URL } from 'url'; +import { DashUploadUtils, SizeSuffix } from '../DashUploadUtils'; +import { Method } from '../RouteManager'; +import RouteSubscriber from '../RouteSubscriber'; +import { Directory, publicDirectory, serverPathToFile } from '../SocketData'; +import { Database } from '../database'; +import ApiManager, { Registration } from './ApiManager'; export type Hierarchy = { [id: string]: string | Hierarchy }; export type ZipMutator = (file: Archiver.Archiver) => void | Promise; @@ -16,147 +16,45 @@ export interface DocumentElements { title: string; } -export default class DownloadManager extends ApiManager { - - protected initialize(register: Registration): void { - - /** - * Let's say someone's using Dash to organize images in collections. - * This lets them export the hierarchy they've built to their - * own file system in a useful format. - * - * This handler starts with a single document id (interesting only - * if it's that of a collection). It traverses the database, captures - * the nesting of only nested images or collections, writes - * that to a zip file and returns it to the client for download. - */ - register({ - method: Method.GET, - subscription: new RouteSubscriber("imageHierarchyExport").add('docId'), - secureHandler: async ({ req, res }) => { - const id = req.params.docId; - const hierarchy: Hierarchy = {}; - await buildHierarchyRecursive(id, hierarchy); - return BuildAndDispatchZip(res, zip => writeHierarchyRecursive(zip, hierarchy)); - } - }); - - register({ - method: Method.GET, - subscription: new RouteSubscriber("downloadId").add("docId"), - secureHandler: async ({ req, res }) => { - return BuildAndDispatchZip(res, async zip => { - const { id, docs, files } = await getDocs(req.params.docId); - const docString = JSON.stringify({ id, docs }); - zip.append(docString, { name: "doc.json" }); - files.forEach(val => { - zip.file(publicDirectory + val, { name: val.substring(1) }); - }); - }); - } - }); - - register({ - method: Method.GET, - subscription: new RouteSubscriber("serializeDoc").add("docId"), - secureHandler: async ({ req, res }) => { - const { docs, files } = await getDocs(req.params.docId); - res.send({ docs, files: Array.from(files) }); - } - }); - - } - -} - -async function getDocs(id: string) { - const files = new Set(); - const docs: { [id: string]: any } = {}; - const fn = (doc: any): string[] => { - const id = doc.id; - if (typeof id === "string" && id.endsWith("Proto")) { - //Skip protos - return []; - } - const ids: string[] = []; - for (const key in doc.fields) { - if (!doc.fields.hasOwnProperty(key)) { continue; } - const field = doc.fields[key]; - if (field === undefined || field === null) { continue; } - - if (field.__type === "proxy" || field.__type === "prefetch_proxy") { - ids.push(field.fieldId); - } else if (field.__type === "script" || field.__type === "computed") { - field.captures && ids.push(field.captures.fieldId); - } else if (field.__type === "list") { - ids.push(...fn(field)); - } else if (typeof field === "string") { - const re = /"(?:dataD|d)ocumentId"\s*:\s*"([\w\-]*)"/g; - let match: string[] | null; - while ((match = re.exec(field)) !== null) { - ids.push(match[1]); - } - } else if (field.__type === "RichTextField") { - const re = /"href"\s*:\s*"(.*?)"/g; - let match: string[] | null; - while ((match = re.exec(field.Data)) !== null) { - const urlString = match[1]; - const split = new URL(urlString).pathname.split("doc/"); - if (split.length > 1) { - ids.push(split[split.length - 1]); - } - } - const re2 = /"src"\s*:\s*"(.*?)"/g; - while ((match = re2.exec(field.Data)) !== null) { - const urlString = match[1]; - const pathname = new URL(urlString).pathname; - files.add(pathname); - } - } else if (["audio", "image", "video", "pdf", "web", "map"].includes(field.__type)) { - const url = new URL(field.url); - const pathname = url.pathname; - files.add(pathname); - } - } - - if (doc.id) { - docs[doc.id] = doc; - } - return ids; - }; - await Database.Instance.visit([id], fn); - return { id, docs, files }; -} - /** - * This utility function factors out the process - * of creating a zip file and sending it back to the client - * by piping it into a response. - * - * Learn more about piping and readable / writable streams here! - * https://www.freecodecamp.org/news/node-js-streams-everything-you-need-to-know-c9141306be93/ - * - * @param res the writable stream response object that will transfer the generated zip file - * @param mutator the callback function used to actually modify and insert information into the zip instance + * This is a very specific utility method to help traverse the database + * to parse data and titles out of images and collections alone. + * + * We don't know if the document id given to is corresponds to a view document or a data + * document. If it's a data document, the response from the database will have + * a data field. If not, call recursively on the proto, and resolve with *its* data + * + * @param targetId the id of the Dash document whose data is being requests + * @returns the data of the document, as well as its title */ -export async function BuildAndDispatchZip(res: express.Response, mutator: ZipMutator): Promise { - res.set('Content-disposition', `attachment;`); - res.set('Content-Type', "application/zip"); - const zip = Archiver('zip'); - zip.pipe(res); - await mutator(zip); - return zip.finalize(); +async function getData(targetId: string): Promise { + return new Promise((resolve, reject) => { + Database.Instance.getDocument(targetId, async (result: any) => { + const { data, proto, title } = result.fields; + if (data) { + if (data.url) { + resolve({ data: data.url, title }); + } else if (data.fields) { + resolve({ data: data.fields, title }); + } else { + reject(); + } + } else if (proto) { + getData(proto.fieldId).then(resolve, reject); + } else { + reject(); + } + }); + }); } /** * This function starts with a single document id as a seed, * typically that of a collection, and then descends the entire tree - * of image or collection documents that are reachable from that seed. + * of image or collection documents that are reachable from that seed. * @param seedId the id of the root of the subtree we're trying to capture, interesting only if it's a collection * @param hierarchy the data structure we're going to use to record the nesting of the collections and images as we descend - */ - -/* + Below is an example of the JSON hierarchy built from two images contained inside a collection titled 'a nested collection', following the general recursive structure shown immediately below { @@ -190,74 +88,175 @@ async function buildHierarchyRecursive(seedId: string, hierarchy: Hierarchy): Pr } /** - * This is a very specific utility method to help traverse the database - * to parse data and titles out of images and collections alone. - * - * We don't know if the document id given to is corresponds to a view document or a data - * document. If it's a data document, the response from the database will have - * a data field. If not, call recursively on the proto, and resolve with *its* data - * - * @param targetId the id of the Dash document whose data is being requests - * @returns the data of the document, as well as its title + * This utility function factors out the process + * of creating a zip file and sending it back to the client + * by piping it into a response. + * + * Learn more about piping and readable / writable streams here! + * https://www.freecodecamp.org/news/node-js-streams-everything-you-need-to-know-c9141306be93/ + * + * @param res the writable stream response object that will transfer the generated zip file + * @param mutator the callback function used to actually modify and insert information into the zip instance */ -async function getData(targetId: string): Promise { - return new Promise((resolve, reject) => { - Database.Instance.getDocument(targetId, async (result: any) => { - const { data, proto, title } = result.fields; - if (data) { - if (data.url) { - resolve({ data: data.url, title }); - } else if (data.fields) { - resolve({ data: data.fields, title }); - } else { - reject(); - } - } else if (proto) { - getData(proto.fieldId).then(resolve, reject); - } else { - reject(); - } - }); - }); +export async function BuildAndDispatchZip(res: express.Response, mutator: ZipMutator): Promise { + res.set('Content-disposition', `attachment;`); + res.set('Content-Type', 'application/zip'); + const zip = Archiver('zip'); + zip.pipe(res); + await mutator(zip); + return zip.finalize(); } /** - * + * * @param file the zip file to which we write the files * @param hierarchy the data structure from which we read, defining the nesting of the documents in the zip * @param prefix lets us create nested folders in the zip file by continually appending to the end * of the prefix with each layer of recursion. - * + * * Function Call #1 => "Dash Export" * Function Call #2 => "Dash Export/a nested collection" * Function Call #3 => "Dash Export/a nested collection/lowest level collection" * ... */ -async function writeHierarchyRecursive(file: Archiver.Archiver, hierarchy: Hierarchy, prefix = "Dash Export"): Promise { - for (const documentTitle of Object.keys(hierarchy)) { - const result = hierarchy[documentTitle]; - // base case or leaf node, we've hit a url (image) - if (typeof result === "string") { - let path: string; - let matches: RegExpExecArray | null; - if ((matches = /\:\d+\/files\/images\/(upload\_[\da-z]{32}.*)/g.exec(result)) !== null) { - // image already exists on our server - path = serverPathToFile(Directory.images, matches[1]); +async function writeHierarchyRecursive(file: Archiver.Archiver, hierarchy: Hierarchy, prefix = 'Dash Export'): Promise { + // eslint-disable-next-line no-restricted-syntax + for (const documentTitle in hierarchy) { + if (Object.prototype.hasOwnProperty.call(hierarchy, documentTitle)) { + const result = hierarchy[documentTitle]; + // base case or leaf node, we've hit a url (image) + if (typeof result === 'string') { + let fPath: string; + const matches = /:\d+\/files\/images\/(upload_[\da-z]{32}.*)/g.exec(result); + if (matches !== null) { + // image already exists on our server + fPath = serverPathToFile(Directory.images, matches[1]); + } else { + // the image doesn't already exist on our server (may have been dragged + // and dropped in the browser and thus hosted remotely) so we upload it + // to our server and point the zip file to it, so it can bundle up the bytes + // eslint-disable-next-line no-await-in-loop + const information = await DashUploadUtils.UploadImage(result); + fPath = information instanceof Error ? '' : information.accessPaths[SizeSuffix.Original].server; + } + // write the file specified by the path to the directory in the + // zip file given by the prefix. + if (fPath) { + file.file(fPath, { name: documentTitle, prefix }); + } } else { - // the image doesn't already exist on our server (may have been dragged - // and dropped in the browser and thus hosted remotely) so we upload it - // to our server and point the zip file to it, so it can bundle up the bytes - const information = await DashUploadUtils.UploadImage(result); - path = information instanceof Error ? "" : information.accessPaths[SizeSuffix.Original].server; + // we've hit a collection, so we have to recurse + // eslint-disable-next-line no-await-in-loop + await writeHierarchyRecursive(file, result, `${prefix}/${documentTitle}`); } - // write the file specified by the path to the directory in the - // zip file given by the prefix. - if (path) { - file.file(path, { name: documentTitle, prefix }); + } + } +} + +async function getDocs(id: string) { + const files = new Set(); + const docs: { [id: string]: any } = {}; + const fn = (doc: any): string[] => { + const { id } = doc; + if (typeof id === 'string' && id.endsWith('Proto')) { + // Skip protos + return []; + } + const ids: string[] = []; + // eslint-disable-next-line no-restricted-syntax + for (const key in doc.fields) { + // eslint-disable-next-line no-continue + if (!Object.prototype.hasOwnProperty.call(doc.fields, key)) continue; + + const field = doc.fields[key]; + // eslint-disable-next-line no-continue + if (field === undefined || field === null) continue; + + if (field.__type === 'proxy' || field.__type === 'prefetch_proxy') { + ids.push(field.fieldId); + } else if (field.__type === 'script' || field.__type === 'computed') { + field.captures && ids.push(field.captures.fieldId); + } else if (field.__type === 'list') { + ids.push(...fn(field)); + } else if (typeof field === 'string') { + const re = /"(?:dataD|d)ocumentId"\s*:\s*"([\w-]*)"/g; + for (let match = re.exec(field); match !== null; match = re.exec(field)) { + ids.push(match[1]); + } + } else if (field.__type === 'RichTextField') { + const re = /"href"\s*:\s*"(.*?)"/g; + for (let match = re.exec(field.data); match !== null; match = re.exec(field.Data)) { + const urlString = match[1]; + const split = new URL(urlString).pathname.split('doc/'); + if (split.length > 1) { + ids.push(split[split.length - 1]); + } + } + const re2 = /"src"\s*:\s*"(.*?)"/g; + for (let match = re2.exec(field.Data); match !== null; match = re2.exec(field.Data)) { + const urlString = match[1]; + const { pathname } = new URL(urlString); + files.add(pathname); + } + } else if (['audio', 'image', 'video', 'pdf', 'web', 'map'].includes(field.__type)) { + const { pathname } = new URL(field.url); + files.add(pathname); } - } else { - // we've hit a collection, so we have to recurse - await writeHierarchyRecursive(file, result, `${prefix}/${documentTitle}`); } + + if (doc.id) { + docs[doc.id] = doc; + } + return ids; + }; + await Database.Instance.visit([id], fn); + return { id, docs, files }; +} + +export default class DownloadManager extends ApiManager { + protected initialize(register: Registration): void { + /** + * Let's say someone's using Dash to organize images in collections. + * This lets them export the hierarchy they've built to their + * own file system in a useful format. + * + * This handler starts with a single document id (interesting only + * if it's that of a collection). It traverses the database, captures + * the nesting of only nested images or collections, writes + * that to a zip file and returns it to the client for download. + */ + register({ + method: Method.GET, + subscription: new RouteSubscriber('imageHierarchyExport').add('docId'), + secureHandler: async ({ req, res }) => { + const id = req.params.docId; + const hierarchy: Hierarchy = {}; + await buildHierarchyRecursive(id, hierarchy); + return BuildAndDispatchZip(res, zip => writeHierarchyRecursive(zip, hierarchy)); + }, + }); + + register({ + method: Method.GET, + subscription: new RouteSubscriber('downloadId').add('docId'), + secureHandler: async ({ req, res }) => + BuildAndDispatchZip(res, async zip => { + const { id, docs, files } = await getDocs(req.params.docId); + const docString = JSON.stringify({ id, docs }); + zip.append(docString, { name: 'doc.json' }); + files.forEach(val => { + zip.file(publicDirectory + val, { name: val.substring(1) }); + }); + }), + }); + + register({ + method: Method.GET, + subscription: new RouteSubscriber('serializeDoc').add('docId'), + secureHandler: async ({ req, res }) => { + const { docs, files } = await getDocs(req.params.docId); + res.send({ docs, files: Array.from(files) }); + }, + }); } -} \ No newline at end of file +} diff --git a/src/server/ApiManagers/GeneralGoogleManager.ts b/src/server/ApiManagers/GeneralGoogleManager.ts index f94b77cac..12913b1ef 100644 --- a/src/server/ApiManagers/GeneralGoogleManager.ts +++ b/src/server/ApiManagers/GeneralGoogleManager.ts @@ -1,51 +1,49 @@ -import ApiManager, { Registration } from "./ApiManager"; -import { Method, _permission_denied } from "../RouteManager"; -import { GoogleApiServerUtils } from "../apis/google/GoogleApiServerUtils"; -import RouteSubscriber from "../RouteSubscriber"; -import { Database } from "../database"; +import ApiManager, { Registration } from './ApiManager'; +import { Method } from '../RouteManager'; +import { GoogleApiServerUtils } from '../apis/google/GoogleApiServerUtils'; +import RouteSubscriber from '../RouteSubscriber'; +import { Database } from '../database'; const EndpointHandlerMap = new Map([ - ["create", (api, params) => api.create(params)], - ["retrieve", (api, params) => api.get(params)], - ["update", (api, params) => api.batchUpdate(params)], + ['create', (api, params) => api.create(params)], + ['retrieve', (api, params) => api.get(params)], + ['update', (api, params) => api.batchUpdate(params)], ]); export default class GeneralGoogleManager extends ApiManager { - protected initialize(register: Registration): void { - register({ method: Method.GET, - subscription: "/readGoogleAccessToken", + subscription: '/readGoogleAccessToken', secureHandler: async ({ user, res }) => { - const { credentials } = (await GoogleApiServerUtils.retrieveCredentials(user.id)); + const { credentials } = await GoogleApiServerUtils.retrieveCredentials(user.id); if (!credentials?.access_token) { return res.send(GoogleApiServerUtils.generateAuthenticationUrl()); } return res.send(credentials); - } + }, }); register({ method: Method.POST, - subscription: "/writeGoogleAccessToken", + subscription: '/writeGoogleAccessToken', secureHandler: async ({ user, req, res }) => { res.send(await GoogleApiServerUtils.processNewUser(user.id, req.body.authenticationCode)); - } + }, }); register({ method: Method.GET, - subscription: "/revokeGoogleAccessToken", + subscription: '/revokeGoogleAccessToken', secureHandler: async ({ user, res }) => { await Database.Auxiliary.GoogleAccessToken.Revoke(user.id); res.send(); - } + }, }); register({ method: Method.POST, - subscription: new RouteSubscriber("googleDocs").add("sector", "action"), + subscription: new RouteSubscriber('googleDocs').add('sector', 'action'), secureHandler: async ({ req, res, user }) => { const sector: GoogleApiServerUtils.Service = req.params.sector as GoogleApiServerUtils.Service; const action: GoogleApiServerUtils.Action = req.params.action as GoogleApiServerUtils.Action; @@ -61,8 +59,7 @@ export default class GeneralGoogleManager extends ApiManager { return; } res.send(undefined); - } + }, }); - } -} \ No newline at end of file +} diff --git a/src/server/ApiManagers/MongoStore.js b/src/server/ApiManagers/MongoStore.js new file mode 100644 index 000000000..28515fee4 --- /dev/null +++ b/src/server/ApiManagers/MongoStore.js @@ -0,0 +1,414 @@ +'use strict'; +var __createBinding = + (this && this.__createBinding) || + (Object.create + ? function (o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ('get' in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { + enumerable: true, + get: function () { + return m[k]; + }, + }; + } + Object.defineProperty(o, k2, desc); + } + : function (o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + }); +var __setModuleDefault = + (this && this.__setModuleDefault) || + (Object.create + ? function (o, v) { + Object.defineProperty(o, 'default', { enumerable: true, value: v }); + } + : function (o, v) { + o['default'] = v; + }); +var __importStar = + (this && this.__importStar) || + function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== 'default' && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; + }; +var __importDefault = + (this && this.__importDefault) || + function (mod) { + return mod && mod.__esModule ? mod : { default: mod }; + }; +Object.defineProperty(exports, '__esModule', { value: true }); +const console_1 = require('console'); +const util_1 = __importDefault(require('util')); +const session = __importStar(require('express-session')); +const mongodb_1 = require('mongodb'); +const debug_1 = __importDefault(require('debug')); +const debug = (0, debug_1.default)('connect-mongo'); +// eslint-disable-next-line @typescript-eslint/no-empty-function +const noop = () => {}; +const unit = a => a; +function defaultSerializeFunction(session) { + // Copy each property of the session to a new object + const obj = {}; + let prop; + for (prop in session) { + if (prop === 'cookie') { + // Convert the cookie instance to an object, if possible + // This gets rid of the duplicate object under session.cookie.data property + // @ts-ignore FIXME: + obj.cookie = session.cookie.toJSON + ? // @ts-ignore FIXME: + session.cookie.toJSON() + : session.cookie; + } else { + // @ts-ignore FIXME: + obj[prop] = session[prop]; + } + } + return obj; +} +function computeTransformFunctions(options) { + if (options.serialize || options.unserialize) { + return { + serialize: options.serialize || defaultSerializeFunction, + unserialize: options.unserialize || unit, + }; + } + if (options.stringify === false) { + return { + serialize: defaultSerializeFunction, + unserialize: unit, + }; + } + // Default case + return { + serialize: JSON.stringify, + unserialize: JSON.parse, + }; +} +class MongoStore extends session.Store { + constructor({ collectionName = 'sessions', ttl = 1209600, mongoOptions = {}, autoRemove = 'native', autoRemoveInterval = 10, touchAfter = 0, stringify = true, crypto, ...required }) { + super(); + this.crypto = null; + debug('create MongoStore instance'); + const options = { + collectionName, + ttl, + mongoOptions, + autoRemove, + autoRemoveInterval, + touchAfter, + stringify, + crypto: { + ...{ + secret: false, + algorithm: 'aes-256-gcm', + hashing: 'sha512', + encodeas: 'base64', + key_size: 32, + iv_size: 16, + at_size: 16, + }, + ...crypto, + }, + ...required, + }; + // Check params + (0, console_1.assert)(options.mongoUrl || options.clientPromise || options.client, 'You must provide either mongoUrl|clientPromise|client in options'); + (0, console_1.assert)(options.createAutoRemoveIdx === null || options.createAutoRemoveIdx === undefined, 'options.createAutoRemoveIdx has been reverted to autoRemove and autoRemoveInterval'); + (0, console_1.assert)(!options.autoRemoveInterval || options.autoRemoveInterval <= 71582, /* (Math.pow(2, 32) - 1) / (1000 * 60) */ 'autoRemoveInterval is too large. options.autoRemoveInterval is in minutes but not seconds nor mills'); + this.transformFunctions = computeTransformFunctions(options); + let _clientP; + if (options.mongoUrl) { + _clientP = mongodb_1.MongoClient.connect(options.mongoUrl, options.mongoOptions); + } else if (options.clientPromise) { + _clientP = options.clientPromise; + } else if (options.client) { + _clientP = Promise.resolve(options.client); + } else { + throw new Error('Cannot init client. Please provide correct options'); + } + (0, console_1.assert)(!!_clientP, 'Client is null|undefined'); + this.clientP = _clientP; + this.options = options; + this.collectionP = _clientP.then(async con => { + const collection = con.db(options.dbName).collection(options.collectionName); + await this.setAutoRemove(collection); + return collection; + }); + if (options.crypto.secret) { + this.crypto = require('kruptein')(options.crypto); + } + } + static create(options) { + return new MongoStore(options); + } + setAutoRemove(collection) { + const removeQuery = () => ({ + expires: { + $lt: new Date(), + }, + }); + switch (this.options.autoRemove) { + case 'native': + debug('Creating MongoDB TTL index'); + return collection.createIndex( + { expires: 1 }, + { + background: true, + expireAfterSeconds: 0, + } + ); + case 'interval': + debug('create Timer to remove expired sessions'); + this.timer = setInterval( + () => + collection.deleteMany(removeQuery(), { + writeConcern: { + w: 0, + j: false, + }, + }), + this.options.autoRemoveInterval * 1000 * 60 + ); + this.timer.unref(); + return Promise.resolve(); + case 'disabled': + default: + return Promise.resolve(); + } + } + computeStorageId(sessionId) { + if (this.options.transformId && typeof this.options.transformId === 'function') { + return this.options.transformId(sessionId); + } + return sessionId; + } + /** + * promisify and bind the `this.crypto.get` function. + * Please check !!this.crypto === true before using this getter! + */ + get cryptoGet() { + if (!this.crypto) { + throw new Error('Check this.crypto before calling this.cryptoGet!'); + } + return util_1.default.promisify(this.crypto.get).bind(this.crypto); + } + /** + * Decrypt given session data + * @param session session data to be decrypt. Mutate the input session. + */ + async decryptSession(session) { + if (this.crypto && session) { + const plaintext = await this.cryptoGet(this.options.crypto.secret, session.session).catch(err => { + throw new Error(err); + }); + // @ts-ignore + session.session = JSON.parse(plaintext); + } + } + /** + * Get a session from the store given a session ID (sid) + * @param sid session ID + */ + get(sid, callback) { + (async () => { + try { + debug(`MongoStore#get=${sid}`); + const collection = await this.collectionP; + const session = await collection.findOne({ + _id: this.computeStorageId(sid), + $or: [{ expires: { $exists: false } }, { expires: { $gt: new Date() } }], + }); + if (this.crypto && session) { + await this.decryptSession(session).catch(err => callback(err)); + } + const s = session && this.transformFunctions.unserialize(session.session); + if (this.options.touchAfter > 0 && (session === null || session === void 0 ? void 0 : session.lastModified)) { + s.lastModified = session.lastModified; + } + this.emit('get', sid); + callback(null, s === undefined ? null : s); + } catch (error) { + callback(error); + } + })(); + } + /** + * Upsert a session into the store given a session ID (sid) and session (session) object. + * @param sid session ID + * @param session session object + */ + set(sid, session, callback = noop) { + (async () => { + var _a; + try { + debug(`MongoStore#set=${sid}`); + // Removing the lastModified prop from the session object before update + // @ts-ignore + if (this.options.touchAfter > 0 && (session === null || session === void 0 ? void 0 : session.lastModified)) { + // @ts-ignore + delete session.lastModified; + } + const s = { + _id: this.computeStorageId(sid), + session: this.transformFunctions.serialize(session), + }; + // Expire handling + if ((_a = session === null || session === void 0 ? void 0 : session.cookie) === null || _a === void 0 ? void 0 : _a.expires) { + s.expires = new Date(session.cookie.expires); + } else { + // If there's no expiration date specified, it is + // browser-session cookie or there is no cookie at all, + // as per the connect docs. + // + // So we set the expiration to two-weeks from now + // - as is common practice in the industry (e.g Django) - + // or the default specified in the options. + s.expires = new Date(Date.now() + this.options.ttl * 1000); + } + // Last modify handling + if (this.options.touchAfter > 0) { + s.lastModified = new Date(); + } + if (this.crypto) { + const cryptoSet = util_1.default.promisify(this.crypto.set).bind(this.crypto); + const data = await cryptoSet(this.options.crypto.secret, s.session).catch(err => { + throw new Error(err); + }); + s.session = data; + } + const collection = await this.collectionP; + const rawResp = await collection.updateOne( + { _id: s._id }, + { $set: s }, + { + upsert: true, + writeConcern: this.options.writeOperationOptions, + } + ); + if (rawResp.upsertedCount > 0) { + this.emit('create', sid); + } else { + this.emit('update', sid); + } + this.emit('set', sid); + } catch (error) { + return callback(error); + } + return callback(null); + })(); + } + touch(sid, session, callback = noop) { + (async () => { + var _a; + try { + debug(`MongoStore#touch=${sid}`); + const updateFields = {}; + const touchAfter = this.options.touchAfter * 1000; + const lastModified = session.lastModified ? session.lastModified.getTime() : 0; + const currentDate = new Date(); + // If the given options has a touchAfter property, check if the + // current timestamp - lastModified timestamp is bigger than + // the specified, if it's not, don't touch the session + if (touchAfter > 0 && lastModified > 0) { + const timeElapsed = currentDate.getTime() - lastModified; + if (timeElapsed < touchAfter) { + debug(`Skip touching session=${sid}`); + return callback(null); + } + updateFields.lastModified = currentDate; + } + if ((_a = session === null || session === void 0 ? void 0 : session.cookie) === null || _a === void 0 ? void 0 : _a.expires) { + updateFields.expires = new Date(session.cookie.expires); + } else { + updateFields.expires = new Date(Date.now() + this.options.ttl * 1000); + } + const collection = await this.collectionP; + const rawResp = await collection.updateOne({ _id: this.computeStorageId(sid) }, { $set: updateFields }, { writeConcern: this.options.writeOperationOptions }); + if (rawResp.matchedCount === 0) { + return callback(new Error('Unable to find the session to touch')); + } else { + this.emit('touch', sid, session); + return callback(null); + } + } catch (error) { + return callback(error); + } + })(); + } + /** + * Get all sessions in the store as an array + */ + all(callback) { + (async () => { + try { + debug('MongoStore#all()'); + const collection = await this.collectionP; + const sessions = collection.find({ + $or: [{ expires: { $exists: false } }, { expires: { $gt: new Date() } }], + }); + const results = []; + for await (const session of sessions) { + if (this.crypto && session) { + await this.decryptSession(session); + } + results.push(this.transformFunctions.unserialize(session.session)); + } + this.emit('all', results); + callback(null, results); + } catch (error) { + callback(error); + } + })(); + } + /** + * Destroy/delete a session from the store given a session ID (sid) + * @param sid session ID + */ + destroy(sid, callback = noop) { + debug(`MongoStore#destroy=${sid}`); + this.collectionP + .then(colleciton => colleciton.deleteOne({ _id: this.computeStorageId(sid) }, { writeConcern: this.options.writeOperationOptions })) + .then(() => { + this.emit('destroy', sid); + callback(null); + }) + .catch(err => callback(err)); + } + /** + * Get the count of all sessions in the store + */ + length(callback) { + debug('MongoStore#length()'); + this.collectionP + .then(collection => collection.countDocuments()) + .then(c => callback(null, c)) + // @ts-ignore + .catch(err => callback(err)); + } + /** + * Delete all sessions from the store. + */ + clear(callback = noop) { + debug('MongoStore#clear()'); + this.collectionP + .then(collection => collection.drop()) + .then(() => callback(null)) + .catch(err => callback(err)); + } + /** + * Close database connection + */ + close() { + debug('MongoStore#close()'); + return this.clientP.then(c => c.close()); + } +} +exports.default = MongoStore; +//# sourceMappingURL=data:application/json;base64,{"version":3,"file":"MongoStore.js","sourceRoot":"","sources":["../../../src/lib/MongoStore.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,qCAAgC;AAChC,gDAAuB;AACvB,yDAA0C;AAC1C,qCAKgB;AAChB,kDAAyB;AAGzB,MAAM,KAAK,GAAG,IAAA,eAAK,EAAC,eAAe,CAAC,CAAA;AAgEpC,gEAAgE;AAChE,MAAM,IAAI,GAAG,GAAG,EAAE,GAAE,CAAC,CAAA;AACrB,MAAM,IAAI,GAAmB,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAA;AAErC,SAAS,wBAAwB,CAC/B,OAA4B;IAE5B,oDAAoD;IACpD,MAAM,GAAG,GAAG,EAAE,CAAA;IACd,IAAI,IAAI,CAAA;IACR,KAAK,IAAI,IAAI,OAAO,EAAE;QACpB,IAAI,IAAI,KAAK,QAAQ,EAAE;YACrB,wDAAwD;YACxD,2EAA2E;YAC3E,oBAAoB;YACpB,GAAG,CAAC,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,MAAM;gBAChC,CAAC,CAAC,oBAAoB;oBACpB,OAAO,CAAC,MAAM,CAAC,MAAM,EAAE;gBACzB,CAAC,CAAC,OAAO,CAAC,MAAM,CAAA;SACnB;aAAM;YACL,oBAAoB;YACpB,GAAG,CAAC,IAAI,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;SAC1B;KACF;IAED,OAAO,GAA0B,CAAA;AACnC,CAAC;AAED,SAAS,yBAAyB,CAAC,OAAmC;IACpE,IAAI,OAAO,CAAC,SAAS,IAAI,OAAO,CAAC,WAAW,EAAE;QAC5C,OAAO;YACL,SAAS,EAAE,OAAO,CAAC,SAAS,IAAI,wBAAwB;YACxD,WAAW,EAAE,OAAO,CAAC,WAAW,IAAI,IAAI;SACzC,CAAA;KACF;IAED,IAAI,OAAO,CAAC,SAAS,KAAK,KAAK,EAAE;QAC/B,OAAO;YACL,SAAS,EAAE,wBAAwB;YACnC,WAAW,EAAE,IAAI;SAClB,CAAA;KACF;IACD,eAAe;IACf,OAAO;QACL,SAAS,EAAE,IAAI,CAAC,SAAS;QACzB,WAAW,EAAE,IAAI,CAAC,KAAK;KACxB,CAAA;AACH,CAAC;AAED,MAAqB,UAAW,SAAQ,OAAO,CAAC,KAAK;IAYnD,YAAY,EACV,cAAc,GAAG,UAAU,EAC3B,GAAG,GAAG,OAAO,EACb,YAAY,GAAG,EAAE,EACjB,UAAU,GAAG,QAAQ,EACrB,kBAAkB,GAAG,EAAE,EACvB,UAAU,GAAG,CAAC,EACd,SAAS,GAAG,IAAI,EAChB,MAAM,EACN,GAAG,QAAQ,EACS;QACpB,KAAK,EAAE,CAAA;QArBD,WAAM,GAAoB,IAAI,CAAA;QAsBpC,KAAK,CAAC,4BAA4B,CAAC,CAAA;QACnC,MAAM,OAAO,GAA+B;YAC1C,cAAc;YACd,GAAG;YACH,YAAY;YACZ,UAAU;YACV,kBAAkB;YAClB,UAAU;YACV,SAAS;YACT,MAAM,EAAE;gBACN,GAAG;oBACD,MAAM,EAAE,KAAK;oBACb,SAAS,EAAE,aAAa;oBACxB,OAAO,EAAE,QAAQ;oBACjB,QAAQ,EAAE,QAAQ;oBAClB,QAAQ,EAAE,EAAE;oBACZ,OAAO,EAAE,EAAE;oBACX,OAAO,EAAE,EAAE;iBACZ;gBACD,GAAG,MAAM;aACV;YACD,GAAG,QAAQ;SACZ,CAAA;QACD,eAAe;QACf,IAAA,gBAAM,EACJ,OAAO,CAAC,QAAQ,IAAI,OAAO,CAAC,aAAa,IAAI,OAAO,CAAC,MAAM,EAC3D,kEAAkE,CACnE,CAAA;QACD,IAAA,gBAAM,EACJ,OAAO,CAAC,mBAAmB,KAAK,IAAI;YAClC,OAAO,CAAC,mBAAmB,KAAK,SAAS,EAC3C,oFAAoF,CACrF,CAAA;QACD,IAAA,gBAAM,EACJ,CAAC,OAAO,CAAC,kBAAkB,IAAI,OAAO,CAAC,kBAAkB,IAAI,KAAK;QAClE,yCAAyC,CAAC,qGAAqG,CAChJ,CAAA;QACD,IAAI,CAAC,kBAAkB,GAAG,yBAAyB,CAAC,OAAO,CAAC,CAAA;QAC5D,IAAI,QAA8B,CAAA;QAClC,IAAI,OAAO,CAAC,QAAQ,EAAE;YACpB,QAAQ,GAAG,qBAAW,CAAC,OAAO,CAAC,OAAO,CAAC,QAAQ,EAAE,OAAO,CAAC,YAAY,CAAC,CAAA;SACvE;aAAM,IAAI,OAAO,CAAC,aAAa,EAAE;YAChC,QAAQ,GAAG,OAAO,CAAC,aAAa,CAAA;SACjC;aAAM,IAAI,OAAO,CAAC,MAAM,EAAE;YACzB,QAAQ,GAAG,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,CAAA;SAC3C;aAAM;YACL,MAAM,IAAI,KAAK,CAAC,oDAAoD,CAAC,CAAA;SACtE;QACD,IAAA,gBAAM,EAAC,CAAC,CAAC,QAAQ,EAAE,0BAA0B,CAAC,CAAA;QAC9C,IAAI,CAAC,OAAO,GAAG,QAAQ,CAAA;QACvB,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;QACtB,IAAI,CAAC,WAAW,GAAG,QAAQ,CAAC,IAAI,CAAC,KAAK,EAAE,GAAG,EAAE,EAAE;YAC7C,MAAM,UAAU,GAAG,GAAG;iBACnB,EAAE,CAAC,OAAO,CAAC,MAAM,CAAC;iBAClB,UAAU,CAAsB,OAAO,CAAC,cAAc,CAAC,CAAA;YAC1D,MAAM,IAAI,CAAC,aAAa,CAAC,UAAU,CAAC,CAAA;YACpC,OAAO,UAAU,CAAA;QACnB,CAAC,CAAC,CAAA;QACF,IAAI,OAAO,CAAC,MAAM,CAAC,MAAM,EAAE;YACzB,IAAI,CAAC,MAAM,GAAG,OAAO,CAAC,UAAU,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAA;SAClD;IACH,CAAC;IAED,MAAM,CAAC,MAAM,CAAC,OAA4B;QACxC,OAAO,IAAI,UAAU,CAAC,OAAO,CAAC,CAAA;IAChC,CAAC;IAEO,aAAa,CACnB,UAA2C;QAE3C,MAAM,WAAW,GAAG,GAAG,EAAE,CAAC,CAAC;YACzB,OAAO,EAAE;gBACP,GAAG,EAAE,IAAI,IAAI,EAAE;aAChB;SACF,CAAC,CAAA;QACF,QAAQ,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;YAC/B,KAAK,QAAQ;gBACX,KAAK,CAAC,4BAA4B,CAAC,CAAA;gBACnC,OAAO,UAAU,CAAC,WAAW,CAC3B,EAAE,OAAO,EAAE,CAAC,EAAE,EACd;oBACE,UAAU,EAAE,IAAI;oBAChB,kBAAkB,EAAE,CAAC;iBACtB,CACF,CAAA;YACH,KAAK,UAAU;gBACb,KAAK,CAAC,yCAAyC,CAAC,CAAA;gBAChD,IAAI,CAAC,KAAK,GAAG,WAAW,CACtB,GAAG,EAAE,CACH,UAAU,CAAC,UAAU,CAAC,WAAW,EAAE,EAAE;oBACnC,YAAY,EAAE;wBACZ,CAAC,EAAE,CAAC;wBACJ,CAAC,EAAE,KAAK;qBACT;iBACF,CAAC,EACJ,IAAI,CAAC,OAAO,CAAC,kBAAkB,GAAG,IAAI,GAAG,EAAE,CAC5C,CAAA;gBACD,IAAI,CAAC,KAAK,CAAC,KAAK,EAAE,CAAA;gBAClB,OAAO,OAAO,CAAC,OAAO,EAAE,CAAA;YAC1B,KAAK,UAAU,CAAC;YAChB;gBACE,OAAO,OAAO,CAAC,OAAO,EAAE,CAAA;SAC3B;IACH,CAAC;IAEO,gBAAgB,CAAC,SAAiB;QACxC,IACE,IAAI,CAAC,OAAO,CAAC,WAAW;YACxB,OAAO,IAAI,CAAC,OAAO,CAAC,WAAW,KAAK,UAAU,EAC9C;YACA,OAAO,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,SAAS,CAAC,CAAA;SAC3C;QACD,OAAO,SAAS,CAAA;IAClB,CAAC;IAED;;;OAGG;IACH,IAAY,SAAS;QACnB,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;YAChB,MAAM,IAAI,KAAK,CAAC,kDAAkD,CAAC,CAAA;SACpE;QACD,OAAO,cAAI,CAAC,SAAS,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;IAC1D,CAAC;IAED;;;OAGG;IACK,KAAK,CAAC,cAAc,CAC1B,OAA+C;QAE/C,IAAI,IAAI,CAAC,MAAM,IAAI,OAAO,EAAE;YAC1B,MAAM,SAAS,GAAG,MAAM,IAAI,CAAC,SAAS,CACpC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,MAAgB,EACpC,OAAO,CAAC,OAAO,CAChB,CAAC,KAAK,CAAC,CAAC,GAAG,EAAE,EAAE;gBACd,MAAM,IAAI,KAAK,CAAC,GAAG,CAAC,CAAA;YACtB,CAAC,CAAC,CAAA;YACF,aAAa;YACb,OAAO,CAAC,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,SAAS,CAAC,CAAA;SACxC;IACH,CAAC;IAED;;;OAGG;IACH,GAAG,CACD,GAAW,EACX,QAAkE;QAElE,CAAC;QAAA,CAAC,KAAK,IAAI,EAAE;YACX,IAAI;gBACF,KAAK,CAAC,kBAAkB,GAAG,EAAE,CAAC,CAAA;gBAC9B,MAAM,UAAU,GAAG,MAAM,IAAI,CAAC,WAAW,CAAA;gBACzC,MAAM,OAAO,GAAG,MAAM,UAAU,CAAC,OAAO,CAAC;oBACvC,GAAG,EAAE,IAAI,CAAC,gBAAgB,CAAC,GAAG,CAAC;oBAC/B,GAAG,EAAE;wBACH,EAAE,OAAO,EAAE,EAAE,OAAO,EAAE,KAAK,EAAE,EAAE;wBAC/B,EAAE,OAAO,EAAE,EAAE,GAAG,EAAE,IAAI,IAAI,EAAE,EAAE,EAAE;qBACjC;iBACF,CAAC,CAAA;gBACF,IAAI,IAAI,CAAC,MAAM,IAAI,OAAO,EAAE;oBAC1B,MAAM,IAAI,CAAC,cAAc,CACvB,OAAyC,CAC1C,CAAC,KAAK,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAA;iBAChC;gBACD,MAAM,CAAC,GACL,OAAO,IAAI,IAAI,CAAC,kBAAkB,CAAC,WAAW,CAAC,OAAO,CAAC,OAAO,CAAC,CAAA;gBACjE,IAAI,IAAI,CAAC,OAAO,CAAC,UAAU,GAAG,CAAC,KAAI,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,YAAY,CAAA,EAAE;oBACxD,CAAC,CAAC,YAAY,GAAG,OAAO,CAAC,YAAY,CAAA;iBACtC;gBACD,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,GAAG,CAAC,CAAA;gBACrB,QAAQ,CAAC,IAAI,EAAE,CAAC,KAAK,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;aAC3C;YAAC,OAAO,KAAK,EAAE;gBACd,QAAQ,CAAC,KAAK,CAAC,CAAA;aAChB;QACH,CAAC,CAAC,EAAE,CAAA;IACN,CAAC;IAED;;;;OAIG;IACH,GAAG,CACD,GAAW,EACX,OAA4B,EAC5B,WAA+B,IAAI;QAEnC,CAAC;QAAA,CAAC,KAAK,IAAI,EAAE;;YACX,IAAI;gBACF,KAAK,CAAC,kBAAkB,GAAG,EAAE,CAAC,CAAA;gBAC9B,uEAAuE;gBACvE,aAAa;gBACb,IAAI,IAAI,CAAC,OAAO,CAAC,UAAU,GAAG,CAAC,KAAI,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,YAAY,CAAA,EAAE;oBACxD,aAAa;oBACb,OAAO,OAAO,CAAC,YAAY,CAAA;iBAC5B;gBACD,MAAM,CAAC,GAAwB;oBAC7B,GAAG,EAAE,IAAI,CAAC,gBAAgB,CAAC,GAAG,CAAC;oBAC/B,OAAO,EAAE,IAAI,CAAC,kBAAkB,CAAC,SAAS,CAAC,OAAO,CAAC;iBACpD,CAAA;gBACD,kBAAkB;gBAClB,IAAI,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,MAAM,0CAAE,OAAO,EAAE;oBAC5B,CAAC,CAAC,OAAO,GAAG,IAAI,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,CAAC,CAAA;iBAC7C;qBAAM;oBACL,iDAAiD;oBACjD,uDAAuD;oBACvD,2BAA2B;oBAC3B,EAAE;oBACF,iDAAiD;oBACjD,yDAAyD;oBACzD,2CAA2C;oBAC3C,CAAC,CAAC,OAAO,GAAG,IAAI,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,GAAG,IAAI,CAAC,CAAA;iBAC3D;gBACD,uBAAuB;gBACvB,IAAI,IAAI,CAAC,OAAO,CAAC,UAAU,GAAG,CAAC,EAAE;oBAC/B,CAAC,CAAC,YAAY,GAAG,IAAI,IAAI,EAAE,CAAA;iBAC5B;gBACD,IAAI,IAAI,CAAC,MAAM,EAAE;oBACf,MAAM,SAAS,GAAG,cAAI,CAAC,SAAS,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;oBACnE,MAAM,IAAI,GAAG,MAAM,SAAS,CAC1B,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,MAAgB,EACpC,CAAC,CAAC,OAAO,CACV,CAAC,KAAK,CAAC,CAAC,GAAG,EAAE,EAAE;wBACd,MAAM,IAAI,KAAK,CAAC,GAAG,CAAC,CAAA;oBACtB,CAAC,CAAC,CAAA;oBACF,CAAC,CAAC,OAAO,GAAG,IAAsC,CAAA;iBACnD;gBACD,MAAM,UAAU,GAAG,MAAM,IAAI,CAAC,WAAW,CAAA;gBACzC,MAAM,OAAO,GAAG,MAAM,UAAU,CAAC,SAAS,CACxC,EAAE,GAAG,EAAE,CAAC,CAAC,GAAG,EAAE,EACd,EAAE,IAAI,EAAE,CAAC,EAAE,EACX;oBACE,MAAM,EAAE,IAAI;oBACZ,YAAY,EAAE,IAAI,CAAC,OAAO,CAAC,qBAAqB;iBACjD,CACF,CAAA;gBACD,IAAI,OAAO,CAAC,aAAa,GAAG,CAAC,EAAE;oBAC7B,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,CAAA;iBACzB;qBAAM;oBACL,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,CAAA;iBACzB;gBACD,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,GAAG,CAAC,CAAA;aACtB;YAAC,OAAO,KAAK,EAAE;gBACd,OAAO,QAAQ,CAAC,KAAK,CAAC,CAAA;aACvB;YACD,OAAO,QAAQ,CAAC,IAAI,CAAC,CAAA;QACvB,CAAC,CAAC,EAAE,CAAA;IACN,CAAC;IAED,KAAK,CACH,GAAW,EACX,OAAsD,EACtD,WAA+B,IAAI;QAEnC,CAAC;QAAA,CAAC,KAAK,IAAI,EAAE;;YACX,IAAI;gBACF,KAAK,CAAC,oBAAoB,GAAG,EAAE,CAAC,CAAA;gBAChC,MAAM,YAAY,GAId,EAAE,CAAA;gBACN,MAAM,UAAU,GAAG,IAAI,CAAC,OAAO,CAAC,UAAU,GAAG,IAAI,CAAA;gBACjD,MAAM,YAAY,GAAG,OAAO,CAAC,YAAY;oBACvC,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,OAAO,EAAE;oBAChC,CAAC,CAAC,CAAC,CAAA;gBACL,MAAM,WAAW,GAAG,IAAI,IAAI,EAAE,CAAA;gBAE9B,+DAA+D;gBAC/D,4DAA4D;gBAC5D,sDAAsD;gBACtD,IAAI,UAAU,GAAG,CAAC,IAAI,YAAY,GAAG,CAAC,EAAE;oBACtC,MAAM,WAAW,GAAG,WAAW,CAAC,OAAO,EAAE,GAAG,YAAY,CAAA;oBACxD,IAAI,WAAW,GAAG,UAAU,EAAE;wBAC5B,KAAK,CAAC,yBAAyB,GAAG,EAAE,CAAC,CAAA;wBACrC,OAAO,QAAQ,CAAC,IAAI,CAAC,CAAA;qBACtB;oBACD,YAAY,CAAC,YAAY,GAAG,WAAW,CAAA;iBACxC;gBAED,IAAI,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,MAAM,0CAAE,OAAO,EAAE;oBAC5B,YAAY,CAAC,OAAO,GAAG,IAAI,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,CAAC,CAAA;iBACxD;qBAAM;oBACL,YAAY,CAAC,OAAO,GAAG,IAAI,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,GAAG,IAAI,CAAC,CAAA;iBACtE;gBACD,MAAM,UAAU,GAAG,MAAM,IAAI,CAAC,WAAW,CAAA;gBACzC,MAAM,OAAO,GAAG,MAAM,UAAU,CAAC,SAAS,CACxC,EAAE,GAAG,EAAE,IAAI,CAAC,gBAAgB,CAAC,GAAG,CAAC,EAAE,EACnC,EAAE,IAAI,EAAE,YAAY,EAAE,EACtB,EAAE,YAAY,EAAE,IAAI,CAAC,OAAO,CAAC,qBAAqB,EAAE,CACrD,CAAA;gBACD,IAAI,OAAO,CAAC,YAAY,KAAK,CAAC,EAAE;oBAC9B,OAAO,QAAQ,CAAC,IAAI,KAAK,CAAC,qCAAqC,CAAC,CAAC,CAAA;iBAClE;qBAAM;oBACL,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,EAAE,OAAO,CAAC,CAAA;oBAChC,OAAO,QAAQ,CAAC,IAAI,CAAC,CAAA;iBACtB;aACF;YAAC,OAAO,KAAK,EAAE;gBACd,OAAO,QAAQ,CAAC,KAAK,CAAC,CAAA;aACvB;QACH,CAAC,CAAC,EAAE,CAAA;IACN,CAAC;IAED;;OAEG;IACH,GAAG,CACD,QAMS;QAET,CAAC;QAAA,CAAC,KAAK,IAAI,EAAE;YACX,IAAI;gBACF,KAAK,CAAC,kBAAkB,CAAC,CAAA;gBACzB,MAAM,UAAU,GAAG,MAAM,IAAI,CAAC,WAAW,CAAA;gBACzC,MAAM,QAAQ,GAAG,UAAU,CAAC,IAAI,CAAC;oBAC/B,GAAG,EAAE;wBACH,EAAE,OAAO,EAAE,EAAE,OAAO,EAAE,KAAK,EAAE,EAAE;wBAC/B,EAAE,OAAO,EAAE,EAAE,GAAG,EAAE,IAAI,IAAI,EAAE,EAAE,EAAE;qBACjC;iBACF,CAAC,CAAA;gBACF,MAAM,OAAO,GAA0B,EAAE,CAAA;gBACzC,IAAI,KAAK,EAAE,MAAM,OAAO,IAAI,QAAQ,EAAE;oBACpC,IAAI,IAAI,CAAC,MAAM,IAAI,OAAO,EAAE;wBAC1B,MAAM,IAAI,CAAC,cAAc,CAAC,OAAyC,CAAC,CAAA;qBACrE;oBACD,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,kBAAkB,CAAC,WAAW,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAA;iBACnE;gBACD,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,OAAO,CAAC,CAAA;gBACzB,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC,CAAA;aACxB;YAAC,OAAO,KAAK,EAAE;gBACd,QAAQ,CAAC,KAAK,CAAC,CAAA;aAChB;QACH,CAAC,CAAC,EAAE,CAAA;IACN,CAAC;IAED;;;OAGG;IACH,OAAO,CAAC,GAAW,EAAE,WAA+B,IAAI;QACtD,KAAK,CAAC,sBAAsB,GAAG,EAAE,CAAC,CAAA;QAClC,IAAI,CAAC,WAAW;aACb,IAAI,CAAC,CAAC,UAAU,EAAE,EAAE,CACnB,UAAU,CAAC,SAAS,CAClB,EAAE,GAAG,EAAE,IAAI,CAAC,gBAAgB,CAAC,GAAG,CAAC,EAAE,EACnC,EAAE,YAAY,EAAE,IAAI,CAAC,OAAO,CAAC,qBAAqB,EAAE,CACrD,CACF;aACA,IAAI,CAAC,GAAG,EAAE;YACT,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,GAAG,CAAC,CAAA;YACzB,QAAQ,CAAC,IAAI,CAAC,CAAA;QAChB,CAAC,CAAC;aACD,KAAK,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAA;IAClC,CAAC;IAED;;OAEG;IACH,MAAM,CAAC,QAA4C;QACjD,KAAK,CAAC,qBAAqB,CAAC,CAAA;QAC5B,IAAI,CAAC,WAAW;aACb,IAAI,CAAC,CAAC,UAAU,EAAE,EAAE,CAAC,UAAU,CAAC,cAAc,EAAE,CAAC;aACjD,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC;YAC/B,aAAa;aACZ,KAAK,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAA;IAClC,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,WAA+B,IAAI;QACvC,KAAK,CAAC,oBAAoB,CAAC,CAAA;QAC3B,IAAI,CAAC,WAAW;aACb,IAAI,CAAC,CAAC,UAAU,EAAE,EAAE,CAAC,UAAU,CAAC,IAAI,EAAE,CAAC;aACvC,IAAI,CAAC,GAAG,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;aAC1B,KAAK,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAA;IAClC,CAAC;IAED;;OAEG;IACH,KAAK;QACH,KAAK,CAAC,oBAAoB,CAAC,CAAA;QAC3B,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAA;IAC5C,CAAC;CACF;AAnaD,6BAmaC"} diff --git a/src/server/ApiManagers/SearchManager.ts b/src/server/ApiManagers/SearchManager.ts index 92c10975f..1b1db5809 100644 --- a/src/server/ApiManagers/SearchManager.ts +++ b/src/server/ApiManagers/SearchManager.ts @@ -1,6 +1,6 @@ import { exec } from 'child_process'; import { cyan, green, red, yellow } from 'colors'; -import { log_execution } from '../ActionUtilities'; +import { logExecution } from '../ActionUtilities'; import { Method } from '../RouteManager'; import RouteSubscriber from '../RouteSubscriber'; import { Search } from '../Search'; @@ -66,13 +66,13 @@ export namespace SolrManager { export async function update() { console.log(green('Beginning update...')); - await log_execution({ + await logExecution({ startMessage: 'Clearing existing Solr information...', endMessage: 'Solr information successfully cleared', action: Search.clear, color: cyan, }); - const cursor = await log_execution({ + const cursor = await logExecution({ startMessage: 'Connecting to and querying for all documents from database...', endMessage: ({ result, error }) => { const success = error === null && result !== undefined; @@ -118,7 +118,7 @@ export namespace SolrManager { } } await cursor?.forEach(updateDoc); - const result = await log_execution({ + const result = await logExecution({ startMessage: `Dispatching updates for ${updates.length} documents`, endMessage: 'Dispatched updates complete', action: () => Search.updateDocuments(updates), diff --git a/src/server/ApiManagers/SessionManager.ts b/src/server/ApiManagers/SessionManager.ts index e37f8c6db..c3139896f 100644 --- a/src/server/ApiManagers/SessionManager.ts +++ b/src/server/ApiManagers/SessionManager.ts @@ -1,67 +1,66 @@ -import ApiManager, { Registration } from "./ApiManager"; -import { Method, _permission_denied, AuthorizedCore, SecureHandler } from "../RouteManager"; -import RouteSubscriber from "../RouteSubscriber"; -import { sessionAgent } from ".."; -import { DashSessionAgent } from "../DashSession/DashSessionAgent"; +import ApiManager, { Registration } from './ApiManager'; +import { Method, _permissionDenied, AuthorizedCore, SecureHandler } from '../RouteManager'; +import RouteSubscriber from '../RouteSubscriber'; +import { sessionAgent } from '..'; +import { DashSessionAgent } from '../DashSession/DashSessionAgent'; -const permissionError = "You are not authorized!"; +const permissionError = 'You are not authorized!'; export default class SessionManager extends ApiManager { - - private secureSubscriber = (root: string, ...params: string[]) => new RouteSubscriber(root).add("session_key", ...params); + private secureSubscriber = (root: string, ...params: string[]) => new RouteSubscriber(root).add('session_key', ...params); private authorizedAction = (handler: SecureHandler) => { return (core: AuthorizedCore) => { - const { req: { params }, res } = core; + const { + req: { params }, + res, + } = core; if (!process.env.MONITORED) { - return res.send("This command only makes sense in the context of a monitored session."); + return res.send('This command only makes sense in the context of a monitored session.'); } if (params.session_key !== process.env.session_key) { - return _permission_denied(res, permissionError); + return _permissionDenied(res, permissionError); } return handler(core); }; - } + }; protected initialize(register: Registration): void { - register({ method: Method.GET, - subscription: this.secureSubscriber("debug", "to?"), + subscription: this.secureSubscriber('debug', 'to?'), secureHandler: this.authorizedAction(async ({ req: { params }, res }) => { const to = params.to || DashSessionAgent.notificationRecipient; - const { error } = await sessionAgent.serverWorker.emit("debug", { to }); + const { error } = await sessionAgent.serverWorker.emit('debug', { to }); res.send(error ? error.message : `Your request was successful: the server captured and compressed (but did not save) a new back up. It was sent to ${to}.`); - }) + }), }); register({ method: Method.GET, - subscription: this.secureSubscriber("backup"), + subscription: this.secureSubscriber('backup'), secureHandler: this.authorizedAction(async ({ res }) => { - const { error } = await sessionAgent.serverWorker.emit("backup"); - res.send(error ? error.message : "Your request was successful: the server successfully created a new back up."); - }) + const { error } = await sessionAgent.serverWorker.emit('backup'); + res.send(error ? error.message : 'Your request was successful: the server successfully created a new back up.'); + }), }); register({ method: Method.GET, - subscription: this.secureSubscriber("kill"), + subscription: this.secureSubscriber('kill'), secureHandler: this.authorizedAction(({ res }) => { - res.send("Your request was successful: the server and its session have been killed."); - sessionAgent.killSession("an authorized user has manually ended the server session via the /kill route"); - }) + res.send('Your request was successful: the server and its session have been killed.'); + sessionAgent.killSession('an authorized user has manually ended the server session via the /kill route'); + }), }); register({ method: Method.GET, - subscription: this.secureSubscriber("deleteSession"), + subscription: this.secureSubscriber('deleteSession'), secureHandler: this.authorizedAction(async ({ res }) => { - const { error } = await sessionAgent.serverWorker.emit("delete"); - res.send(error ? error.message : "Your request was successful: the server successfully deleted the database. Return to /home."); - }) + const { error } = await sessionAgent.serverWorker.emit('delete'); + res.send(error ? error.message : 'Your request was successful: the server successfully deleted the database. Return to /home.'); + }), }); - } - -} \ No newline at end of file +} diff --git a/src/server/ApiManagers/UploadManager.ts b/src/server/ApiManagers/UploadManager.ts index 2306b6589..1a759f04d 100644 --- a/src/server/ApiManagers/UploadManager.ts +++ b/src/server/ApiManagers/UploadManager.ts @@ -1,50 +1,27 @@ +import * as AdmZip from 'adm-zip'; import * as formidable from 'formidable'; -import { createReadStream, createWriteStream, unlink, writeFile } from 'fs'; -import * as path from 'path'; +import * as fs from 'fs'; +import { createReadStream, createWriteStream, unlink } from 'fs'; +import * as imageDataUri from 'image-data-uri'; import Jimp from 'jimp'; -import { filesDirectory, publicDirectory } from '..'; +import * as path from 'path'; +import * as uuid from 'uuid'; import { retrocycle } from '../../decycler/decycler'; +import { DashVersion } from '../../fields/DocSymbols'; import { DashUploadUtils, InjectSize, SizeSuffix } from '../DashUploadUtils'; -import { Database } from '../database'; import { Method, _success } from '../RouteManager'; -import RouteSubscriber from '../RouteSubscriber'; import { AcceptableMedia, Upload } from '../SharedMediaTypes'; +import { clientPathToFile, Directory, pathToDirectory, publicDirectory, serverPathToFile } from '../SocketData'; +import { Database } from '../database'; import ApiManager, { Registration } from './ApiManager'; import { SolrManager } from './SearchManager'; -import * as uuid from 'uuid'; -import { DashVersion } from '../../fields/DocSymbols'; -import * as AdmZip from 'adm-zip'; -import * as imageDataUri from 'image-data-uri'; -import * as fs from 'fs'; - -export enum Directory { - parsed_files = 'parsed_files', - images = 'images', - videos = 'videos', - pdfs = 'pdfs', - text = 'text', - audio = 'audio', - csv = 'csv', -} - -export function serverPathToFile(directory: Directory, filename: string) { - return path.normalize(`${filesDirectory}/${directory}/${filename}`); -} - -export function pathToDirectory(directory: Directory) { - return path.normalize(`${filesDirectory}/${directory}`); -} - -export function clientPathToFile(directory: Directory, filename: string) { - return `/files/${directory}/${filename}`; -} export default class UploadManager extends ApiManager { protected initialize(register: Registration): void { register({ method: Method.POST, subscription: '/ping', - secureHandler: async ({ req, res }) => { + secureHandler: async ({ /* req, */ res }) => { _success(res, { message: DashVersion, date: new Date() }); }, }); @@ -78,31 +55,33 @@ export default class UploadManager extends ApiManager { form.on('progress', e => fileguids.split(';').map(guid => DashUploadUtils.uploadProgress.set(guid, `read:(${Math.round((100 * +e) / +filesize)}%) ${e} of ${filesize}`))); return new Promise(resolve => { form.parse(req, async (_err, _fields, files) => { - const results: Upload.FileResponse[] = []; if (_err?.message) { - results.push({ - source: { - filepath: '', - originalFilename: 'none', - newFilename: 'none', - mimetype: 'text', - size: 0, - hashAlgorithm: 'md5', - toJSON: () => ({ name: 'none', size: 0, length: 0, mtime: new Date(), filepath: '', originalFilename: 'none', newFilename: 'none', mimetype: 'text' }), + _success(res, [ + { + source: { + filepath: '', + originalFilename: 'none', + newFilename: 'none', + mimetype: 'text', + size: 0, + hashAlgorithm: 'md5', + toJSON: () => ({ name: 'none', size: 0, length: 0, mtime: new Date(), filepath: '', originalFilename: 'none', newFilename: 'none', mimetype: 'text' }), + }, + result: { name: 'failed upload', message: `${_err.message}` }, }, - result: { name: 'failed upload', message: `${_err.message}` }, - }); - } - fileguids.split(';').map(guid => DashUploadUtils.uploadProgress.set(guid, `resampling images`)); + ]); + } else { + fileguids.split(';').map(guid => DashUploadUtils.uploadProgress.set(guid, `resampling images`)); + const results = ( + await Promise.all( + Array.from(Object.keys(files)).map( + async key => (!files[key] ? undefined : DashUploadUtils.upload(files[key]![0] /* , key */)) // key is the guid used by the client to track upload progress. + ) + ) + ).filter(result => result && !(result.result instanceof Error)); - for (const key in files) { - const f = files[key]; - if (f) { - const result = await DashUploadUtils.upload(f[0], key); // key is the guid used by the client to track upload progress. - result && !(result.result instanceof Error) && results.push(result); - } + _success(res, results); } - _success(res, results); resolve(); }); }); @@ -113,17 +92,14 @@ export default class UploadManager extends ApiManager { method: Method.POST, subscription: '/uploadYoutubeVideo', secureHandler: async ({ req, res }) => { - //req.readableBuffer.head.data - return new Promise(async resolve => { - req.addListener('data', async args => { - const payload = String.fromCharCode.apply(String, args); - const { videoId, overwriteId } = JSON.parse(payload); - const results: Upload.FileResponse[] = []; - const result = await DashUploadUtils.uploadYoutube(videoId, overwriteId ?? videoId); - result && results.push(result); - _success(res, results); - resolve(); - }); + // req.readableBuffer.head.data + req.addListener('data', async args => { + const payload = String.fromCharCode(...args); // .apply(String, args); + const { videoId, overwriteId } = JSON.parse(payload); + const results: Upload.FileResponse[] = []; + const result = await DashUploadUtils.uploadYoutube(videoId, overwriteId ?? videoId); + result && results.push(result); + _success(res, results); }); }, }); @@ -132,49 +108,10 @@ export default class UploadManager extends ApiManager { method: Method.POST, subscription: '/queryYoutubeProgress', secureHandler: async ({ req, res }) => { - return new Promise(async resolve => { - req.addListener('data', args => { - const payload = String.fromCharCode.apply(String, args); - const videoId = JSON.parse(payload).videoId; - _success(res, { progress: DashUploadUtils.QueryYoutubeProgress(videoId, req.user) }); - resolve(); - }); - }); - }, - }); - - register({ - method: Method.POST, - subscription: new RouteSubscriber('youtubeScreenshot'), - secureHandler: async ({ req, res }) => { - const { id, timecode } = req.body; - const convert = (raw: string) => { - const number = Math.floor(Number(raw)); - const seconds = number % 60; - const minutes = (number - seconds) / 60; - return `${minutes}m${seconds}s`; - }; - const suffix = timecode ? `&t=${convert(timecode)}` : ``; - const targetUrl = `https://www.youtube.com/watch?v=${id}${suffix}`; - const buffer = await captureYoutubeScreenshot(targetUrl); - if (!buffer) { - return res.send(); - } - const resolvedName = `youtube_capture_${id}_${suffix}.png`; - const resolvedPath = serverPathToFile(Directory.images, resolvedName); - return new Promise(resolve => { - writeFile(resolvedPath, buffer, async error => { - if (error) { - return res.send(); - } - await DashUploadUtils.outputResizedImages(resolvedPath, resolvedName, pathToDirectory(Directory.images)); - res.send({ - accessPaths: { - agnostic: DashUploadUtils.getAccessPaths(Directory.images, resolvedName), - }, - } as Upload.FileInformation); - resolve(); - }); + req.addListener('data', args => { + const payload = String.fromCharCode(...args); // .apply(String, args); + const { videoId } = JSON.parse(payload); + _success(res, { progress: DashUploadUtils.QueryYoutubeProgress(videoId) }); }); }, }); @@ -186,7 +123,8 @@ export default class UploadManager extends ApiManager { const { sources } = req.body; if (Array.isArray(sources)) { const results = await Promise.all(sources.map(source => DashUploadUtils.UploadImage(source))); - return res.send(results); + res.send(results); + return; } res.send(); }, @@ -203,20 +141,22 @@ export default class UploadManager extends ApiManager { const getId = (id: string): string => { if (!remap || id.endsWith('Proto')) return id; if (id in ids) return ids[id]; - return (ids[id] = uuid.v4()); + ids[id] = uuid.v4(); + return ids[id]; }; - const mapFn = (doc: any) => { + const mapFn = (docIn: any) => { + const doc = docIn; if (doc.id) { doc.id = getId(doc.id); } + // eslint-disable-next-line no-restricted-syntax for (const key in doc.fields) { - if (!doc.fields.hasOwnProperty(key)) { - continue; - } + // eslint-disable-next-line no-continue + if (!Object.prototype.hasOwnProperty.call(doc.fields, key)) continue; + const field = doc.fields[key]; - if (field === undefined || field === null) { - continue; - } + // eslint-disable-next-line no-continue + if (field === undefined || field === null) continue; if (field.__type === 'Doc') { mapFn(field); @@ -229,78 +169,80 @@ export default class UploadManager extends ApiManager { } else if (field.__type === 'list') { mapFn(field); } else if (typeof field === 'string') { - const re = /("(?:dataD|d)ocumentId"\s*:\s*")([\w\-]*)"/g; - doc.fields[key] = (field as any).replace(re, (match: any, p1: string, p2: string) => { - return `${p1}${getId(p2)}"`; - }); + const re = /("(?:dataD|d)ocumentId"\s*:\s*")([\w-]*)"/g; + doc.fields[key] = (field as any).replace(re, (match: any, p1: string, p2: string) => `${p1}${getId(p2)}"`); } else if (field.__type === 'RichTextField') { const re = /("href"\s*:\s*")(.*?)"/g; - field.Data = field.Data.replace(re, (match: any, p1: string, p2: string) => { - return `${p1}${getId(p2)}"`; - }); + field.Data = field.Data.replace(re, (match: any, p1: string, p2: string) => `${p1}${getId(p2)}"`); } } }; return new Promise(resolve => { form.parse(req, async (_err, fields, files) => { - remap = Object.keys(fields).some(key => key === 'remap' && !fields.remap?.includes('false')); //.remap !== 'false'; // bcz: looking to see if the field 'remap' is set to 'false' + remap = Object.keys(fields).some(key => key === 'remap' && !fields.remap?.includes('false')); // .remap !== 'false'; // bcz: looking to see if the field 'remap' is set to 'false' let id: string = ''; let docids: string[] = []; let linkids: string[] = []; try { - for (const name in files) { - const f = files[name]; - if (!f) continue; - const path_2 = f[0]; // what about the rest of the array? are we guaranteed only one value is set? - const zip = new AdmZip(path_2.filepath); - zip.getEntries().forEach((entry: any) => { - let entryName = entry.entryName.replace(/%%%/g, '/'); - if (!entryName.startsWith('files/')) { - return; - } - const extension = path.extname(entryName); - const pathname = publicDirectory + '/' + entry.entryName; - const targetname = publicDirectory + '/' + entryName; - try { - zip.extractEntryTo(entry.entryName, publicDirectory, true, false); - createReadStream(pathname).pipe(createWriteStream(targetname)); - Jimp.read(pathname).then(img => { - DashUploadUtils.imageResampleSizes(extension).forEach(({ width, suffix }) => { - const outputPath = InjectSize(targetname, suffix); - if (!width) createReadStream(pathname).pipe(createWriteStream(outputPath)); - else img = img.resize(width, Jimp.AUTO).write(outputPath); + // eslint-disable-next-line no-restricted-syntax + for (const name in Object.keys(files)) { + if (Object.prototype.hasOwnProperty.call(files, name)) { + const f = files[name]; + // eslint-disable-next-line no-continue + if (!f) continue; + const path2 = f[0]; // what about the rest of the array? are we guaranteed only one value is set? + const zip = new AdmZip(path2.filepath); + zip.getEntries().forEach((entry: any) => { + const entryName = entry.entryName.replace(/%%%/g, '/'); + if (!entryName.startsWith('files/')) { + return; + } + const extension = path.extname(entryName); + const pathname = publicDirectory + '/' + entry.entryName; + const targetname = publicDirectory + '/' + entryName; + try { + zip.extractEntryTo(entry.entryName, publicDirectory, true, false); + createReadStream(pathname).pipe(createWriteStream(targetname)); + Jimp.read(pathname).then(imgIn => { + let img = imgIn; + DashUploadUtils.imageResampleSizes(extension).forEach(({ width, suffix }) => { + const outputPath = InjectSize(targetname, suffix); + if (!width) createReadStream(pathname).pipe(createWriteStream(outputPath)); + else img = img.resize(width, Jimp.AUTO).write(outputPath); + }); + unlink(pathname, () => {}); }); - unlink(pathname, () => {}); - }); - } catch (e) { - console.log(e); - } - }); - const json = zip.getEntry('docs.json'); - if (json) { - try { - const data = JSON.parse(json.getData().toString('utf8'), retrocycle()); - const { docs, links } = data; - id = getId(data.id); - const rdocs = Object.keys(docs).map(key => docs[key]); - const ldocs = Object.keys(links).map(key => links[key]); - [...rdocs, ...ldocs].forEach(mapFn); - docids = rdocs.map(doc => doc.id); - linkids = ldocs.map(link => link.id); - await Promise.all( - [...rdocs, ...ldocs].map( - doc => - new Promise(res => { - // overwrite mongo doc with json doc contents - Database.Instance.replace(doc.id, doc, (err, r) => res(err && console.log(err)), true); - }) - ) - ); - } catch (e) { - console.log(e); + } catch (e) { + console.log(e); + } + }); + const json = zip.getEntry('docs.json'); + if (json) { + try { + const data = JSON.parse(json.getData().toString('utf8'), retrocycle()); + const { docs, links } = data; + id = getId(data.id); + const rdocs = Object.keys(docs).map(key => docs[key]); + const ldocs = Object.keys(links).map(key => links[key]); + [...rdocs, ...ldocs].forEach(mapFn); + docids = rdocs.map(doc => doc.id); + linkids = ldocs.map(link => link.id); + // eslint-disable-next-line no-await-in-loop + await Promise.all( + [...rdocs, ...ldocs].map( + doc => + new Promise(res => { + // overwrite mongo doc with json doc contents + Database.Instance.replace(doc.id, doc, err => res(err && console.log(err)), true); + }) + ) + ); + } catch (e) { + console.log(e); + } } + unlink(path2.filepath, () => {}); } - unlink(path_2.filepath, () => {}); } SolrManager.update(); res.send(JSON.stringify({ id, docids, linkids } || 'error')); @@ -319,9 +261,8 @@ export default class UploadManager extends ApiManager { secureHandler: async ({ req, res }) => { const { source } = req.body; if (typeof source === 'string') { - return res.send(await DashUploadUtils.InspectImage(source)); - } - res.send({}); + res.send(await DashUploadUtils.InspectImage(source)); + } else res.send({}); }, }); @@ -329,7 +270,7 @@ export default class UploadManager extends ApiManager { method: Method.POST, subscription: '/uploadURI', secureHandler: ({ req, res }) => { - const uri: any = req.body.uri; + const { uri } = req.body; const filename = req.body.name; const origSuffix = req.body.nosuffix ? SizeSuffix.None : SizeSuffix.Original; const deleteFiles = req.body.replaceRootFilename; @@ -338,23 +279,24 @@ export default class UploadManager extends ApiManager { return; } if (deleteFiles) { - const path = serverPathToFile(Directory.images, ''); + const serverPath = serverPathToFile(Directory.images, ''); const regex = new RegExp(`${deleteFiles}.*`); - fs.readdirSync(path) + fs.readdirSync(serverPath) .filter((f: any) => regex.test(f)) - .map((f: any) => fs.unlinkSync(path + f)); + .map((f: any) => fs.unlinkSync(serverPath + f)); } - return imageDataUri.outputFile(uri, serverPathToFile(Directory.images, InjectSize(filename, origSuffix))).then((savedName: string) => { + imageDataUri.outputFile(uri, serverPathToFile(Directory.images, InjectSize(filename, origSuffix))).then((savedName: string) => { const ext = path.extname(savedName).toLowerCase(); if (AcceptableMedia.imageFormats.includes(ext)) { - Jimp.read(savedName).then(img => + Jimp.read(savedName).then(imgIn => { + let img = imgIn; (!origSuffix ? [{ width: 400, suffix: SizeSuffix.Medium }] : Object.values(DashUploadUtils.Sizes)) // .forEach(({ width, suffix }) => { const outputPath = serverPathToFile(Directory.images, InjectSize(filename, suffix) + ext); if (!width) createReadStream(savedName).pipe(createWriteStream(outputPath)); else img = img.resize(width, Jimp.AUTO).write(outputPath); - }) - ); + }); + }); } res.send(clientPathToFile(Directory.images, filename + ext)); }); @@ -362,35 +304,3 @@ export default class UploadManager extends ApiManager { }); } } -function delay(ms: number) { - return new Promise(resolve => setTimeout(resolve, ms)); -} -/** - * On success, returns a buffer containing the bytes of a screenshot - * of the video (optionally, at a timecode) specified by @param targetUrl. - * - * On failure, returns undefined. - */ -async function captureYoutubeScreenshot(targetUrl: string) { - // const browser = await launch({ args: ['--no-sandbox', '--disable-setuid-sandbox'] }); - // const page = await browser.newPage(); - // // await page.setViewport({ width: 1920, height: 1080 }); - - // // await page.goto(targetUrl, { waitUntil: 'domcontentloaded' as any }); - - // const videoPlayer = await page.$('.html5-video-player'); - // videoPlayer && await page.focus("video"); - // await delay(7000); - // const ad = await page.$('.ytp-ad-skip-button-text'); - // await ad?.click(); - // await videoPlayer?.click(); - // await delay(1000); - // // hide youtube player controls. - // await page.evaluate(() => (document.querySelector('.ytp-chrome-bottom') as HTMLElement).style.display = 'none'); - - // const buffer = await videoPlayer?.screenshot({ encoding: "binary" }); - // await browser.close(); - - // return buffer; - return null; -} diff --git a/src/server/ApiManagers/UserManager.ts b/src/server/ApiManagers/UserManager.ts index 0431b9bcf..b587340e2 100644 --- a/src/server/ApiManagers/UserManager.ts +++ b/src/server/ApiManagers/UserManager.ts @@ -1,16 +1,14 @@ -import ApiManager, { Registration } from './ApiManager'; -import { Method } from '../RouteManager'; -import { Database } from '../database'; -import { msToTime } from '../ActionUtilities'; import * as bcrypt from 'bcrypt-nodejs'; +import { check, validationResult } from 'express-validator'; +import { Utils } from '../../Utils'; import { Opt } from '../../fields/Doc'; -import { WebSocket } from '../websocket'; -import { resolvedPorts } from '../server_Initialization'; import { DashVersion } from '../../fields/DocSymbols'; -import { Utils } from '../../Utils'; -import { check, validationResult } from 'express-validator'; +import { msToTime } from '../ActionUtilities'; +import { Method } from '../RouteManager'; +import { resolvedPorts, socketMap, timeMap } from '../SocketData'; +import { Database } from '../database'; +import ApiManager, { Registration } from './ApiManager'; -export const timeMap: { [id: string]: number } = {}; interface ActivityUnit { user: string; duration: number; @@ -32,9 +30,10 @@ export default class UserManager extends ApiManager { method: Method.POST, subscription: '/setCacheDocumentIds', secureHandler: async ({ user, req, res }) => { + const userModel = user; const result: any = {}; - user.cacheDocumentIds = req.body.cacheDocumentIds; - user.save().then(undefined, err => { + userModel.cacheDocumentIds = req.body.cacheDocumentIds; + userModel.save().then(undefined, (err: any) => { if (err) { result.error = [{ msg: 'Error while caching documents' }]; } @@ -90,17 +89,19 @@ export default class UserManager extends ApiManager { method: Method.POST, subscription: '/internalResetPassword', secureHandler: async ({ user, req, res }) => { + const userModel = user; const result: any = {}; - const { curr_pass, new_pass, new_confirm } = req.body; + // eslint-disable-next-line camelcase + const { curr_pass, new_pass } = req.body; // perhaps should assert whether curr password is entered correctly const validated = await new Promise>(resolve => { - bcrypt.compare(curr_pass, user.password, (err, passwords_match) => { - if (err || !passwords_match) { + bcrypt.compare(curr_pass, userModel.password, (err, passwordsMatch) => { + if (err || !passwordsMatch) { result.error = [{ msg: 'Incorrect current password' }]; res.send(result); resolve(undefined); } else { - resolve(passwords_match); + resolve(passwordsMatch); } }); }); @@ -111,10 +112,11 @@ export default class UserManager extends ApiManager { check('new_pass', 'Password must be at least 4 characters long') .run(req) - .then(chcekcres => console.log(chcekcres)); //.len({ min: 4 }); + .then(chcekcres => console.log(chcekcres)); // .len({ min: 4 }); check('new_confirm', 'Passwords do not match') .run(req) - .then(theres => console.log(theres)); //.equals(new_pass); + .then(theres => console.log(theres)); // .equals(new_pass); + // eslint-disable-next-line camelcase if (curr_pass === new_pass) { result.error = [{ msg: 'Current and new password are the same' }]; } @@ -125,12 +127,13 @@ export default class UserManager extends ApiManager { // will only change password if there are no errors. if (!result.error) { - user.password = new_pass; - user.passwordResetToken = undefined; - user.passwordResetExpires = undefined; + // eslint-disable-next-line camelcase + userModel.password = new_pass; + userModel.passwordResetToken = undefined; + userModel.passwordResetExpires = undefined; } - user.save().then(undefined, err => { + userModel.save().then(undefined, err => { if (err) { result.error = [{ msg: 'Error while saving new password' }]; } @@ -149,13 +152,16 @@ export default class UserManager extends ApiManager { const activeTimes: ActivityUnit[] = []; const inactiveTimes: ActivityUnit[] = []; + // eslint-disable-next-line no-restricted-syntax for (const user in timeMap) { - const time = timeMap[user]; - const socketPair = Array.from(WebSocket.socketMap).find(pair => pair[1] === user); - if (socketPair && !socketPair[0].disconnected) { - const duration = now - time; - const target = duration / 1000 < 60 * 5 ? activeTimes : inactiveTimes; - target.push({ user, duration }); + if (Object.prototype.hasOwnProperty.call(timeMap, user)) { + const time = timeMap[user]; + const socketPair = Array.from(socketMap).find(pair => pair[1] === user); + if (socketPair && !socketPair[0].disconnected) { + const duration = now - time; + const target = duration / 1000 < 60 * 5 ? activeTimes : inactiveTimes; + target.push({ user, duration }); + } } } diff --git a/src/server/Client.ts b/src/server/Client.ts index e6f953712..f67999c5b 100644 --- a/src/server/Client.ts +++ b/src/server/Client.ts @@ -1,4 +1,4 @@ -import { computed } from "mobx"; +import { computed } from 'mobx'; export class Client { private _guid: string; @@ -7,5 +7,7 @@ export class Client { this._guid = guid; } - @computed public get GUID(): string { return this._guid; } -} \ No newline at end of file + @computed public get GUID(): string { + return this._guid; + } +} diff --git a/src/server/DashSession/DashSessionAgent.ts b/src/server/DashSession/DashSessionAgent.ts index 1ef7a131d..f937c17ad 100644 --- a/src/server/DashSession/DashSessionAgent.ts +++ b/src/server/DashSession/DashSessionAgent.ts @@ -1,18 +1,19 @@ -import { Email, pathFromRoot } from '../ActionUtilities'; -import { red, yellow, green, cyan } from 'colors'; -import { get } from 'request-promise'; -import { Utils } from '../../Utils'; -import { WebSocket } from '../websocket'; -import { MessageStore } from '../Message'; -import { launchServer, onWindows } from '..'; -import { readdirSync, statSync, createWriteStream, readFileSync, unlinkSync } from 'fs'; import * as Archiver from 'archiver'; +import { cyan, green, red, yellow } from 'colors'; +import { createWriteStream, readFileSync, readdirSync, statSync, unlinkSync } from 'fs'; import { resolve } from 'path'; +import { get } from 'request-promise'; import { rimraf } from 'rimraf'; +import { launchServer, onWindows } from '..'; +import { Utils } from '../../Utils'; +import { ServerUtils } from '../../ServerUtils'; +import { Email, pathFromRoot } from '../ActionUtilities'; +import { MessageStore } from '../Message'; +import { WebSocket } from '../websocket'; import { AppliedSessionAgent, ExitHandler } from './Session/agents/applied_session_agent'; -import { ServerWorker } from './Session/agents/server_worker'; import { Monitor } from './Session/agents/monitor'; -import { MessageHandler, ErrorLike } from './Session/agents/promisified_ipc_manager'; +import { ErrorLike, MessageHandler } from './Session/agents/promisified_ipc_manager'; +import { ServerWorker } from './Session/agents/server_worker'; /** * If we're the monitor (master) thread, we should launch the monitor logic for the session. @@ -22,6 +23,7 @@ import { MessageHandler, ErrorLike } from './Session/agents/promisified_ipc_mana export class DashSessionAgent extends AppliedSessionAgent { private readonly signature = '-Dash Server Session Manager'; private readonly releaseDesktop = pathFromRoot('../../Desktop'); + public static notificationRecipient = 'browndashptc@gmail.com'; /** * The core method invoked when the single master thread is initialized. @@ -149,7 +151,7 @@ export class DashSessionAgent extends AppliedSessionAgent { const { _socket } = WebSocket; if (_socket) { const message = typeof reason === 'boolean' ? (reason ? 'exit' : 'temporary') : 'crash'; - Utils.Emit(_socket, MessageStore.ConnectionTerminated, message); + ServerUtils.Emit(_socket, MessageStore.ConnectionTerminated, message); } }; @@ -217,7 +219,3 @@ export class DashSessionAgent extends AppliedSessionAgent { } } } - -export namespace DashSessionAgent { - export const notificationRecipient = 'browndashptc@gmail.com'; -} diff --git a/src/server/DashStats.ts b/src/server/DashStats.ts index a9e6af67c..485ab9f99 100644 --- a/src/server/DashStats.ts +++ b/src/server/DashStats.ts @@ -1,9 +1,8 @@ import { cyan, magenta } from 'colors'; import { Response } from 'express'; -import SocketIO from 'socket.io'; -import { timeMap } from './ApiManagers/UserManager'; -import { WebSocket } from './websocket'; import * as fs from 'fs'; +import SocketIO from 'socket.io'; +import { socketMap, timeMap, userOperations } from './SocketData'; /** * DashStats focuses on tracking user data for each session. @@ -17,7 +16,6 @@ export namespace DashStats { const statsCSVDirectory = './src/server/stats/'; const statsCSVFilename = statsCSVDirectory + 'userLoginStats.csv'; - const columns = ['USERNAME', 'ACTION', 'TIME']; /** * UserStats holds the stats associated with a particular user. @@ -77,6 +75,111 @@ export namespace DashStats { // structure export const lastUserOperations = new Map(); + /** + * convertToCSV() is a helper method that stringifies a CSVStore object + * that can be written to the CSV file later. + * @param dataObject the object to stringify + * @returns the object as a string. + */ + function convertToCSV(dataObject: CSVStore): string { + return `${dataObject.USERNAME},${dataObject.ACTION},${dataObject.TIME}\n`; + } + /** + * getLastOperationsOrDefault() is a helper method that will attempt + * to query the lastUserOperations map for a specified username. If the + * username is not in the map, an empty UserLastOperations object is returned. + * @param username + * @returns the user's UserLastOperations structure or an empty + * UserLastOperations object (All values set to 0) if the username is not found. + */ + function getLastOperationsOrDefault(username: string): UserLastOperations { + if (lastUserOperations.get(username) === undefined) { + const initializeOperationsQueue = []; + for (let i = 0; i < RATE_INTERVAL; i++) { + initializeOperationsQueue.push(0); + } + return { + sampleOperations: 0, + lastSampleOperations: 0, + previousOperationsQueue: initializeOperationsQueue, + }; + } + return lastUserOperations.get(username)!; + } + + /** + * updateLastOperations updates a specific user's UserLastOperations information + * for the current sampling cycle. The method removes old/outdated counts for + * operations from the queue and adds new data for the current sampling + * cycle to the queue, updating the total count as it goes. + * @param lastOperationData the old UserLastOperations data that must be updated + * @param currentOperations the total number of operations measured for this sampling cycle. + * @returns the udpated UserLastOperations structure. + */ + function updateLastOperations(lastOperationData: UserLastOperations, currentOperations: number): UserLastOperations { + // create a copy of the UserLastOperations to modify + const newLastOperationData: UserLastOperations = { + sampleOperations: lastOperationData.sampleOperations, + lastSampleOperations: lastOperationData.lastSampleOperations, + previousOperationsQueue: lastOperationData.previousOperationsQueue.slice(), + }; + + let newSampleOperations = newLastOperationData.sampleOperations; + newSampleOperations -= newLastOperationData.previousOperationsQueue.shift()!; // removes and returns the first element of the queue + const operationsThisCycle = currentOperations - lastOperationData.lastSampleOperations; + newSampleOperations += operationsThisCycle; // add the operations this cycle to find out what our count for the interval should be (e.g operations in the last 10 seconds) + + // update values for the copy object + newLastOperationData.sampleOperations = newSampleOperations; + + newLastOperationData.previousOperationsQueue.push(operationsThisCycle); + newLastOperationData.lastSampleOperations = currentOperations; + + return newLastOperationData; + } + + /** + * getUserOperationsOrDefault() is a helper method to get the user's total + * operations for the CURRENT sampling interval. The method will return 0 + * if the username is not in the userOperations map. + * @param username the username to search the map for + * @returns the total number of operations recorded up to this sampling cycle. + */ + function getUserOperationsOrDefault(username: string): number { + return userOperations.get(username) === undefined ? 0 : userOperations.get(username)!; + } + + /** + * getCurrentStats() calculates the total stats for this cycle. In this case, + * getCurrentStats() returns an Array of UserStats[] objects describing + * the stats for each user + * @returns an array of UserStats storing data for each user at the current moment. + */ + function getCurrentStats(): UserStats[] { + const socketPairs: UserStats[] = []; + Array.from(socketMap.entries()).forEach(([key, value]) => { + const username = value.split(' ')[0]; + const connectionTime = new Date(timeMap[username]); + + const connectionTimeString = connectionTime.toLocaleDateString() + ' ' + connectionTime.toLocaleTimeString(); + + if (!key.disconnected) { + const lastRecordedOperations = getLastOperationsOrDefault(username); + const currentUserOperationCount = getUserOperationsOrDefault(username); + + socketPairs.push({ + socketId: key.id, + username: username, + time: connectionTimeString.includes('Invalid Date') ? '' : connectionTimeString, + operations: userOperations.get(username) ? userOperations.get(username)! : 0, + rate: lastRecordedOperations.sampleOperations, + }); + lastUserOperations.set(username, updateLastOperations(lastRecordedOperations, currentUserOperationCount)); + } + }); + return socketPairs; + } + /** * handleStats is called when the /stats route is called, providing a JSON * object with relevant stats. In this case, we return the number of @@ -84,8 +187,7 @@ export namespace DashStats { * @param res Response object from Express */ export function handleStats(res: Response) { - let current = getCurrentStats(); - const results: CSVStore[] = []; + const current = getCurrentStats(); res.json({ currentConnections: current.length, socketMap: current, @@ -99,7 +201,7 @@ export namespace DashStats { * @returns a StatsDataBundle that is sent to the frontend view on each websocket update */ export function getUpdatedStatsBundle(): StatsDataBundle { - let current = getCurrentStats(); + const current = getCurrentStats(); return { connectedUsers: current, @@ -113,11 +215,8 @@ export namespace DashStats { * @param res */ export function handleStatsView(res: Response) { - let current = getCurrentStats(); - - let connectedUsers = current.map(socketPair => { - return socketPair.time + ' - ' + socketPair.username + ' Operations: ' + socketPair.operations; - }); + const current = getCurrentStats(); + const connectedUsers = current.map(({ time, username, operations }) => time + ' - ' + username + ' Operations: ' + operations); let serverTraffic = ServerTraffic.NOT_BUSY; if (current.length < BUSY_SERVER_BOUND) { @@ -145,17 +244,17 @@ export namespace DashStats { */ export function logUserLogin(username: string | undefined, socket: SocketIO.Socket) { if (!(username === undefined)) { - let currentDate = new Date(); + const currentDate = new Date(); console.log(magenta(`User ${username.split(' ')[0]} logged in at: ${currentDate.toISOString()}`)); - let toWrite: CSVStore = { + const toWrite: CSVStore = { USERNAME: username, ACTION: 'loggedIn', TIME: currentDate.toISOString(), }; if (!fs.existsSync(statsCSVDirectory)) fs.mkdirSync(statsCSVDirectory); - let statsFile = fs.createWriteStream(statsCSVFilename, { flags: 'a' }); + const statsFile = fs.createWriteStream(statsCSVFilename, { flags: 'a' }); statsFile.write(convertToCSV(toWrite)); statsFile.end(); console.log(cyan(convertToCSV(toWrite))); @@ -170,10 +269,10 @@ export namespace DashStats { */ export function logUserLogout(username: string | undefined, socket: SocketIO.Socket) { if (!(username === undefined)) { - let currentDate = new Date(); + const currentDate = new Date(); - let statsFile = fs.createWriteStream(statsCSVFilename, { flags: 'a' }); - let toWrite: CSVStore = { + const statsFile = fs.createWriteStream(statsCSVFilename, { flags: 'a' }); + const toWrite: CSVStore = { USERNAME: username, ACTION: 'loggedOut', TIME: currentDate.toISOString(), @@ -182,110 +281,4 @@ export namespace DashStats { statsFile.end(); } } - - /** - * getLastOperationsOrDefault() is a helper method that will attempt - * to query the lastUserOperations map for a specified username. If the - * username is not in the map, an empty UserLastOperations object is returned. - * @param username - * @returns the user's UserLastOperations structure or an empty - * UserLastOperations object (All values set to 0) if the username is not found. - */ - function getLastOperationsOrDefault(username: string): UserLastOperations { - if (lastUserOperations.get(username) === undefined) { - let initializeOperationsQueue = []; - for (let i = 0; i < RATE_INTERVAL; i++) { - initializeOperationsQueue.push(0); - } - return { - sampleOperations: 0, - lastSampleOperations: 0, - previousOperationsQueue: initializeOperationsQueue, - }; - } - return lastUserOperations.get(username)!; - } - - /** - * updateLastOperations updates a specific user's UserLastOperations information - * for the current sampling cycle. The method removes old/outdated counts for - * operations from the queue and adds new data for the current sampling - * cycle to the queue, updating the total count as it goes. - * @param lastOperationData the old UserLastOperations data that must be updated - * @param currentOperations the total number of operations measured for this sampling cycle. - * @returns the udpated UserLastOperations structure. - */ - function updateLastOperations(lastOperationData: UserLastOperations, currentOperations: number): UserLastOperations { - // create a copy of the UserLastOperations to modify - let newLastOperationData: UserLastOperations = { - sampleOperations: lastOperationData.sampleOperations, - lastSampleOperations: lastOperationData.lastSampleOperations, - previousOperationsQueue: lastOperationData.previousOperationsQueue.slice(), - }; - - let newSampleOperations = newLastOperationData.sampleOperations; - newSampleOperations -= newLastOperationData.previousOperationsQueue.shift()!; // removes and returns the first element of the queue - let operationsThisCycle = currentOperations - lastOperationData.lastSampleOperations; - newSampleOperations += operationsThisCycle; // add the operations this cycle to find out what our count for the interval should be (e.g operations in the last 10 seconds) - - // update values for the copy object - newLastOperationData.sampleOperations = newSampleOperations; - - newLastOperationData.previousOperationsQueue.push(operationsThisCycle); - newLastOperationData.lastSampleOperations = currentOperations; - - return newLastOperationData; - } - - /** - * getUserOperationsOrDefault() is a helper method to get the user's total - * operations for the CURRENT sampling interval. The method will return 0 - * if the username is not in the userOperations map. - * @param username the username to search the map for - * @returns the total number of operations recorded up to this sampling cycle. - */ - function getUserOperationsOrDefault(username: string): number { - return WebSocket.userOperations.get(username) === undefined ? 0 : WebSocket.userOperations.get(username)!; - } - - /** - * getCurrentStats() calculates the total stats for this cycle. In this case, - * getCurrentStats() returns an Array of UserStats[] objects describing - * the stats for each user - * @returns an array of UserStats storing data for each user at the current moment. - */ - function getCurrentStats(): UserStats[] { - let socketPairs: UserStats[] = []; - for (let [key, value] of WebSocket.socketMap) { - let username = value.split(' ')[0]; - let connectionTime = new Date(timeMap[username]); - - let connectionTimeString = connectionTime.toLocaleDateString() + ' ' + connectionTime.toLocaleTimeString(); - - if (!key.disconnected) { - let lastRecordedOperations = getLastOperationsOrDefault(username); - let currentUserOperationCount = getUserOperationsOrDefault(username); - - socketPairs.push({ - socketId: key.id, - username: username, - time: connectionTimeString.includes('Invalid Date') ? '' : connectionTimeString, - operations: WebSocket.userOperations.get(username) ? WebSocket.userOperations.get(username)! : 0, - rate: lastRecordedOperations.sampleOperations, - }); - lastUserOperations.set(username, updateLastOperations(lastRecordedOperations, currentUserOperationCount)); - } - } - return socketPairs; - } - - /** - * convertToCSV() is a helper method that stringifies a CSVStore object - * that can be written to the CSV file later. - * @param dataObject the object to stringify - * @returns the object as a string. - */ - function convertToCSV(dataObject: CSVStore): string { - return `${dataObject.USERNAME},${dataObject.ACTION},${dataObject.TIME}\n`; - } } diff --git a/src/server/DashUploadUtils.ts b/src/server/DashUploadUtils.ts index 307aec6fc..3d8325da9 100644 --- a/src/server/DashUploadUtils.ts +++ b/src/server/DashUploadUtils.ts @@ -15,16 +15,15 @@ import { basename } from 'path'; import * as parse from 'pdf-parse'; import * as request from 'request-promise'; import { Duplex, Stream } from 'stream'; -import { filesDirectory, publicDirectory } from '.'; import { Utils } from '../Utils'; -import { Opt } from '../fields/Doc'; -import { ParsedPDF } from '../server/PdfTypes'; import { createIfNotExists } from './ActionUtilities'; import { AzureManager } from './ApiManagers/AzureManager'; -import { Directory, clientPathToFile, pathToDirectory, serverPathToFile } from './ApiManagers/UploadManager'; +import { ParsedPDF } from './PdfTypes'; import { AcceptableMedia, Upload } from './SharedMediaTypes'; +import { Directory, clientPathToFile, filesDirectory, pathToDirectory, publicDirectory, serverPathToFile } from './SocketData'; import { resolvedServerUrl } from './server_Initialization'; -const spawn = require('child_process').spawn; + +const { spawn } = require('child_process'); const { exec } = require('child_process'); const requestImageSize = require('../client/util/request-image-size'); @@ -42,7 +41,7 @@ export function InjectSize(filename: string, size: SizeSuffix) { } function isLocal() { - return /Dash-Web[0-9]*[\\\/]src[\\\/]server[\\\/]public[\\\/](.*)/; + return /Dash-Web[0-9]*[\\/]src[\\/]server[\\/]public[\\/](.*)/; } function usingAzure() { @@ -68,11 +67,21 @@ export namespace DashUploadUtils { const size = 'content-length'; const type = 'content-type'; - const BLOBSTORE_URL = process.env.BLOBSTORE_URL; - const RESIZE_FUNCTION_URL = process.env.RESIZE_FUNCTION_URL; + const { BLOBSTORE_URL, RESIZE_FUNCTION_URL } = process.env; + + const { imageFormats, videoFormats, applicationFormats, audioFormats } = AcceptableMedia; // TODO:glr - const { imageFormats, videoFormats, applicationFormats, audioFormats } = AcceptableMedia; //TODO:glr + export function fExists(name: string, destination: Directory) { + const destinationPath = serverPathToFile(destination, name); + return existsSync(destinationPath); + } + export function getAccessPaths(directory: Directory, fileName: string) { + return { + client: clientPathToFile(directory, fileName), + server: serverPathToFile(directory, fileName), + }; + } export async function concatVideos(filePaths: string[]): Promise { // make a list of paths to create the ordered text file for ffmpeg const inputListName = 'concat.txt'; @@ -80,14 +89,14 @@ export namespace DashUploadUtils { // make a list of paths to create the ordered text file for ffmpeg const filePathsText = filePaths.map(filePath => `file '${filePath}'`).join('\n'); // write the text file to the file system - await new Promise((res, reject) => + await new Promise((res, reject) => { writeFile(textFilePath, filePathsText, err => { if (err) { reject(); console.log(err); } else res(); - }) - ); + }); + }); // make output file name based on timestamp const outputFileName = `output-${Utils.GenerateGuid()}.mp4`; @@ -95,19 +104,19 @@ export namespace DashUploadUtils { const outputFilePath = path.join(pathToDirectory(Directory.videos), outputFileName); // concatenate the videos - await new Promise((resolve, reject) => + await new Promise((resolve, reject) => { ffmpeg() .input(textFilePath) .inputOptions(['-f concat', '-safe 0']) // .outputOptions('-c copy') - //.videoCodec("copy") + // .videoCodec("copy") .save(outputFilePath) .on('error', (err: any) => { console.log(err); reject(); }) - .on('end', resolve) - ); + .on('end', resolve); + }); // delete concat.txt from the file system unlinkSync(textFilePath); @@ -135,270 +144,76 @@ export namespace DashUploadUtils { }; } - export function QueryYoutubeProgress(videoId: string, user?: Express.User) { + export const uploadProgress = new Map(); + + export function QueryYoutubeProgress(videoId: string) { // console.log(`PROGRESS:${videoId}`, (user as any)?.email); return uploadProgress.get(videoId) ?? 'pending data upload'; } - export let uploadProgress = new Map(); - - export function uploadYoutube(videoId: string, overwriteId: string): Promise { - return new Promise>((res, rej) => { - const name = videoId; - const filepath = name.replace(/^-/, '__') + '.mp4'; - const finalPath = serverPathToFile(Directory.videos, filepath); - if (existsSync(finalPath)) { - uploadProgress.set(overwriteId, 'computing duration'); - exec(`yt-dlp -o ${finalPath} "https://www.youtube.com/watch?v=${videoId}" --get-duration`, (error: any, stdout: any, stderr: any) => { - const time = Array.from(stdout.trim().split(':')).reverse(); - const duration = (time.length > 2 ? Number(time[2]) * 1000 * 60 : 0) + (time.length > 1 ? Number(time[1]) * 60 : 0) + (time.length > 0 ? Number(time[0]) : 0); - res(resolveExistingFile(name, filepath, Directory.videos, 'video/mp4', duration, undefined)); - }); - } else { - uploadProgress.set(overwriteId, 'starting download'); - const ytdlp = spawn(`yt-dlp`, ['-o', filepath, `https://www.youtube.com/watch?v=${videoId}`, '--max-filesize', '100M', '-f', 'mp4']); - - ytdlp.stdout.on('data', (data: any) => uploadProgress.set(overwriteId, data.toString())); - - let errors = ''; - ytdlp.stderr.on('data', (data: any) => { - uploadProgress.set(overwriteId, 'error:' + data.toString()); - errors = data.toString(); - }); - - ytdlp.on('exit', function (code: any) { - if (code) { - res({ - source: { - size: 0, - filepath: name, - originalFilename: name, - newFilename: name, - mimetype: 'video', - hashAlgorithm: 'md5', - toJSON: () => ({ newFilename: name, filepath, mimetype: 'video', mtime: new Date(), size: 0, length: 0, originalFilename: name }), - }, - result: { name: 'failed youtube query', message: `Could not archive video. ${code ? errors : uploadProgress.get(videoId)}` }, - }); - } else { - uploadProgress.set(overwriteId, 'computing duration'); - exec(`yt-dlp-o ${filepath} "https://www.youtube.com/watch?v=${videoId}" --get-duration`, (error: any, stdout: any, stderr: any) => { - const time = Array.from(stdout.trim().split(':')).reverse(); - const duration = (time.length > 2 ? Number(time[2]) * 1000 * 60 : 0) + (time.length > 1 ? Number(time[1]) * 60 : 0) + (time.length > 0 ? Number(time[0]) : 0); - const data = { size: 0, filepath, name, mimetype: 'video', originalFilename: name, newFilename: name, hashAlgorithm: 'md5' as 'md5', type: 'video/mp4' }; - const file = { ...data, toJSON: () => ({ ...data, length: 0, filename: data.filepath.replace(/.*\//, ''), mtime: new Date(), toJson: () => undefined as any }) }; - MoveParsedFile(file, Directory.videos).then(output => { - console.log('OUTPUT = ' + output); - res(output); - }); - }); - } + /** + * Basically just a wrapper around rename, which 'deletes' + * the file at the old path and 'moves' it to the new one. For simplicity, the + * caller just has to pass in the name of the target directory, and this function + * will resolve the actual target path from that. + * @param file The file to move + * @param destination One of the specific media asset directories into which to move it + * @param suffix If the file doesn't have a suffix and you want to provide it one + * to appear in the new location + */ + export async function MoveParsedFile(file: formidable.File, destination: Directory, suffix?: string, text?: string, duration?: number, targetName?: string): Promise { + const { filepath } = file; + let name = targetName ?? path.basename(filepath); + suffix && (name += suffix); + return new Promise(resolve => { + const destinationPath = serverPathToFile(destination, name); + rename(filepath, destinationPath, error => { + resolve({ + source: file, + result: error ?? { + accessPaths: { + agnostic: getAccessPaths(destination, name), + }, + rawText: text, + duration, + }, }); - } + }); }); } - export async function upload(file: File, overwriteGuid?: string): Promise { - const isAzureOn = usingAzure(); - const { mimetype: type, filepath, originalFilename } = file; - const types = type?.split('/') ?? []; - // uploadProgress.set(overwriteGuid ?? name, 'uploading'); // If the client sent a guid it uses to track upload progress, use that guid. Otherwise, use the file's name. - - const category = types[0]; - let format = `.${types[1]}`; - console.log(green(`Processing upload of file (${originalFilename}) and format (${format}) with upload type (${type}) in category (${category}).`)); - - switch (category) { - case 'image': - if (imageFormats.includes(format)) { - const result = await UploadImage(filepath, basename(filepath)); - return { source: file, result }; - } - fs.unlink(filepath, () => {}); - return { source: file, result: { name: 'Unsupported image format', message: `Could not upload unsupported file (${originalFilename}). Please convert to an .jpg` } }; - case 'video': - if (format.includes('x-matroska')) { - console.log('case video'); - await new Promise(res => - ffmpeg(file.filepath) - .videoCodec('copy') // this will copy the data instead of reencode it - .save(file.filepath.replace('.mkv', '.mp4')) - .on('end', res) - .on('error', (e: any) => console.log(e)) - ); - file.filepath = file.filepath.replace('.mkv', '.mp4'); - format = '.mp4'; - } - if (format.includes('quicktime')) { - let abort = false; - await new Promise(res => - ffmpeg.ffprobe(file.filepath, (err: any, metadata: any) => { - if (metadata.streams.some((stream: any) => stream.codec_name === 'hevc')) { - abort = true; - } - res(); - }) - ); - if (abort) { - // bcz: instead of aborting, we could convert the file using the code below to an mp4. Problem is that this takes a long time and will clog up the server. - // await new Promise(res => - // ffmpeg(file.path) - // .videoCodec('libx264') // this will copy the data instead of reencode it - // .audioCodec('mp2') - // .save(file.path.replace('.MOV', '.mp4').replace('.mov', '.mp4')) - // .on('end', res) - // ); - // file.path = file.path.replace('.mov', '.mp4').replace('.MOV', '.mp4'); - // format = '.mp4'; - fs.unlink(filepath, () => {}); - return { source: file, result: { name: 'Unsupported video format', message: `Could not upload unsupported file (${originalFilename}). Please convert to an .mp4` } }; - } - } - if (videoFormats.includes(format) || format.includes('.webm')) { - return MoveParsedFile(file, Directory.videos); - } - fs.unlink(filepath, () => {}); - return { source: file, result: { name: 'Unsupported video format', message: `Could not upload unsupported file (${originalFilename}). Please convert to an .mp4` } }; - case 'application': - if (applicationFormats.includes(format)) { - const val = UploadPdf(file); - if (val) return val; - } - case 'audio': - const components = format.split(';'); - if (components.length > 1) { - format = components[0]; - } - if (audioFormats.includes(format)) { - return UploadAudio(file, format); - } - fs.unlink(filepath, () => {}); - return { source: file, result: { name: 'Unsupported audio format', message: `Could not upload unsupported file (${originalFilename}). Please convert to an .mp3` } }; - case 'text': - if (types[1] == 'csv') { - return UploadCsv(file); - } - } - - console.log(red(`Ignoring unsupported file (${originalFilename}) with upload type (${type}).`)); - fs.unlink(filepath, () => {}); - return { source: file, result: new Error(`Could not upload unsupported file (${originalFilename}) with upload type (${type}).`) }; - } - - async function UploadPdf(file: File) { - const fileKey = (await md5File(file.filepath)) + '.pdf'; - const textFilename = `${fileKey.substring(0, fileKey.length - 4)}.txt`; - if (fExists(fileKey, Directory.pdfs) && fExists(textFilename, Directory.text)) { - fs.unlink(file.filepath, () => {}); - return new Promise(res => { - const textFilename = `${fileKey.substring(0, fileKey.length - 4)}.txt`; - const readStream = createReadStream(serverPathToFile(Directory.text, textFilename)); - var rawText = ''; - readStream - .on('data', chunk => (rawText += chunk.toString())) // - .on('end', () => res(resolveExistingFile(file.originalFilename ?? '', fileKey, Directory.pdfs, file.mimetype, undefined, rawText))); - }); - } - const dataBuffer = readFileSync(file.filepath); - const result: ParsedPDF | any = await parse(dataBuffer).catch((e: any) => e); - if (!result.code) { - await new Promise((resolve, reject) => { - const writeStream = createWriteStream(serverPathToFile(Directory.text, textFilename)); - writeStream.write(result?.text, error => (error ? reject(error) : resolve())); + const parseExifData = async (source: string) => { + const image = await request.get(source, { encoding: null }); + const { /* data, */ error } = await new Promise<{ data: any; error: any }>(resolve => { + // eslint-disable-next-line no-new + new ExifImage({ image }, (error, data) => { + const reason = (error as any)?.code; + resolve({ data, error: reason }); }); - return MoveParsedFile(file, Directory.pdfs, undefined, result?.text, undefined, fileKey); - } - return { source: file, result: { name: 'faile pdf pupload', message: `Could not upload (${file.originalFilename}).${result.message}` } }; - } - - async function UploadCsv(file: File) { - const { filepath: sourcePath } = file; - // read the file as a string - const data = readFileSync(sourcePath, 'utf8'); - // split the string into an array of lines - return MoveParsedFile(file, Directory.csv, undefined, data); - // console.log(csvParser(data)); - } - - const manualSuffixes = ['.webm']; - - async function UploadAudio(file: File, format: string) { - const suffix = manualSuffixes.includes(format) ? format : undefined; - return MoveParsedFile(file, Directory.audio, suffix); - } - - /** - * Uploads an image specified by the @param source to Dash's /public/files/ - * directory, and returns information generated during that upload - * - * @param {string} source is either the absolute path of an already uploaded image or - * the url of a remote image - * @param {string} filename dictates what to call the image. If not specified, - * the name {@param prefix}_upload_{GUID} - * @param {string} prefix is a string prepended to the generated image name in the - * event that @param filename is not specified - * - * @returns {ImageUploadInformation | Error} This method returns - * 1) the paths to the uploaded images (plural due to resizing) - * 2) the exif data embedded in the image, or the error explaining why exif couldn't be parsed - * 3) the size of the image, in bytes (4432130) - * 4) the content type of the image, i.e. image/(jpeg | png | ...) - */ - export const UploadImage = async (source: string, filename?: string, prefix: string = ''): Promise => { - const metadata = await InspectImage(source); - if (metadata instanceof Error) { - return { name: metadata.name, message: metadata.message }; - } - const outputFile = filename || metadata.filename || ''; - - return UploadInspectedImage(metadata, outputFile, prefix); + }); + return error ? { data: undefined, error } : { data: await exifr.parse(image), error }; }; - - export async function buildFileDirectories() { - if (!existsSync(publicDirectory)) { - console.error('\nPlease ensure that the following directory exists...\n'); - console.log(publicDirectory); - process.exit(0); - } - if (!existsSync(filesDirectory)) { - console.error('\nPlease ensure that the following directory exists...\n'); - console.log(filesDirectory); - process.exit(0); - } - const pending = Object.keys(Directory).map(sub => createIfNotExists(`${filesDirectory}/${sub}`)); - return Promise.all(pending); - } - - export interface RequestedImageSize { - width: number; - height: number; - type: string; - } - - export interface ImageResizer { - width: number; - suffix: SizeSuffix; - } - /** * Based on the url's classification as local or remote, gleans * as much information as possible about the specified image * * @param source is the path or url to the image in question */ - export const InspectImage = async (source: string): Promise => { - let rawMatches: RegExpExecArray | null; + export const InspectImage = async (sourceIn: string): Promise => { + let source = sourceIn; + const rawMatches = /^data:image\/([a-z]+);base64,(.*)/.exec(source); let filename: string | undefined; /** * Just more edge case handling: this if clause handles the case where an image onto the canvas that * is represented by a base64 encoded data uri, rather than a proper file. We manually write it out * to the server and then carry on as if it had been put there by the Formidable form / file parser. */ - if ((rawMatches = /^data:image\/([a-z]+);base64,(.*)/.exec(source)) !== null) { + if (rawMatches !== null) { const [ext, data] = rawMatches.slice(1, 3); - const resolved = (filename = `upload_${Utils.GenerateGuid()}.${ext}`); + filename = `upload_${Utils.GenerateGuid()}.${ext}`; + const resolved = filename; if (usingAzure()) { - const response = await AzureManager.UploadBase64ImageBlob(resolved, data); + await AzureManager.UploadBase64ImageBlob(resolved, data); source = `${AzureManager.BASE_STRING}/${resolved}`; } else { source = `${resolvedServerUrl}${clientPathToFile(Directory.images, resolved)}`; @@ -438,7 +253,7 @@ export namespace DashUploadUtils { // Use the request library to parse out file level image information in the headers const { headers } = await new Promise((resolve, reject) => { - return request.head(resolvedUrl, (error, res) => (error ? reject(error) : resolve(res))); + request.head(resolvedUrl, (error, res) => (error ? reject(error) : resolve(res))); }).catch(e => { console.log('Error processing headers: ', e); }); @@ -449,6 +264,7 @@ export namespace DashUploadUtils { // Bundle up the information into an object return { source, + // eslint-disable-next-line radix contentSize: parseInt(headers[size]), contentType: headers[type], nativeWidth, @@ -462,49 +278,71 @@ export namespace DashUploadUtils { } }; + async function correctRotation(imgSourcePath: string) { + const buffer = fs.readFileSync(imgSourcePath); + try { + return (await autorotate.rotate(buffer, { quality: 30 })).buffer; + } catch (e) { + return buffer; + } + } + /** - * Basically just a wrapper around rename, which 'deletes' - * the file at the old path and 'moves' it to the new one. For simplicity, the - * caller just has to pass in the name of the target directory, and this function - * will resolve the actual target path from that. - * @param file The file to move - * @param destination One of the specific media asset directories into which to move it - * @param suffix If the file doesn't have a suffix and you want to provide it one - * to appear in the new location - */ - export async function MoveParsedFile(file: formidable.File, destination: Directory, suffix: string | undefined = undefined, text?: string, duration?: number, targetName?: string): Promise { - const { filepath } = file; - let name = targetName ?? path.basename(filepath); - suffix && (name += suffix); - return new Promise(resolve => { - const destinationPath = serverPathToFile(destination, name); - rename(filepath, destinationPath, error => { - resolve({ - source: file, - result: error - ? error - : { - accessPaths: { - agnostic: getAccessPaths(destination, name), - }, - rawText: text, - duration, - }, - }); - }); - }); + * define the resizers to use + * @param ext the extension + * @returns an array of resize descriptions + */ + export function imageResampleSizes(ext: string): DashUploadUtils.ImageResizer[] { + return [ + { suffix: SizeSuffix.Original, width: 0 }, + ...[...(AcceptableMedia.imageFormats.includes(ext.toLowerCase()) ? Object.values(DashUploadUtils.Sizes) : [])].map(({ suffix, width }) => ({ + width, + suffix, + })), + ]; } - export function fExists(name: string, destination: Directory) { - const destinationPath = serverPathToFile(destination, name); - return existsSync(destinationPath); - } + /** + * outputResizedImages takes in a readable stream and resizes the images according to the sizes defined at the top of this file. + * + * The new images will be saved to the server with the corresponding prefixes. + * @param imgSourcePath file path for image being resized + * @param outputFileName the basename (No suffix) of the outputted file. + * @param outputDirectory the directory to output to, usually Directory.Images + * @returns a map with suffixes as keys and resized filenames as values. + */ + export async function outputResizedImages(imgSourcePath: string, outputFileName: string, outputDirectory: string) { + const writtenFiles: { [suffix: string]: string } = {}; + const sizes = imageResampleSizes(path.extname(outputFileName)); - export function getAccessPaths(directory: Directory, fileName: string) { - return { - client: clientPathToFile(directory, fileName), - server: serverPathToFile(directory, fileName), + const imgBuffer = await correctRotation(imgSourcePath); + const imgReadStream = new Duplex(); + imgReadStream.push(imgBuffer); + imgReadStream.push(null); + const outputPath = (suffix: SizeSuffix) => { + writtenFiles[suffix] = InjectSize(outputFileName, suffix); + return path.resolve(outputDirectory, writtenFiles[suffix]); }; + await Promise.all( + sizes.filter(({ width }) => !width).map(({ suffix }) => + new Promise(res => { + imgReadStream.pipe(createWriteStream(outputPath(suffix))).on('close', res); + }) + )); // prettier-ignore + + return Jimp.read(imgBuffer) + .then(async (imgIn: any) => { + let img = imgIn; + await Promise.all( sizes.filter(({ width }) => width).map(({ width, suffix }) => { + img = img.resize(width, Jimp.AUTO).write(outputPath(suffix)); + return img; + } )); // prettier-ignore + return writtenFiles; + }) + .catch((e: any) => { + console.log('ERROR' + e); + return writtenFiles; + }); } /** @@ -555,119 +393,265 @@ export namespace DashUploadUtils { } catch (e) { // input is a blob or other, try reading it to create a metadata source file. const reqSource = request(metadata.source); - let readStream: Stream = reqSource instanceof Promise ? await reqSource : reqSource; + const readStream: Stream = reqSource instanceof Promise ? await reqSource : reqSource; const readSource = `${prefix}upload_${Utils.GenerateGuid()}.${metadata.contentType.split('/')[1].toLowerCase()}`; - await new Promise((res, rej) => + await new Promise((res, rej) => { readStream .pipe(createWriteStream(readSource)) .on('close', () => res()) - .on('error', () => rej()) - ); + .on('error', () => rej()); + }); writtenFiles = await outputResizedImages(readSource, resolved, pathToDirectory(Directory.images)); - fs.unlink(readSource, err => console.log("Couldn't unlink temporary image file:" + readSource)); + fs.unlink(readSource, err => console.log("Couldn't unlink temporary image file:" + readSource, err)); } } - for (const suffix of Object.keys(writtenFiles)) { + Array.from(Object.keys(writtenFiles)).forEach(suffix => { information.accessPaths[suffix] = getAccessPaths(images, writtenFiles[suffix]); - } + }); if (isLocal().test(source) && cleanUp) { unlinkSync(source); } return information; }; - const bufferConverterRec = (layer: any) => { - for (const key of Object.keys(layer)) { - const val: any = layer[key]; - if (val instanceof Buffer) { - layer[key] = val.toString(); - } else if (Array.isArray(val) && typeof val[0] === 'number') { - layer[key] = Buffer.from(val).toString(); - } else if (typeof val === 'object') { - bufferConverterRec(val); - } + /** + * Uploads an image specified by the @param source to Dash's /public/files/ + * directory, and returns information generated during that upload + * + * @param {string} source is either the absolute path of an already uploaded image or + * the url of a remote image + * @param {string} filename dictates what to call the image. If not specified, + * the name {@param prefix}_upload_{GUID} + * @param {string} prefix is a string prepended to the generated image name in the + * event that @param filename is not specified + * + * @returns {ImageUploadInformation | Error} This method returns + * 1) the paths to the uploaded images (plural due to resizing) + * 2) the exif data embedded in the image, or the error explaining why exif couldn't be parsed + * 3) the size of the image, in bytes (4432130) + * 4) the content type of the image, i.e. image/(jpeg | png | ...) + */ + export const UploadImage = async (source: string, filename?: string, prefix: string = ''): Promise => { + const metadata = await InspectImage(source); + if (metadata instanceof Error) { + return { name: metadata.name, message: metadata.message }; } + const outputFile = filename || metadata.filename || ''; + + return UploadInspectedImage(metadata, outputFile, prefix); }; - const parseExifData = async (source: string) => { - const image = await request.get(source, { encoding: null }); - const { data, error } = await new Promise<{ data: any; error: any }>(resolve => { - new ExifImage({ image }, (error, data) => { - let reason: Opt = undefined; - if (error) { - reason = (error as any).code; - } - resolve({ data, error: reason }); - }); + export function uploadYoutube(videoId: string, overwriteId: string): Promise { + return new Promise>(res => { + const name = videoId; + const filepath = name.replace(/^-/, '__') + '.mp4'; + const finalPath = serverPathToFile(Directory.videos, filepath); + if (existsSync(finalPath)) { + uploadProgress.set(overwriteId, 'computing duration'); + exec(`yt-dlp -o ${finalPath} "https://www.youtube.com/watch?v=${videoId}" --get-duration`, (error: any, stdout: any /* , stderr: any */) => { + const time = Array.from(stdout.trim().split(':')).reverse(); + const duration = (time.length > 2 ? Number(time[2]) * 1000 * 60 : 0) + (time.length > 1 ? Number(time[1]) * 60 : 0) + (time.length > 0 ? Number(time[0]) : 0); + res(resolveExistingFile(name, filepath, Directory.videos, 'video/mp4', duration, undefined)); + }); + } else { + uploadProgress.set(overwriteId, 'starting download'); + const ytdlp = spawn(`yt-dlp`, ['-o', filepath, `https://www.youtube.com/watch?v=${videoId}`, '--max-filesize', '100M', '-f', 'mp4']); + + ytdlp.stdout.on('data', (data: any) => uploadProgress.set(overwriteId, data.toString())); + + let errors = ''; + ytdlp.stderr.on('data', (data: any) => { + uploadProgress.set(overwriteId, 'error:' + data.toString()); + errors = data.toString(); + }); + + ytdlp.on('exit', (code: any) => { + if (code) { + res({ + source: { + size: 0, + filepath: name, + originalFilename: name, + newFilename: name, + mimetype: 'video', + hashAlgorithm: 'md5', + toJSON: () => ({ newFilename: name, filepath, mimetype: 'video', mtime: new Date(), size: 0, length: 0, originalFilename: name }), + }, + result: { name: 'failed youtube query', message: `Could not archive video. ${code ? errors : uploadProgress.get(videoId)}` }, + }); + } else { + uploadProgress.set(overwriteId, 'computing duration'); + exec(`yt-dlp-o ${filepath} "https://www.youtube.com/watch?v=${videoId}" --get-duration`, (/* error: any, stdout: any, stderr: any */) => { + // const time = Array.from(stdout.trim().split(':')).reverse(); + // const duration = (time.length > 2 ? Number(time[2]) * 1000 * 60 : 0) + (time.length > 1 ? Number(time[1]) * 60 : 0) + (time.length > 0 ? Number(time[0]) : 0); + const data = { size: 0, filepath, name, mimetype: 'video', originalFilename: name, newFilename: name, hashAlgorithm: 'md5' as 'md5', type: 'video/mp4' }; + const file = { ...data, toJSON: () => ({ ...data, length: 0, filename: data.filepath.replace(/.*\//, ''), mtime: new Date(), toJson: () => undefined as any }) }; + MoveParsedFile(file, Directory.videos).then(output => res(output)); + }); + } + }); + } }); - //data && bufferConverterRec(data); - return error ? { data: undefined, error } : { data: await exifr.parse(image), error }; - }; + } + const manualSuffixes = ['.webm']; - const { pngs, jpgs, webps, tiffs } = AcceptableMedia; - const pngOptions = { - compressionLevel: 9, - adaptiveFiltering: true, - force: true, - }; + async function UploadAudio(file: File, format: string) { + const suffix = manualSuffixes.includes(format) ? format : undefined; + return MoveParsedFile(file, Directory.audio, suffix); + } - async function correctRotation(imgSourcePath: string) { - const buffer = fs.readFileSync(imgSourcePath); - try { - return (await autorotate.rotate(buffer, { quality: 30 })).buffer; - } catch (e) { - return buffer; + async function UploadPdf(file: File) { + const fileKey = (await md5File(file.filepath)) + '.pdf'; + const textFilename = `${fileKey.substring(0, fileKey.length - 4)}.txt`; + if (fExists(fileKey, Directory.pdfs) && fExists(textFilename, Directory.text)) { + fs.unlink(file.filepath, () => {}); + return new Promise(res => { + const textFilename = `${fileKey.substring(0, fileKey.length - 4)}.txt`; + const readStream = createReadStream(serverPathToFile(Directory.text, textFilename)); + let rawText = ''; + readStream + .on('data', chunk => { + rawText += chunk.toString(); + }) + .on('end', () => res(resolveExistingFile(file.originalFilename ?? '', fileKey, Directory.pdfs, file.mimetype, undefined, rawText))); + }); } + const dataBuffer = readFileSync(file.filepath); + const result: ParsedPDF | any = await parse(dataBuffer).catch((e: any) => e); + if (!result.code) { + await new Promise((resolve, reject) => { + const writeStream = createWriteStream(serverPathToFile(Directory.text, textFilename)); + writeStream.write(result?.text, error => (error ? reject(error) : resolve())); + }); + return MoveParsedFile(file, Directory.pdfs, undefined, result?.text, undefined, fileKey); + } + return { source: file, result: { name: 'faile pdf pupload', message: `Could not upload (${file.originalFilename}).${result.message}` } }; } - /** - * outputResizedImages takes in a readable stream and resizes the images according to the sizes defined at the top of this file. - * - * The new images will be saved to the server with the corresponding prefixes. - * @param imgSourcePath file path for image being resized - * @param outputFileName the basename (No suffix) of the outputted file. - * @param outputDirectory the directory to output to, usually Directory.Images - * @returns a map with suffixes as keys and resized filenames as values. - */ - export async function outputResizedImages(imgSourcePath: string, outputFileName: string, outputDirectory: string) { - const writtenFiles: { [suffix: string]: string } = {}; - const sizes = imageResampleSizes(path.extname(outputFileName)); + async function UploadCsv(file: File) { + const { filepath: sourcePath } = file; + // read the file as a string + const data = readFileSync(sourcePath, 'utf8'); + // split the string into an array of lines + return MoveParsedFile(file, Directory.csv, undefined, data); + // console.log(csvParser(data)); + } - const imgBuffer = await correctRotation(imgSourcePath); - const imgReadStream = new Duplex(); - imgReadStream.push(imgBuffer); - imgReadStream.push(null); - const outputPath = (suffix: SizeSuffix) => path.resolve(outputDirectory, (writtenFiles[suffix] = InjectSize(outputFileName, suffix))); - await Promise.all( - sizes.filter(({ width }) => !width).map(({ suffix }) => - new Promise(res => imgReadStream.pipe(createWriteStream(outputPath(suffix))).on('close', res)) - )); // prettier-ignore + export async function upload(file: File /* , overwriteGuid?: string */): Promise { + // const isAzureOn = usingAzure(); + const { mimetype, filepath, originalFilename } = file; + const types = mimetype?.split('/') ?? []; + // uploadProgress.set(overwriteGuid ?? name, 'uploading'); // If the client sent a guid it uses to track upload progress, use that guid. Otherwise, use the file's name. - return Jimp.read(imgBuffer) - .then(async (img: any) => { - await Promise.all( sizes.filter(({ width }) => width).map(({ width, suffix }) => - img = img.resize(width, Jimp.AUTO).write(outputPath(suffix)) - )); // prettier-ignore - return writtenFiles; - }) - .catch((e: any) => { - console.log('ERROR' + e); - return writtenFiles; - }); + const category = types[0]; + let format = `.${types[1]}`; + console.log(green(`Processing upload of file (${originalFilename}) and format (${format}) with upload type (${mimetype}) in category (${category}).`)); + + switch (category) { + case 'image': + if (imageFormats.includes(format)) { + const result = await UploadImage(filepath, basename(filepath)); + return { source: file, result }; + } + fs.unlink(filepath, () => {}); + return { source: file, result: { name: 'Unsupported image format', message: `Could not upload unsupported file (${originalFilename}). Please convert to an .jpg` } }; + case 'video': { + const vidFile = file; + if (format.includes('x-matroska')) { + await new Promise(res => { + ffmpeg(vidFile.filepath) + .videoCodec('copy') // this will copy the data instead of reencode it + .save(vidFile.filepath.replace('.mkv', '.mp4')) + .on('end', res) + .on('error', (e: any) => console.log(e)); + }); + vidFile.filepath = vidFile.filepath.replace('.mkv', '.mp4'); + format = '.mp4'; + } + if (format.includes('quicktime')) { + let abort = false; + await new Promise(res => { + ffmpeg.ffprobe(vidFile.filepath, (err: any, metadata: any) => { + if (metadata.streams.some((stream: any) => stream.codec_name === 'hevc')) { + abort = true; + } + res(); + }); + }); + if (abort) { + // bcz: instead of aborting, we could convert the file using the code below to an mp4. Problem is that this takes a long time and will clog up the server. + // await new Promise(res => + // ffmpeg(file.path) + // .videoCodec('libx264') // this will copy the data instead of reencode it + // .audioCodec('mp2') + // .save(vidFile.path.replace('.MOV', '.mp4').replace('.mov', '.mp4')) + // .on('end', res) + // ); + // vidFile.path = vidFile.path.replace('.mov', '.mp4').replace('.MOV', '.mp4'); + // format = '.mp4'; + fs.unlink(filepath, () => {}); + return { source: file, result: { name: 'Unsupported video format', message: `Could not upload unsupported file (${originalFilename}). Please convert to an .mp4` } }; + } + } + if (videoFormats.includes(format) || format.includes('.webm')) { + return MoveParsedFile(vidFile, Directory.videos); + } + fs.unlink(filepath, () => {}); + return { source: vidFile, result: { name: 'Unsupported video format', message: `Could not upload unsupported file (${originalFilename}). Please convert to an .mp4` } }; + } + case 'application': + if (applicationFormats.includes(format)) { + const val = UploadPdf(file); + if (val) return val; + } + break; + case 'audio': { + const components = format.split(';'); + if (components.length > 1) { + [format] = components; + } + if (audioFormats.includes(format)) { + return UploadAudio(file, format); + } + fs.unlink(filepath, () => {}); + return { source: file, result: { name: 'Unsupported audio format', message: `Could not upload unsupported file (${originalFilename}). Please convert to an .mp3` } }; + } + case 'text': + if (types[1] === 'csv') { + return UploadCsv(file); + } + break; + default: + } + + console.log(red(`Ignoring unsupported file (${originalFilename}) with upload type (${mimetype}).`)); + fs.unlink(filepath, () => {}); + return { source: file, result: new Error(`Could not upload unsupported file (${originalFilename}) with upload type (${mimetype}).`) }; } - /** - * define the resizers to use - * @param ext the extension - * @returns an array of resize descriptions - */ - export function imageResampleSizes(ext: string): DashUploadUtils.ImageResizer[] { - return [ - { suffix: SizeSuffix.Original, width: 0 }, - ...[...(AcceptableMedia.imageFormats.includes(ext.toLowerCase()) ? Object.values(DashUploadUtils.Sizes) : [])].map(({ suffix, width }) => ({ - width, - suffix, - })), - ]; + export async function buildFileDirectories() { + if (!existsSync(publicDirectory)) { + console.error('\nPlease ensure that the following directory exists...\n'); + console.log(publicDirectory); + process.exit(0); + } + if (!existsSync(filesDirectory)) { + console.error('\nPlease ensure that the following directory exists...\n'); + console.log(filesDirectory); + process.exit(0); + } + const pending = Object.keys(Directory).map(sub => createIfNotExists(`${filesDirectory}/${sub}`)); + return Promise.all(pending); + } + + export interface RequestedImageSize { + width: number; + height: number; + type: string; + } + + export interface ImageResizer { + width: number; + suffix: SizeSuffix; } } diff --git a/src/server/Message.ts b/src/server/Message.ts index 8f0af08bc..03150c841 100644 --- a/src/server/Message.ts +++ b/src/server/Message.ts @@ -1,22 +1,47 @@ -import { Point } from "../pen-gestures/ndollar"; -import { Utils } from "../Utils"; +import * as uuid from 'uuid'; +import { Point } from '../pen-gestures/ndollar'; +function GenerateDeterministicGuid(seed: string): string { + return uuid.v5(seed, uuid.v5.URL); +} +// eslint-disable-next-line @typescript-eslint/no-unused-vars export class Message { private _name: string; private _guid: string; constructor(name: string) { this._name = name; - this._guid = Utils.GenerateDeterministicGuid(name); + this._guid = GenerateDeterministicGuid(name); } - get Name(): string { return this._name; } - get Message(): string { return this._guid; } + get Name(): string { + return this._name; + } + get Message(): string { + return this._guid; + } } export enum Types { - Number, List, Key, Image, Web, Document, Text, Icon, RichText, DocumentReference, - Html, Video, Audio, Ink, PDF, Tuple, Boolean, Script, Templates + Number, + List, + Key, + Image, + Web, + Document, + Text, + Icon, + RichText, + DocumentReference, + Html, + Video, + Audio, + Ink, + PDF, + Tuple, + Boolean, + Script, + Templates, } export interface Transferable { @@ -26,7 +51,9 @@ export interface Transferable { } export enum YoutubeQueryTypes { - Channels, SearchVideo, VideoDetails + Channels, + SearchVideo, + VideoDetails, } export interface YoutubeQueryInput { @@ -45,7 +72,7 @@ export interface Diff extends Reference { export interface GestureContent { readonly points: Array; - readonly bounds: { right: number, left: number, bottom: number, top: number, width: number, height: number }; + readonly bounds: { right: number; left: number; bottom: number; top: number; width: number; height: number }; readonly width?: string; readonly color?: string; } @@ -73,27 +100,27 @@ export interface RoomMessage { } export namespace MessageStore { - export const Foo = new Message("Foo"); - export const Bar = new Message("Bar"); - export const SetField = new Message("Set Field"); // send Transferable (no reply) - export const GetField = new Message("Get Field"); // send string 'id' get Transferable back - export const GetFields = new Message("Get Fields"); // send string[] of 'id' get Transferable[] back - export const GetDocument = new Message("Get Document"); - export const DeleteAll = new Message("Delete All"); - export const ConnectionTerminated = new Message("Connection Terminated"); - - export const GesturePoints = new Message("Gesture Points"); - export const MobileInkOverlayTrigger = new Message("Trigger Mobile Ink Overlay"); - export const UpdateMobileInkOverlayPosition = new Message("Update Mobile Ink Overlay Position"); - export const MobileDocumentUpload = new Message("Upload Document From Mobile"); - - export const GetRefField = new Message("Get Ref Field"); - export const GetRefFields = new Message("Get Ref Fields"); - export const UpdateField = new Message("Update Ref Field"); - export const CreateField = new Message("Create Ref Field"); - export const YoutubeApiQuery = new Message("Youtube Api Query"); - export const DeleteField = new Message("Delete field"); - export const DeleteFields = new Message("Delete fields"); - - export const UpdateStats = new Message("updatestats"); + export const Foo = new Message('Foo'); + export const Bar = new Message('Bar'); + export const SetField = new Message('Set Field'); // send Transferable (no reply) + export const GetField = new Message('Get Field'); // send string 'id' get Transferable back + export const GetFields = new Message('Get Fields'); // send string[] of 'id' get Transferable[] back + export const GetDocument = new Message('Get Document'); + export const DeleteAll = new Message('Delete All'); + export const ConnectionTerminated = new Message('Connection Terminated'); + + export const GesturePoints = new Message('Gesture Points'); + export const MobileInkOverlayTrigger = new Message('Trigger Mobile Ink Overlay'); + export const UpdateMobileInkOverlayPosition = new Message('Update Mobile Ink Overlay Position'); + export const MobileDocumentUpload = new Message('Upload Document From Mobile'); + + export const GetRefField = new Message('Get Ref Field'); + export const GetRefFields = new Message('Get Ref Fields'); + export const UpdateField = new Message('Update Ref Field'); + export const CreateField = new Message('Create Ref Field'); + export const YoutubeApiQuery = new Message('Youtube Api Query'); + export const DeleteField = new Message('Delete field'); + export const DeleteFields = new Message('Delete fields'); + + export const UpdateStats = new Message('updatestats'); } diff --git a/src/server/RouteManager.ts b/src/server/RouteManager.ts index 540bca776..d8e0455f6 100644 --- a/src/server/RouteManager.ts +++ b/src/server/RouteManager.ts @@ -1,9 +1,9 @@ import { cyan, green, red } from 'colors'; import { Express, Request, Response } from 'express'; -import { AdminPriviliges } from '.'; import { Utils } from '../Utils'; -import { DashUserModel } from './authentication/DashUserModel'; import RouteSubscriber from './RouteSubscriber'; +import { AdminPrivileges } from './SocketData'; +import { DashUserModel } from './authentication/DashUserModel'; export enum Method { GET, @@ -21,6 +21,34 @@ export type SecureHandler = (core: AuthorizedCore) => any | Promise; export type PublicHandler = (core: CoreArguments) => any | Promise; export type ErrorHandler = (core: CoreArguments & { error: any }) => any | Promise; +export const STATUS = { + OK: 200, + BAD_REQUEST: 400, + EXECUTION_ERROR: 500, + PERMISSION_DENIED: 403, +}; + +export function _error(res: Response, message: string, error?: any) { + console.error(message, error); + res.statusMessage = message; + res.status(STATUS.EXECUTION_ERROR).send(error); +} + +export function _success(res: Response, body: any) { + res.status(STATUS.OK).send(body); +} + +export function _invalid(res: Response, message: string) { + res.statusMessage = message; + res.status(STATUS.BAD_REQUEST).send(); +} + +export function _permissionDenied(res: Response, message?: string) { + if (message) { + res.statusMessage = message; + } + res.status(STATUS.PERMISSION_DENIED).send(`Permission Denied! ${message}`); +} export interface RouteInitializer { method: Method; subscription: string | RouteSubscriber | (string | RouteSubscriber)[]; @@ -71,7 +99,7 @@ export default class RouteManager { console.log('please remove all duplicate routes before continuing'); } if (malformedCount) { - console.log(`please ensure all routes adhere to ^\/$|^\/[A-Za-z]+(\/\:[A-Za-z?_]+)*$`); + console.log(`please ensure all routes adhere to ^/$|^/[A-Za-z]+(/:[A-Za-z?_]+)*$`); } process.exit(1); } else { @@ -94,7 +122,7 @@ export default class RouteManager { typeof initializer.subscription === 'string' && RouteManager.routes.push(initializer.subscription); initializer.subscription instanceof RouteSubscriber && RouteManager.routes.push(initializer.subscription.root); initializer.subscription instanceof Array && - initializer.subscription.map(sub => { + initializer.subscription.forEach(sub => { typeof sub === 'string' && RouteManager.routes.push(sub); sub instanceof RouteSubscriber && RouteManager.routes.push(sub.root); }); @@ -120,23 +148,23 @@ export default class RouteManager { }; if (user) { if (requireAdmin && isRelease && process.env.PASSWORD) { - if (AdminPriviliges.get(user.id)) { - AdminPriviliges.delete(user.id); + if (AdminPrivileges.get(user.id)) { + AdminPrivileges.delete(user.id); } else { - return res.redirect(`/admin/${req.originalUrl.substring(1).replace('/', ':')}`); + res.redirect(`/admin/${req.originalUrl.substring(1).replace('/', ':')}`); + return; } } await tryExecute(secureHandler, { ...core, user }); - } else { - //req.session!.target = target; - if (publicHandler) { - await tryExecute(publicHandler, core); - if (!res.headersSent) { - // res.redirect("/login"); - } - } else { - res.redirect('/login'); + } + // req.session!.target = target; + else if (publicHandler) { + await tryExecute(publicHandler, core); + if (!res.headersSent) { + // res.redirect("/login"); } + } else { + res.redirect('/login'); } setTimeout(() => { if (!res.headersSent) { @@ -153,7 +181,7 @@ export default class RouteManager { } else { route = subscriber.build; } - if (!/^\/$|^\/[A-Za-z\*]+(\/\:[A-Za-z?_\*]+)*$/g.test(route)) { + if (!/^\/$|^\/[A-Za-z*]+(\/:[A-Za-z?_*]+)*$/g.test(route)) { this.failedRegistrations.push({ reason: RegistrationError.Malformed, route, @@ -180,6 +208,7 @@ export default class RouteManager { case Method.POST: this.server.post(route, supervised); break; + default: } } }; @@ -190,32 +219,3 @@ export default class RouteManager { } }; } - -export const STATUS = { - OK: 200, - BAD_REQUEST: 400, - EXECUTION_ERROR: 500, - PERMISSION_DENIED: 403, -}; - -export function _error(res: Response, message: string, error?: any) { - console.error(message, error); - res.statusMessage = message; - res.status(STATUS.EXECUTION_ERROR).send(error); -} - -export function _success(res: Response, body: any) { - res.status(STATUS.OK).send(body); -} - -export function _invalid(res: Response, message: string) { - res.statusMessage = message; - res.status(STATUS.BAD_REQUEST).send(); -} - -export function _permission_denied(res: Response, message?: string) { - if (message) { - res.statusMessage = message; - } - res.status(STATUS.PERMISSION_DENIED).send(`Permission Denied! ${message}`); -} diff --git a/src/server/SocketData.ts b/src/server/SocketData.ts new file mode 100644 index 000000000..e857996e5 --- /dev/null +++ b/src/server/SocketData.ts @@ -0,0 +1,35 @@ +import { Socket } from 'socket.io'; +import * as path from 'path'; + +export const timeMap: { [id: string]: number } = {}; +export const userOperations = new Map(); +export const socketMap = new Map(); + +export const publicDirectory = path.resolve(__dirname, 'public'); +export const filesDirectory = path.resolve(publicDirectory, 'files'); + +export const AdminPrivileges: Map = new Map(); + +export const resolvedPorts: { server: number; socket: number } = { server: 1050, socket: 4321 }; + +export enum Directory { + parsed_files = 'parsed_files', + images = 'images', + videos = 'videos', + pdfs = 'pdfs', + text = 'text', + audio = 'audio', + csv = 'csv', +} + +export function serverPathToFile(directory: Directory, filename: string) { + return path.normalize(`${filesDirectory}/${directory}/${filename}`); +} + +export function pathToDirectory(directory: Directory) { + return path.normalize(`${filesDirectory}/${directory}`); +} + +export function clientPathToFile(directory: Directory, filename: string) { + return `/files/${directory}/${filename}`; +} diff --git a/src/server/apis/google/GoogleApiServerUtils.ts b/src/server/apis/google/GoogleApiServerUtils.ts index 3940b7a3d..55e5fd7c0 100644 --- a/src/server/apis/google/GoogleApiServerUtils.ts +++ b/src/server/apis/google/GoogleApiServerUtils.ts @@ -1,9 +1,9 @@ -import { google } from 'googleapis'; -import { OAuth2Client, Credentials, OAuth2ClientOptions } from 'google-auth-library'; -import { Opt } from '../../../fields/Doc'; import { GaxiosResponse } from 'gaxios'; -import * as request from 'request-promise'; +import { Credentials, OAuth2Client, OAuth2ClientOptions } from 'google-auth-library'; +import { google } from 'googleapis'; import * as qs from 'query-string'; +import * as request from 'request-promise'; +import { Opt } from '../../../fields/Doc'; import { Database } from '../../database'; import { GoogleCredentialsLoader } from './CredentialsLoader'; diff --git a/src/server/authentication/AuthenticationManager.ts b/src/server/authentication/AuthenticationManager.ts index 9c1525df0..b5d1dba28 100644 --- a/src/server/authentication/AuthenticationManager.ts +++ b/src/server/authentication/AuthenticationManager.ts @@ -6,7 +6,7 @@ import './Passport'; import * as async from 'async'; import * as nodemailer from 'nodemailer'; import * as c from 'crypto'; -import { emptyFunction, Utils } from '../../Utils'; +import { emptyFunction, Utils } from '../../ClientUtils'; import { MailOptions } from 'nodemailer/lib/stream-transport'; import { check, validationResult } from 'express-validator'; diff --git a/src/server/authentication/DashUserModel.ts b/src/server/authentication/DashUserModel.ts index dbb7a79ed..3bc21ecb6 100644 --- a/src/server/authentication/DashUserModel.ts +++ b/src/server/authentication/DashUserModel.ts @@ -1,9 +1,8 @@ -//@ts-ignore import * as bcrypt from 'bcrypt-nodejs'; -//@ts-ignore import * as mongoose from 'mongoose'; import { Utils } from '../../Utils'; +type comparePasswordFunction = (candidatePassword: string, cb: (err: any, isMatch: any) => void) => void; export type DashUserModel = mongoose.Document & { email: String; password: string; @@ -26,8 +25,6 @@ export type DashUserModel = mongoose.Document & { comparePassword: comparePasswordFunction; }; -type comparePasswordFunction = (candidatePassword: string, cb: (err: any, isMatch: any) => void) => void; - export type AuthToken = { accessToken: string; kind: string; @@ -75,16 +72,19 @@ userSchema.pre('save', function save(next) { bcrypt.hash( user.password, salt, - () => void {}, + () => {}, (err: mongoose.Error, hash: string) => { if (err) { return next(err); } user.password = hash; next(); + return undefined; } ); + return undefined; }); + return undefined; }); const comparePassword: comparePasswordFunction = function (this: DashUserModel, candidatePassword, cb) { diff --git a/src/server/database.ts b/src/server/database.ts index 3a087ce38..3a28dc87e 100644 --- a/src/server/database.ts +++ b/src/server/database.ts @@ -9,6 +9,7 @@ import { Transferable } from './Message'; import { Upload } from './SharedMediaTypes'; export namespace Database { + // eslint-disable-next-line import/no-mutable-exports export let disconnect: Function; class DocSchema implements mongodb.BSON.Document { @@ -30,7 +31,10 @@ export namespace Database { export async function tryInitializeConnection() { try { const { connection } = mongoose; - disconnect = async () => new Promise(resolve => connection.close().then(resolve)); + disconnect = async () => + new Promise(resolve => { + connection.close().then(resolve); + }); if (connection.readyState === ConnectionStates.disconnected) { await new Promise((resolve, reject) => { connection.on('error', reject); @@ -39,7 +43,7 @@ export namespace Database { resolve(); }); mongoose.connect(url, { - //useNewUrlParser: true, + // useNewUrlParser: true, dbName: schema, // reconnectTries: Number.MAX_VALUE, // reconnectInterval: 1000, @@ -81,8 +85,8 @@ export namespace Database { const collection = this.db.collection(collectionName); const prom = this.currentWrites[id]; let newProm: Promise; - const run = (): Promise => { - return new Promise(resolve => { + const run = (): Promise => + new Promise(resolve => { collection .updateOne({ _id: id }, value, { upsert }) .then(res => { @@ -96,13 +100,12 @@ export namespace Database { console.log('MOngo UPDATE ONE ERROR:', error); }); }); - }; newProm = prom ? prom.then(run) : run(); this.currentWrites[id] = newProm; return newProm; - } else { - this.onConnect.push(() => this.update(id, value, callback, upsert, collectionName)); } + this.onConnect.push(() => this.update(id, value, callback, upsert, collectionName)); + return undefined; } public replace(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateResult) => void, upsert = true, collectionName = DocumentsCollection) { @@ -110,8 +113,8 @@ export namespace Database { const collection = this.db.collection(collectionName); const prom = this.currentWrites[id]; let newProm: Promise; - const run = (): Promise => { - return new Promise(resolve => { + const run = (): Promise => + new Promise(resolve => { collection.replaceOne({ _id: id }, value, { upsert }).then(res => { if (this.currentWrites[id] === newProm) { delete this.currentWrites[id]; @@ -120,7 +123,6 @@ export namespace Database { callback(undefined as any, res as any); }); }); - }; newProm = prom ? prom.then(run) : run(); this.currentWrites[id] = newProm; } else { @@ -132,8 +134,10 @@ export namespace Database { const cursor = this.db?.listCollections(); const collectionNames: string[] = []; if (cursor) { + // eslint-disable-next-line no-await-in-loop while (await cursor.hasNext()) { - const collection: any = await cursor.next(); + // eslint-disable-next-line no-await-in-loop + const collection = await cursor.next(); collection && collectionNames.push(collection.name); } } @@ -141,26 +145,30 @@ export namespace Database { } public delete(query: any, collectionName?: string): Promise; + // eslint-disable-next-line no-dupe-class-members public delete(id: string, collectionName?: string): Promise; - public delete(id: any, collectionName = DocumentsCollection) { + // eslint-disable-next-line no-dupe-class-members + public delete(idIn: any, collectionName = DocumentsCollection) { + let id = idIn; if (typeof id === 'string') { id = { _id: id }; } if (this.db) { - const db = this.db; - return new Promise(res => - db - .collection(collectionName) + const { db } = this; + return new Promise(res => { + db.collection(collectionName) .deleteMany(id) - .then(result => res(result)) - ); - } else { - return new Promise(res => this.onConnect.push(() => res(this.delete(id, collectionName)))); + .then(result => res(result)); + }); } + return new Promise(res => { + this.onConnect.push(() => res(this.delete(id, collectionName))); + }); } public async dropSchema(...targetSchemas: string[]): Promise { const executor = async (database: mongodb.Db) => { + // eslint-disable-next-line no-use-before-define const existing = await Instance.getCollectionNames(); let valid: string[]; if (targetSchemas.length) { @@ -173,12 +181,13 @@ export namespace Database { }; if (this.db) { return executor(this.db); - } else { - this.onConnect.push(() => this.db && executor(this.db)); } + this.onConnect.push(() => this.db && executor(this.db)); + return undefined; } - public async insert(value: any, collectionName = DocumentsCollection) { + public async insert(valueIn: any, collectionName = DocumentsCollection) { + const value = valueIn; if (this.db && value !== null) { if ('id' in value) { value._id = value.id; @@ -188,36 +197,36 @@ export namespace Database { const collection = this.db.collection(collectionName); const prom = this.currentWrites[id]; let newProm: Promise; - const run = (): Promise => { - return new Promise(resolve => { + const run = (): Promise => + new Promise(resolve => { collection .insertOne(value) - .then(res => { + .then(() => { if (this.currentWrites[id] === newProm) { delete this.currentWrites[id]; } resolve(); }) - .catch(err => { - console.log('Mongo INSERT ERROR: ', err); - }); + .catch(err => console.log('Mongo INSERT ERROR: ', err)); }); - }; newProm = prom ? prom.then(run) : run(); this.currentWrites[id] = newProm; return newProm; - } else if (value !== null) { + } + if (value !== null) { this.onConnect.push(() => this.insert(value, collectionName)); } + return undefined; } public getDocument(id: string, fn: (result?: Transferable) => void, collectionName = DocumentsCollection) { if (this.db) { const collection = this.db.collection(collectionName); - collection.findOne({ _id: id }).then(result => { + collection.findOne({ _id: id }).then(resultIn => { + const result = resultIn; if (result) { result.id = result._id; - //delete result._id; + // delete result._id; fn(result as any); } else { fn(undefined); @@ -235,7 +244,8 @@ export namespace Database { .find({ _id: { $in: ids } }) .toArray(); fn( - found.map((doc: any) => { + found.map((docIn: any) => { + const doc = docIn; doc.id = doc._id; delete doc._id; return doc; @@ -253,24 +263,26 @@ export namespace Database { const count = Math.min(ids.length, 1000); const index = ids.length - count; const fetchIds = ids.splice(index, count).filter(id => !visited.has(id)); - if (!fetchIds.length) { - continue; - } - const docs = await new Promise<{ [key: string]: any }[]>(res => this.getDocuments(fetchIds, res, collectionName)); - for (const doc of docs) { - const id = doc.id; - visited.add(id); - ids.push(...(await fn(doc))); + if (fetchIds.length) { + // eslint-disable-next-line no-await-in-loop + const docs = await new Promise<{ [key: string]: any }[]>(res => { + this.getDocuments(fetchIds, res, collectionName); + }); + docs.forEach(async doc => { + const { id } = doc; + visited.add(id); + ids.push(...(await fn(doc))); + }); } } - } else { - return new Promise(res => { - this.onConnect.push(() => { - this.visit(ids, fn, collectionName); - res(); - }); - }); + return undefined; } + return new Promise(res => { + this.onConnect.push(() => { + this.visit(ids, fn, collectionName); + res(); + }); + }); } public query(query: { [key: string]: any }, projection?: { [key: string]: 0 | 1 }, collectionName = DocumentsCollection): Promise { @@ -280,36 +292,31 @@ export namespace Database { cursor = cursor.project(projection); } return Promise.resolve(cursor); - } else { - return new Promise(res => { - this.onConnect.push(() => res(this.query(query, projection, collectionName))); - }); } + return new Promise(res => { + this.onConnect.push(() => { + res(this.query(query, projection, collectionName)); + }); + }); } public updateMany(query: any, update: any, collectionName = DocumentsCollection) { if (this.db) { - const db = this.db; - return new Promise(res => - db - .collection(collectionName) + const { db } = this; + return new Promise(res => { + db.collection(collectionName) .updateMany(query, update) .then(result => res(result)) - .catch(error => { - console.log('Mongo INSERT MANY ERROR:', error); - }) - ); - } else { - return new Promise(res => { - this.onConnect.push(() => - this.updateMany(query, update, collectionName) - .then(res) - .catch(error => { - console.log('Mongo UPDATAE MANY ERROR: ', error); - }) - ); + .catch(error => console.log('Mongo INSERT MANY ERROR:', error)); }); } + return new Promise(res => { + this.onConnect.push(() => + this.updateMany(query, update, collectionName) + .then(res) + .catch(error => console.log('Mongo UPDATAE MANY ERROR: ', error)) + ); + }); } public print() { @@ -375,9 +382,7 @@ export namespace Database { * Checks to see if an image with the given @param contentSize * already exists in the aux database, i.e. has already been downloaded from Google Photos. */ - export const QueryUploadHistory = async (contentSize: number) => { - return SanitizedSingletonQuery({ contentSize }, AuxiliaryCollections.GooglePhotosUploadHistory); - }; + export const QueryUploadHistory = async (contentSize: number) => SanitizedSingletonQuery({ contentSize }, AuxiliaryCollections.GooglePhotosUploadHistory); /** * Records the uploading of the image with the given @param information, @@ -405,28 +410,25 @@ export namespace Database { * Retrieves the credentials associaed with @param userId * and optionally removes their database id according to @param removeId. */ - export const Fetch = async (userId: string, removeId = true): Promise> => { - return SanitizedSingletonQuery({ userId }, AuxiliaryCollections.GoogleAccess, removeId); - }; + export const Fetch = async (userId: string, removeId = true): Promise> => SanitizedSingletonQuery({ userId }, AuxiliaryCollections.GoogleAccess, removeId); /** * Writes the @param enrichedCredentials to the database, associated * with @param userId for later retrieval and updating. */ - export const Write = async (userId: string, enrichedCredentials: GoogleApiServerUtils.EnrichedCredentials) => { - return Instance.insert({ userId, canAccess: [], ...enrichedCredentials }, AuxiliaryCollections.GoogleAccess); - }; + export const Write = async (userId: string, enrichedCredentials: GoogleApiServerUtils.EnrichedCredentials) => Instance.insert({ userId, canAccess: [], ...enrichedCredentials }, AuxiliaryCollections.GoogleAccess); /** - * Updates the @param access_token and @param expiry_date fields + * Updates the @param accessToken and @param expiryDate fields * in the stored credentials associated with @param userId. */ - export const Update = async (userId: string, access_token: string, expiry_date: number) => { + export const Update = async (userId: string, accessToken: string, expiryDate: number) => { const entry = await Fetch(userId, false); if (entry) { - const parameters = { $set: { access_token, expiry_date } }; + const parameters = { $set: { access_token: accessToken, expiry_date: expiryDate } }; return Instance.update(entry._id, parameters, emptyFunction, true, AuxiliaryCollections.GoogleAccess); } + return undefined; }; /** diff --git a/src/server/index.ts b/src/server/index.ts index 47c37c9f0..5a86f36d9 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -1,9 +1,10 @@ -import * as dotenv from 'dotenv'; import { yellow } from 'colors'; +import * as dotenv from 'dotenv'; import * as mobileDetect from 'mobile-detect'; import * as path from 'path'; import * as qs from 'query-string'; -import { log_execution } from './ActionUtilities'; +import { logExecution } from './ActionUtilities'; +import { AdminPrivileges, resolvedPorts } from './SocketData'; import DataVizManager from './ApiManagers/DataVizManager'; import DeleteManager from './ApiManagers/DeleteManager'; import DownloadManager from './ApiManagers/DownloadManager'; @@ -24,13 +25,10 @@ import { Database } from './database'; import { Logger } from './ProcessFactory'; import RouteManager, { Method, PublicHandler } from './RouteManager'; import RouteSubscriber from './RouteSubscriber'; -import initializeServer, { resolvedPorts } from './server_Initialization'; +import initializeServer from './server_Initialization'; dotenv.config(); -export const AdminPriviliges: Map = new Map(); export const onWindows = process.platform === 'win32'; export let sessionAgent: AppliedSessionAgent; -export const publicDirectory = path.resolve(__dirname, 'public'); -export const filesDirectory = path.resolve(publicDirectory, 'files'); /** * These are the functions run before the server starts @@ -45,7 +43,7 @@ async function preliminaryFunctions() { SSL.loadCredentials(); GoogleApiServerUtils.processProjectCredentials(); if (process.env.DB !== 'MEM') { - await log_execution({ + await logExecution({ startMessage: 'attempting to initialize mongodb connection', endMessage: 'connection outcome determined', action: Database.tryInitializeConnection, @@ -136,7 +134,7 @@ function routeSetter({ isRelease, addSupervisedRoute, logRegistrationOutcome }: const { previous_target } = req.params; let redirect: string; if (password === PASSWORD) { - AdminPriviliges.set(id, true); + AdminPrivileges.set(id, true); redirect = `/${previous_target.replace(':', '/')}`; } else { redirect = `/admin/${previous_target}`; @@ -174,7 +172,7 @@ function routeSetter({ isRelease, addSupervisedRoute, logRegistrationOutcome }: * the main monitor (master) thread. */ export async function launchServer() { - await log_execution({ + await logExecution({ startMessage: '\nstarting execution of preliminary functions', endMessage: 'completed preliminary functions\n', action: preliminaryFunctions, diff --git a/src/server/remapUrl.ts b/src/server/remapUrl.ts index b8e17ec66..ca7ca241f 100644 --- a/src/server/remapUrl.ts +++ b/src/server/remapUrl.ts @@ -1,58 +1,69 @@ -import { Database } from "./database"; -import { resolvedPorts } from "./server_Initialization"; +import { URL } from 'url'; +import { Database } from './database'; +import { resolvedPorts } from './SocketData'; -//npx ts-node src/server/remapUrl.ts +// npx ts-node src/server/remapUrl.ts const suffixMap: { [type: string]: true } = { - "video": true, - "pdf": true, - "audio": true, - "web": true, - "image": true, - "map": true, + video: true, + pdf: true, + audio: true, + web: true, + image: true, + map: true, }; async function update() { - await new Promise(res => setTimeout(res, 10)); - console.log("update"); + await new Promise(res => { + setTimeout(res, 10); + }); + console.log('update'); const cursor = await Database.Instance.query({}); - console.log("Cleared"); + console.log('Cleared'); const updates: [string, any][] = []; function updateDoc(doc: any) { - if (doc.__type !== "Doc") { + if (doc.__type !== 'Doc') { return; } - const fields = doc.fields; + const { fields } = doc; if (!fields) { return; } - const update: any = { - }; + const updated: any = {}; let dynfield = false; - for (const key in fields) { + Array.from(Object.keys(fields)).forEach(key => { const value = fields[key]; if (value && value.__type && suffixMap[value.__type]) { const url = new URL(value.url); - if (url.href.includes("localhost") && url.href.includes("Bill")) { + if (url.href.includes('localhost') && url.href.includes('Bill')) { dynfield = true; - update.$set = { ["fields." + key + ".url"]: `${url.protocol}//dash-web.eastus2.cloudapp.azure.com:${resolvedPorts.server}${url.pathname}` }; + updated.$set = { ['fields.' + key + '.url']: `${url.protocol}//dash-web.eastus2.cloudapp.azure.com:${resolvedPorts.server}${url.pathname}` }; } } - } + }); if (dynfield) { - updates.push([doc._id, update]); + updates.push([doc._id, updated]); } } await cursor.forEach(updateDoc); - await Promise.all(updates.map(doc => { - console.log(doc[0], doc[1]); - return new Promise(res => Database.Instance.update(doc[0], doc[1], () => { - console.log("wrote " + JSON.stringify(doc[1])); - res(); - }, false)); - })); - console.log("Done"); + await Promise.all( + updates.map(doc => { + console.log(doc[0], doc[1]); + return new Promise(res => { + Database.Instance.update( + doc[0], + doc[1], + () => { + console.log('wrote ' + JSON.stringify(doc[1])); + res(); + }, + false + ); + }); + }) + ); + console.log('Done'); // await Promise.all(updates.map(update => { // return limit(() => Search.updateDocument(update)); // })); diff --git a/src/server/server_Initialization.ts b/src/server/server_Initialization.ts index 2d52ea906..8db7e9933 100644 --- a/src/server/server_Initialization.ts +++ b/src/server/server_Initialization.ts @@ -1,73 +1,59 @@ import * as bodyParser from 'body-parser'; +import * as brotli from 'brotli'; import { blue, yellow } from 'colors'; +import * as flash from 'connect-flash'; +import * as MongoStoreConnect from 'connect-mongo'; import * as cors from 'cors'; import * as express from 'express'; +import * as expressFlash from 'express-flash'; import * as session from 'express-session'; import { createServer } from 'https'; import * as passport from 'passport'; import * as request from 'request'; import * as webpack from 'webpack'; import * as wdm from 'webpack-dev-middleware'; +// eslint-disable-next-line import/no-extraneous-dependencies import * as whm from 'webpack-hot-middleware'; import * as zlib from 'zlib'; -import { publicDirectory } from '.'; +import * as config from '../../webpack.config.js'; import { logPort } from './ActionUtilities'; +import RouteManager from './RouteManager'; +import RouteSubscriber from './RouteSubscriber'; +import { publicDirectory, resolvedPorts } from './SocketData'; import { SSL } from './apis/google/CredentialsLoader'; import { getForgot, getLogin, getLogout, getReset, getSignup, postForgot, postLogin, postReset, postSignup } from './authentication/AuthenticationManager'; import { Database } from './database'; -import RouteManager from './RouteManager'; -import RouteSubscriber from './RouteSubscriber'; import { WebSocket } from './websocket'; -import * as expressFlash from 'express-flash'; -import * as flash from 'connect-flash'; -import * as brotli from 'brotli'; -import * as MongoStoreConnect from 'connect-mongo'; -import * as config from '../../webpack.config'; /* RouteSetter is a wrapper around the server that prevents the server from being exposed. */ export type RouteSetter = (server: RouteManager) => void; -//export let disconnect: Function; +// export let disconnect: Function; -export let resolvedPorts: { server: number; socket: number } = { server: 1050, socket: 4321 }; +// eslint-disable-next-line import/no-mutable-exports export let resolvedServerUrl: string; -export default async function InitializeServer(routeSetter: RouteSetter) { - const isRelease = determineEnvironment(); - const app = buildWithMiddleware(express()); - const compiler = webpack(config as any); - - // route table managed by express. routes are tested sequentially against each of these map rules. when a match is found, the handler is called to process the request - app.use(wdm(compiler, { publicPath: config.output.publicPath })); - app.use(whm(compiler)); - app.get(new RegExp(/^\/+$/), (req, res) => res.redirect(req.user ? '/home' : '/login')); // target urls that consist of one or more '/'s with nothing in between - app.use(express.static(publicDirectory, { setHeaders: res => res.setHeader('Access-Control-Allow-Origin', '*') })); //all urls that start with dash's public directory: /files/ (e.g., /files/images, /files/audio, etc) - app.use(cors({ origin: (_origin: any, callback: any) => callback(null, true) })); - registerAuthenticationRoutes(app); // this adds routes to authenticate a user (login, etc) - registerCorsProxy(app); // this adds a /corsProxy/ route to allow clients to get to urls that would otherwise be blocked by cors policies - isRelease && !SSL.Loaded && SSL.exit(); - routeSetter(new RouteManager(app, isRelease)); // this sets up all the regular supervised routes (things like /home, download/upload api's, pdf, search, session, etc) - registerEmbeddedBrowseRelativePathHandler(app); // this allows renered web pages which internally have relative paths to find their content +const week = 7 * 24 * 60 * 60 * 1000; +const secret = '64d6866242d3b5a5503c675b32c9605e4e90478e9b77bcf2bc'; +const store = process.env.DB === 'MEM' ? new session.MemoryStore() : MongoStoreConnect.create({ mongoUrl: Database.url }); - isRelease && process.env.serverPort && (resolvedPorts.server = Number(process.env.serverPort)); - const server = isRelease ? createServer(SSL.Credentials, app) : app; - await new Promise(resolve => server.listen(resolvedPorts.server, resolve)); - logPort('server', resolvedPorts.server); +/* Determine if the enviroment is dev mode or release mode. */ +function determineEnvironment() { + const isRelease = process.env.RELEASE === 'true'; - resolvedServerUrl = `${isRelease && process.env.serverName ? `https://${process.env.serverName}.com` : 'http://localhost'}:${resolvedPorts.server}`; + const color = isRelease ? blue : yellow; + const label = isRelease ? 'release' : 'development'; + console.log(`\nrunning server in ${color(label)} mode`); - // initialize the web socket (bidirectional communication: if a user changes - // a field on one client, that change must be broadcast to all other clients) - await WebSocket.initialize(isRelease, SSL.Credentials); + // // swilkins: I don't think we need to read from ClientUtils.RELEASE anymore. Should be able to invoke process.env.RELEASE + // // on the client side, thanks to dotenv in webpack.config.js + // let clientUtils = fs.readFileSync('./src/client/util/ClientUtils.ts.temp', 'utf8'); + // clientUtils = `//AUTO-GENERATED FILE: DO NOT EDIT\n${clientUtils.replace('"mode"', String(isRelease))}`; + // fs.writeFileSync('./src/client/util/ClientUtils.ts', clientUtils, 'utf8'); - //disconnect = async () => new Promise(resolve => server.close(resolve)); return isRelease; } -const week = 7 * 24 * 60 * 60 * 1000; -const secret = '64d6866242d3b5a5503c675b32c9605e4e90478e9b77bcf2bc'; -const store = process.env.DB === 'MEM' ? new session.MemoryStore() : MongoStoreConnect.create({ mongoUrl: Database.url }); - function buildWithMiddleware(server: express.Express) { [ session({ @@ -100,72 +86,43 @@ function buildWithMiddleware(server: express.Express) { return server; } -/* Determine if the enviroment is dev mode or release mode. */ -function determineEnvironment() { - const isRelease = process.env.RELEASE === 'true'; - - const color = isRelease ? blue : yellow; - const label = isRelease ? 'release' : 'development'; - console.log(`\nrunning server in ${color(label)} mode`); - - // // swilkins: I don't think we need to read from ClientUtils.RELEASE anymore. Should be able to invoke process.env.RELEASE - // // on the client side, thanks to dotenv in webpack.config.js - // let clientUtils = fs.readFileSync('./src/client/util/ClientUtils.ts.temp', 'utf8'); - // clientUtils = `//AUTO-GENERATED FILE: DO NOT EDIT\n${clientUtils.replace('"mode"', String(isRelease))}`; - // fs.writeFileSync('./src/client/util/ClientUtils.ts', clientUtils, 'utf8'); - - return isRelease; -} - -function registerAuthenticationRoutes(server: express.Express) { - server.get('/signup', getSignup); - server.post('/signup', postSignup); - - server.get('/login', getLogin); - server.post('/login', postLogin); - - server.get('/logout', getLogout); - - server.get('/forgotPassword', getForgot); - server.post('/forgotPassword', postForgot); - - const reset = new RouteSubscriber('resetPassword').add('token').build; - server.get(reset, getReset); - server.post(reset, postReset); -} - -function registerCorsProxy(server: express.Express) { - server.use('/corsProxy', async (req, res) => { - res.setHeader('Access-Control-Allow-Origin', '*'); - res.header('Access-Control-Allow-Methods', 'GET, PUT, PATCH, POST, DELETE'); - res.header('Access-Control-Allow-Headers', req.header('access-control-request-headers')); - const referer = req.headers.referer ? decodeURIComponent(req.headers.referer) : ''; - let requrlraw = decodeURIComponent(req.url.substring(1)); - const qsplit = requrlraw.split('?q='); - const newqsplit = requrlraw.split('&q='); - if (qsplit.length > 1 && newqsplit.length > 1) { - const lastq = newqsplit[newqsplit.length - 1]; - requrlraw = qsplit[0] + '?q=' + lastq.split('&')[0] + '&' + qsplit[1].split('&')[1]; - } - const requrl = requrlraw.startsWith('/') ? referer + requrlraw : requrlraw; - // cors weirdness here... - // if the referer is a cors page and the cors() route (I think) redirected to /corsProxy/ and the requested url path was relative, - // then we redirect again to the cors referer and just add the relative path. - if (!requrl.startsWith('http') && req.originalUrl.startsWith('/corsProxy') && referer?.includes('corsProxy')) { - res.redirect(referer + (referer.endsWith('/') ? '' : '/') + requrl); +function registerEmbeddedBrowseRelativePathHandler(server: express.Express) { + server.use('*', (req, res) => { + // res.setHeader('Access-Control-Allow-Origin', '*'); + // res.header('Access-Control-Allow-Methods', 'GET, PUT, PATCH, POST, DELETE'); + // res.header('Access-Control-Allow-Headers', req.header('access-control-request-headers')); + const relativeUrl = req.originalUrl; + if (!res.headersSent && req.headers.referer?.includes('corsProxy')) { + if (!req.user) res.redirect('/home'); // When no user is logged in, we interpret a relative URL as being a reference to something they don't have access to and redirect to /home + // a request for something by a proxied referrer means it must be a relative reference. So construct a proxied absolute reference here. + try { + const proxiedRefererUrl = decodeURIComponent(req.headers.referer); // (e.g., http://localhost:/corsProxy/https://en.wikipedia.org/wiki/Engelbart) + const dashServerUrl = proxiedRefererUrl.match(/.*corsProxy\//)![0]; // the dash server url (e.g.: http://localhost:/corsProxy/ ) + const actualReferUrl = proxiedRefererUrl.replace(dashServerUrl, ''); // the url of the referer without the proxy (e.g., : https://en.wikipedia.org/wiki/Engelbart) + const absoluteTargetBaseUrl = actualReferUrl.match(/https?:\/\/[^/]*/)![0]; // the base of the original url (e.g., https://en.wikipedia.org) + const redirectedProxiedUrl = dashServerUrl + encodeURIComponent(absoluteTargetBaseUrl + relativeUrl); // the new proxied full url (e.g., http://localhost:/corsProxy/https://en.wikipedia.org/) + const redirectUrl = relativeUrl.startsWith('//') ? 'http:' + relativeUrl : redirectedProxiedUrl; + res.redirect(redirectUrl); + } catch (e) { + console.log('Error embed: ', e); + } + } else if (relativeUrl.startsWith('/search') && !req.headers.referer?.includes('corsProxy')) { + // detect search query and use default search engine + res.redirect(req.headers.referer + 'corsProxy/' + encodeURIComponent('http://www.google.com' + relativeUrl)); } else { - proxyServe(req, requrl, res); + res.end(); } }); } function proxyServe(req: any, requrl: string, response: any) { + // eslint-disable-next-line global-require const htmlBodyMemoryStream = new (require('memorystream'))(); - var wasinBrFormat = false; + let wasinBrFormat = false; const sendModifiedBody = () => { const header = response.headers['content-encoding']; - const refToCors = (match: any, tag: string, sym: string, href: string, offset: any, string: any) => `${tag}=${sym + resolvedServerUrl}/corsProxy/${href + sym}`; - const relpathToCors = (match: any, href: string, offset: any, string: any) => `="${resolvedServerUrl + '/corsProxy/' + decodeURIComponent(req.originalUrl.split('/corsProxy/')[1].match(/https?:\/\/[^\/]*/)?.[0] ?? '') + '/' + href}"`; + const refToCors = (match: any, tag: string, sym: string, href: string) => `${tag}=${sym + resolvedServerUrl}/corsProxy/${href + sym}`; + // const relpathToCors = (match: any, href: string, offset: any, string: any) => `="${resolvedServerUrl + '/corsProxy/' + decodeURIComponent(req.originalUrl.split('/corsProxy/')[1].match(/https?:\/\/[^\/]*/)?.[0] ?? '') + '/' + href}"`; if (header) { try { const bodyStream = htmlBodyMemoryStream.read(); @@ -174,8 +131,8 @@ function proxyServe(req: any, requrl: string, response: any) { const htmlText = htmlInputText .toString('utf8') .replace('', ' ') - .replace(/(src|href)=([\'\"])(https?[^\2\n]*)\1/g, refToCors) // replace src or href='http(s)://...' or href="http(s)://.." - //.replace(/= *"\/([^"]*)"/g, relpathToCors) + .replace(/(src|href)=(['"])(https?[^\2\n]*)\1/g, refToCors) // replace src or href='http(s)://...' or href="http(s)://.." + // .replace(/= *"\/([^"]*)"/g, relpathToCors) .replace(/data-srcset="[^"]*"/g, '') .replace(/srcset="[^"]*"/g, '') .replace(/target="_blank"/g, ''); @@ -198,7 +155,7 @@ function proxyServe(req: any, requrl: string, response: any) { } }; const retrieveHTTPBody = () => { - //req.headers.cookie = ''; + // req.headers.cookie = ''; req.pipe(request(requrl)) .on('error', (e: any) => { console.log(`CORS url error: ${requrl}`, e); @@ -227,6 +184,7 @@ function proxyServe(req: any, requrl: string, response: any) { res.headers['x-permitted-cross-domain-policies'] = 'all'; res.headers['x-frame-options'] = ''; res.headers['content-security-policy'] = ''; + // eslint-disable-next-line no-multi-assign response.headers = response._headers = res.headers; }) .on('end', sendModifiedBody) @@ -236,31 +194,78 @@ function proxyServe(req: any, requrl: string, response: any) { retrieveHTTPBody(); } -function registerEmbeddedBrowseRelativePathHandler(server: express.Express) { - server.use('*', (req, res) => { - // res.setHeader('Access-Control-Allow-Origin', '*'); - // res.header('Access-Control-Allow-Methods', 'GET, PUT, PATCH, POST, DELETE'); - // res.header('Access-Control-Allow-Headers', req.header('access-control-request-headers')); - const relativeUrl = req.originalUrl; - if (!res.headersSent && req.headers.referer?.includes('corsProxy')) { - if (!req.user) res.redirect('/home'); // When no user is logged in, we interpret a relative URL as being a reference to something they don't have access to and redirect to /home - // a request for something by a proxied referrer means it must be a relative reference. So construct a proxied absolute reference here. - try { - const proxiedRefererUrl = decodeURIComponent(req.headers.referer); // (e.g., http://localhost:/corsProxy/https://en.wikipedia.org/wiki/Engelbart) - const dashServerUrl = proxiedRefererUrl.match(/.*corsProxy\//)![0]; // the dash server url (e.g.: http://localhost:/corsProxy/ ) - const actualReferUrl = proxiedRefererUrl.replace(dashServerUrl, ''); // the url of the referer without the proxy (e.g., : https://en.wikipedia.org/wiki/Engelbart) - const absoluteTargetBaseUrl = actualReferUrl.match(/https?:\/\/[^\/]*/)![0]; // the base of the original url (e.g., https://en.wikipedia.org) - const redirectedProxiedUrl = dashServerUrl + encodeURIComponent(absoluteTargetBaseUrl + relativeUrl); // the new proxied full url (e.g., http://localhost:/corsProxy/https://en.wikipedia.org/) - const redirectUrl = relativeUrl.startsWith('//') ? 'http:' + relativeUrl : redirectedProxiedUrl; - res.redirect(redirectUrl); - } catch (e) { - console.log('Error embed: ', e); - } - } else if (relativeUrl.startsWith('/search') && !req.headers.referer?.includes('corsProxy')) { - // detect search query and use default search engine - res.redirect(req.headers.referer + 'corsProxy/' + encodeURIComponent('http://www.google.com' + relativeUrl)); +function registerCorsProxy(server: express.Express) { + server.use('/corsProxy', async (req, res) => { + res.setHeader('Access-Control-Allow-Origin', '*'); + res.header('Access-Control-Allow-Methods', 'GET, PUT, PATCH, POST, DELETE'); + res.header('Access-Control-Allow-Headers', req.header('access-control-request-headers')); + const referer = req.headers.referer ? decodeURIComponent(req.headers.referer) : ''; + let requrlraw = decodeURIComponent(req.url.substring(1)); + const qsplit = requrlraw.split('?q='); + const newqsplit = requrlraw.split('&q='); + if (qsplit.length > 1 && newqsplit.length > 1) { + const lastq = newqsplit[newqsplit.length - 1]; + requrlraw = qsplit[0] + '?q=' + lastq.split('&')[0] + '&' + qsplit[1].split('&')[1]; + } + const requrl = requrlraw.startsWith('/') ? referer + requrlraw : requrlraw; + // cors weirdness here... + // if the referer is a cors page and the cors() route (I think) redirected to /corsProxy/ and the requested url path was relative, + // then we redirect again to the cors referer and just add the relative path. + if (!requrl.startsWith('http') && req.originalUrl.startsWith('/corsProxy') && referer?.includes('corsProxy')) { + res.redirect(referer + (referer.endsWith('/') ? '' : '/') + requrl); } else { - res.end(); + proxyServe(req, requrl, res); } }); } + +function registerAuthenticationRoutes(server: express.Express) { + server.get('/signup', getSignup); + server.post('/signup', postSignup); + + server.get('/login', getLogin); + server.post('/login', postLogin); + + server.get('/logout', getLogout); + + server.get('/forgotPassword', getForgot); + server.post('/forgotPassword', postForgot); + + const reset = new RouteSubscriber('resetPassword').add('token').build; + server.get(reset, getReset); + server.post(reset, postReset); +} + +export default async function InitializeServer(routeSetter: RouteSetter) { + const isRelease = determineEnvironment(); + const app = buildWithMiddleware(express()); + const compiler = webpack(config as any); + + // route table managed by express. routes are tested sequentially against each of these map rules. when a match is found, the handler is called to process the request + app.use(wdm(compiler, { publicPath: config.output.publicPath })); + app.use(whm(compiler)); + app.get(/^\/+$/, (req, res) => res.redirect(req.user ? '/home' : '/login')); // target urls that consist of one or more '/'s with nothing in between + app.use(express.static(publicDirectory, { setHeaders: res => res.setHeader('Access-Control-Allow-Origin', '*') })); // all urls that start with dash's public directory: /files/ (e.g., /files/images, /files/audio, etc) + app.use(cors({ origin: (_origin: any, callback: any) => callback(null, true) })); + registerAuthenticationRoutes(app); // this adds routes to authenticate a user (login, etc) + registerCorsProxy(app); // this adds a /corsProxy/ route to allow clients to get to urls that would otherwise be blocked by cors policies + isRelease && !SSL.Loaded && SSL.exit(); + routeSetter(new RouteManager(app, isRelease)); // this sets up all the regular supervised routes (things like /home, download/upload api's, pdf, search, session, etc) + registerEmbeddedBrowseRelativePathHandler(app); // this allows renered web pages which internally have relative paths to find their content + + isRelease && process.env.serverPort && (resolvedPorts.server = Number(process.env.serverPort)); + const server = isRelease ? createServer(SSL.Credentials, app) : app; + await new Promise(resolve => { + server.listen(resolvedPorts.server, resolve); + }); + logPort('server', resolvedPorts.server); + + resolvedServerUrl = `${isRelease && process.env.serverName ? `https://${process.env.serverName}.com` : 'http://localhost'}:${resolvedPorts.server}`; + + // initialize the web socket (bidirectional communication: if a user changes + // a field on one client, that change must be broadcast to all other clients) + await WebSocket.initialize(isRelease, SSL.Credentials); + + // disconnect = async () => new Promise(resolve => server.close(resolve)); + return isRelease; +} diff --git a/src/server/websocket.ts b/src/server/websocket.ts index 38134f2c1..cb16bce72 100644 --- a/src/server/websocket.ts +++ b/src/server/websocket.ts @@ -3,152 +3,26 @@ import { createServer } from 'https'; import * as _ from 'lodash'; import { networkInterfaces } from 'os'; import { Server, Socket } from 'socket.io'; -import { Utils } from '../Utils'; +import { ServerUtils } from '../ServerUtils'; import { logPort } from './ActionUtilities'; -import { timeMap } from './ApiManagers/UserManager'; import { Client } from './Client'; import { DashStats } from './DashStats'; import { DocumentsCollection } from './IDatabase'; import { Diff, GestureContent, MessageStore, MobileDocumentUploadContent, MobileInkOverlayContent, Transferable, Types, UpdateMobileInkOverlayPositionContent, YoutubeQueryInput, YoutubeQueryTypes } from './Message'; import { Search } from './Search'; +import { resolvedPorts, socketMap, timeMap, userOperations } from './SocketData'; import { GoogleCredentialsLoader } from './apis/google/CredentialsLoader'; import YoutubeApi from './apis/youtube/youtubeApiSample'; import { initializeGuest } from './authentication/DashUserModel'; import { Database } from './database'; -import { resolvedPorts } from './server_Initialization'; export namespace WebSocket { + let CurUser: string | undefined; + // eslint-disable-next-line import/no-mutable-exports export let _socket: Socket; + // eslint-disable-next-line import/no-mutable-exports + export let _disconnect: Function; export const clients: { [key: string]: Client } = {}; - export const socketMap = new Map(); - export const userOperations = new Map(); - export let disconnect: Function; - - export async function initialize(isRelease: boolean, credentials:any) { - let io: Server; - if (isRelease) { - const { socketPort } = process.env; - if (socketPort) { - resolvedPorts.socket = Number(socketPort); - } - const httpsServer = createServer(credentials); - io = new Server(httpsServer, {}) - httpsServer.listen(resolvedPorts.socket); - } else { - io = new Server(); - io.listen(resolvedPorts.socket); - } - logPort('websocket', resolvedPorts.socket); - - io.on('connection', socket => { - _socket = socket; - socket.use((_packet, next) => { - const userEmail = socketMap.get(socket); - if (userEmail) { - timeMap[userEmail] = Date.now(); - } - next(); - }); - - socket.emit(MessageStore.UpdateStats.Message, DashStats.getUpdatedStatsBundle()); - - // convenience function to log server messages on the client - function log(message?: any, ...optionalParams: any[]) { - socket.emit('log', ['Message from server:', message, ...optionalParams]); - } - - socket.on('message', function (message, room) { - console.log('Client said: ', message); - socket.in(room).emit('message', message); - }); - - socket.on('create or join', function (room) { - console.log('Received request to create or join room ' + room); - - const clientsInRoom = socket.rooms.has(room); - const numClients = clientsInRoom ? Object.keys(room.sockets).length : 0; - console.log('Room ' + room + ' now has ' + numClients + ' client(s)'); - - if (numClients === 0) { - socket.join(room); - console.log('Client ID ' + socket.id + ' created room ' + room); - socket.emit('created', room, socket.id); - } else if (numClients === 1) { - console.log('Client ID ' + socket.id + ' joined room ' + room); - socket.in(room).emit('join', room); - socket.join(room); - socket.emit('joined', room, socket.id); - socket.in(room).emit('ready'); - } else { - // max two clients - socket.emit('full', room); - } - }); - - socket.on('ipaddr', function () { - const ifaces = networkInterfaces(); - for (const dev in ifaces) { - ifaces[dev]?.forEach(function (details) { - if (details.family === 'IPv4' && details.address !== '127.0.0.1') { - socket.emit('ipaddr', details.address); - } - }); - } - }); - - socket.on('bye', function () { - console.log('received bye'); - }); - - socket.on('disconnect', function () { - let currentUser = socketMap.get(socket); - if (!(currentUser === undefined)) { - let currentUsername = currentUser.split(' ')[0]; - DashStats.logUserLogout(currentUsername, socket); - delete timeMap[currentUsername]; - } - }); - - Utils.Emit(socket, MessageStore.Foo, 'handshooken'); - - Utils.AddServerHandler(socket, MessageStore.Bar, guid => barReceived(socket, guid)); - Utils.AddServerHandler(socket, MessageStore.SetField, args => setField(socket, args)); - Utils.AddServerHandlerCallback(socket, MessageStore.GetField, getField); - Utils.AddServerHandlerCallback(socket, MessageStore.GetFields, getFields); - if (isRelease) { - Utils.AddServerHandler(socket, MessageStore.DeleteAll, () => doDelete(false)); - } - - Utils.AddServerHandler(socket, MessageStore.CreateField, CreateField); - Utils.AddServerHandlerCallback(socket, MessageStore.YoutubeApiQuery, HandleYoutubeQuery); - Utils.AddServerHandler(socket, MessageStore.UpdateField, diff => UpdateField(socket, diff)); - Utils.AddServerHandler(socket, MessageStore.DeleteField, id => DeleteField(socket, id)); - Utils.AddServerHandler(socket, MessageStore.DeleteFields, ids => DeleteFields(socket, ids)); - Utils.AddServerHandler(socket, MessageStore.GesturePoints, content => processGesturePoints(socket, content)); - Utils.AddServerHandler(socket, MessageStore.MobileInkOverlayTrigger, content => processOverlayTrigger(socket, content)); - Utils.AddServerHandler(socket, MessageStore.UpdateMobileInkOverlayPosition, content => processUpdateOverlayPosition(socket, content)); - Utils.AddServerHandler(socket, MessageStore.MobileDocumentUpload, content => processMobileDocumentUpload(socket, content)); - Utils.AddServerHandlerCallback(socket, MessageStore.GetRefField, GetRefField); - Utils.AddServerHandlerCallback(socket, MessageStore.GetRefFields, GetRefFields); - - /** - * Whenever we receive the go-ahead, invoke the import script and pass in - * as an emitter and a terminator the functions that simply broadcast a result - * or indicate termination to the client via the web socket - */ - - disconnect = () => { - socket.broadcast.emit('connection_terminated', Date.now()); - socket.disconnect(true); - }; - }); - - setInterval(function () { - // Utils.Emit(socket, MessageStore.UpdateStats, DashStats.getUpdatedStatsBundle()); - - io.emit(MessageStore.UpdateStats.Message, DashStats.getUpdatedStatsBundle()); - }, DashStats.SAMPLING_INTERVAL); - } function processGesturePoints(socket: Socket, content: GestureContent) { socket.broadcast.emit('receiveGesturePoints', content); @@ -174,8 +48,11 @@ export namespace WebSocket { break; case YoutubeQueryTypes.SearchVideo: YoutubeApi.authorizedGetVideos(ProjectCredentials, query.userInput, callback); + break; case YoutubeQueryTypes.VideoDetails: YoutubeApi.authorizedGetVideoDetails(ProjectCredentials, query.videoIds, callback); + break; + default: } } @@ -189,6 +66,9 @@ export namespace WebSocket { initializeGuest(); } + function printActiveUsers() { + socketMap.forEach((user, socket) => !socket.disconnected && console.log(user)); + } function barReceived(socket: Socket, userEmail: string) { clients[userEmail] = new Client(userEmail.toString()); const currentdate = new Date(); @@ -203,7 +83,7 @@ export namespace WebSocket { } function getField([id, callback]: [string, (result?: Transferable) => void]) { - Database.Instance.getDocument(id, (result?: Transferable) => callback(result ? result : undefined)); + Database.Instance.getDocument(id, (result?: Transferable) => callback(result)); } function getFields([ids, callback]: [string[], (result: Transferable[]) => void]) { @@ -248,27 +128,24 @@ export namespace WebSocket { list: [ '_l', list => { - const results = []; - for (const value of list.fields) { - const term = ToSearchTerm(value); - if (term) { - results.push(term.value); - } - } + const results: any[] = []; + // eslint-disable-next-line no-use-before-define + list.fields.forEach((value: any) => ToSearchTerm(value) && results.push(ToSearchTerm(value)!.value)); return results.length ? results : null; }, ], }; - function ToSearchTerm(val: any): { suffix: string; value: any } | undefined { + function ToSearchTerm(valIn: any): { suffix: string; value: any } | undefined { + let val = valIn; if (val === null || val === undefined) { - return; + return undefined; } const type = val.__type || typeof val; let suffix = suffixMap[type]; if (!suffix) { - return; + return undefined; } if (Array.isArray(suffix)) { const accessor = suffix[1]; @@ -277,7 +154,7 @@ export namespace WebSocket { } else { val = val[accessor]; } - suffix = suffix[0]; + [suffix] = suffix; } return { suffix, value: val }; } @@ -285,8 +162,28 @@ export namespace WebSocket { function getSuffix(value: string | [string, any]): string { return typeof value === 'string' ? value : value[0]; } + const pendingOps = new Map(); - function addToListField(socket: Socket, diff: Diff, curListItems?: Transferable): void { + function dispatchNextOp(id: string) { + const next = pendingOps.get(id)!.shift(); + if (next) { + const { diff, socket } = next; + if (diff.diff.$addToSet) { + // eslint-disable-next-line no-use-before-define + return GetRefFieldLocal([diff.id, (result?: Transferable) => addToListField(socket, diff, result)]); // would prefer to have Mongo handle list additions direclty, but for now handle it on our own + } + if (diff.diff.$remFromSet) { + // eslint-disable-next-line no-use-before-define + return GetRefFieldLocal([diff.id, (result?: Transferable) => remFromListField(socket, diff, result)]); // would prefer to have Mongo handle list additions direclty, but for now handle it on our own + } + // eslint-disable-next-line no-use-before-define + return SetField(socket, diff); + } + return !pendingOps.get(id)!.length && pendingOps.delete(id); + } + + function addToListField(socket: Socket, diffIn: Diff, curListItems?: Transferable): void { + const diff = diffIn; diff.diff.$set = diff.diff.$addToSet; delete diff.diff.$addToSet; // convert add to set to a query of the current fields, and then a set of the composition of the new fields with the old ones const updatefield = Array.from(Object.keys(diff.diff.$set))[0]; @@ -296,7 +193,7 @@ export namespace WebSocket { return; } const curList = (curListItems as any)?.fields?.[updatefield.replace('fields.', '')]?.fields.filter((item: any) => item !== undefined) || []; - diff.diff.$set[updatefield].fields = [...curList, ...newListItems]; //, ...newListItems.filter((newItem: any) => newItem === null || !curList.some((curItem: any) => curItem.fieldId ? curItem.fieldId === newItem.fieldId : curItem.heading ? curItem.heading === newItem.heading : curItem === newItem))]; + diff.diff.$set[updatefield].fields = [...curList, ...newListItems]; // , ...newListItems.filter((newItem: any) => newItem === null || !curList.some((curItem: any) => curItem.fieldId ? curItem.fieldId === newItem.fieldId : curItem.heading ? curItem.heading === newItem.heading : curItem === newItem))]; const sendBack = diff.diff.length !== diff.diff.$set[updatefield].fields.length; delete diff.diff.length; Database.Instance.update( @@ -305,11 +202,13 @@ export namespace WebSocket { () => { if (sendBack) { console.log('Warning: list modified during update. Composite list is being returned.'); - const id = socket.id; - (socket as any).id = ''; + const { id } = socket; + (socket as any).id = ''; // bcz: HACK. this prevents the update message from going back to the client that made the change. socket.broadcast.emit(MessageStore.UpdateField.Message, diff); (socket as any).id = id; - } else socket.broadcast.emit(MessageStore.UpdateField.Message, diff); + } else { + socket.broadcast.emit(MessageStore.UpdateField.Message, diff); + } dispatchNextOp(diff.id); }, false @@ -352,28 +251,28 @@ export namespace WebSocket { * items to delete) * @param curListItems the server's current copy of the data */ - function remFromListField(socket: Socket, diff: Diff, curListItems?: Transferable): void { + function remFromListField(socket: Socket, diffIn: Diff, curListItems?: Transferable): void { + const diff = diffIn; diff.diff.$set = diff.diff.$remFromSet; delete diff.diff.$remFromSet; const updatefield = Array.from(Object.keys(diff.diff.$set))[0]; const remListItems = diff.diff.$set[updatefield].fields; const curList = (curListItems as any)?.fields?.[updatefield.replace('fields.', '')]?.fields.filter((f: any) => f !== null) || []; - const hint = diff.diff.$set.hint; + const { hint } = diff.diff.$set; if (hint) { // indexesToRemove stores the indexes that we mark for deletion, which is later used to filter the list (delete the elements) - let indexesToRemove: number[] = []; + const indexesToRemove: number[] = []; for (let i = 0; i < hint.deleteCount; i++) { if (curList.length > i + hint.start && _.isEqual(curList[i + hint.start], remListItems[i])) { indexesToRemove.push(i + hint.start); - continue; - } - - let closestIndex = findClosestIndex(curList, indexesToRemove, remListItems[i], i + hint.start); - if (closestIndex !== -1) { - indexesToRemove.push(closestIndex); } else { - console.log('Item to delete was not found - index = -1'); + const closestIndex = findClosestIndex(curList, indexesToRemove, remListItems[i], i + hint.start); + if (closestIndex !== -1) { + indexesToRemove.push(closestIndex); + } else { + console.log('Item to delete was not found - index = -1'); + } } } @@ -398,45 +297,23 @@ export namespace WebSocket { if (sendBack) { // the two copies are different, so the server sends its copy. console.log('SEND BACK'); - const id = socket.id; - (socket as any).id = ''; + const { id } = socket; + (socket as any).id = ''; // bcz: HACK. this prevents the update message from going back to the client that made the change. socket.broadcast.emit(MessageStore.UpdateField.Message, diff); (socket as any).id = id; - } else socket.broadcast.emit(MessageStore.UpdateField.Message, diff); + } else { + socket.broadcast.emit(MessageStore.UpdateField.Message, diff); + } dispatchNextOp(diff.id); }, false ); } - const pendingOps = new Map(); - - function dispatchNextOp(id: string) { - const next = pendingOps.get(id)!.shift(); - if (next) { - const { diff, socket } = next; - if (diff.diff.$addToSet) { - return GetRefFieldLocal([diff.id, (result?: Transferable) => addToListField(socket, diff, result)]); // would prefer to have Mongo handle list additions direclty, but for now handle it on our own - } - if (diff.diff.$remFromSet) { - return GetRefFieldLocal([diff.id, (result?: Transferable) => remFromListField(socket, diff, result)]); // would prefer to have Mongo handle list additions direclty, but for now handle it on our own - } - return GetRefFieldLocal([diff.id, (result?: Transferable) => SetField(socket, diff, result)]); - } - if (!pendingOps.get(id)!.length) pendingOps.delete(id); - } - - function printActiveUsers() { - socketMap.forEach((user, socket) => { - !socket.disconnected && console.log(user); - }); - } - var CurUser: string | undefined = undefined; - function UpdateField(socket: Socket, diff: Diff) { const curUser = socketMap.get(socket); - if (!curUser) return; - let currentUsername = curUser.split(' ')[0]; + if (!curUser) return false; + const currentUsername = curUser.split(' ')[0]; userOperations.set(currentUsername, userOperations.get(currentUsername) !== undefined ? userOperations.get(currentUsername)! + 1 : 0); if (CurUser !== socketMap.get(socket)) { @@ -454,15 +331,18 @@ export namespace WebSocket { if (diff.diff.$remFromSet) { return GetRefFieldLocal([diff.id, (result?: Transferable) => remFromListField(socket, diff, result)]); // would prefer to have Mongo handle list additions direclty, but for now handle it on our own } - return GetRefFieldLocal([diff.id, (result?: Transferable) => SetField(socket, diff, result)]); + // eslint-disable-next-line no-use-before-define + return SetField(socket, diff); } - function SetField(socket: Socket, diff: Diff, curListItems?: Transferable) { + function SetField(socket: Socket, diff: Diff /* , curListItems?: Transferable */) { Database.Instance.update(diff.id, diff.diff, () => socket.broadcast.emit(MessageStore.UpdateField.Message, diff), false); const docfield = diff.diff.$set || diff.diff.$unset; if (docfield) { const update: any = { id: diff.id }; let dynfield = false; + // eslint-disable-next-line no-restricted-syntax for (let key in docfield) { + // eslint-disable-next-line no-continue if (!key.startsWith('fields.')) continue; dynfield = true; const val = docfield[key]; @@ -504,4 +384,124 @@ export namespace WebSocket { function CreateField(newValue: any) { Database.Instance.insert(newValue); } + export async function initialize(isRelease: boolean, credentials: any) { + let io: Server; + if (isRelease) { + const { socketPort } = process.env; + if (socketPort) { + resolvedPorts.socket = Number(socketPort); + } + const httpsServer = createServer(credentials); + io = new Server(httpsServer, {}); + httpsServer.listen(resolvedPorts.socket); + } else { + io = new Server(); + io.listen(resolvedPorts.socket); + } + logPort('websocket', resolvedPorts.socket); + + io.on('connection', socket => { + _socket = socket; + socket.use((_packet, next) => { + const userEmail = socketMap.get(socket); + if (userEmail) { + timeMap[userEmail] = Date.now(); + } + next(); + }); + + socket.emit(MessageStore.UpdateStats.Message, DashStats.getUpdatedStatsBundle()); + + socket.on('message', (message, room) => { + console.log('Client said: ', message); + socket.in(room).emit('message', message); + }); + + socket.on('create or join', room => { + console.log('Received request to create or join room ' + room); + + const clientsInRoom = socket.rooms.has(room); + const numClients = clientsInRoom ? Object.keys(room.sockets).length : 0; + console.log('Room ' + room + ' now has ' + numClients + ' client(s)'); + + if (numClients === 0) { + socket.join(room); + console.log('Client ID ' + socket.id + ' created room ' + room); + socket.emit('created', room, socket.id); + } else if (numClients === 1) { + console.log('Client ID ' + socket.id + ' joined room ' + room); + socket.in(room).emit('join', room); + socket.join(room); + socket.emit('joined', room, socket.id); + socket.in(room).emit('ready'); + } else { + // max two clients + socket.emit('full', room); + } + }); + + socket.on('ipaddr', () => { + const ifaces = networkInterfaces(); + for (const dev in ifaces) { + ifaces[dev]?.forEach(details => { + if (details.family === 'IPv4' && details.address !== '127.0.0.1') { + socket.emit('ipaddr', details.address); + } + }); + } + }); + + socket.on('bye', () => { + console.log('received bye'); + }); + + socket.on('disconnect', () => { + const currentUser = socketMap.get(socket); + if (!(currentUser === undefined)) { + const currentUsername = currentUser.split(' ')[0]; + DashStats.logUserLogout(currentUsername, socket); + delete timeMap[currentUsername]; + } + }); + + ServerUtils.Emit(socket, MessageStore.Foo, 'handshooken'); + + ServerUtils.AddServerHandler(socket, MessageStore.Bar, guid => barReceived(socket, guid)); + ServerUtils.AddServerHandler(socket, MessageStore.SetField, args => setField(socket, args)); + ServerUtils.AddServerHandlerCallback(socket, MessageStore.GetField, getField); + ServerUtils.AddServerHandlerCallback(socket, MessageStore.GetFields, getFields); + if (isRelease) { + ServerUtils.AddServerHandler(socket, MessageStore.DeleteAll, () => doDelete(false)); + } + + ServerUtils.AddServerHandler(socket, MessageStore.CreateField, CreateField); + ServerUtils.AddServerHandlerCallback(socket, MessageStore.YoutubeApiQuery, HandleYoutubeQuery); + ServerUtils.AddServerHandler(socket, MessageStore.UpdateField, diff => UpdateField(socket, diff)); + ServerUtils.AddServerHandler(socket, MessageStore.DeleteField, id => DeleteField(socket, id)); + ServerUtils.AddServerHandler(socket, MessageStore.DeleteFields, ids => DeleteFields(socket, ids)); + ServerUtils.AddServerHandler(socket, MessageStore.GesturePoints, content => processGesturePoints(socket, content)); + ServerUtils.AddServerHandler(socket, MessageStore.MobileInkOverlayTrigger, content => processOverlayTrigger(socket, content)); + ServerUtils.AddServerHandler(socket, MessageStore.UpdateMobileInkOverlayPosition, content => processUpdateOverlayPosition(socket, content)); + ServerUtils.AddServerHandler(socket, MessageStore.MobileDocumentUpload, content => processMobileDocumentUpload(socket, content)); + ServerUtils.AddServerHandlerCallback(socket, MessageStore.GetRefField, GetRefField); + ServerUtils.AddServerHandlerCallback(socket, MessageStore.GetRefFields, GetRefFields); + + /** + * Whenever we receive the go-ahead, invoke the import script and pass in + * as an emitter and a terminator the functions that simply broadcast a result + * or indicate termination to the client via the web socket + */ + + _disconnect = () => { + socket.broadcast.emit('connection_terminated', Date.now()); + socket.disconnect(true); + }; + }); + + setInterval(() => { + // Utils.Emit(socket, MessageStore.UpdateStats, DashStats.getUpdatedStatsBundle()); + + io.emit(MessageStore.UpdateStats.Message, DashStats.getUpdatedStatsBundle()); + }, DashStats.SAMPLING_INTERVAL); + } } -- cgit v1.2.3-70-g09d2 From ec859c33f69d586f287aecdceeca38c4e77cb0ab Mon Sep 17 00:00:00 2001 From: bobzel Date: Fri, 19 Apr 2024 12:11:16 -0400 Subject: lint errors --- src/Utils.ts | 6 +- src/client/views/MainView.tsx | 3 +- src/server/authentication/AuthenticationManager.ts | 109 +++++++++++---------- 3 files changed, 61 insertions(+), 57 deletions(-) (limited to 'src/server') diff --git a/src/Utils.ts b/src/Utils.ts index 0455fd19a..c87ef052c 100644 --- a/src/Utils.ts +++ b/src/Utils.ts @@ -43,15 +43,15 @@ export namespace Utils { export function TraceConsoleLog() { ['log', 'warn'].forEach(method => { const old = (console as any)[method]; - (console as any)[method] = function () { + (console as any)[method] = function (...args: any[]) { let stack = new Error('').stack?.split(/\n/); // Chrome includes a single "Error" line, FF doesn't. if (stack && stack[0].indexOf('Error') === 0) { stack = stack.slice(1); } const message = (stack?.[1] || 'Stack undefined!').trim(); - const args = ([] as any[]).slice.apply(arguments).concat([message]); - return old.apply(console, args); + const newArgs = args.slice().concat([message]); + return old.apply(console, newArgs); }; }); } diff --git a/src/client/views/MainView.tsx b/src/client/views/MainView.tsx index 13945cacf..b0156846f 100644 --- a/src/client/views/MainView.tsx +++ b/src/client/views/MainView.tsx @@ -8,7 +8,7 @@ import { observer } from 'mobx-react'; import * as React from 'react'; import '../../../node_modules/browndash-components/dist/styles/global.min.css'; import { ClientUtils, lightOrDark, returnEmptyDoclist, returnEmptyFilter, returnFalse, returnTrue, returnZero, setupMoveUpEvents } from '../../ClientUtils'; -import { emptyFunction } from '../../Utils'; +import { Utils, emptyFunction } from '../../Utils'; import { Doc, DocListCast, Opt } from '../../fields/Doc'; import { DocData } from '../../fields/DocSymbols'; import { DocCast, StrCast } from '../../fields/Types'; @@ -163,6 +163,7 @@ export class MainView extends ObservableReactComponent<{}> { mainDocViewHeight = () => this._dashUIHeight - this.headerBarDocHeight(); componentDidMount() { + // Utils.TraceConsoleLog(); reaction( // when a multi-selection occurs, remove focus from all active elements to allow keyboad input to go only to global key manager to act upon selection () => SelectionManager.Views.slice(), diff --git a/src/server/authentication/AuthenticationManager.ts b/src/server/authentication/AuthenticationManager.ts index b5d1dba28..0cc1553c0 100644 --- a/src/server/authentication/AuthenticationManager.ts +++ b/src/server/authentication/AuthenticationManager.ts @@ -1,21 +1,21 @@ -import { default as User, DashUserModel, initializeGuest } from './DashUserModel'; -import { Request, Response, NextFunction } from 'express'; -import * as passport from 'passport'; -import { IVerifyOptions } from 'passport-local'; -import './Passport'; import * as async from 'async'; -import * as nodemailer from 'nodemailer'; import * as c from 'crypto'; -import { emptyFunction, Utils } from '../../ClientUtils'; -import { MailOptions } from 'nodemailer/lib/stream-transport'; +import { NextFunction, Request, Response } from 'express'; import { check, validationResult } from 'express-validator'; +import * as nodemailer from 'nodemailer'; +import { MailOptions } from 'nodemailer/lib/stream-transport'; +import * as passport from 'passport'; +import { Utils } from '../../Utils'; +import User, { DashUserModel, initializeGuest } from './DashUserModel'; +import './Passport'; +// import { IVerifyOptions } from 'passport-local'; /** * GET /signup * Directs user to the signup page * modeled by signup.pug in views */ -export let getSignup = (req: Request, res: Response) => { +export const getSignup = (req: Request, res: Response) => { if (req.user) { return res.redirect('/home'); } @@ -23,13 +23,23 @@ export let getSignup = (req: Request, res: Response) => { title: 'Sign Up', user: req.user, }); + return undefined; +}; + +const tryRedirectToTarget = (req: Request, res: Response) => { + const target = (req.session as any)?.target; + if (req.session && target) { + res.redirect(target); + } else { + res.redirect('/home'); + } }; /** * POST /signup * Create a new local account. */ -export let postSignup = (req: Request, res: Response, next: NextFunction) => { +export const postSignup = (req: Request, res: Response, next: NextFunction) => { const email = req.body.email as String; check('email', 'Email is not valid').isEmail().run(req); check('password', 'Password must be at least 4 characters long').isLength({ min: 4 }).run(req); @@ -42,7 +52,7 @@ export let postSignup = (req: Request, res: Response, next: NextFunction) => { return res.redirect('/signup'); } - const password = req.body.password; + const { password } = req.body; const model = { email, @@ -65,35 +75,29 @@ export let postSignup = (req: Request, res: Response, next: NextFunction) => { req.logIn(user, err => { if (err) return next(err); tryRedirectToTarget(req, res); + return undefined; }); }) .catch((err: any) => next(err)); + return undefined; }) .catch((err: any) => next(err)); + return undefined; }; - -const tryRedirectToTarget = (req: Request, res: Response) => { - const target = (req.session as any)?.target; - if (req.session && target) { - res.redirect(target); - } else { - res.redirect('/home'); - } -}; - /** * GET /login * Login page. */ -export let getLogin = (req: Request, res: Response) => { +export const getLogin = (req: Request, res: Response) => { if (req.user) { - //req.session.target = undefined; + // req.session.target = undefined; return res.redirect('/home'); } res.render('login.pug', { title: 'Log In', user: req.user, }); + return undefined; }; /** @@ -101,7 +105,7 @@ export let getLogin = (req: Request, res: Response) => { * Sign in using email and password. * On failure, redirect to signup page */ -export let postLogin = (req: Request, res: Response, next: NextFunction) => { +export const postLogin = (req: Request, res: Response, next: NextFunction) => { if (req.body.email === '') { User.findOne({ email: 'guest' }) .then((user: any) => !user && initializeGuest()) @@ -119,23 +123,21 @@ export let postLogin = (req: Request, res: Response, next: NextFunction) => { return res.redirect('/signup'); } - const callback = (err: Error, user: DashUserModel, _info: IVerifyOptions) => { + const callback = (err: Error, user: DashUserModel /* , _info: IVerifyOptions */) => { if (err) { next(err); - return; - } - if (!user) { + } else if (!user) { return res.redirect('/signup'); - } - req.logIn(user, err => { - if (err) { - next(err); - return; - } - tryRedirectToTarget(req, res); - }); + } else + req.logIn(user, loginErr => { + if (loginErr) { + next(loginErr); + } else tryRedirectToTarget(req, res); + }); + return undefined; }; setTimeout(() => passport.authenticate('local', callback)(req, res, next), 500); + return undefined; }; /** @@ -143,31 +145,29 @@ export let postLogin = (req: Request, res: Response, next: NextFunction) => { * Invokes the logout function on the request * and destroys the user's current session. */ -export let getLogout = (req: Request, res: Response) => { +export const getLogout = (req: Request, res: Response) => { req.logout(err => { if (err) console.log(err); else res.redirect('/login'); }); }; -export let getForgot = function (req: Request, res: Response) { +export const getForgot = function (req: Request, res: Response) { res.render('forgot.pug', { title: 'Recover Password', user: req.user, }); }; -export let postForgot = function (req: Request, res: Response, next: NextFunction) { - const email = req.body.email; +export const postForgot = function (req: Request, res: Response, next: NextFunction) { + const { email } = req.body; async.waterfall( [ function (done: any) { - c.randomBytes(20, function (err: any, buffer: Buffer) { + c.randomBytes(20, (err: any, buffer: Buffer) => { if (err) { done(null); - return; - } - done(null, buffer.toString('hex')); + } else done(null, buffer.toString('hex')); }); }, function (token: string, done: any) { @@ -204,20 +204,21 @@ export let postForgot = function (req: Request, res: Response, next: NextFunctio '\n\n' + 'If you did not request this, please ignore this email and your password will remain unchanged.\n', } as MailOptions; - smtpTransport.sendMail(mailOptions, function (err: Error | null) { + smtpTransport.sendMail(mailOptions, (err: Error | null) => { // req.flash('info', 'An e-mail has been sent to ' + user.email + ' with further instructions.'); done(null, err, 'done'); }); }, ], - function (err) { + err => { if (err) return next(err); res.redirect('/forgotPassword'); + return undefined; } ); }; -export let getReset = function (req: Request, res: Response) { +export const getReset = function (req: Request, res: Response) { User.findOne({ passwordResetToken: req.params.token, passwordResetExpires: { $gt: Date.now() } }) .then((user: any) => { if (!user) return res.redirect('/forgotPassword'); @@ -225,11 +226,12 @@ export let getReset = function (req: Request, res: Response) { title: 'Reset Password', user: req.user, }); + return undefined; }) - .catch((err: any) => res.redirect('/forgotPassword')); + .catch(() => res.redirect('/forgotPassword')); }; -export let postReset = function (req: Request, res: Response) { +export const postReset = function (req: Request, res: Response) { async.waterfall( [ function (done: any) { @@ -251,10 +253,11 @@ export let postReset = function (req: Request, res: Response) { () => (req as any).logIn(user), (err: any) => err ) - .catch((err: any) => res.redirect('/login')); + .catch(() => res.redirect('/login')); done(null, user); + return undefined; }) - .catch((err: any) => res.redirect('back')); + .catch(() => res.redirect('back')); }, function (user: DashUserModel, done: any) { const smtpTransport = nodemailer.createTransport({ @@ -268,13 +271,13 @@ export let postReset = function (req: Request, res: Response) { to: user.email, from: 'browndashptc@gmail.com', subject: 'Your password has been changed', - text: 'Hello,\n\n' + 'This is a confirmation that the password for your account ' + user.email + ' has just been changed.\n', + text: 'Hello,\n\nThis is a confirmation that the password for your account ' + user.email + ' has just been changed.\n', } as MailOptions; smtpTransport.sendMail(mailOptions, err => done(null, err)); }, ], - function (err) { + () => { res.redirect('/login'); } ); -- cgit v1.2.3-70-g09d2 From 939e18624af4252551f38c43335ee8ef0acd144c Mon Sep 17 00:00:00 2001 From: bobzel Date: Sun, 21 Apr 2024 19:03:49 -0400 Subject: more lint cleanup --- .eslintrc.json | 1 + package-lock.json | 3 +- package.json | 2 +- .../apis/google_docs/GooglePhotosClientUtils.ts | 82 ++- src/client/util/BranchingTrailManager.tsx | 41 +- src/client/util/CalendarManager.tsx | 39 +- src/client/util/CurrentUserUtils.ts | 2 +- src/client/util/DragManager.ts | 18 +- src/client/util/GroupManager.tsx | 85 ++- src/client/util/HypothesisUtils.ts | 2 - src/client/util/Import & Export/ImageUtils.ts | 2 +- src/client/util/LinkFollower.ts | 7 +- src/client/util/LinkManager.ts | 28 +- src/client/util/ScriptingGlobals.ts | 1 + src/client/util/SearchUtil.ts | 20 +- src/client/util/SettingsManager.tsx | 323 +++++---- src/client/util/SharingManager.tsx | 753 +++++++++++---------- src/client/util/SnappingManager.ts | 3 + src/client/util/UndoManager.ts | 90 +-- src/client/util/reportManager/ReportManager.tsx | 38 +- src/client/views/ContextMenuItem.tsx | 15 +- src/client/views/DashboardView.tsx | 41 +- src/client/views/DictationOverlay.tsx | 55 +- src/client/views/DocComponent.tsx | 5 +- src/client/views/DocumentButtonBar.tsx | 114 ++-- src/client/views/EditableView.tsx | 23 +- src/client/views/FieldsDropdown.tsx | 16 +- src/client/views/FilterPanel.tsx | 1 + src/client/views/GlobalKeyHandler.ts | 58 +- src/client/views/InkingStroke.tsx | 109 +-- src/client/views/MainView.tsx | 42 +- src/client/views/MetadataEntryMenu.tsx | 196 ------ src/client/views/OverlayView.tsx | 7 +- src/client/views/PropertiesButtons.tsx | 182 +++-- src/client/views/PropertiesView.tsx | 44 +- src/client/views/SidebarAnnos.tsx | 13 +- src/client/views/StyleProvider.tsx | 75 +- src/client/views/UndoStack.tsx | 19 +- src/client/views/animationtimeline/Timeline.tsx | 1 + src/client/views/animationtimeline/Track.tsx | 2 +- .../collections/CollectionMasonryViewFieldRow.tsx | 96 +-- src/client/views/collections/CollectionMenu.tsx | 19 +- .../views/collections/CollectionNoteTakingView.tsx | 85 ++- .../views/collections/CollectionPileView.tsx | 13 +- .../collections/CollectionStackedTimeline.tsx | 152 +++-- src/client/views/collections/CollectionSubView.tsx | 31 +- .../views/collections/CollectionTimeView.tsx | 32 +- .../views/collections/CollectionTreeView.tsx | 79 ++- src/client/views/collections/TabDocView.tsx | 347 +++++----- src/client/views/collections/TreeView.tsx | 46 +- .../CollectionFreeFormInfoUI.tsx | 33 +- .../CollectionFreeFormLayoutEngines.tsx | 38 +- .../collectionFreeForm/CollectionFreeFormView.tsx | 295 +++++--- .../collections/collectionFreeForm/MarqueeView.tsx | 10 +- .../collectionSchema/SchemaTableCell.tsx | 33 +- src/client/views/global/globalScripts.ts | 13 +- src/client/views/linking/LinkMenuItem.tsx | 32 +- src/client/views/linking/LinkPopup.tsx | 21 +- .../views/nodes/CollectionFreeFormDocumentView.tsx | 44 +- src/client/views/nodes/ComparisonBox.tsx | 17 +- src/client/views/nodes/DataVizBox/DataVizBox.tsx | 115 ++-- .../nodes/DataVizBox/components/Histogram.tsx | 230 +++---- .../nodes/DataVizBox/components/LineChart.tsx | 107 ++- .../views/nodes/DataVizBox/components/TableBox.tsx | 80 ++- src/client/views/nodes/DocumentContentsView.tsx | 33 +- src/client/views/nodes/DocumentLinksButton.tsx | 34 +- src/client/views/nodes/DocumentView.tsx | 2 +- src/client/views/nodes/FieldView.tsx | 10 +- src/client/views/nodes/FontIconBox/FontIconBox.tsx | 60 +- src/client/views/nodes/ImageBox.tsx | 7 +- src/client/views/nodes/KeyValuePair.tsx | 13 +- src/client/views/nodes/LabelBox.tsx | 25 +- src/client/views/nodes/LinkAnchorBox.tsx | 6 +- src/client/views/nodes/MapBox/MapBox.tsx | 25 +- .../views/nodes/MapboxMapBox/MapboxContainer.tsx | 31 +- src/client/views/nodes/PDFBox.tsx | 100 ++- .../views/nodes/RecordingBox/RecordingBox.tsx | 37 +- src/client/views/nodes/ScreenshotBox.tsx | 29 +- src/client/views/nodes/VideoBox.tsx | 103 ++- src/client/views/nodes/WebBox.tsx | 153 +++-- .../views/nodes/formattedText/DashFieldView.tsx | 310 +++++---- .../nodes/formattedText/FormattedTextBox.scss | 3 +- .../views/nodes/formattedText/FormattedTextBox.tsx | 2 +- .../formattedText/ProsemirrorExampleTransfer.ts | 51 +- .../views/nodes/formattedText/RichTextMenu.tsx | 226 +++---- .../views/nodes/formattedText/RichTextRules.ts | 2 +- src/client/views/nodes/formattedText/marks_rts.ts | 30 +- src/client/views/nodes/formattedText/nodes_rts.ts | 14 +- src/client/views/nodes/trails/PresBox.tsx | 635 ++++++++++------- src/client/views/nodes/trails/PresElementBox.tsx | 96 +-- src/client/views/pdf/Annotation.tsx | 4 +- src/client/views/pdf/GPTPopup/GPTPopup.tsx | 3 +- src/client/views/search/SearchBox.tsx | 68 +- src/client/views/topbar/TopBar.tsx | 25 +- src/fields/Doc.ts | 56 +- src/fields/List.ts | 34 +- src/fields/ObjectField.ts | 1 + src/fields/Proxy.ts | 17 +- src/fields/Schema.ts | 2 +- src/fields/Types.ts | 15 +- src/fields/documentSchemas.ts | 3 +- src/mobile/ImageUpload.tsx | 77 +-- src/mobile/MobileInterface.tsx | 2 +- src/server/ApiManagers/UploadManager.ts | 4 +- 104 files changed, 3740 insertions(+), 3134 deletions(-) delete mode 100644 src/client/views/MetadataEntryMenu.tsx (limited to 'src/server') diff --git a/.eslintrc.json b/.eslintrc.json index 0c4e375a9..780626412 100644 --- a/.eslintrc.json +++ b/.eslintrc.json @@ -53,6 +53,7 @@ "react/destructuring-assignment": 0, "no-restricted-globals": ["error", "event"], "no-param-reassign": ["error", { "props": false }], + "import/no-cycle": 0, "no-alert": 0, "radix": "off" }, diff --git a/package-lock.json b/package-lock.json index cc1a0ea64..c616bb6ea 100644 --- a/package-lock.json +++ b/package-lock.json @@ -183,6 +183,7 @@ "react-measure": "^2.5.2", "react-resizable": "^3.0.5", "react-select": "^5.8.0", + "react-type-animation": "^3.2.0", "react-xarrows": "^2.0.2", "readline": "^1.3.0", "recharts": "^2.10.3", @@ -287,7 +288,6 @@ "jsdom": "^24.0.0", "mocha": "^10.2.0", "prettier": "^3.1.0", - "react-type-animation": "^3.2.0", "scss-loader": "0.0.1", "style-loader": "^4.0.0", "ts-loader": "^9.5.1", @@ -30110,7 +30110,6 @@ "version": "3.2.0", "resolved": "https://registry.npmjs.org/react-type-animation/-/react-type-animation-3.2.0.tgz", "integrity": "sha512-WXTe0i3rRNKjmggPvT5ntye1QBt0ATGbijeW6V3cQe2W0jaMABXXlPPEdtofnS9tM7wSRHchEvI9SUw+0kUohw==", - "dev": true, "peerDependencies": { "prop-types": "^15.5.4", "react": ">= 15.0.0", diff --git a/package.json b/package.json index 3f2f5a70f..a3ba6bdd9 100644 --- a/package.json +++ b/package.json @@ -83,7 +83,6 @@ "jsdom": "^24.0.0", "mocha": "^10.2.0", "prettier": "^3.1.0", - "react-type-animation": "^3.2.0", "scss-loader": "0.0.1", "style-loader": "^4.0.0", "ts-loader": "^9.5.1", @@ -267,6 +266,7 @@ "react-measure": "^2.5.2", "react-resizable": "^3.0.5", "react-select": "^5.8.0", + "react-type-animation": "^3.2.0", "react-xarrows": "^2.0.2", "readline": "^1.3.0", "recharts": "^2.10.3", diff --git a/src/client/apis/google_docs/GooglePhotosClientUtils.ts b/src/client/apis/google_docs/GooglePhotosClientUtils.ts index 757031fec..07a2708ec 100644 --- a/src/client/apis/google_docs/GooglePhotosClientUtils.ts +++ b/src/client/apis/google_docs/GooglePhotosClientUtils.ts @@ -1,3 +1,5 @@ +/* eslint-disable no-use-before-define */ +import Photos = require('googlephotos'); import { AssertionError } from 'assert'; import { EditorState } from 'prosemirror-state'; import { ClientUtils } from '../../../ClientUtils'; @@ -5,14 +7,12 @@ import { Doc, DocListCastAsync, Opt } from '../../../fields/Doc'; import { Id } from '../../../fields/FieldSymbols'; import { RichTextField } from '../../../fields/RichTextField'; import { RichTextUtils } from '../../../fields/RichTextUtils'; -import { Cast, StrCast } from '../../../fields/Types'; -import { ImageField } from '../../../fields/URLField'; +import { Cast, ImageCast, StrCast } from '../../../fields/Types'; import { MediaItem, NewMediaItemResult } from '../../../server/apis/google/SharedTypes'; import { Networking } from '../../Network'; import { DocUtils, Docs, DocumentOptions } from '../../documents/Documents'; import { FormattedTextBox } from '../../views/nodes/formattedText/FormattedTextBox'; import { GoogleAuthenticationManager } from '../GoogleAuthenticationManager'; -import Photos = require('googlephotos'); export namespace GooglePhotos { const endpoint = async () => new Photos(await GoogleAuthenticationManager.Instance.fetchOrGenerateAccessToken()); @@ -76,17 +76,16 @@ export namespace GooglePhotos { export const CollectionToAlbum = async (options: AlbumCreationOptions): Promise> => { const { collection, title, descriptionKey, tag } = options; const dataDocument = Doc.GetProto(collection); - const images = ((await DocListCastAsync(dataDocument.data)) || []).filter(doc => Cast(doc.data, ImageField)); + const images = ((await DocListCastAsync(dataDocument.data)) || []).filter(doc => ImageCast(doc.data)); if (!images || !images.length) { return undefined; } - const resolved = title ? title : StrCast(collection.title) || `Dash Collection (${collection[Id]}`; + const resolved = title || StrCast(collection.title) || `Dash Collection (${collection[Id]}`; const { id, productUrl } = await Create.Album(resolved); const response = await Transactions.UploadImages(images, { id }, descriptionKey); if (response) { const { results, failed } = response; - let index: Opt; - while ((index = failed.pop()) !== undefined) { + for (let index = failed.pop(); index !== undefined; index = failed.pop()) { Doc.RemoveDocFromList(dataDocument, 'data', images.splice(index, 1)[0]); } const mediaItems: MediaItem[] = results.map(item => item.mediaItem); @@ -97,13 +96,12 @@ export namespace GooglePhotos { for (let i = 0; i < images.length; i++) { const image = Doc.GetProto(images[i]); const mediaItem = mediaItems[i]; - if (!mediaItem) { - continue; + if (mediaItem) { + image.googlePhotosId = mediaItem.id; + image.googlePhotosAlbumUrl = productUrl; + image.googlePhotosUrl = mediaItem.productUrl || mediaItem.baseUrl; + idMapping[mediaItem.id] = image; } - image.googlePhotosId = mediaItem.id; - image.googlePhotosAlbumUrl = productUrl; - image.googlePhotosUrl = mediaItem.productUrl || mediaItem.baseUrl; - idMapping[mediaItem.id] = image; } collection.googlePhotosAlbumUrl = productUrl; collection.googlePhotosIdMapping = idMapping; @@ -114,6 +112,7 @@ export namespace GooglePhotos { Transactions.AddTextEnrichment(collection, `Find me at ${ClientUtils.prepend(`/doc/${collection[Id]}?sharing=true`)}`); return { albumId: id, mediaItems }; } + return undefined; }; } @@ -124,7 +123,7 @@ export namespace GooglePhotos { await GoogleAuthenticationManager.Instance.fetchOrGenerateAccessToken(); const response = await Query.ContentSearch(requested); const uploads = await Transactions.WriteMediaItemsToServer(response); - const children = uploads.map((upload: Transactions.UploadInformation) => Docs.Create.ImageDocument(ClientUtils.fileUrl(upload.fileNames.clean) /*, {"data_contentSize":upload.contentSize}*/)); + const children = uploads.map((upload: Transactions.UploadInformation) => Docs.Create.ImageDocument(ClientUtils.fileUrl(upload.fileNames.clean) /* , {"data_contentSize":upload.contentSize} */)); const options = { _width: 500, _height: 500 }; return constructor(children, options); }; @@ -144,7 +143,7 @@ export namespace GooglePhotos { const images = (await DocListCastAsync(collection.data))!.map(Doc.GetProto); images?.forEach(image => tagMapping.set(image[Id], ContentCategories.NONE)); const values = Object.values(ContentCategories).filter(value => value !== ContentCategories.NONE); - for (const value of values) { + values.forEach(async value => { const searched = (await ContentSearch({ included: [value] }))?.mediaItems?.map(({ id }) => id); searched?.forEach(async id => { const image = await Cast(idMapping[id], Doc); @@ -154,7 +153,7 @@ export namespace GooglePhotos { !tags?.includes(value) && tagMapping.set(key, tags + delimiter + value); } }); - } + }); images?.forEach(image => { const concatenated = tagMapping.get(image[Id])!; const tags = concatenated.split(delimiter); @@ -200,9 +199,10 @@ export namespace GooglePhotos { export const AlbumSearch = async (albumId: string, pageSize = 100): Promise => { const photos = await endpoint(); const mediaItems: MediaItem[] = []; - let nextPageTokenStored: Opt = undefined; + let nextPageTokenStored: Opt; const found = 0; do { + // eslint-disable-next-line no-await-in-loop const response: any = await photos.mediaItems.search(albumId, pageSize, nextPageTokenStored); mediaItems.push(...response.mediaItems); nextPageTokenStored = response.nextPageToken; @@ -222,7 +222,7 @@ export namespace GooglePhotos { excluded.length && excluded.forEach(category => contentFilter.addExcludedContentCategories(category)); filters.setContentFilter(contentFilter); - const date = options.date; + const { date } = options; if (date) { const dateFilter = new photos.DateFilter(); if (date instanceof Date) { @@ -240,15 +240,11 @@ export namespace GooglePhotos { }); }; - export const GetImage = async (mediaItemId: string): Promise => { - return (await endpoint()).mediaItems.get(mediaItemId); - }; + export const GetImage = async (mediaItemId: string): Promise => (await endpoint()).mediaItems.get(mediaItemId); } namespace Create { - export const Album = async (title: string) => { - return (await endpoint()).albums.create(title); - }; + export const Album = async (title: string) => (await endpoint()).albums.create(title); } export namespace Transactions { @@ -278,6 +274,7 @@ export namespace GooglePhotos { return enrichmentItem.id; } } + return undefined; }; export const WriteMediaItemsToServer = async (body: { mediaItems: any[] }): Promise => { @@ -291,9 +288,12 @@ export namespace GooglePhotos { return undefined; } const baseUrls: string[] = await Promise.all( - response.results.map(item => { - return new Promise(resolve => Query.GetImage(item.mediaItem.id).then(item => resolve(item.baseUrl))); - }) + response.results.map( + item => + new Promise(resolve => { + Query.GetImage(item.mediaItem.id).then(item => resolve(item.baseUrl)); + }) + ) ); return baseUrls; }; @@ -303,27 +303,25 @@ export namespace GooglePhotos { failed: number[]; } - export const UploadImages = async (sources: Doc[], album?: AlbumReference, descriptionKey = 'caption'): Promise> => { + export const UploadImages = async (sources: Doc[], albumIn?: AlbumReference, descriptionKey = 'caption'): Promise> => { await GoogleAuthenticationManager.Instance.fetchOrGenerateAccessToken(); - if (album && 'title' in album) { - album = await Create.Album(album.title); - } + const album = albumIn && 'title' in albumIn ? await Create.Album(albumIn.title) : albumIn; const media: MediaInput[] = []; - for (const source of sources) { - const data = Cast(Doc.GetProto(source).data, ImageField); - if (!data) { - return; - } - const url = data.url.href; - const target = Doc.MakeEmbedding(source); - const description = parseDescription(target, descriptionKey); - await DocUtils.makeCustomViewClicked(target, Docs.Create.FreeformDocument); - media.push({ url, description }); - } + sources + .filter(source => ImageCast(Doc.GetProto(source).data)) + .forEach(async source => { + const data = ImageCast(Doc.GetProto(source).data); + const url = data.url.href; + const target = Doc.MakeEmbedding(source); + const description = parseDescription(target, descriptionKey); + await DocUtils.makeCustomViewClicked(target, Docs.Create.FreeformDocument); + media.push({ url, description }); + }); if (media.length) { const results = await Networking.PostToServer('/googlePhotosMediaPost', { media, album }); return results; } + return undefined; }; const parseDescription = (document: Doc, descriptionKey: string) => { diff --git a/src/client/util/BranchingTrailManager.tsx b/src/client/util/BranchingTrailManager.tsx index 02879e3c4..28c00644f 100644 --- a/src/client/util/BranchingTrailManager.tsx +++ b/src/client/util/BranchingTrailManager.tsx @@ -1,18 +1,31 @@ +/* eslint-disable react/no-unused-class-component-methods */ +/* eslint-disable react/no-array-index-key */ import { action, computed, makeObservable, observable } from 'mobx'; import { observer } from 'mobx-react'; import * as React from 'react'; import { Doc } from '../../fields/Doc'; import { Id } from '../../fields/FieldSymbols'; -import { PresBox } from '../views/nodes/trails'; import { OverlayView } from '../views/OverlayView'; +import { PresBox } from '../views/nodes/trails'; import { DocumentManager } from './DocumentManager'; -import { Docs } from '../documents/Documents'; -import { nullAudio } from '../../fields/URLField'; @observer export class BranchingTrailManager extends React.Component { + // eslint-disable-next-line no-use-before-define public static Instance: BranchingTrailManager; + // stack of the history + @observable private slideHistoryStack: String[] = []; + @observable private containsSet: Set = new Set(); + // docId to Doc map + @observable private docIdToDocMap: Map = new Map(); + + // prev pres to copmare with + @observable private prevPresId: String | null = null; + @action setPrevPres = action((newId: String | null) => { + this.prevPresId = newId; + }); + constructor(props: any) { super(props); makeObservable(this); @@ -22,7 +35,7 @@ export class BranchingTrailManager extends React.Component { } setupUi = () => { - OverlayView.Instance.addWindow(, { x: 100, y: 150, width: 1000, title: 'Branching Trail' }); + OverlayView.Instance.addWindow(, { x: 100, y: 150, width: 1000, title: 'Branching Trail' }); // OverlayView.Instance.forceUpdate(); console.log(OverlayView.Instance); // let hi = Docs.Create.TextDocument("beee", { @@ -36,23 +49,11 @@ export class BranchingTrailManager extends React.Component { console.log(DocumentManager._overlayViews); }; - // stack of the history - @observable private slideHistoryStack: String[] = []; @action setSlideHistoryStack = action((newArr: String[]) => { this.slideHistoryStack = newArr; }); - @observable private containsSet: Set = new Set(); - - // prev pres to copmare with - @observable private prevPresId: String | null = null; - @action setPrevPres = action((newId: String | null) => { - this.prevPresId = newId; - }); - - // docId to Doc map - @observable private docIdToDocMap: Map = new Map(); - + // eslint-disable-next-line react/sort-comp observeDocumentChange = (targetDoc: Doc, pres: PresBox) => { const presId = pres.Document[Id]; if (this.prevPresId === presId) { @@ -106,7 +107,7 @@ export class BranchingTrailManager extends React.Component { if (this.slideHistoryStack.length === 0) { Doc.UserDoc().isBranchingMode = false; } - //PresBox.NavigateToTarget(targetDoc, targetDoc); + // PresBox.NavigateToTarget(targetDoc, targetDoc); }; @computed get trailBreadcrumbs() { @@ -116,11 +117,11 @@ export class BranchingTrailManager extends React.Component { const [presId, targetDocId] = info.split(','); const doc = this.docIdToDocMap.get(targetDocId); if (!doc) { - return <>; + return null; } return ( - -{'>'} diff --git a/src/client/util/CalendarManager.tsx b/src/client/util/CalendarManager.tsx index 6e9094b3a..46aa4d238 100644 --- a/src/client/util/CalendarManager.tsx +++ b/src/client/util/CalendarManager.tsx @@ -1,4 +1,10 @@ +/* eslint-disable jsx-a11y/no-static-element-interactions */ +/* eslint-disable jsx-a11y/click-events-have-key-events */ +import { DateRangePicker, Provider, defaultTheme } from '@adobe/react-spectrum'; +import { IconLookup, faPlus } from '@fortawesome/free-solid-svg-icons'; +import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'; import { TextField } from '@mui/material'; +import { Button } from 'browndash-components'; import { action, computed, makeObservable, observable, runInAction } from 'mobx'; import { observer } from 'mobx-react'; import * as React from 'react'; @@ -6,21 +12,16 @@ import Select from 'react-select'; import { Doc, DocListCast } from '../../fields/Doc'; import { DocData } from '../../fields/DocSymbols'; import { StrCast } from '../../fields/Types'; +import { Docs } from '../documents/Documents'; import { DictationOverlay } from '../views/DictationOverlay'; import { MainViewModal } from '../views/MainViewModal'; +import { ObservableReactComponent } from '../views/ObservableReactComponent'; import { DocumentView } from '../views/nodes/DocumentView'; import { TaskCompletionBox } from '../views/nodes/TaskCompletedBox'; import './CalendarManager.scss'; import { DocumentManager } from './DocumentManager'; import { SelectionManager } from './SelectionManager'; import { SettingsManager } from './SettingsManager'; -// import { DateRange, Range, RangeKeyDict } from 'react-date-range'; -import { DateRangePicker, Provider, defaultTheme } from '@adobe/react-spectrum'; -import { IconLookup, faPlus } from '@fortawesome/free-solid-svg-icons'; -import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'; -import { Button } from 'browndash-components'; -import { Docs } from '../documents/Documents'; -import { ObservableReactComponent } from '../views/ObservableReactComponent'; // import 'react-date-range/dist/styles.css'; // import 'react-date-range/dist/theme/default.css'; @@ -47,6 +48,7 @@ const formatCalendarDateToString = (calendarDate: any) => { @observer export class CalendarManager extends ObservableReactComponent<{}> { + // eslint-disable-next-line no-use-before-define public static Instance: CalendarManager; @observable private isOpen = false; @observable private targetDoc: Doc | undefined = undefined; // the target document @@ -83,10 +85,10 @@ export class CalendarManager extends ObservableReactComponent<{}> { this.creationType = type; }; - public open = (target?: DocumentView, target_doc?: Doc) => { + public open = (target?: DocumentView, targetDoc?: Doc) => { console.log('hi'); runInAction(() => { - this.targetDoc = target_doc || target?.Document; + this.targetDoc = targetDoc || target?.Document; this.targetDocView = target; DictationOverlay.Instance.hasActiveModal = true; this.isOpen = this.targetDoc !== undefined; @@ -117,7 +119,7 @@ export class CalendarManager extends ObservableReactComponent<{}> { @action handleSelectChange = (option: any) => { if (option) { - let selectOpt = option as CalendarSelectOptions; + const selectOpt = option as CalendarSelectOptions; this.selectedExistingCalendarOption = selectOpt; this.calendarName = selectOpt.value; // or label } @@ -136,7 +138,7 @@ export class CalendarManager extends ObservableReactComponent<{}> { // TODO: Make undoable private addToCalendar = () => { - let docs = SelectionManager.Views.length < 2 ? [this.targetDoc] : SelectionManager.Views.map(docView => docView.Document); + const docs = SelectionManager.Views.length < 2 ? [this.targetDoc] : SelectionManager.Views.map(docView => docView.Document); const targetDoc = this.layoutDocAcls ? docs[0] : docs[0]?.[DocData]; // doc to add to calendar console.log(targetDoc); @@ -159,7 +161,7 @@ export class CalendarManager extends ObservableReactComponent<{}> { } } else { // find existing calendar based on selected name (should technically always find one) - const existingCalendar = this.existingCalendars.find(calendar => StrCast(calendar.title) === this.calendarName); + const existingCalendar = this.existingCalendars.find(findCal => StrCast(findCal.title) === this.calendarName); if (existingCalendar) calendar = existingCalendar; else { this.errorMessage = 'Must select an existing calendar'; @@ -252,11 +254,9 @@ export class CalendarManager extends ObservableReactComponent<{}> { @computed get calendarInterface() { - let docs = SelectionManager.Views.length < 2 ? [this.targetDoc] : SelectionManager.Views.map(docView => docView.Document); + const docs = SelectionManager.Views.length < 2 ? [this.targetDoc] : SelectionManager.Views.map(docView => docView.Document); const targetDoc = this.layoutDocAcls ? docs[0] : docs[0]?.[DocData]; - const currentDate = new Date(); - return (
{ {this.focusOn(docs.length < 2 ? StrCast(targetDoc?.title, 'this document') : '-multiple-')}

-
this.setInterationType('new-calendar')}> +
this.setInterationType('new-calendar')}> Add to New Calendar
-
this.setInterationType('existing-calendar')}> +
this.setInterationType('existing-calendar')}> Add to Existing calendar
@@ -317,7 +317,8 @@ export class CalendarManager extends ObservableReactComponent<{}> { color: StrCast(Doc.UserDoc().userColor), width: '100%', }), - }}> + }} + /> )}
@@ -351,6 +352,6 @@ export class CalendarManager extends ObservableReactComponent<{}> { } render() { - return ; + return ; } } diff --git a/src/client/util/CurrentUserUtils.ts b/src/client/util/CurrentUserUtils.ts index 27ae5c9a0..6dba8027d 100644 --- a/src/client/util/CurrentUserUtils.ts +++ b/src/client/util/CurrentUserUtils.ts @@ -709,7 +709,7 @@ pie title Minerals in my tap water return [ { title: "Back", toolTip: "Go back", btnType: ButtonType.ClickButton, icon: "arrow-left", scripts: { onClick: '{ return webBack(); }' }}, { title: "Forward", toolTip: "Go forward", btnType: ButtonType.ClickButton, icon: "arrow-right", scripts: { onClick: '{ return webForward(); }'}}, - { title: "URL", toolTip: "URL", width: 250, btnType: ButtonType.EditableText, icon: "lock", ignoreClick: true, scripts: { script: '{ return webSetURL(value, _readOnly_); }'} }, + { title: "URL", toolTip: "URL", width: 250, btnType: ButtonType.EditText, icon: "lock", ignoreClick: true, scripts: { script: '{ return webSetURL(value, _readOnly_); }'} }, ]; } static videoTools() { diff --git a/src/client/util/DragManager.ts b/src/client/util/DragManager.ts index 62f055f1a..3890b7845 100644 --- a/src/client/util/DragManager.ts +++ b/src/client/util/DragManager.ts @@ -495,18 +495,20 @@ export namespace DragManager { .filter(pb => pb.width && pb.height) .map((pb, i) => pb.getContext('2d')!.drawImage(pdfBoxSrc[i], 0, 0)); } - [dragElement, ...Array.from(dragElement.getElementsByTagName('*'))].forEach(ele => { - (ele as any).style && ((ele as any).style.pointerEvents = 'none'); - }); + [dragElement, ...Array.from(dragElement.getElementsByTagName('*'))] + .map(dele => (dele as any).style) + .forEach(style => { + style && (style.pointerEvents = 'none'); + }); dragDiv.appendChild(dragElement); if (dragElement !== ele) { - const children = [Array.from(ele.children), Array.from(dragElement.children)]; - while (children[0].length) { - const childs = [children[0].pop(), children[1].pop()]; + const dragChildren = [Array.from(ele.children), Array.from(dragElement.children)]; + while (dragChildren[0].length) { + const childs = [dragChildren[0].pop(), dragChildren[1].pop()]; if (childs[0]?.children) { - children[0].push(...Array.from(childs[0].children)); - children[1].push(...Array.from(childs[1]!.children)); + dragChildren[0].push(...Array.from(childs[0].children)); + dragChildren[1].push(...Array.from(childs[1]!.children)); } if (childs[0]?.scrollTop) childs[1]!.scrollTop = childs[0].scrollTop; } diff --git a/src/client/util/GroupManager.tsx b/src/client/util/GroupManager.tsx index c261c0f1e..8d84dbad8 100644 --- a/src/client/util/GroupManager.tsx +++ b/src/client/util/GroupManager.tsx @@ -1,3 +1,5 @@ +/* eslint-disable jsx-a11y/no-static-element-interactions */ +/* eslint-disable jsx-a11y/click-events-have-key-events */ import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'; import { Button, IconButton, Size, Type } from 'browndash-components'; import { action, computed, makeObservable, observable } from 'mobx'; @@ -30,6 +32,7 @@ export interface UserOptions { @observer export class GroupManager extends ObservableReactComponent<{}> { + // eslint-disable-next-line no-use-before-define static Instance: GroupManager; @observable isOpen: boolean = false; // whether the GroupManager is to be displayed or not. @observable private users: string[] = []; // list of users populated from the database. @@ -160,7 +163,7 @@ export class GroupManager extends ObservableReactComponent<{}> { addGroup(groupDoc: Doc): boolean { if (this.GroupManagerDoc) { Doc.AddDocToList(this.GroupManagerDoc, 'data', groupDoc); - this.GroupManagerDoc['data_modificationDate'] = new DateField(); + this.GroupManagerDoc.data_modificationDate = new DateField(); return true; } return false; @@ -202,7 +205,7 @@ export class GroupManager extends ObservableReactComponent<{}> { !memberList.includes(email) && memberList.push(email); groupDoc.members = JSON.stringify(memberList); SharingManager.Instance.shareWithAddedMember(groupDoc, email); - this.GroupManagerDoc && (this.GroupManagerDoc['data_modificationDate'] = new DateField()); + this.GroupManagerDoc && (this.GroupManagerDoc.data_modificationDate = new DateField()); } } @@ -216,10 +219,9 @@ export class GroupManager extends ObservableReactComponent<{}> { const memberList = JSON.parse(StrCast(groupDoc.members)); const index = memberList.indexOf(email); if (index !== -1) { - const user = memberList.splice(index, 1)[0]; groupDoc.members = JSON.stringify(memberList); SharingManager.Instance.removeMember(groupDoc, email); - this.GroupManagerDoc && (this.GroupManagerDoc['data_modificationDate'] = new DateField()); + this.GroupManagerDoc && (this.GroupManagerDoc.data_modificationDate = new DateField()); } } } @@ -275,7 +277,9 @@ export class GroupManager extends ObservableReactComponent<{}> { TaskCompletionBox.textDisplayed = 'Group created!'; TaskCompletionBox.taskCompleted = true; setTimeout( - action(() => (TaskCompletionBox.taskCompleted = false)), + action(() => { + TaskCompletionBox.taskCompleted = false; + }), 2000 ); }; @@ -292,7 +296,7 @@ export class GroupManager extends ObservableReactComponent<{}> {

- (this.buttonColour = this.inputRef.current?.value ? 'black' : '#979797'))} /> + { + this.buttonColour = this.inputRef.current?.value ? 'black' : '#979797'; + })} + />
this.setInternalSharing({ user, linkDatabase, sharingDoc, userColor }, e.currentTarget.value, undefined)}> + {this.sharingOptions(uniform)} + + ) : ( +
+ {concat(ReverseHierarchyMap.get(permissions)?.image, ' ', permissions)} +   +
+ )} +
+
+ ); }); - const docs = await DocServer.GetRefFields(raw.reduce((list, user) => [...list, user.sharingDocumentId, user.linkDatabaseId], [] as string[])); - raw.map( - action((newUser: User) => { - const sharingDoc = docs[newUser.sharingDocumentId]; - const linkDatabase = docs[newUser.linkDatabaseId]; - if (sharingDoc instanceof Doc && linkDatabase instanceof Doc) { - if (!this.users.find(user => user.user.email === newUser.email)) { - this.users.push({ user: newUser, sharingDoc, linkDatabase, userColor: StrCast(sharingDoc.userColor) }); - //LinkManager.addLinkDB(linkDatabase); - } - } - }) + + // checks if every doc has the same author + const sameAuthor = docs.every(doc => doc?.author === docs[0]?.author); + + // the owner of the doc and the current user are placed at the top of the user list. + const userKey = `acl-${normalizeEmail(ClientUtils.CurrentUserEmail())}`; + const curUserPermission = StrCast(targetDoc[userKey]); + // const curUserPermission = HierarchyMapping.get(effectiveAcls[0])!.name + userListContents.unshift( + sameAuthor ? ( +
+ {targetDoc?.author === ClientUtils.CurrentUserEmail() ? 'Me' : StrCast(targetDoc?.author)} +
+
Owner
+
+
+ ) : null, + sameAuthor && targetDoc?.author !== ClientUtils.CurrentUserEmail() ? ( +
+ Me +
+
+ {effectiveAcls.every(acl => acl === effectiveAcls[0]) ? concat(ReverseHierarchyMap.get(curUserPermission!)?.image, ' ', curUserPermission) : '-multiple-'} +   +
+
+
+ ) : null + ); + + // the list of groups shared with + const groupListMap: (Doc | { title: string })[] = groups.filter(({ title }) => (docs.length > 1 ? commonKeys.includes(`acl-${normalizeEmail(StrCast(title))}`) : true)); + groupListMap.unshift({ title: 'Guest' }); // , { title: "ALL" }); + const groupListContents = groupListMap.map(group => { + const groupKey = `acl-${StrCast(group.title)}`; + const uniform = docs.every(doc => doc?.[DocAcl]?.[groupKey] === docs[0]?.[DocAcl]?.[groupKey]); + const permissions = uniform ? StrCast(targetDoc?.[groupKey]) : '-multiple-'; + + return !permissions ? null : ( +
+
{StrCast(group.title)}
+   + {group instanceof Doc ? ( + } + size={Size.XSMALL} + color={SettingsManager.userColor} + onClick={action(() => { + GroupManager.Instance.currentGroup = group; + })} + /> + ) : null} +
+ {admin || this.myDocAcls ? ( + + ) : ( +
+ {concat(ReverseHierarchyMap.get(permissions)?.image, ' ', permissions)} +   +
+ )} +
+
); - this.populating = false; - } - }; + }); + return ( +
+ {GroupManager.Instance?.currentGroup ? ( + { + GroupManager.Instance.currentGroup = undefined; + })} + /> + ) : null} +
+

+

window.open('https://brown-dash.github.io/Dash-Documentation/features/collaboration/', '_blank')}> + window.open('https://brown-dash.github.io/Dash-Documentation/features/collaboration/', '_blank')} /> +
+ Share + {this.focusOn(docs.length < 2 ? StrCast(targetDoc?.title, 'this document') : '-multiple-')} +

+
+
+
+
+ {admin ? ( +
+
+ + {this.sharingOptions(true)} + +
+
+
+
+
+ { + this.showUserOptions = !this.showUserOptions; + })} + />{' '} + + { + this.showGroupOptions = !this.showGroupOptions; + })} + />{' '} + +
+ +
+ {Doc.noviceMode ? null : ( +
+ { + this.upgradeNested = !this.upgradeNested; + })} + checked={this.upgradeNested} + />{' '} + + { + this.layoutDocAcls = !this.layoutDocAcls; + })} + checked={this.layoutDocAcls} + />{' '} + +
+ )} +
+
+ ) : ( +
+
+
+ { + this.layoutDocAcls = !this.layoutDocAcls; + })} + checked={this.layoutDocAcls} + />{' '} + +
+
+
+ )} +
+
+
{ + this.individualSort = this.individualSort === 'ascending' ? 'descending' : this.individualSort === 'descending' ? 'none' : 'ascending'; + })}> +
+ Individuals + } + size={Size.XSMALL} + color={StrCast(Doc.UserDoc().userColor)} + /> +
+
+
{userListContents}
+
+
+
{ + this.groupSort = this.groupSort === 'ascending' ? 'descending' : this.groupSort === 'descending' ? 'none' : 'ascending'; + })}> +
+ Groups + } size={Size.XSMALL} color={StrCast(Doc.UserDoc().userColor)} onClick={action(() => GroupManager.Instance.open())} /> + } + size={Size.XSMALL} + color={StrCast(Doc.UserDoc().userColor)} + /> +
+
+
{groupListContents}
+
+
+
+
+ ); + } /** * Shares the document with a user. @@ -200,12 +479,69 @@ export class SharingManager extends React.Component<{}> { } }); }, 'set group permissions'); + /** + * Populates the list of validated users (this.users) by adding registered users which have a sharingDocument. + */ + populateUsers = async () => { + if (!this.populating && Doc.UserDoc()[Id] !== Utils.GuestID()) { + this.populating = true; + const userList = await RequestPromise.get(ClientUtils.prepend('/getUsers')); + const raw = (JSON.parse(userList) as User[]).filter(user => user.email !== 'guest' && user.email !== ClientUtils.CurrentUserEmail()); + runInAction(() => { + FieldLoader.ServerLoadStatus.message = 'users'; + }); + const docs = await DocServer.GetRefFields(raw.reduce((list, user) => [...list, user.sharingDocumentId, user.linkDatabaseId], [] as string[])); + raw.map( + action((newUser: User) => { + const sharingDoc = docs[newUser.sharingDocumentId]; + const linkDatabase = docs[newUser.linkDatabaseId]; + if (sharingDoc instanceof Doc && linkDatabase instanceof Doc) { + if (!this.users.find(user => user.user.email === newUser.email)) { + this.users.push({ user: newUser, sharingDoc, linkDatabase, userColor: StrCast(sharingDoc.userColor) }); + // LinkManager.addLinkDB(linkDatabase); + } + } + }) + ); + this.populating = false; + } + }; + + // eslint-disable-next-line react/sort-comp + public close = action(() => { + this.isOpen = false; + this.selectedUsers = null; // resets the list of users and selected users (in the react-select component) + TaskCompletionBox.taskCompleted = false; + setTimeout( + action(() => { + // this.copied = false; + DictationOverlay.Instance.hasActiveModal = false; + this.targetDoc = undefined; + }), + 500 + ); + this.layoutDocAcls = false; + }); + + // eslint-disable-next-line react/no-unused-class-component-methods + public open = (target?: DocumentView, targetDoc?: Doc) => { + this.populateUsers(); + runInAction(() => { + this.targetDocView = target; + this.targetDoc = targetDoc || target?.Document; + DictationOverlay.Instance.hasActiveModal = true; + this.isOpen = this.targetDoc !== undefined; + this.permissions = SharingPermissions.Augment; + this.upgradeNested = true; + }); + }; /** * Shares the documents shared with a group with a new user who has been added to that group. * @param group * @param emailId */ + // eslint-disable-next-line react/no-unused-class-component-methods shareWithAddedMember = (group: Doc, emailId: string, retry: boolean = true) => { const user = this.users.find(({ user: { email } }) => email === emailId)!; const self = this; @@ -231,6 +567,7 @@ export class SharingManager extends React.Component<{}> { /** * Called from the properties sidebar to change permissions of a user. */ + // eslint-disable-next-line react/no-unused-class-component-methods shareFromPropertiesSidebar = undoable((shareWith: string, permission: SharingPermissions, docs: Doc[], layout: boolean) => { if (layout) this.layoutDocAcls = true; if (shareWith !== 'Guest') { @@ -254,6 +591,7 @@ export class SharingManager extends React.Component<{}> { * @param group * @param emailId */ + // eslint-disable-next-line react/no-unused-class-component-methods removeMember = (group: Doc, emailId: string) => { const user: ValidatedUser = this.users.find(({ user: { email } }) => email === emailId)!; @@ -277,6 +615,7 @@ export class SharingManager extends React.Component<{}> { * Removes a group's permissions from documents that have been shared with it. * @param group */ + // eslint-disable-next-line react/no-unused-class-component-methods removeGroup = (group: Doc) => { if (group.docsShared) { DocListCast(group.docsShared).forEach(doc => { @@ -299,25 +638,12 @@ export class SharingManager extends React.Component<{}> { // targetDoc["acl-" + PublicKey] = permission; // }s - /** - * Copies the Public sharing url to the user's clipboard. - */ - private copyURL = (e: any) => { - ClientUtils.CopyText(ClientUtils.shareUrl(this.targetDoc![Id])); - }; - - /** - * Returns the SharingPermissions (Admin, Can Edit etc) access that's used to share - */ - private sharingOptions(uniform: boolean, showGuestOptions?: boolean) { - const dropdownValues: string[] = showGuestOptions ? [SharingPermissions.None, SharingPermissions.View] : Object.values(SharingPermissions); - if (!uniform) dropdownValues.unshift('-multiple-'); - return dropdownValues.map(permission => ( - - )); - } + /** + * Copies the Public sharing url to the user's clipboard. + */ + private copyURL = () => { + ClientUtils.CopyText(ClientUtils.shareUrl(this.targetDoc![Id])); + }; private focusOn = (contents: string) => { const title = this.targetDoc ? StrCast(this.targetDoc.title) : ''; @@ -350,24 +676,6 @@ export class SharingManager extends React.Component<{}> { ); }; - /** - * Handles changes in the users selected in react-select - */ - @action - handleUsersChange = (selectedOptions: any) => { - this.selectedUsers = selectedOptions as UserOptions[]; - }; - - /** - * Handles changes in the permission chosen to share with someone with - */ - handlePermissionsChange = undoable( - action((event: React.ChangeEvent) => { - this.permissions = event.currentTarget.value as SharingPermissions; - }), - 'permission change' - ); - /** * Calls the relevant method for sharing, displays the popup, and resets the relevant variables. */ @@ -389,7 +697,9 @@ export class SharingManager extends React.Component<{}> { TaskCompletionBox.textDisplayed = 'Document shared!'; TaskCompletionBox.taskCompleted = true; setTimeout( - action(() => (TaskCompletionBox.taskCompleted = false)), + action(() => { + TaskCompletionBox.taskCompleted = false; + }), 2000 ); } @@ -418,263 +728,20 @@ export class SharingManager extends React.Component<{}> { const g2 = StrCast(group2.title); return g1 < g2 ? -1 : g1 === g2 ? 0 : 1; }; - /** - * @returns the main interface of the SharingManager. + * Returns the SharingPermissions (Admin, Can Edit etc) access that's used to share */ - @computed get sharingInterface() { - if (!this.targetDoc) return null; - TraceMobx(); - const groupList = GroupManager.Instance?.allGroups || []; - - const sortedUsers = this.users - .slice() - .sort(this.sortUsers) - .map(({ user: { email } }) => ({ label: email, value: indType + email })); - const sortedGroups = groupList - .slice() - .sort(this.sortGroups) - .map(({ title }) => ({ label: StrCast(title), value: groupType + StrCast(title) })); - - // the next block handles the users shown (individuals/groups/both) - const options: GroupedOptions[] = []; - if (GroupManager.Instance) { - if ((this.showUserOptions && this.showGroupOptions) || (!this.showUserOptions && !this.showGroupOptions)) { - options.push({ label: 'Individuals', options: sortedUsers }, { label: 'Groups', options: sortedGroups }); - } else if (this.showUserOptions) options.push({ label: 'Individuals', options: sortedUsers }); - else options.push({ label: 'Groups', options: sortedGroups }); - } - - const users = this.individualSort === 'ascending' ? this.users.slice().sort(this.sortUsers) : this.individualSort === 'descending' ? this.users.slice().sort(this.sortUsers).reverse() : this.users; - const groups = this.groupSort === 'ascending' ? groupList.slice().sort(this.sortGroups) : this.groupSort === 'descending' ? groupList.slice().sort(this.sortGroups).reverse() : groupList; - - let docs = SelectionManager.Views.length < 2 ? [this.targetDoc] : SelectionManager.Views.map(docView => docView.Document); - - if (this.myDocAcls) { - const newDocs: Doc[] = []; - SearchUtil.foreachRecursiveDoc(docs, (depth, doc) => newDocs.push(doc)); - docs = newDocs.filter(doc => GetEffectiveAcl(doc) === AclAdmin); - } - - const targetDoc = this.layoutDocAcls ? docs[0] : docs[0]?.[DocData]; - - // tslint:disable-next-line: no-unnecessary-callback-wrapper - const effectiveAcls = docs.map(doc => GetEffectiveAcl(doc)); - const admin = this.myDocAcls ? Boolean(docs.length) : effectiveAcls.every(acl => acl === AclAdmin); - - // users in common between all docs - const commonKeys = intersection(docs).reduce((list, doc) => (doc?.[DocAcl] ? [...list, ...Object.keys(doc[DocAcl])] : list), [] as string[]); - - // the list of users shared with - const userListContents = users - // .filter(({ user }) => (docs.length > 1 ? commonKeys.includes(`acl-${normalizeEmail(user.email)}`) : docs[0]?.author !== user.email)) - .filter(({ user }) => docs[0]?.author !== user.email) - .map(({ user, linkDatabase, sharingDoc, userColor }) => { - const userKey = `acl-${normalizeEmail(user.email)}`; - const uniform = docs.every(doc => doc?.[DocAcl]?.[userKey] === docs[0]?.[DocAcl]?.[userKey]); - // const permissions = uniform ? StrCast(targetDoc?.[userKey]) : '-multiple-'; - let permissions = targetDoc[DocAcl][userKey] ? HierarchyMapping.get(targetDoc[DocAcl][userKey])?.name : StrCast(targetDoc[userKey]); - permissions = uniform ? StrCast(targetDoc?.[userKey]) : '-multiple-'; - - return !permissions ? null : ( -
- {user.email} -
- {admin || this.myDocAcls ? ( - - ) : ( -
- {concat(ReverseHierarchyMap.get(permissions)?.image, ' ', permissions)} -   -
- )} -
-
- ); - }); - - // checks if every doc has the same author - const sameAuthor = docs.every(doc => doc?.author === docs[0]?.author); - - // the owner of the doc and the current user are placed at the top of the user list. - const userKey = `acl-${normalizeEmail(ClientUtils.CurrentUserEmail())}`; - const curUserPermission = StrCast(targetDoc[userKey]); - // const curUserPermission = HierarchyMapping.get(effectiveAcls[0])!.name - userListContents.unshift( - sameAuthor ? ( -
- {targetDoc?.author === ClientUtils.CurrentUserEmail() ? 'Me' : StrCast(targetDoc?.author)} -
-
Owner
-
-
- ) : null, - sameAuthor && targetDoc?.author !== ClientUtils.CurrentUserEmail() ? ( -
- Me -
-
- {effectiveAcls.every(acl => acl === effectiveAcls[0]) ? concat(ReverseHierarchyMap.get(curUserPermission!)?.image, ' ', curUserPermission) : '-multiple-'} -   -
-
-
- ) : null - ); - - // the list of groups shared with - const groupListMap: (Doc | { title: string })[] = groups.filter(({ title }) => (docs.length > 1 ? commonKeys.includes(`acl-${normalizeEmail(StrCast(title))}`) : true)); - groupListMap.unshift({ title: 'Guest' }); //, { title: "ALL" }); - const groupListContents = groupListMap.map(group => { - let groupKey = `acl-${StrCast(group.title)}`; - const uniform = docs.every(doc => doc?.[DocAcl]?.[groupKey] === docs[0]?.[DocAcl]?.[groupKey]); - const permissions = uniform ? StrCast(targetDoc?.[groupKey]) : '-multiple-'; - - return !permissions ? null : ( -
-
{StrCast(group.title)}
-   - {group instanceof Doc ? } size={Size.XSMALL} color={SettingsManager.userColor} onClick={action(() => (GroupManager.Instance.currentGroup = group))} /> : null} -
- {admin || this.myDocAcls ? ( - - ) : ( -
- {concat(ReverseHierarchyMap.get(permissions)?.image, ' ', permissions)} -   -
- )} -
-
- ); - }); - return ( -
- {GroupManager.Instance?.currentGroup ? (GroupManager.Instance.currentGroup = undefined))} /> : null} -
-

-

window.open('https://brown-dash.github.io/Dash-Documentation/features/collaboration/', '_blank')}> - window.open('https://brown-dash.github.io/Dash-Documentation/features/collaboration/', '_blank')} /> -
- Share - {this.focusOn(docs.length < 2 ? StrCast(targetDoc?.title, 'this document') : '-multiple-')} -

-
-
-
-
- {admin ? ( -
-
- - {this.sharingOptions(true)} - -
-
-
-
-
- (this.showUserOptions = !this.showUserOptions))} /> - (this.showGroupOptions = !this.showGroupOptions))} /> -
- -
- {Doc.noviceMode ? null : ( -
- (this.upgradeNested = !this.upgradeNested))} checked={this.upgradeNested} /> - (this.layoutDocAcls = !this.layoutDocAcls))} checked={this.layoutDocAcls} /> -
- )} -
-
- ) : ( -
-
-
- (this.layoutDocAcls = !this.layoutDocAcls))} checked={this.layoutDocAcls} /> -
-
-
- )} -
-
-
(this.individualSort = this.individualSort === 'ascending' ? 'descending' : this.individualSort === 'descending' ? 'none' : 'ascending'))}> -
- Individuals - } - size={Size.XSMALL} - color={StrCast(Doc.UserDoc().userColor)} - /> -
-
-
{userListContents}
-
-
-
(this.groupSort = this.groupSort === 'ascending' ? 'descending' : this.groupSort === 'descending' ? 'none' : 'ascending'))}> -
- Groups - } size={Size.XSMALL} color={StrCast(Doc.UserDoc().userColor)} onClick={action(() => GroupManager.Instance.open())} /> - } - size={Size.XSMALL} - color={StrCast(Doc.UserDoc().userColor)} - /> -
-
-
{groupListContents}
-
-
-
- - ); + private sharingOptions(uniform: boolean, showGuestOptions?: boolean) { + const dropdownValues: string[] = showGuestOptions ? [SharingPermissions.None, SharingPermissions.View] : Object.values(SharingPermissions); + if (!uniform) dropdownValues.unshift('-multiple-'); + return dropdownValues.map(permission => ( + + )); } render() { - return ; + return ; } } diff --git a/src/client/util/SnappingManager.ts b/src/client/util/SnappingManager.ts index eb47bbe88..3da85191f 100644 --- a/src/client/util/SnappingManager.ts +++ b/src/client/util/SnappingManager.ts @@ -10,6 +10,7 @@ export class SnappingManager { @observable _shiftKey = false; @observable _ctrlKey = false; @observable _metaKey = false; + @observable _showPresPaths = false; @observable _isLinkFollowing = false; @observable _isDragging: boolean = false; @observable _isResizing: string | undefined = undefined; // the string is the Id of the document being resized @@ -36,6 +37,7 @@ export class SnappingManager { public static get ShiftKey() { return this.Instance._shiftKey; } // prettier-ignore public static get CtrlKey() { return this.Instance._ctrlKey; } // prettier-ignore public static get MetaKey() { return this.Instance._metaKey; } // prettier-ignore + public static get ShowPresPaths() { return this.Instance._showPresPaths; } // prettier-ignore public static get IsLinkFollowing(){ return this.Instance._isLinkFollowing; } // prettier-ignore public static get IsDragging() { return this.Instance._isDragging; } // prettier-ignore public static get IsResizing() { return this.Instance._isResizing; } // prettier-ignore @@ -44,6 +46,7 @@ export class SnappingManager { public static SetShiftKey = (down: boolean) => runInAction(() => {this.Instance._shiftKey = down}); // prettier-ignore public static SetCtrlKey = (down: boolean) => runInAction(() => {this.Instance._ctrlKey = down}); // prettier-ignore public static SetMetaKey = (down: boolean) => runInAction(() => {this.Instance._metaKey = down}); // prettier-ignore + public static SetShowPresPaths = (paths:boolean) => runInAction(() => {this.Instance._showPresPaths = paths}); // prettier-ignore public static SetIsLinkFollowing= (follow:boolean)=> runInAction(() => {this.Instance._isLinkFollowing = follow}); // prettier-ignore public static SetIsDragging = (drag: boolean) => runInAction(() => {this.Instance._isDragging = drag}); // prettier-ignore public static SetIsResizing = (docid?:string) => runInAction(() => {this.Instance._isResizing = docid}); // prettier-ignore diff --git a/src/client/util/UndoManager.ts b/src/client/util/UndoManager.ts index 4e941508d..956c0e674 100644 --- a/src/client/util/UndoManager.ts +++ b/src/client/util/UndoManager.ts @@ -1,8 +1,11 @@ +/* eslint-disable prefer-spread */ +/* eslint-disable no-use-before-define */ import { action, observable, runInAction } from 'mobx'; import { Without } from '../../Utils'; import { RichTextField } from '../../fields/RichTextField'; -export let printToConsole = false; // Doc.MyDockedBtns.linearView_IsOpen +// eslint-disable-next-line prefer-const +let printToConsole = false; // Doc.MyDockedBtns.linearView_IsOpen function getBatchName(target: any, key: string | symbol): string { const keyName = key.toString(); @@ -38,10 +41,11 @@ function propertyDecorator(target: any, key: string | symbol) { } export function undoable(fn: (...args: any[]) => any, batchName: string): (...args: any[]) => any { - return function () { + return function (...fargs) { const batch = UndoManager.StartBatch(batchName); try { - return fn.apply(undefined, arguments as any); + // eslint-disable-next-line prefer-rest-params + return fn.apply(undefined, fargs); } finally { batch.end(); } @@ -49,13 +53,15 @@ export function undoable(fn: (...args: any[]) => any, batchName: string): (...ar } export function undoBatch(target: any, key: string | symbol, descriptor?: TypedPropertyDescriptor): any; +// eslint-disable-next-line no-redeclare export function undoBatch(fn: (...args: any[]) => any): (...args: any[]) => any; +// eslint-disable-next-line no-redeclare export function undoBatch(target: any, key?: string | symbol, descriptor?: TypedPropertyDescriptor): any { if (!key) { - return function () { + return function (...fargs: any[]) { const batch = UndoManager.StartBatch(''); try { - return target.apply(undefined, arguments); + return target.apply(undefined, fargs); } finally { batch.end(); } @@ -63,7 +69,7 @@ export function undoBatch(target: any, key?: string | symbol, descriptor?: Typed } if (!descriptor) { propertyDecorator(target, key); - return; + return undefined; } const oldFunction = descriptor.value; @@ -87,14 +93,18 @@ export namespace UndoManager { } type UndoBatch = UndoEvent[]; - export let undoStackNames: string[] = observable([]); - export let redoStackNames: string[] = observable([]); - export let undoStack: UndoBatch[] = observable([]); - export let redoStack: UndoBatch[] = observable([]); let currentBatch: UndoBatch | undefined; - export let batchCounter = observable.box(0); let undoing = false; - export let tempEvents: UndoEvent[] | undefined = undefined; + let tempEvents: UndoEvent[] | undefined; + export const undoStackNames: string[] = observable([]); + export const redoStackNames: string[] = observable([]); + export const undoStack: UndoBatch[] = observable([]); + export const redoStack: UndoBatch[] = observable([]); + export const batchCounter = observable.box(0); + let _fieldPrinter: (val: any) => string = val => val?.toString(); + export function SetFieldPrinter(printer: (val: any) => string) { + _fieldPrinter = printer; + } export function AddEvent(event: UndoEvent, value?: any): void { if (currentBatch && batchCounter.get() && !undoing) { @@ -103,8 +113,8 @@ export namespace UndoManager { ' '.slice(0, batchCounter.get()) + 'UndoEvent : ' + event.prop + - ' = ' + - (value instanceof RichTextField ? value.Text : value instanceof Array ? value.map(val => Field.toJavascriptString(val)).join(',') : Field.toJavascriptString(value)) + ' = ' + // prettier-ignore + (value instanceof RichTextField ? value.Text : value instanceof Array ? value.map(_fieldPrinter).join(',') : _fieldPrinter(value)) ); currentBatch.push(event); tempEvents?.push(event); @@ -130,21 +140,22 @@ export namespace UndoManager { } export function FilterBatches(fieldTypes: string[]) { const fieldCounts: { [key: string]: number } = {}; - const lastStack = UndoManager.undoStack.slice(-1)[0]; //.lastElement(); + const lastStack = UndoManager.undoStack.slice(-1)[0]; // .lastElement(); if (lastStack) { - lastStack.forEach(ev => fieldTypes.includes(ev.prop) && (fieldCounts[ev.prop] = (fieldCounts[ev.prop] || 0) + 1)); + lastStack.forEach(ev => { + fieldTypes.includes(ev.prop) && (fieldCounts[ev.prop] = (fieldCounts[ev.prop] || 0) + 1); + }); const fieldCount2: { [key: string]: number } = {}; - runInAction( - () => - (UndoManager.undoStack[UndoManager.undoStack.length - 1] = lastStack.filter(ev => { - if (fieldTypes.includes(ev.prop)) { - fieldCount2[ev.prop] = (fieldCount2[ev.prop] || 0) + 1; - if (fieldCount2[ev.prop] === 1 || fieldCount2[ev.prop] === fieldCounts[ev.prop]) return true; - return false; - } - return true; - })) - ); + runInAction(() => { + UndoManager.undoStack[UndoManager.undoStack.length - 1] = lastStack.filter(ev => { + if (fieldTypes.includes(ev.prop)) { + fieldCount2[ev.prop] = (fieldCount2[ev.prop] || 0) + 1; + if (fieldCount2[ev.prop] === 1 || fieldCount2[ev.prop] === fieldCounts[ev.prop]) return true; + return false; + } + return true; + }); + }); } } export function TraceOpenBatches() { @@ -161,11 +172,10 @@ export namespace UndoManager { if (this.disposed) { console.log('WARNING: undo batch already disposed'); return false; - } else { - this.disposed = true; - openBatches.splice(openBatches.indexOf(this)); - return EndBatch(this.batchName, cancel); } + this.disposed = true; + openBatches.splice(openBatches.indexOf(this)); + return EndBatch(this.batchName, cancel); }; end = () => this.dispose(false); @@ -183,7 +193,7 @@ export namespace UndoManager { const EndBatch = action((batchName: string, cancel: boolean = false) => { runInAction(() => batchCounter.set(batchCounter.get() - 1)); - printToConsole && console.log(' '.slice(0, batchCounter.get()) + 'End ' + batchName + ' (' + currentBatch?.length + ')'); + printToConsole && console.log(' '.slice(0, batchCounter.get()) + 'End ' + batchName + ' (' + (currentBatch?.length ?? 0) + ')'); if (batchCounter.get() === 0 && currentBatch?.length) { if (!cancel) { undoStack.push(currentBatch); @@ -200,10 +210,10 @@ export namespace UndoManager { export function StartTempBatch() { tempEvents = []; } - export function EndTempBatch(success: boolean) { + export function EndTempBatch(success: boolean) { UndoManager.UndoTempBatch(success); } - //TODO Make this return the return value + // TODO Make this return the return value export function RunInBatch(fn: () => T, batchName: string) { const batch = StartBatch(batchName); try { @@ -235,9 +245,11 @@ export namespace UndoManager { } undoing = true; - for (let i = commands.length - 1; i >= 0; i--) { - commands[i].undo(); - } + // eslint-disable-next-line prettier/prettier + commands + .slice() + .reverse() + .forEach(command => command.undo()); undoing = false; redoStackNames.push(names ?? '???'); @@ -256,9 +268,7 @@ export namespace UndoManager { } undoing = true; - for (const command of commands) { - command.redo(); - } + commands.forEach(command => command.redo()); undoing = false; undoStackNames.push(names ?? '???'); diff --git a/src/client/util/reportManager/ReportManager.tsx b/src/client/util/reportManager/ReportManager.tsx index 02b3ee32c..2224e642d 100644 --- a/src/client/util/reportManager/ReportManager.tsx +++ b/src/client/util/reportManager/ReportManager.tsx @@ -1,3 +1,6 @@ +/* eslint-disable jsx-a11y/label-has-associated-control */ +/* eslint-disable jsx-a11y/media-has-caption */ +/* eslint-disable react/no-unused-class-component-methods */ import { Octokit } from '@octokit/core'; import { Button, Dropdown, DropdownType, IconButton, Type } from 'browndash-components'; import { action, makeObservable, observable } from 'mobx'; @@ -13,7 +16,7 @@ import { ClientUtils } from '../../../ClientUtils'; import { Doc } from '../../../fields/Doc'; import { StrCast } from '../../../fields/Types'; import { MainViewModal } from '../../views/MainViewModal'; -import '.././SettingsManager.scss'; +import '../SettingsManager.scss'; import { SettingsManager } from '../SettingsManager'; import './ReportManager.scss'; import { Filter, FormInput, FormTextArea, IssueCard, IssueView } from './ReportManagerComponents'; @@ -25,10 +28,12 @@ import { BugType, FileData, Priority, ReportForm, ViewState, bugDropdownItems, d */ @observer export class ReportManager extends React.Component<{}> { + // eslint-disable-next-line no-use-before-define public static Instance: ReportManager; @observable private isOpen = false; @observable private query = ''; + // eslint-disable-next-line react/sort-comp @action private setQuery = (q: string) => { this.query = q; }; @@ -83,7 +88,9 @@ export class ReportManager extends React.Component<{}> { this.formData = newData; }); - public close = action(() => (this.isOpen = false)); + public close = action(() => { + this.isOpen = false; + }); public open = action(async () => { this.isOpen = true; if (this.shownIssues.length === 0) { @@ -165,7 +172,7 @@ export class ReportManager extends React.Component<{}> { * @returns JSX element of a piece of media (image, video, audio) */ private getMediaPreview = (fileData: FileData): JSX.Element => { - const file = fileData.file; + const { file } = fileData; const mimeType = file.type; const preview = URL.createObjectURL(file); @@ -180,7 +187,8 @@ export class ReportManager extends React.Component<{}> { ); - } else if (mimeType.startsWith('video/')) { + } + if (mimeType.startsWith('video/')) { return (
@@ -194,7 +202,8 @@ export class ReportManager extends React.Component<{}> {
); - } else if (mimeType.startsWith('audio/')) { + } + if (mimeType.startsWith('audio/')) { return (
); } - return <>; + return
; }; /** @@ -307,8 +316,8 @@ export class ReportManager extends React.Component<{}> {
{ @@ -320,8 +329,8 @@ export class ReportManager extends React.Component<{}> { /> { @@ -347,7 +356,7 @@ export class ReportManager extends React.Component<{}> { text="Submit" type={Type.TERT} color={StrCast(Doc.UserDoc().userVariantColor)} - icon={} + icon={} iconPlacement="right" onClick={() => { this.reportIssue(); @@ -364,7 +373,7 @@ export class ReportManager extends React.Component<{}> { /> )}
- } onClick={this.close} /> + } onClick={this.close} />
); @@ -376,9 +385,8 @@ export class ReportManager extends React.Component<{}> { private reportComponent = () => { if (this.viewState === ViewState.VIEW) { return this.viewIssuesComponent(); - } else { - return this.reportIssueComponent(); } + return this.reportIssueComponent(); }; render() { @@ -386,7 +394,7 @@ export class ReportManager extends React.Component<{}> { diff --git a/src/client/views/ContextMenuItem.tsx b/src/client/views/ContextMenuItem.tsx index 5760872fb..eb1030eec 100644 --- a/src/client/views/ContextMenuItem.tsx +++ b/src/client/views/ContextMenuItem.tsx @@ -1,3 +1,4 @@ +/* eslint-disable react/jsx-props-no-spreading */ import { IconProp } from '@fortawesome/fontawesome-svg-core'; import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'; import { action, makeObservable, observable, runInAction } from 'mobx'; @@ -17,6 +18,7 @@ export interface OriginalMenuProps { export interface SubmenuProps { description: string; + // eslint-disable-next-line no-use-before-define subitems: ContextMenuProps[]; noexpand?: boolean; addDivider?: boolean; @@ -37,7 +39,9 @@ export class ContextMenuItem extends ObservableReactComponent (this._items.length = 0)); + runInAction(() => { + this._items.length = 0; + }); if ((this._props as SubmenuProps)?.subitems) { (this._props as SubmenuProps).subitems?.forEach(i => runInAction(() => this._items.push(i))); } @@ -83,7 +87,9 @@ export class ContextMenuItem extends ObservableReactComponent (this.overItem = false)), + action(() => { + this.overItem = false; + }), ContextMenuItem.timeout ); }; @@ -147,10 +153,10 @@ export class ContextMenuItem extends ObservableReactComponent {this._props.description} - +
); } + return null; } } diff --git a/src/client/views/DashboardView.tsx b/src/client/views/DashboardView.tsx index 14abd5f89..25415a4f0 100644 --- a/src/client/views/DashboardView.tsx +++ b/src/client/views/DashboardView.tsx @@ -1,3 +1,5 @@ +/* eslint-disable jsx-a11y/no-static-element-interactions */ +/* eslint-disable jsx-a11y/click-events-have-key-events */ import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'; import { Button, ColorPicker, EditableText, Size, Type } from 'browndash-components'; import { action, computed, makeObservable, observable } from 'mobx'; @@ -48,10 +50,18 @@ export class DashboardView extends ObservableReactComponent<{}> { @observable private selectedDashboardGroup = DashboardGroup.MyDashboards; @observable private newDashboardName = ''; @observable private newDashboardColor = '#AFAFAF'; - @action abortCreateNewDashboard = () => (this.openModal = false); - @action setNewDashboardName = (name: string) => (this.newDashboardName = name); - @action setNewDashboardColor = (color: string) => (this.newDashboardColor = color); - @action selectDashboardGroup = (group: DashboardGroup) => (this.selectedDashboardGroup = group); + @action abortCreateNewDashboard = () => { + this.openModal = false; + }; + @action setNewDashboardName = (name: string) => { + this.newDashboardName = name; + }; + @action setNewDashboardColor = (color: string) => { + this.newDashboardColor = color; + }; + @action selectDashboardGroup = (group: DashboardGroup) => { + this.selectedDashboardGroup = group; + }; clickDashboard = (e: React.MouseEvent, dashboard: Doc) => { if (this.selectedDashboardGroup === DashboardGroup.SharedDashboards) { @@ -138,9 +148,9 @@ export class DashboardView extends ObservableReactComponent<{}> { <>
-
open linked trail
}>
-
- +
); @@ -134,8 +144,12 @@ export class DocumentButtonBar extends ObservableReactComponent<{ views: () => ( key={icon.toString()} size="sm" icon={icon} - onPointerEnter={action(e => (this.subEndLink = (pinLayout ? 'Layout' : '') + (pinLayout && pinContent ? ' &' : '') + (pinContent ? ' Content' : '')))} - onPointerLeave={action(e => (this.subEndLink = ''))} + onPointerEnter={action(() => { + this.subEndLink = (pinLayout ? 'Layout' : '') + (pinLayout && pinContent ? ' &' : '') + (pinContent ? ' Content' : ''); + })} + onPointerLeave={action(() => { + this.subEndLink = ''; + })} onClick={e => { this.view0 && DocumentLinksButton.finishLinkClick(e.clientX, e.clientY, DocumentLinksButton.StartLink, this.view0.Document, true, this.view0, { @@ -157,7 +171,7 @@ export class DocumentButtonBar extends ObservableReactComponent<{ views: () => ( {linkBtn(false, true, 'address-card')} {linkBtn(true, true, 'id-card')} - + ); } @@ -177,15 +191,16 @@ export class DocumentButtonBar extends ObservableReactComponent<{ views: () => ( key={icon.toString()} size="sm" icon={icon} - onPointerEnter={action( - e => - (this.subPin = - (pinLayoutView ? 'Layout' : '') + - (pinLayoutView && pinContentView ? ' &' : '') + - (pinContentView ? ' Content View' : '') + - (pinLayoutView && pinContentView ? '(shift+alt)' : pinLayoutView ? '(shift)' : pinContentView ? '(alt)' : '')) - )} - onPointerLeave={action(e => (this.subPin = ''))} + onPointerEnter={action(() => { + this.subPin = + (pinLayoutView ? 'Layout' : '') + + (pinLayoutView && pinContentView ? ' &' : '') + + (pinContentView ? ' Content View' : '') + + (pinLayoutView && pinContentView ? '(shift+alt)' : pinLayoutView ? '(shift)' : pinContentView ? '(alt)' : ''); + })} + onPointerLeave={action(() => { + this.subPin = ''; + })} onClick={e => { const docs = this._props .views() @@ -232,8 +247,8 @@ export class DocumentButtonBar extends ObservableReactComponent<{ views: () => ( get shareButton() { const targetDoc = this.view0?.Document; return !targetDoc ? null : ( - {'Open Sharing Manager'}}> -
SharingManager.Instance.open(this.view0, targetDoc)}> + Open Sharing Manager
}> +
SharingManager.Instance.open(this.view0, targetDoc)}>
@@ -244,7 +259,7 @@ export class DocumentButtonBar extends ObservableReactComponent<{ views: () => ( get menuButton() { const targetDoc = this.view0?.Document; return !targetDoc ? null : ( - {`Open Context Menu`}}> + Open Context Menu}>
setupMoveUpEvents(this, e, returnFalse, emptyFunction, e => this.openContextMenu(e))}>
@@ -260,8 +275,7 @@ export class DocumentButtonBar extends ObservableReactComponent<{ views: () => (
{ - console.log('hi: ', CalendarManager.Instance); + onClick={() => { CalendarManager.Instance.open(this.view0, targetDoc); }}> @@ -282,7 +296,18 @@ export class DocumentButtonBar extends ObservableReactComponent<{ views: () => ( style={{ backgroundColor: this._isRecording ? Colors.ERROR_RED : Colors.DARK_GRAY, color: Colors.WHITE }} onPointerDown={action((e: React.PointerEvent) => { this._isRecording = true; - this._props.views().map(view => view && DocumentViewInternal.recordAudioAnnotation(view.dataDoc, view.LayoutFieldKey, stopFunc => (this._stopFunc = stopFunc), emptyFunction)); + this._props.views().map( + view => + view && + DocumentViewInternal.recordAudioAnnotation( + view.dataDoc, + view.LayoutFieldKey, + stopFunc => { + this._stopFunc = stopFunc; + }, + emptyFunction + ) + ); const b = UndoManager.StartBatch('Recording'); setupMoveUpEvents( this, @@ -310,10 +335,10 @@ export class DocumentButtonBar extends ObservableReactComponent<{ views: () => ( if (this._dragRef.current) { const dragDocView = this.view0!; const dragData = new DragManager.DocumentDragData([dragDocView.Document]); - const [left, top] = dragDocView.screenToContentsTransform().inverse().transformPoint(0, 0); + const origin = dragDocView.screenToContentsTransform().inverse().transformPoint(0, 0); dragData.defaultDropAction = dropActionType.embed; dragData.canEmbed = true; - DragManager.StartDocumentDrag([dragDocView.ContentDiv!], dragData, left, top, { hideSource: false }); + DragManager.StartDocumentDrag([dragDocView.ContentDiv!], dragData, origin[0], origin[1], { hideSource: false }); return true; } return false; @@ -336,8 +361,19 @@ export class DocumentButtonBar extends ObservableReactComponent<{ views: () => ( @computed get templateButton() { return !this.view0 ? null : ( - Tap to Customize Layout. Drag an embedding
} open={this._tooltipOpen} onClose={action(() => (this._tooltipOpen = false))} placement="bottom"> -
!this._ref.current?.getBoundingClientRect().width && (this._tooltipOpen = true))}> + Tap to Customize Layout. Drag an embedding
} + open={this._tooltipOpen} + onClose={action(() => { + this._tooltipOpen = false; + })} + placement="bottom"> +
{ + !this._ref.current?.getBoundingClientRect().width && (this._tooltipOpen = true); + })}> } popup={this.templateMenu} popupContainsPt={returnTrue} />
@@ -365,17 +401,17 @@ export class DocumentButtonBar extends ObservableReactComponent<{ views: () => ( @observable _captureEndLinkLayout = false; @action - captureEndLinkLayout = (e: React.PointerEvent) => { + captureEndLinkLayout = () => { this._captureEndLinkLayout = !this._captureEndLinkLayout; }; @observable _captureEndLinkContent = false; @action - captureEndLinkContent = (e: React.PointerEvent) => { + captureEndLinkContent = () => { this._captureEndLinkContent = !this._captureEndLinkContent; }; @action - captureEndLinkState = (e: React.PointerEvent) => { + captureEndLinkState = () => { this._captureEndLinkContent = this._captureEndLinkLayout = !this._captureEndLinkLayout; }; @@ -402,13 +438,15 @@ export class DocumentButtonBar extends ObservableReactComponent<{ views: () => ( return (
- +
{this._showLinkPopup ? (
(link.link_displayLine = !IsFollowLinkScript(this._props.views().lastElement()?.Document.onClick))} + linkCreated={link => { + link.link_displayLine = !IsFollowLinkScript(this._props.views().lastElement()?.Document.onClick); + }} linkCreateAnchor={() => this._props.views().lastElement()?.ComponentView?.getAnchor?.(true)} linkFrom={() => this._props.views().lastElement()?.Document} /> @@ -423,7 +461,7 @@ export class DocumentButtonBar extends ObservableReactComponent<{ views: () => (
{this.pinButton}
{this.recordButton}
{this.calendarButton}
- {!Doc.UserDoc()['documentLinksButton-fullMenu'] ? null :
{this.shareButton}
} + {!Doc.UserDoc().documentLinksButton_fullMenu ? null :
{this.shareButton}
}
{this.menuButton}
); diff --git a/src/client/views/EditableView.tsx b/src/client/views/EditableView.tsx index 85e893e19..684b948af 100644 --- a/src/client/views/EditableView.tsx +++ b/src/client/views/EditableView.tsx @@ -1,3 +1,5 @@ +/* eslint-disable jsx-a11y/no-static-element-interactions */ +/* eslint-disable jsx-a11y/click-events-have-key-events */ import { action, IReactionDisposer, makeObservable, observable, reaction, runInAction } from 'mobx'; import { observer } from 'mobx-react'; import * as React from 'react'; @@ -70,7 +72,7 @@ export class EditableView extends ObservableReactComponent { constructor(props: EditableProps) { super(props); makeObservable(this); - this._editing = this._props.editing ? true : false; + this._editing = !!this._props.editing; } componentDidMount(): void { @@ -166,7 +168,7 @@ export class EditableView extends ObservableReactComponent { this._props.menuCallback(e.currentTarget.getBoundingClientRect().x, e.currentTarget.getBoundingClientRect().y); break; } - + // eslint-disable-next-line no-fallthrough default: if (this._props.textCallback?.(e.key)) { e.stopPropagation(); @@ -186,7 +188,6 @@ export class EditableView extends ObservableReactComponent { this._editing = true; this._props.isEditingCallback?.(true); } - // e.stopPropagation(); } }; @@ -223,6 +224,7 @@ export class EditableView extends ObservableReactComponent { renderEditor() { return this._props.autosuggestProps ? ( { ) : this._props.oneLine !== false && this._props.GetValue()?.toString().indexOf('\n') === -1 ? ( (this._inputref = r)} + ref={r => { this._inputref = r; }} // prettier-ignore style={{ display: this._props.display, overflow: 'auto', fontSize: this._props.fontSize, minWidth: 20, background: this._props.background }} placeholder={this._props.placeholder} onBlur={e => this.finalizeEdit(e.currentTarget.value, false, true, false)} defaultValue={this._props.GetValue()} - autoFocus={true} + // eslint-disable-next-line jsx-a11y/no-autofocus + autoFocus onChange={this.onChange} onKeyDown={this.onKeyDown} onPointerDown={this.stopPropagation} @@ -256,12 +259,13 @@ export class EditableView extends ObservableReactComponent { ) : (