aboutsummaryrefslogtreecommitdiff
path: root/src/server
diff options
context:
space:
mode:
Diffstat (limited to 'src/server')
-rw-r--r--src/server/ApiManagers/UploadManager.ts556
-rw-r--r--src/server/ApiManagers/UserManager.ts77
-rw-r--r--src/server/DashUploadUtils.ts255
-rw-r--r--src/server/RouteManager.ts58
-rw-r--r--src/server/SharedMediaTypes.ts38
-rw-r--r--src/server/authentication/AuthenticationManager.ts31
-rw-r--r--src/server/authentication/DashUserModel.ts106
-rw-r--r--src/server/index.ts108
-rw-r--r--src/server/websocket.ts194
9 files changed, 751 insertions, 672 deletions
diff --git a/src/server/ApiManagers/UploadManager.ts b/src/server/ApiManagers/UploadManager.ts
index 332ba3d35..0b6e18743 100644
--- a/src/server/ApiManagers/UploadManager.ts
+++ b/src/server/ApiManagers/UploadManager.ts
@@ -1,291 +1,308 @@
-import ApiManager, { Registration } from "./ApiManager";
-import { Method, _success } from "../RouteManager";
+import ApiManager, { Registration } from './ApiManager';
+import { Method, _success } from '../RouteManager';
import * as formidable from 'formidable';
import v4 = require('uuid/v4');
const AdmZip = require('adm-zip');
-import { extname, basename, dirname, } from 'path';
-import { createReadStream, createWriteStream, unlink, writeFile } from "fs";
-import { publicDirectory, filesDirectory } from "..";
-import { Database } from "../database";
-import { DashUploadUtils, InjectSize, SizeSuffix } from "../DashUploadUtils";
+import { extname, basename, dirname } from 'path';
+import { createReadStream, createWriteStream, unlink, writeFile } from 'fs';
+import { publicDirectory, filesDirectory } from '..';
+import { Database } from '../database';
+import { DashUploadUtils, InjectSize, SizeSuffix } from '../DashUploadUtils';
import * as sharp from 'sharp';
-import { AcceptableMedia, Upload } from "../SharedMediaTypes";
-import { normalize } from "path";
-import RouteSubscriber from "../RouteSubscriber";
+import { AcceptableMedia, Upload } from '../SharedMediaTypes';
+import { normalize } from 'path';
+import RouteSubscriber from '../RouteSubscriber';
const imageDataUri = require('image-data-uri');
-import { SolrManager } from "./SearchManager";
+import { SolrManager } from './SearchManager';
const fs = require('fs');
export enum Directory {
- parsed_files = "parsed_files",
- images = "images",
- videos = "videos",
- pdfs = "pdfs",
- text = "text",
- pdf_thumbnails = "pdf_thumbnails",
- audio = "audio",
- csv = "csv",
+ parsed_files = 'parsed_files',
+ images = 'images',
+ videos = 'videos',
+ pdfs = 'pdfs',
+ text = 'text',
+ pdf_thumbnails = 'pdf_thumbnails',
+ audio = 'audio',
+ csv = 'csv',
}
export function serverPathToFile(directory: Directory, filename: string) {
- return normalize(`${filesDirectory}/${directory}/${filename}`);
+ return normalize(`${filesDirectory}/${directory}/${filename}`);
}
export function pathToDirectory(directory: Directory) {
- return normalize(`${filesDirectory}/${directory}`);
+ return normalize(`${filesDirectory}/${directory}`);
}
export function clientPathToFile(directory: Directory, filename: string) {
- return `/files/${directory}/${filename}`;
+ return `/files/${directory}/${filename}`;
}
export default class UploadManager extends ApiManager {
-
protected initialize(register: Registration): void {
-
- register({
- method: Method.POST,
- subscription: "/concatVideos",
- secureHandler: async ({ req, res }) => {
- // req.body contains the array of server paths to the videos
- _success(res, await DashUploadUtils.concatVideos(req.body));
- }
- });
-
- register({
- method: Method.POST,
- subscription: "/uploadFormData",
- secureHandler: async ({ req, res }) => {
- const form = new formidable.IncomingForm();
- form.keepExtensions = true;
- form.uploadDir = pathToDirectory(Directory.parsed_files);
- return new Promise<void>(resolve => {
- form.parse(req, async (_err, _fields, files) => {
- const results: Upload.FileResponse[] = [];
- for (const key in files) {
- const f = files[key];
- if (!Array.isArray(f)) {
- const result = await DashUploadUtils.upload(f);
- result && !(result.result instanceof Error) && results.push(result);
- }
- }
- _success(res, results);
- resolve();
- });
- });
- }
- });
-
- register({
- method: Method.POST,
- subscription: "/uploadYoutubeVideo",
- secureHandler: async ({ req, res }) => {
- //req.readableBuffer.head.data
- return new Promise<void>(async resolve => {
- req.addListener("data", async (args) => {
- console.log(args);
- const payload = String.fromCharCode.apply(String, args);
- const videoId = JSON.parse(payload).videoId;
- const results: Upload.FileResponse[] = [];
- const result = await DashUploadUtils.uploadYoutube(videoId);
- result && !(result.result instanceof Error) && results.push(result);
- _success(res, results);
- resolve();
- });
- });
- }
- });
+ register({
+ method: Method.POST,
+ subscription: '/concatVideos',
+ secureHandler: async ({ req, res }) => {
+ // req.body contains the array of server paths to the videos
+ _success(res, await DashUploadUtils.concatVideos(req.body));
+ },
+ });
- register({
- method: Method.POST,
- subscription: new RouteSubscriber("youtubeScreenshot"),
- secureHandler: async ({ req, res }) => {
- const { id, timecode } = req.body;
- const convert = (raw: string) => {
- const number = Math.floor(Number(raw));
- const seconds = number % 60;
- const minutes = (number - seconds) / 60;
- return `${minutes}m${seconds}s`;
- };
- const suffix = timecode ? `&t=${convert(timecode)}` : ``;
- const targetUrl = `https://www.youtube.com/watch?v=${id}${suffix}`;
- const buffer = await captureYoutubeScreenshot(targetUrl);
- if (!buffer) {
- return res.send();
+ register({
+ method: Method.POST,
+ subscription: '/uploadFormData',
+ secureHandler: async ({ req, res }) => {
+ const form = new formidable.IncomingForm();
+ form.keepExtensions = true;
+ form.uploadDir = pathToDirectory(Directory.parsed_files);
+ return new Promise<void>(resolve => {
+ form.parse(req, async (_err, _fields, files) => {
+ const results: Upload.FileResponse[] = [];
+ for (const key in files) {
+ const f = files[key];
+ if (!Array.isArray(f)) {
+ const result = await DashUploadUtils.upload(f);
+ result && !(result.result instanceof Error) && results.push(result);
+ }
}
- const resolvedName = `youtube_capture_${id}_${suffix}.png`;
- const resolvedPath = serverPathToFile(Directory.images, resolvedName);
- return new Promise<void>(resolve => {
- writeFile(resolvedPath, buffer, async error => {
- if (error) {
- return res.send();
- }
- await DashUploadUtils.outputResizedImages(() => createReadStream(resolvedPath), resolvedName, pathToDirectory(Directory.images));
- res.send({
- accessPaths: {
- agnostic: DashUploadUtils.getAccessPaths(Directory.images, resolvedName)
- }
- } as Upload.FileInformation);
- resolve();
- });
- });
- }
- });
+ _success(res, results);
+ resolve();
+ });
+ });
+ },
+ });
- register({
- method: Method.POST,
- subscription: "/uploadRemoteImage",
- secureHandler: async ({ req, res }) => {
+ register({
+ method: Method.POST,
+ subscription: '/uploadYoutubeVideo',
+ secureHandler: async ({ req, res }) => {
+ //req.readableBuffer.head.data
+ return new Promise<void>(async resolve => {
+ req.addListener('data', async args => {
+ console.log(args);
+ const payload = String.fromCharCode.apply(String, args);
+ const videoId = JSON.parse(payload).videoId;
+ const results: Upload.FileResponse[] = [];
+ const result = await DashUploadUtils.uploadYoutube(videoId);
+ result && results.push(result);
+ _success(res, results);
+ resolve();
+ });
+ });
+ },
+ });
- const { sources } = req.body;
- if (Array.isArray(sources)) {
- const results = await Promise.all(sources.map(source => DashUploadUtils.UploadImage(source)));
- return res.send(results);
+ register({
+ method: Method.POST,
+ subscription: new RouteSubscriber('youtubeScreenshot'),
+ secureHandler: async ({ req, res }) => {
+ const { id, timecode } = req.body;
+ const convert = (raw: string) => {
+ const number = Math.floor(Number(raw));
+ const seconds = number % 60;
+ const minutes = (number - seconds) / 60;
+ return `${minutes}m${seconds}s`;
+ };
+ const suffix = timecode ? `&t=${convert(timecode)}` : ``;
+ const targetUrl = `https://www.youtube.com/watch?v=${id}${suffix}`;
+ const buffer = await captureYoutubeScreenshot(targetUrl);
+ if (!buffer) {
+ return res.send();
+ }
+ const resolvedName = `youtube_capture_${id}_${suffix}.png`;
+ const resolvedPath = serverPathToFile(Directory.images, resolvedName);
+ return new Promise<void>(resolve => {
+ writeFile(resolvedPath, buffer, async error => {
+ if (error) {
+ return res.send();
}
- res.send();
- }
- });
+ await DashUploadUtils.outputResizedImages(() => createReadStream(resolvedPath), resolvedName, pathToDirectory(Directory.images));
+ res.send({
+ accessPaths: {
+ agnostic: DashUploadUtils.getAccessPaths(Directory.images, resolvedName),
+ },
+ } as Upload.FileInformation);
+ resolve();
+ });
+ });
+ },
+ });
- register({
- method: Method.POST,
- subscription: "/uploadDoc",
- secureHandler: ({ req, res }) => {
+ register({
+ method: Method.POST,
+ subscription: '/uploadRemoteImage',
+ secureHandler: async ({ req, res }) => {
+ const { sources } = req.body;
+ if (Array.isArray(sources)) {
+ const results = await Promise.all(sources.map(source => DashUploadUtils.UploadImage(source)));
+ return res.send(results);
+ }
+ res.send();
+ },
+ });
- const form = new formidable.IncomingForm();
- form.keepExtensions = true;
- // let path = req.body.path;
- const ids: { [id: string]: string } = {};
- let remap = true;
- const getId = (id: string): string => {
- if (!remap) return id;
- if (id.endsWith("Proto")) return id;
- if (id in ids) {
- return ids[id];
- } else {
- return ids[id] = v4();
- }
- };
- const mapFn = (doc: any) => {
- if (doc.id) {
- doc.id = getId(doc.id);
- }
- for (const key in doc.fields) {
- if (!doc.fields.hasOwnProperty(key)) { continue; }
- const field = doc.fields[key];
- if (field === undefined || field === null) { continue; }
+ register({
+ method: Method.POST,
+ subscription: '/uploadDoc',
+ secureHandler: ({ req, res }) => {
+ const form = new formidable.IncomingForm();
+ form.keepExtensions = true;
+ // let path = req.body.path;
+ const ids: { [id: string]: string } = {};
+ let remap = true;
+ const getId = (id: string): string => {
+ if (!remap) return id;
+ if (id.endsWith('Proto')) return id;
+ if (id in ids) {
+ return ids[id];
+ } else {
+ return (ids[id] = v4());
+ }
+ };
+ const mapFn = (doc: any) => {
+ if (doc.id) {
+ doc.id = getId(doc.id);
+ }
+ for (const key in doc.fields) {
+ if (!doc.fields.hasOwnProperty(key)) {
+ continue;
+ }
+ const field = doc.fields[key];
+ if (field === undefined || field === null) {
+ continue;
+ }
- if (field.__type === "Doc") {
- mapFn(field);
- } else if (field.__type === "proxy" || field.__type === "prefetch_proxy") {
- field.fieldId = getId(field.fieldId);
- } else if (field.__type === "script" || field.__type === "computed") {
- if (field.captures) {
- field.captures.fieldId = getId(field.captures.fieldId);
- }
- } else if (field.__type === "list") {
- mapFn(field);
- } else if (typeof field === "string") {
- const re = /("(?:dataD|d)ocumentId"\s*:\s*")([\w\-]*)"/g;
- doc.fields[key] = (field as any).replace(re, (match: any, p1: string, p2: string) => {
- return `${p1}${getId(p2)}"`;
- });
- } else if (field.__type === "RichTextField") {
- const re = /("href"\s*:\s*")(.*?)"/g;
- field.Data = field.Data.replace(re, (match: any, p1: string, p2: string) => {
- return `${p1}${getId(p2)}"`;
- });
- }
- }
- };
- return new Promise<void>(resolve => {
- form.parse(req, async (_err, fields, files) => {
- remap = fields.remap !== "false";
- let id: string = "";
+ if (field.__type === 'Doc') {
+ mapFn(field);
+ } else if (field.__type === 'proxy' || field.__type === 'prefetch_proxy') {
+ field.fieldId = getId(field.fieldId);
+ } else if (field.__type === 'script' || field.__type === 'computed') {
+ if (field.captures) {
+ field.captures.fieldId = getId(field.captures.fieldId);
+ }
+ } else if (field.__type === 'list') {
+ mapFn(field);
+ } else if (typeof field === 'string') {
+ const re = /("(?:dataD|d)ocumentId"\s*:\s*")([\w\-]*)"/g;
+ doc.fields[key] = (field as any).replace(re, (match: any, p1: string, p2: string) => {
+ return `${p1}${getId(p2)}"`;
+ });
+ } else if (field.__type === 'RichTextField') {
+ const re = /("href"\s*:\s*")(.*?)"/g;
+ field.Data = field.Data.replace(re, (match: any, p1: string, p2: string) => {
+ return `${p1}${getId(p2)}"`;
+ });
+ }
+ }
+ };
+ return new Promise<void>(resolve => {
+ form.parse(req, async (_err, fields, files) => {
+ remap = fields.remap !== 'false';
+ let id: string = '';
+ try {
+ for (const name in files) {
+ const f = files[name];
+ const path_2 = Array.isArray(f) ? '' : f.path;
+ const zip = new AdmZip(path_2);
+ zip.getEntries().forEach((entry: any) => {
+ if (!entry.entryName.startsWith('files/')) return;
+ let directory = dirname(entry.entryName) + '/';
+ const extension = extname(entry.entryName);
+ const base = basename(entry.entryName).split('.')[0];
try {
- for (const name in files) {
- const f = files[name];
- const path_2 = Array.isArray(f) ? "" : f.path;
- const zip = new AdmZip(path_2);
- zip.getEntries().forEach((entry: any) => {
- if (!entry.entryName.startsWith("files/")) return;
- let directory = dirname(entry.entryName) + "/";
- const extension = extname(entry.entryName);
- const base = basename(entry.entryName).split(".")[0];
- try {
- zip.extractEntryTo(entry.entryName, publicDirectory, true, false);
- directory = "/" + directory;
-
- createReadStream(publicDirectory + directory + base + extension).pipe(createWriteStream(publicDirectory + directory + base + "_o" + extension));
- createReadStream(publicDirectory + directory + base + extension).pipe(createWriteStream(publicDirectory + directory + base + "_s" + extension));
- createReadStream(publicDirectory + directory + base + extension).pipe(createWriteStream(publicDirectory + directory + base + "_m" + extension));
- createReadStream(publicDirectory + directory + base + extension).pipe(createWriteStream(publicDirectory + directory + base + "_l" + extension));
- } catch (e) {
- console.log(e);
- }
- });
- const json = zip.getEntry("doc.json");
- try {
- const data = JSON.parse(json.getData().toString("utf8"));
- const datadocs = data.docs;
- id = getId(data.id);
- const docs = Object.keys(datadocs).map(key => datadocs[key]);
- docs.forEach(mapFn);
- await Promise.all(docs.map((doc: any) => new Promise<void>(res => {
- Database.Instance.replace(doc.id, doc, (err, r) => {
- err && console.log(err);
- res();
- }, true);
- })));
- } catch (e) { console.log(e); }
- unlink(path_2, () => { });
- }
- SolrManager.update();
- res.send(JSON.stringify(id || "error"));
- } catch (e) { console.log(e); }
- resolve();
- });
- });
- }
- });
-
- register({
- method: Method.POST,
- subscription: "/inspectImage",
- secureHandler: async ({ req, res }) => {
+ zip.extractEntryTo(entry.entryName, publicDirectory, true, false);
+ directory = '/' + directory;
- const { source } = req.body;
- if (typeof source === "string") {
- return res.send(await DashUploadUtils.InspectImage(source));
+ createReadStream(publicDirectory + directory + base + extension).pipe(createWriteStream(publicDirectory + directory + base + '_o' + extension));
+ createReadStream(publicDirectory + directory + base + extension).pipe(createWriteStream(publicDirectory + directory + base + '_s' + extension));
+ createReadStream(publicDirectory + directory + base + extension).pipe(createWriteStream(publicDirectory + directory + base + '_m' + extension));
+ createReadStream(publicDirectory + directory + base + extension).pipe(createWriteStream(publicDirectory + directory + base + '_l' + extension));
+ } catch (e) {
+ console.log(e);
+ }
+ });
+ const json = zip.getEntry('doc.json');
+ try {
+ const data = JSON.parse(json.getData().toString('utf8'));
+ const datadocs = data.docs;
+ id = getId(data.id);
+ const docs = Object.keys(datadocs).map(key => datadocs[key]);
+ docs.forEach(mapFn);
+ await Promise.all(
+ docs.map(
+ (doc: any) =>
+ new Promise<void>(res => {
+ Database.Instance.replace(
+ doc.id,
+ doc,
+ (err, r) => {
+ err && console.log(err);
+ res();
+ },
+ true
+ );
+ })
+ )
+ );
+ } catch (e) {
+ console.log(e);
+ }
+ unlink(path_2, () => {});
+ }
+ SolrManager.update();
+ res.send(JSON.stringify(id || 'error'));
+ } catch (e) {
+ console.log(e);
}
- res.send({});
- }
- });
+ resolve();
+ });
+ });
+ },
+ });
+
+ register({
+ method: Method.POST,
+ subscription: '/inspectImage',
+ secureHandler: async ({ req, res }) => {
+ const { source } = req.body;
+ if (typeof source === 'string') {
+ return res.send(await DashUploadUtils.InspectImage(source));
+ }
+ res.send({});
+ },
+ });
register({
method: Method.POST,
- subscription: "/uploadURI",
+ subscription: '/uploadURI',
secureHandler: ({ req, res }) => {
const uri = req.body.uri;
const filename = req.body.name;
const origSuffix = req.body.nosuffix ? SizeSuffix.None : SizeSuffix.Original;
const deleteFiles = req.body.replaceRootFilename;
if (!uri || !filename) {
- res.status(401).send("incorrect parameters specified");
+ res.status(401).send('incorrect parameters specified');
return;
}
if (deleteFiles) {
- const path = serverPathToFile(Directory.images, "");
+ const path = serverPathToFile(Directory.images, '');
const regex = new RegExp(`${deleteFiles}.*`);
- fs.readdirSync(path).filter((f: any) => regex.test(f)).map((f: any) => fs.unlinkSync(path + f));
+ fs.readdirSync(path)
+ .filter((f: any) => regex.test(f))
+ .map((f: any) => fs.unlinkSync(path + f));
}
return imageDataUri.outputFile(uri, serverPathToFile(Directory.images, InjectSize(filename, origSuffix))).then((savedName: string) => {
const ext = extname(savedName).toLowerCase();
const { pngs, jpgs } = AcceptableMedia;
- const resizers = !origSuffix ? [{ resizer: sharp().resize(400, undefined, { withoutEnlargement: true }), suffix: SizeSuffix.Medium }] : [
- { resizer: sharp().resize(100, undefined, { withoutEnlargement: true }), suffix: SizeSuffix.Small },
- { resizer: sharp().resize(400, undefined, { withoutEnlargement: true }), suffix: SizeSuffix.Medium },
- { resizer: sharp().resize(900, undefined, { withoutEnlargement: true }), suffix: SizeSuffix.Large },
- ];
+ const resizers = !origSuffix
+ ? [{ resizer: sharp().resize(400, undefined, { withoutEnlargement: true }), suffix: SizeSuffix.Medium }]
+ : [
+ { resizer: sharp().resize(100, undefined, { withoutEnlargement: true }), suffix: SizeSuffix.Small },
+ { resizer: sharp().resize(400, undefined, { withoutEnlargement: true }), suffix: SizeSuffix.Medium },
+ { resizer: sharp().resize(900, undefined, { withoutEnlargement: true }), suffix: SizeSuffix.Large },
+ ];
let isImage = false;
if (pngs.includes(ext)) {
resizers.forEach(element => {
@@ -301,49 +318,48 @@ export default class UploadManager extends ApiManager {
if (isImage) {
resizers.forEach(resizer => {
const path = serverPathToFile(Directory.images, InjectSize(filename, resizer.suffix) + ext);
- createReadStream(savedName).on("error", e => console.log("Resizing read:" + e))
+ createReadStream(savedName)
+ .on('error', e => console.log('Resizing read:' + e))
.pipe(resizer.resizer)
- .pipe(createWriteStream(path).on("error", e => console.log("Resizing write: " + e)));
+ .on('error', e => console.log('Resizing write: ' + e))
+ .pipe(createWriteStream(path).on('error', e => console.log('Resizing write: ' + e)));
});
-
- }
- res.send(clientPathToFile(Directory.images, filename + ext));
- });
- }
- });
-
- }
-
+ }
+ res.send(clientPathToFile(Directory.images, filename + ext));
+ });
+ },
+ });
+ }
}
function delay(ms: number) {
- return new Promise(resolve => setTimeout(resolve, ms));
+ return new Promise(resolve => setTimeout(resolve, ms));
}
/**
* On success, returns a buffer containing the bytes of a screenshot
* of the video (optionally, at a timecode) specified by @param targetUrl.
- *
+ *
* On failure, returns undefined.
*/
async function captureYoutubeScreenshot(targetUrl: string) {
- // const browser = await launch({ args: ['--no-sandbox', '--disable-setuid-sandbox'] });
- // const page = await browser.newPage();
- // // await page.setViewport({ width: 1920, height: 1080 });
+ // const browser = await launch({ args: ['--no-sandbox', '--disable-setuid-sandbox'] });
+ // const page = await browser.newPage();
+ // // await page.setViewport({ width: 1920, height: 1080 });
- // // await page.goto(targetUrl, { waitUntil: 'domcontentloaded' as any });
+ // // await page.goto(targetUrl, { waitUntil: 'domcontentloaded' as any });
- // const videoPlayer = await page.$('.html5-video-player');
- // videoPlayer && await page.focus("video");
- // await delay(7000);
- // const ad = await page.$('.ytp-ad-skip-button-text');
- // await ad?.click();
- // await videoPlayer?.click();
- // await delay(1000);
- // // hide youtube player controls.
- // await page.evaluate(() => (document.querySelector('.ytp-chrome-bottom') as HTMLElement).style.display = 'none');
+ // const videoPlayer = await page.$('.html5-video-player');
+ // videoPlayer && await page.focus("video");
+ // await delay(7000);
+ // const ad = await page.$('.ytp-ad-skip-button-text');
+ // await ad?.click();
+ // await videoPlayer?.click();
+ // await delay(1000);
+ // // hide youtube player controls.
+ // await page.evaluate(() => (document.querySelector('.ytp-chrome-bottom') as HTMLElement).style.display = 'none');
- // const buffer = await videoPlayer?.screenshot({ encoding: "binary" });
- // await browser.close();
+ // const buffer = await videoPlayer?.screenshot({ encoding: "binary" });
+ // await browser.close();
- // return buffer;
- return null;
-} \ No newline at end of file
+ // return buffer;
+ return null;
+}
diff --git a/src/server/ApiManagers/UserManager.ts b/src/server/ApiManagers/UserManager.ts
index 7be8a1e9f..53e55c1c3 100644
--- a/src/server/ApiManagers/UserManager.ts
+++ b/src/server/ApiManagers/UserManager.ts
@@ -1,10 +1,10 @@
-import ApiManager, { Registration } from "./ApiManager";
-import { Method } from "../RouteManager";
-import { Database } from "../database";
-import { msToTime } from "../ActionUtilities";
-import * as bcrypt from "bcrypt-nodejs";
-import { Opt } from "../../fields/Doc";
-import { WebSocket } from "../websocket";
+import ApiManager, { Registration } from './ApiManager';
+import { Method } from '../RouteManager';
+import { Database } from '../database';
+import { msToTime } from '../ActionUtilities';
+import * as bcrypt from 'bcrypt-nodejs';
+import { Opt } from '../../fields/Doc';
+import { WebSocket } from '../websocket';
export const timeMap: { [id: string]: number } = {};
interface ActivityUnit {
@@ -13,28 +13,26 @@ interface ActivityUnit {
}
export default class UserManager extends ApiManager {
-
protected initialize(register: Registration): void {
-
register({
method: Method.GET,
- subscription: "/getUsers",
+ subscription: '/getUsers',
secureHandler: async ({ res }) => {
- const cursor = await Database.Instance.query({}, { email: 1, linkDatabaseId: 1, sharingDocumentId: 1 }, "users");
+ const cursor = await Database.Instance.query({}, { email: 1, linkDatabaseId: 1, sharingDocumentId: 1 }, 'users');
const results = await cursor.toArray();
res.send(results.map((user: any) => ({ email: user.email, linkDatabaseId: user.linkDatabaseId, sharingDocumentId: user.sharingDocumentId })));
- }
+ },
});
register({
method: Method.POST,
- subscription: "/setCacheDocumentIds",
+ subscription: '/setCacheDocumentIds',
secureHandler: async ({ user, req, res }) => {
const result: any = {};
user.cacheDocumentIds = req.body.cacheDocumentIds;
user.save(err => {
if (err) {
- result.error = [{ msg: "Error while caching documents" }];
+ result.error = [{ msg: 'Error while caching documents' }];
}
});
@@ -42,32 +40,35 @@ export default class UserManager extends ApiManager {
// console.log(e);
// });
res.send(result);
- }
+ },
});
register({
method: Method.GET,
- subscription: "/getUserDocumentIds",
- secureHandler: ({ res, user }) => res.send({ userDocumentId: user.userDocumentId, linkDatabaseId: user.linkDatabaseId, sharingDocumentId: user.sharingDocumentId })
+ subscription: '/getUserDocumentIds',
+ secureHandler: ({ res, user }) => res.send({ userDocumentId: user.userDocumentId, linkDatabaseId: user.linkDatabaseId, sharingDocumentId: user.sharingDocumentId }),
+ publicHandler: ({ res }) => res.send({ userDocumentId: '__guest__', linkDatabaseId: 3, sharingDocumentId: 2 }),
});
register({
method: Method.GET,
- subscription: "/getSharingDocumentId",
- secureHandler: ({ res, user }) => res.send(user.sharingDocumentId)
+ subscription: '/getSharingDocumentId',
+ secureHandler: ({ res, user }) => res.send(user.sharingDocumentId),
+ publicHandler: ({ res }) => res.send(2),
});
register({
method: Method.GET,
- subscription: "/getLinkDatabaseId",
- secureHandler: ({ res, user }) => res.send(user.linkDatabaseId)
+ subscription: '/getLinkDatabaseId',
+ secureHandler: ({ res, user }) => res.send(user.linkDatabaseId),
+ publicHandler: ({ res }) => res.send(3),
});
register({
method: Method.GET,
- subscription: "/getCurrentUser",
+ subscription: '/getCurrentUser',
secureHandler: ({ res, user: { _id, email, cacheDocumentIds } }) => res.send(JSON.stringify({ id: _id, email, cacheDocumentIds })),
- publicHandler: ({ res }) => res.send(JSON.stringify({ id: "__guest__", email: "" }))
+ publicHandler: ({ res }) => res.send(JSON.stringify({ id: '__guest__', email: 'guest' })),
});
register({
@@ -80,7 +81,7 @@ export default class UserManager extends ApiManager {
const validated = await new Promise<Opt<boolean>>(resolve => {
bcrypt.compare(curr_pass, user.password, (err, passwords_match) => {
if (err || !passwords_match) {
- result.error = [{ msg: "Incorrect current password" }];
+ result.error = [{ msg: 'Incorrect current password' }];
res.send(result);
resolve(undefined);
} else {
@@ -93,10 +94,10 @@ export default class UserManager extends ApiManager {
return;
}
- req.assert("new_pass", "Password must be at least 4 characters long").len({ min: 4 });
- req.assert("new_confirm", "Passwords do not match").equals(new_pass);
+ req.assert('new_pass', 'Password must be at least 4 characters long').len({ min: 4 });
+ req.assert('new_confirm', 'Passwords do not match').equals(new_pass);
if (curr_pass === new_pass) {
- result.error = [{ msg: "Current and new password are the same" }];
+ result.error = [{ msg: 'Current and new password are the same' }];
}
// was there error in validating new passwords?
if (req.validationErrors()) {
@@ -113,17 +114,17 @@ export default class UserManager extends ApiManager {
user.save(err => {
if (err) {
- result.error = [{ msg: "Error while saving new password" }];
+ result.error = [{ msg: 'Error while saving new password' }];
}
});
res.send(result);
- }
+ },
});
register({
method: Method.GET,
- subscription: "/activity",
+ subscription: '/activity',
secureHandler: ({ res }) => {
const now = Date.now();
@@ -135,25 +136,23 @@ export default class UserManager extends ApiManager {
const socketPair = Array.from(WebSocket.socketMap).find(pair => pair[1] === user);
if (socketPair && !socketPair[0].disconnected) {
const duration = now - time;
- const target = (duration / 1000) < (60 * 5) ? activeTimes : inactiveTimes;
+ const target = duration / 1000 < 60 * 5 ? activeTimes : inactiveTimes;
target.push({ user, duration });
}
}
- const process = (target: { user: string, duration: number }[]) => {
+ const process = (target: { user: string; duration: number }[]) => {
const comparator = (first: ActivityUnit, second: ActivityUnit) => first.duration - second.duration;
const sorted = target.sort(comparator);
return sorted.map(({ user, duration }) => `${user} (${msToTime(duration)})`);
};
- res.render("user_activity.pug", {
- title: "User Activity",
+ res.render('user_activity.pug', {
+ title: 'User Activity',
active: process(activeTimes),
- inactive: process(inactiveTimes)
+ inactive: process(inactiveTimes),
});
- }
+ },
});
-
}
-
-} \ No newline at end of file
+}
diff --git a/src/server/DashUploadUtils.ts b/src/server/DashUploadUtils.ts
index cae35da60..8cf657da4 100644
--- a/src/server/DashUploadUtils.ts
+++ b/src/server/DashUploadUtils.ts
@@ -4,33 +4,31 @@ import * as exifr from 'exifr';
import { File } from 'formidable';
import { createWriteStream, existsSync, readFileSync, rename, unlinkSync, writeFile } from 'fs';
import * as path from 'path';
-import { basename } from "path";
+import { basename } from 'path';
import * as sharp from 'sharp';
import { Stream } from 'stream';
import { filesDirectory, publicDirectory } from '.';
import { Opt } from '../fields/Doc';
-import { ParsedPDF } from "../server/PdfTypes";
+import { ParsedPDF } from '../server/PdfTypes';
import { Utils } from '../Utils';
import { createIfNotExists } from './ActionUtilities';
import { clientPathToFile, Directory, pathToDirectory, serverPathToFile } from './ApiManagers/UploadManager';
-import { resolvedServerUrl } from "./server_Initialization";
+import { resolvedServerUrl } from './server_Initialization';
import { AcceptableMedia, Upload } from './SharedMediaTypes';
import request = require('request-promise');
import formidable = require('formidable');
-import { file } from 'jszip';
-import { csvParser } from './DataVizUtils';
-const { exec } = require("child_process");
+const { exec } = require('child_process');
const parse = require('pdf-parse');
-const ffmpeg = require("fluent-ffmpeg");
-const fs = require("fs");
-const requestImageSize = require("../client/util/request-image-size");
+const ffmpeg = require('fluent-ffmpeg');
+const fs = require('fs');
+const requestImageSize = require('../client/util/request-image-size');
export enum SizeSuffix {
- Small = "_s",
- Medium = "_m",
- Large = "_l",
- Original = "_o",
- None = ""
+ Small = '_s',
+ Medium = '_m',
+ Large = '_l',
+ Original = '_o',
+ None = '',
}
export function InjectSize(filename: string, size: SizeSuffix) {
@@ -43,7 +41,6 @@ function isLocal() {
}
export namespace DashUploadUtils {
-
export interface Size {
width: number;
suffix: SizeSuffix;
@@ -59,19 +56,19 @@ export namespace DashUploadUtils {
return AcceptableMedia.imageFormats.includes(path.extname(url).toLowerCase());
}
- const size = "content-length";
- const type = "content-type";
+ const size = 'content-length';
+ const type = 'content-type';
+
+ const { imageFormats, videoFormats, applicationFormats, audioFormats } = AcceptableMedia; //TODO:glr
- const { imageFormats, videoFormats, applicationFormats, audioFormats } = AcceptableMedia; //TODO:glr
-
export async function concatVideos(filePaths: string[]): Promise<Upload.AccessPathInfo> {
// make a list of paths to create the ordered text file for ffmpeg
const inputListName = 'concat.txt';
const textFilePath = path.join(filesDirectory, inputListName);
// make a list of paths to create the ordered text file for ffmpeg
- const filePathsText = filePaths.map(filePath => `file '${filePath}'`).join('\n');
+ const filePathsText = filePaths.map(filePath => `file '${filePath}'`).join('\n');
// write the text file to the file system
- writeFile(textFilePath, filePathsText, (err) => console.log(err));
+ writeFile(textFilePath, filePathsText, err => console.log(err));
// make output file name based on timestamp
const outputFileName = `output-${Utils.GenerateGuid()}.mp4`;
@@ -81,87 +78,126 @@ export namespace DashUploadUtils {
// concatenate the videos
await new Promise((resolve, reject) => {
var merge = ffmpeg();
- merge.input(textFilePath)
- .inputOptions(['-f concat', '-safe 0'])
+ merge
+ .input(textFilePath)
+ .inputOptions(['-f concat', '-safe 0'])
.outputOptions('-c copy')
//.videoCodec("copy")
.save(outputFilePath)
- .on("error", reject)
- .on("end", resolve);
- })
-
- // delete concat.txt from the file system
- unlinkSync(textFilePath);
- // delete the old segment videos from the server
- filePaths.forEach(filePath => unlinkSync(filePath));
-
- // return the path(s) to the output file
- return {
- accessPaths: getAccessPaths(Directory.videos, outputFileName)
- }
+ .on('error', reject)
+ .on('end', resolve);
+ });
+
+ // delete concat.txt from the file system
+ unlinkSync(textFilePath);
+ // delete the old segment videos from the server
+ filePaths.forEach(filePath => unlinkSync(filePath));
+
+ // return the path(s) to the output file
+ return {
+ accessPaths: getAccessPaths(Directory.videos, outputFileName),
+ };
}
export function uploadYoutube(videoId: string): Promise<Upload.FileResponse> {
- console.log("UPLOAD " + videoId);
return new Promise<Upload.FileResponse<Upload.FileInformation>>((res, rej) => {
- exec('youtube-dl -o ' + (videoId + ".mp4") + ' https://www.youtube.com/watch?v=' + videoId + ' -f "best[filesize<50M]"',
- (error: any, stdout: any, stderr: any) => {
- if (error) console.log(`error: ${error.message}`);
- else if (stderr) console.log(`stderr: ${stderr}`);
- else {
- console.log(`stdout: ${stdout}`);
- const data = { size: 0, path: videoId + ".mp4", name: videoId, type: "video/mp4" };
- const file = { ...data, toJSON: () => ({ ...data, filename: data.path.replace(/.*\//, ""), mtime: null, length: 0, mime: "", toJson: () => undefined as any }) };
+ console.log('Uploading YouTube video: ' + videoId);
+ exec('youtube-dl -o ' + (videoId + '.mp4') + ' ' + videoId + ' -f "mp4[filesize<5M]/bestvideo[filesize<5M]+bestaudio/bestvideo+bestaudio"', (error: any, stdout: any, stderr: any) => {
+ if (error) {
+ console.log(`error: Error: ${error.message}`);
+ res({
+ source: {
+ size: 0,
+ path: videoId,
+ name: videoId,
+ type: '',
+ toJSON: () => ({ name: videoId, path: videoId }),
+ },
+ result: { name: 'failed youtube query', message: `Could not upload YouTube video (${videoId}). Error: ${error.message}` },
+ });
+ } else {
+ exec('youtube-dl -o ' + (videoId + '.mp4') + ' ' + videoId + ' --get-duration', (error: any, stdout: any, stderr: any) => {
+ const time = Array.from(stdout.trim().split(':')).reverse();
+ const duration = (time.length > 2 ? Number(time[2]) * 1000 * 60 : 0) + (time.length > 1 ? Number(time[1]) * 60 : 0) + (time.length > 0 ? Number(time[0]) : 0);
+ const data = { size: 0, path: videoId + '.mp4', name: videoId, type: 'video/mp4' };
+ const file = { ...data, toJSON: () => ({ ...data, filename: data.path.replace(/.*\//, ''), mtime: duration.toString(), mime: '', toJson: () => undefined as any }) };
res(MoveParsedFile(file, Directory.videos));
- }
- });
+ });
+ }
+ });
});
}
export async function upload(file: File): Promise<Upload.FileResponse> {
const { type, path, name } = file;
- const types = type?.split("/") ?? [];
+ const types = type?.split('/') ?? [];
const category = types[0];
let format = `.${types[1]}`;
console.log(green(`Processing upload of file (${name}) and format (${format}) with upload type (${type}) in category (${category}).`));
-
+
switch (category) {
- case "image":
+ case 'image':
if (imageFormats.includes(format)) {
const result = await UploadImage(path, basename(path));
return { source: file, result };
}
- case "video":
- if (format.includes("x-matroska")) {
- console.log("case video");
- await new Promise(res => ffmpeg(file.path)
- .videoCodec("copy") // this will copy the data instead of reencode it
- .save(file.path.replace(".mkv", ".mp4"))
- .on('end', res));
- file.path = file.path.replace(".mkv", ".mp4");
- format = ".mp4";
+ return { source: file, result: { name: 'Unsupported image format', message: `Could not upload unsupported file (${name}). Please convert to an .jpg` } };
+ case 'video':
+ if (format.includes('x-matroska')) {
+ console.log('case video');
+ await new Promise(res =>
+ ffmpeg(file.path)
+ .videoCodec('copy') // this will copy the data instead of reencode it
+ .save(file.path.replace('.mkv', '.mp4'))
+ .on('end', res)
+ );
+ file.path = file.path.replace('.mkv', '.mp4');
+ format = '.mp4';
+ }
+ if (format.includes('quicktime')) {
+ let abort = false;
+ await new Promise<void>(res =>
+ ffmpeg.ffprobe(file.path, (err: any, metadata: any) => {
+ if (metadata.streams.some((stream: any) => stream.codec_name === 'hevc')) {
+ abort = true;
+ }
+ res();
+ })
+ );
+ if (abort) return { source: file, result: { name: 'Unsupported video format', message: `Could not upload unsupported file (${name}). Please convert to an .mp4` } };
+ // bcz: instead of aborting, we could convert the file using the code below to an mp4. Problem is that this takes a long time and will clog up the server.
+ // await new Promise(res =>
+ // ffmpeg(file.path)
+ // .videoCodec('libx264') // this will copy the data instead of reencode it
+ // .audioCodec('mp2')
+ // .save(file.path.replace('.MOV', '.mp4').replace('.mov', '.mp4'))
+ // .on('end', res)
+ // );
+ // file.path = file.path.replace('.mov', '.mp4').replace('.MOV', '.mp4');
+ // format = '.mp4';
}
if (videoFormats.includes(format)) {
return MoveParsedFile(file, Directory.videos);
}
- case "application":
+ return { source: file, result: { name: 'Unsupported video format', message: `Could not upload unsupported file (${name}). Please convert to an .mp4` } };
+ case 'application':
if (applicationFormats.includes(format)) {
return UploadPdf(file);
}
- case "audio":
- const components = format.split(";");
+ case 'audio':
+ const components = format.split(';');
if (components.length > 1) {
format = components[0];
}
if (audioFormats.includes(format)) {
return UploadAudio(file, format);
}
- case "text":
- if (types[1] == "csv") {
+ return { source: file, result: { name: 'Unsupported audio format', message: `Could not upload unsupported file (${name}). Please convert to an .mp3` } };
+ case 'text':
+ if (types[1] == 'csv') {
return UploadCsv(file);
}
-
}
console.log(red(`Ignoring unsupported file (${name}) with upload type (${type}).`));
@@ -176,22 +212,21 @@ export namespace DashUploadUtils {
const name = path.basename(sourcePath);
const textFilename = `${name.substring(0, name.length - 4)}.txt`;
const writeStream = createWriteStream(serverPathToFile(Directory.text, textFilename));
- writeStream.write(result.text, error => error ? reject(error) : resolve());
+ writeStream.write(result.text, error => (error ? reject(error) : resolve()));
});
return MoveParsedFile(file, Directory.pdfs, undefined, result.text);
}
async function UploadCsv(file: File) {
- const { path: sourcePath } = file;
- // read the file as a string
+ const { path: sourcePath } = file;
+ // read the file as a string
const data = readFileSync(sourcePath, 'utf8');
// split the string into an array of lines
return MoveParsedFile(file, Directory.csv, undefined, data);
// console.log(csvParser(data));
-
}
- const manualSuffixes = [".webm"];
+ const manualSuffixes = ['.webm'];
async function UploadAudio(file: File, format: string) {
const suffix = manualSuffixes.includes(format) ? format : undefined;
@@ -200,22 +235,22 @@ export namespace DashUploadUtils {
/**
* Uploads an image specified by the @param source to Dash's /public/files/
- * directory, and returns information generated during that upload
- *
+ * directory, and returns information generated during that upload
+ *
* @param {string} source is either the absolute path of an already uploaded image or
* the url of a remote image
* @param {string} filename dictates what to call the image. If not specified,
* the name {@param prefix}_upload_{GUID}
* @param {string} prefix is a string prepended to the generated image name in the
* event that @param filename is not specified
- *
+ *
* @returns {ImageUploadInformation | Error} This method returns
* 1) the paths to the uploaded images (plural due to resizing)
* 2) the exif data embedded in the image, or the error explaining why exif couldn't be parsed
* 3) the size of the image, in bytes (4432130)
* 4) the content type of the image, i.e. image/(jpeg | png | ...)
*/
- export const UploadImage = async (source: string, filename?: string, prefix: string = ""): Promise<Upload.ImageInformation | Error> => {
+ export const UploadImage = async (source: string, filename?: string, prefix: string = ''): Promise<Upload.ImageInformation | Error> => {
const metadata = await InspectImage(source);
if (metadata instanceof Error) {
return metadata;
@@ -225,12 +260,12 @@ export namespace DashUploadUtils {
export async function buildFileDirectories() {
if (!existsSync(publicDirectory)) {
- console.error("\nPlease ensure that the following directory exists...\n");
+ console.error('\nPlease ensure that the following directory exists...\n');
console.log(publicDirectory);
process.exit(0);
}
if (!existsSync(filesDirectory)) {
- console.error("\nPlease ensure that the following directory exists...\n");
+ console.error('\nPlease ensure that the following directory exists...\n');
console.log(filesDirectory);
process.exit(0);
}
@@ -252,7 +287,7 @@ export namespace DashUploadUtils {
/**
* Based on the url's classification as local or remote, gleans
* as much information as possible about the specified image
- *
+ *
* @param source is the path or url to the image in question
*/
export const InspectImage = async (source: string): Promise<Upload.InspectionResults | Error> => {
@@ -265,9 +300,9 @@ export namespace DashUploadUtils {
*/
if ((rawMatches = /^data:image\/([a-z]+);base64,(.*)/.exec(source)) !== null) {
const [ext, data] = rawMatches.slice(1, 3);
- const resolved = filename = `upload_${Utils.GenerateGuid()}.${ext}`;
+ const resolved = (filename = `upload_${Utils.GenerateGuid()}.${ext}`);
const error = await new Promise<Error | null>(resolve => {
- writeFile(serverPathToFile(Directory.images, resolved), data, "base64", resolve);
+ writeFile(serverPathToFile(Directory.images, resolved), data, 'base64', resolve);
});
if (error !== null) {
return error;
@@ -276,12 +311,12 @@ export namespace DashUploadUtils {
}
let resolvedUrl: string;
/**
- *
+ *
* At this point, we want to take whatever url we have and make sure it's requestable.
* Anything that's hosted by some other website already is, but if the url is a local file url
* (locates the file on this server machine), we have to resolve the client side url by cutting out the
* basename subtree (i.e. /images/<some_guid>.<ext>) and put it on the end of the server's url.
- *
+ *
* This can always be localhost, regardless of whether this is on the server or not, since we (the server, not the client)
* will be the ones making the request, and from the perspective of dash-release or dash-web, localhost:<port> refers to the same thing
* as the full dash-release.eastus.cloudapp.azure.com:<port>.
@@ -290,18 +325,18 @@ export namespace DashUploadUtils {
if (matches === null) {
resolvedUrl = source;
} else {
- resolvedUrl = `${resolvedServerUrl}/${matches[1].split("\\").join("/")}`;
+ resolvedUrl = `${resolvedServerUrl}/${matches[1].split('\\').join('/')}`;
}
// See header comments: not all image files have exif data (I believe only JPG is the only format that can have it)
const exifData = await parseExifData(resolvedUrl);
const results = {
exifData,
- requestable: resolvedUrl
+ requestable: resolvedUrl,
};
// Use the request library to parse out file level image information in the headers
- const { headers } = (await new Promise<any>((resolve, reject) => {
- request.head(resolvedUrl, (error, res) => error ? reject(error) : resolve(res));
- }).catch(console.error));
+ const { headers } = await new Promise<any>((resolve, reject) => {
+ request.head(resolvedUrl, (error, res) => (error ? reject(error) : resolve(res)));
+ }).catch(console.error);
try {
// Compute the native width and height ofthe image with an npm module
const { width: nativeWidth, height: nativeHeight } = await requestImageSize(resolvedUrl);
@@ -313,7 +348,7 @@ export namespace DashUploadUtils {
nativeWidth,
nativeHeight,
filename,
- ...results
+ ...results,
};
} catch (e: any) {
console.log(e);
@@ -331,7 +366,7 @@ export namespace DashUploadUtils {
* @param suffix If the file doesn't have a suffix and you want to provide it one
* to appear in the new location
*/
- export async function MoveParsedFile(file: formidable.File, destination: Directory, suffix: string | undefined = undefined, text?: string): Promise<Upload.FileResponse> {
+ export async function MoveParsedFile(file: formidable.File, destination: Directory, suffix: string | undefined = undefined, text?: string, duration?: number): Promise<Upload.FileResponse> {
const { path: sourcePath } = file;
let name = path.basename(sourcePath);
suffix && (name += suffix);
@@ -340,12 +375,15 @@ export namespace DashUploadUtils {
rename(sourcePath, destinationPath, error => {
resolve({
source: file,
- result: error ? error : {
- accessPaths: {
- agnostic: getAccessPaths(destination, name)
- },
- rawText: text
- }
+ result: error
+ ? error
+ : {
+ accessPaths: {
+ agnostic: getAccessPaths(destination, name),
+ },
+ rawText: text,
+ duration,
+ },
});
});
});
@@ -354,19 +392,19 @@ export namespace DashUploadUtils {
export function getAccessPaths(directory: Directory, fileName: string) {
return {
client: clientPathToFile(directory, fileName),
- server: serverPathToFile(directory, fileName)
+ server: serverPathToFile(directory, fileName),
};
}
- export const UploadInspectedImage = async (metadata: Upload.InspectionResults, filename?: string, prefix = "", cleanUp = true): Promise<Upload.ImageInformation> => {
+ export const UploadInspectedImage = async (metadata: Upload.InspectionResults, filename?: string, prefix = '', cleanUp = true): Promise<Upload.ImageInformation> => {
const { requestable, source, ...remaining } = metadata;
- const resolved = filename || `${prefix}upload_${Utils.GenerateGuid()}.${remaining.contentType.split("/")[1].toLowerCase()}`;
+ const resolved = filename || `${prefix}upload_${Utils.GenerateGuid()}.${remaining.contentType.split('/')[1].toLowerCase()}`;
const { images } = Directory;
const information: Upload.ImageInformation = {
accessPaths: {
- agnostic: getAccessPaths(images, resolved)
+ agnostic: getAccessPaths(images, resolved),
},
- ...metadata
+ ...metadata,
};
const writtenFiles = await outputResizedImages(() => request(requestable), resolved, pathToDirectory(Directory.images));
for (const suffix of Object.keys(writtenFiles)) {
@@ -383,9 +421,9 @@ export namespace DashUploadUtils {
const val: any = layer[key];
if (val instanceof Buffer) {
layer[key] = val.toString();
- } else if (Array.isArray(val) && typeof val[0] === "number") {
+ } else if (Array.isArray(val) && typeof val[0] === 'number') {
layer[key] = Buffer.from(val).toString();
- } else if (typeof val === "object") {
+ } else if (typeof val === 'object') {
bufferConverterRec(val);
}
}
@@ -410,20 +448,20 @@ export namespace DashUploadUtils {
const pngOptions = {
compressionLevel: 9,
adaptiveFiltering: true,
- force: true
+ force: true,
};
export async function outputResizedImages(streamProvider: () => Stream | Promise<Stream>, outputFileName: string, outputDirectory: string) {
const writtenFiles: { [suffix: string]: string } = {};
for (const { resizer, suffix } of resizers(path.extname(outputFileName))) {
- const outputPath = path.resolve(outputDirectory, writtenFiles[suffix] = InjectSize(outputFileName, suffix));
+ const outputPath = path.resolve(outputDirectory, (writtenFiles[suffix] = InjectSize(outputFileName, suffix)));
await new Promise<void>(async (resolve, reject) => {
const source = streamProvider();
let readStream: Stream = source instanceof Promise ? await source : source;
if (resizer) {
readStream = readStream.pipe(resizer.withMetadata());
}
- readStream.pipe(createWriteStream(outputPath)).on("close", resolve).on("error", reject);
+ readStream.pipe(createWriteStream(outputPath)).on('close', resolve).on('error', reject);
});
}
return writtenFiles;
@@ -442,15 +480,14 @@ export namespace DashUploadUtils {
initial = initial.webp();
} else if (tiffs.includes(ext)) {
initial = initial.tiff();
- } else if (ext === ".gif") {
+ } else if (ext === '.gif') {
initial = undefined;
}
return {
resizer: initial,
- suffix
+ suffix,
};
- })
+ }),
];
}
-
-} \ No newline at end of file
+}
diff --git a/src/server/RouteManager.ts b/src/server/RouteManager.ts
index aa9bfcfa7..5683cd539 100644
--- a/src/server/RouteManager.ts
+++ b/src/server/RouteManager.ts
@@ -1,12 +1,12 @@
import { cyan, green, red } from 'colors';
import { Express, Request, Response } from 'express';
-import { AdminPriviliges } from ".";
-import { DashUserModel } from "./authentication/DashUserModel";
-import RouteSubscriber from "./RouteSubscriber";
+import { AdminPriviliges } from '.';
+import { DashUserModel } from './authentication/DashUserModel';
+import RouteSubscriber from './RouteSubscriber';
export enum Method {
GET,
- POST
+ POST,
}
export interface CoreArguments {
@@ -33,13 +33,13 @@ const registered = new Map<string, Set<Method>>();
enum RegistrationError {
Malformed,
- Duplicate
+ Duplicate,
}
export default class RouteManager {
private server: Express;
private _isRelease: boolean;
- private failedRegistrations: { route: string, reason: RegistrationError }[] = [];
+ private failedRegistrations: { route: string; reason: RegistrationError }[] = [];
public get isRelease() {
return this._isRelease;
@@ -74,39 +74,42 @@ export default class RouteManager {
}
process.exit(1);
} else {
- console.log(green("all server routes have been successfully registered:"));
- Array.from(registered.keys()).sort().forEach(route => console.log(cyan(route)));
+ console.log(green('all server routes have been successfully registered:'));
+ Array.from(registered.keys())
+ .sort()
+ .forEach(route => console.log(cyan(route)));
console.log();
}
- }
+ };
static routes: string[] = [];
/**
- *
- * @param initializer
+ *
+ * @param initializer
*/
addSupervisedRoute = (initializer: RouteInitializer): void => {
const { method, subscription, secureHandler, publicHandler, errorHandler, requireAdminInRelease: requireAdmin } = initializer;
- typeof (initializer.subscription) === "string" && RouteManager.routes.push(initializer.subscription);
+ typeof initializer.subscription === 'string' && RouteManager.routes.push(initializer.subscription);
initializer.subscription instanceof RouteSubscriber && RouteManager.routes.push(initializer.subscription.root);
- initializer.subscription instanceof Array && initializer.subscription.map(sub => {
- typeof (sub) === "string" && RouteManager.routes.push(sub);
- sub instanceof RouteSubscriber && RouteManager.routes.push(sub.root);
- });
+ initializer.subscription instanceof Array &&
+ initializer.subscription.map(sub => {
+ typeof sub === 'string' && RouteManager.routes.push(sub);
+ sub instanceof RouteSubscriber && RouteManager.routes.push(sub.root);
+ });
const isRelease = this._isRelease;
const supervised = async (req: Request, res: Response) => {
let user = req.user as Partial<DashUserModel> | undefined;
const { originalUrl: target } = req;
- if (process.env.DB === "MEM" && !user) {
- user = { id: "guest", email: "", userDocumentId: "guestDocId" };
+ if (process.env.DB === 'MEM' && !user) {
+ user = { id: 'guest', email: 'guest', userDocumentId: '__guest__' };
}
const core = { req, res, isRelease };
const tryExecute = async (toExecute: (args: any) => any | Promise<any>, args: any) => {
try {
await toExecute(args);
} catch (e) {
- console.log(red(target), user && ("email" in user) ? "<user logged out>" : undefined);
+ console.log(red(target), user && 'email' in user ? '<user logged out>' : undefined);
if (errorHandler) {
errorHandler({ ...core, error: e });
} else {
@@ -119,7 +122,7 @@ export default class RouteManager {
if (AdminPriviliges.get(user.id)) {
AdminPriviliges.delete(user.id);
} else {
- return res.redirect(`/admin/${req.originalUrl.substring(1).replace("/", ":")}`);
+ return res.redirect(`/admin/${req.originalUrl.substring(1).replace('/', ':')}`);
}
}
await tryExecute(secureHandler, { ...core, user });
@@ -128,10 +131,10 @@ export default class RouteManager {
if (publicHandler) {
await tryExecute(publicHandler, core);
if (!res.headersSent) {
- res.redirect("/login");
+ // res.redirect("/login");
}
} else {
- res.redirect("/login");
+ res.redirect('/login');
}
}
setTimeout(() => {
@@ -144,7 +147,7 @@ export default class RouteManager {
};
const subscribe = (subscriber: RouteSubscriber | string) => {
let route: string;
- if (typeof subscriber === "string") {
+ if (typeof subscriber === 'string') {
route = subscriber;
} else {
route = subscriber.build;
@@ -152,7 +155,7 @@ export default class RouteManager {
if (!/^\/$|^\/[A-Za-z\*]+(\/\:[A-Za-z?_\*]+)*$/g.test(route)) {
this.failedRegistrations.push({
reason: RegistrationError.Malformed,
- route
+ route,
});
} else {
const existing = registered.get(route);
@@ -160,7 +163,7 @@ export default class RouteManager {
if (existing.has(method)) {
this.failedRegistrations.push({
reason: RegistrationError.Duplicate,
- route
+ route,
});
return;
}
@@ -184,15 +187,14 @@ export default class RouteManager {
} else {
subscribe(subscription);
}
- }
-
+ };
}
export const STATUS = {
OK: 200,
BAD_REQUEST: 400,
EXECUTION_ERROR: 500,
- PERMISSION_DENIED: 403
+ PERMISSION_DENIED: 403,
};
export function _error(res: Response, message: string, error?: any) {
diff --git a/src/server/SharedMediaTypes.ts b/src/server/SharedMediaTypes.ts
index cde95526f..7db1c2dae 100644
--- a/src/server/SharedMediaTypes.ts
+++ b/src/server/SharedMediaTypes.ts
@@ -2,36 +2,45 @@ import { ExifData } from 'exif';
import { File } from 'formidable';
export namespace AcceptableMedia {
- export const gifs = [".gif"];
- export const pngs = [".png"];
- export const jpgs = [".jpg", ".jpeg"];
- export const webps = [".webp"];
- export const tiffs = [".tiff"];
+ export const gifs = ['.gif'];
+ export const pngs = ['.png'];
+ export const jpgs = ['.jpg', '.jpeg'];
+ export const webps = ['.webp'];
+ export const tiffs = ['.tiff'];
export const imageFormats = [...pngs, ...jpgs, ...gifs, ...webps, ...tiffs];
- export const videoFormats = [".mov", ".mp4", ".quicktime", ".mkv", ".x-matroska;codecs=avc1"];
- export const applicationFormats = [".pdf"];
- export const audioFormats = [".wav", ".mp3", ".mpeg", ".flac", ".au", ".aiff", ".m4a", ".webm"];
+ export const videoFormats = ['.mov', '.mp4', '.quicktime', '.mkv', '.x-matroska;codecs=avc1'];
+ export const applicationFormats = ['.pdf'];
+ export const audioFormats = ['.wav', '.mp3', '.mpeg', '.flac', '.au', '.aiff', '.m4a', '.webm'];
}
export namespace Upload {
-
export function isImageInformation(uploadResponse: Upload.FileInformation): uploadResponse is Upload.ImageInformation {
- return "nativeWidth" in uploadResponse;
+ return 'nativeWidth' in uploadResponse;
+ }
+
+ export function isVideoInformation(uploadResponse: Upload.FileInformation): uploadResponse is Upload.VideoInformation {
+ return 'duration' in uploadResponse;
}
export interface FileInformation {
accessPaths: AccessPathInfo;
rawText?: string;
+ duration?: number;
}
- export type FileResponse<T extends FileInformation = FileInformation> = { source: File, result: T | Error };
+ export type FileResponse<T extends FileInformation = FileInformation> = { source: File; result: T | Error };
export type ImageInformation = FileInformation & InspectionResults;
+ export type VideoInformation = FileInformation & VideoResults;
+
export interface AccessPathInfo {
- [suffix: string]: { client: string, server: string };
+ [suffix: string]: { client: string; server: string };
}
+ export interface VideoResults {
+ duration: number;
+ }
export interface InspectionResults {
source: string;
requestable: string;
@@ -44,8 +53,7 @@ export namespace Upload {
}
export interface EnrichedExifData {
- data: ExifData & ExifData["gps"];
+ data: ExifData & ExifData['gps'];
error?: string;
}
-
-} \ No newline at end of file
+}
diff --git a/src/server/authentication/AuthenticationManager.ts b/src/server/authentication/AuthenticationManager.ts
index 3622be4c5..52d876e95 100644
--- a/src/server/authentication/AuthenticationManager.ts
+++ b/src/server/authentication/AuthenticationManager.ts
@@ -1,4 +1,4 @@
-import { default as User, DashUserModel } from './DashUserModel';
+import { default as User, DashUserModel, initializeGuest } from './DashUserModel';
import { Request, Response, NextFunction } from 'express';
import * as passport from 'passport';
import { IVerifyOptions } from 'passport-local';
@@ -30,6 +30,7 @@ export let getSignup = (req: Request, res: Response) => {
* Create a new local account.
*/
export let postSignup = (req: Request, res: Response, next: NextFunction) => {
+ const email = req.body.email as String;
req.assert('email', 'Email is not valid').isEmail();
req.assert('password', 'Password must be at least 4 characters long').len({ min: 4 });
req.assert('confirmPassword', 'Passwords do not match').equals(req.body.password);
@@ -41,15 +42,14 @@ export let postSignup = (req: Request, res: Response, next: NextFunction) => {
return res.redirect('/signup');
}
- const email = req.body.email as String;
const password = req.body.password;
const model = {
email,
password,
- userDocumentId: Utils.GenerateGuid(),
- sharingDocumentId: Utils.GenerateGuid(),
- linkDatabaseId: Utils.GenerateGuid(),
+ userDocumentId: email === 'guest' ? '__guest__' : Utils.GenerateGuid(),
+ sharingDocumentId: email === 'guest' ? 2 : Utils.GenerateGuid(),
+ linkDatabaseId: email === 'guest' ? 3 : Utils.GenerateGuid(),
cacheDocumentIds: '',
} as Partial<DashUserModel>;
@@ -106,18 +106,22 @@ export let getLogin = (req: Request, res: Response) => {
* On failure, redirect to signup page
*/
export let postLogin = (req: Request, res: Response, next: NextFunction) => {
- req.assert('email', 'Email is not valid').isEmail();
- req.assert('password', 'Password cannot be blank').notEmpty();
- req.sanitize('email').normalizeEmail({ gmail_remove_dots: false });
-
- const errors = req.validationErrors();
+ if (req.body.email === '') {
+ User.findOne({ email: 'guest' }, (err: any, user: DashUserModel) => !user && initializeGuest());
+ req.body.email = 'guest';
+ req.body.password = 'guest';
+ } else {
+ req.assert('email', 'Email is not valid').isEmail();
+ req.assert('password', 'Password cannot be blank').notEmpty();
+ req.sanitize('email').normalizeEmail({ gmail_remove_dots: false });
+ }
- if (errors) {
+ if (req.validationErrors()) {
req.flash('errors', 'Unable to login at this time. Please try again.');
return res.redirect('/signup');
}
- passport.authenticate('local', (err: Error, user: DashUserModel, _info: IVerifyOptions) => {
+ const callback = (err: Error, user: DashUserModel, _info: IVerifyOptions) => {
if (err) {
next(err);
return;
@@ -132,7 +136,8 @@ export let postLogin = (req: Request, res: Response, next: NextFunction) => {
}
tryRedirectToTarget(req, res);
});
- })(req, res, next);
+ };
+ setTimeout(() => passport.authenticate('local', callback)(req, res, next), 500);
};
/**
diff --git a/src/server/authentication/DashUserModel.ts b/src/server/authentication/DashUserModel.ts
index bee28b96d..a1883beab 100644
--- a/src/server/authentication/DashUserModel.ts
+++ b/src/server/authentication/DashUserModel.ts
@@ -1,13 +1,13 @@
//@ts-ignore
-import * as bcrypt from "bcrypt-nodejs";
+import * as bcrypt from 'bcrypt-nodejs';
//@ts-ignore
import * as mongoose from 'mongoose';
export type DashUserModel = mongoose.Document & {
- email: String,
- password: string,
- passwordResetToken?: string,
- passwordResetExpires?: Date,
+ email: String;
+ password: string;
+ passwordResetToken?: string;
+ passwordResetExpires?: Date;
userDocumentId: string;
sharingDocumentId: string;
@@ -15,66 +15,74 @@ export type DashUserModel = mongoose.Document & {
cacheDocumentIds: string;
profile: {
- name: string,
- gender: string,
- location: string,
- website: string,
- picture: string
- },
+ name: string;
+ gender: string;
+ location: string;
+ website: string;
+ picture: string;
+ };
- comparePassword: comparePasswordFunction,
+ comparePassword: comparePasswordFunction;
};
type comparePasswordFunction = (candidatePassword: string, cb: (err: any, isMatch: any) => {}) => void;
export type AuthToken = {
- accessToken: string,
- kind: string
+ accessToken: string;
+ kind: string;
};
-const userSchema = new mongoose.Schema({
- email: String,
- password: String,
- passwordResetToken: String,
- passwordResetExpires: Date,
+const userSchema = new mongoose.Schema(
+ {
+ email: String,
+ password: String,
+ passwordResetToken: String,
+ passwordResetExpires: Date,
- userDocumentId: String, // id that identifies a document which hosts all of a user's account data
- sharingDocumentId: String, // id that identifies a document that stores documents shared to a user, their user color, and any additional info needed to communicate between users
- linkDatabaseId: String,
- cacheDocumentIds: String, // set of document ids to retreive on startup
+ userDocumentId: String, // id that identifies a document which hosts all of a user's account data
+ sharingDocumentId: String, // id that identifies a document that stores documents shared to a user, their user color, and any additional info needed to communicate between users
+ linkDatabaseId: String,
+ cacheDocumentIds: String, // set of document ids to retreive on startup
- facebook: String,
- twitter: String,
- google: String,
+ facebook: String,
+ twitter: String,
+ google: String,
- profile: {
- name: String,
- gender: String,
- location: String,
- website: String,
- picture: String
- }
-}, { timestamps: true });
+ profile: {
+ name: String,
+ gender: String,
+ location: String,
+ website: String,
+ picture: String,
+ },
+ },
+ { timestamps: true }
+);
/**
* Password hash middleware.
*/
-userSchema.pre("save", function save(next) {
+userSchema.pre('save', function save(next) {
const user = this as DashUserModel;
- if (!user.isModified("password")) {
+ if (!user.isModified('password')) {
return next();
}
bcrypt.genSalt(10, (err: any, salt: string) => {
if (err) {
return next(err);
}
- bcrypt.hash(user.password, salt, () => void {}, (err: mongoose.Error, hash: string) => {
- if (err) {
- return next(err);
+ bcrypt.hash(
+ user.password,
+ salt,
+ () => void {},
+ (err: mongoose.Error, hash: string) => {
+ if (err) {
+ return next(err);
+ }
+ user.password = hash;
+ next();
}
- user.password = hash;
- next();
- });
+ );
});
});
@@ -88,5 +96,15 @@ const comparePassword: comparePasswordFunction = function (this: DashUserModel,
userSchema.methods.comparePassword = comparePassword;
-const User = mongoose.model("User", userSchema);
-export default User; \ No newline at end of file
+const User = mongoose.model('User', userSchema);
+export function initializeGuest() {
+ new User({
+ email: 'guest',
+ password: 'guest',
+ userDocumentId: '__guest__',
+ sharingDocumentId: '2',
+ linkDatabaseId: '3',
+ cacheDocumentIds: '',
+ }).save();
+}
+export default User;
diff --git a/src/server/index.ts b/src/server/index.ts
index f8c32103b..6e6bde3cb 100644
--- a/src/server/index.ts
+++ b/src/server/index.ts
@@ -1,35 +1,35 @@
require('dotenv').config();
-import { yellow } from "colors";
+import { yellow } from 'colors';
import * as mobileDetect from 'mobile-detect';
import * as path from 'path';
import * as qs from 'query-string';
-import { log_execution } from "./ActionUtilities";
-import DeleteManager from "./ApiManagers/DeleteManager";
+import { log_execution } from './ActionUtilities';
+import DeleteManager from './ApiManagers/DeleteManager';
import DownloadManager from './ApiManagers/DownloadManager';
-import GeneralGoogleManager from "./ApiManagers/GeneralGoogleManager";
-import GooglePhotosManager from "./ApiManagers/GooglePhotosManager";
-import PDFManager from "./ApiManagers/PDFManager";
+import GeneralGoogleManager from './ApiManagers/GeneralGoogleManager';
+import GooglePhotosManager from './ApiManagers/GooglePhotosManager';
+import PDFManager from './ApiManagers/PDFManager';
import { SearchManager } from './ApiManagers/SearchManager';
-import SessionManager from "./ApiManagers/SessionManager";
-import UploadManager from "./ApiManagers/UploadManager";
+import SessionManager from './ApiManagers/SessionManager';
+import UploadManager from './ApiManagers/UploadManager';
import UserManager from './ApiManagers/UserManager';
import UtilManager from './ApiManagers/UtilManager';
import { GoogleCredentialsLoader, SSL } from './apis/google/CredentialsLoader';
-import { GoogleApiServerUtils } from "./apis/google/GoogleApiServerUtils";
-import { DashSessionAgent } from "./DashSession/DashSessionAgent";
-import { AppliedSessionAgent } from "./DashSession/Session/agents/applied_session_agent";
+import { GoogleApiServerUtils } from './apis/google/GoogleApiServerUtils';
+import { DashSessionAgent } from './DashSession/DashSessionAgent';
+import { AppliedSessionAgent } from './DashSession/Session/agents/applied_session_agent';
import { DashUploadUtils } from './DashUploadUtils';
import { Database } from './database';
-import { Logger } from "./ProcessFactory";
+import { Logger } from './ProcessFactory';
import RouteManager, { Method, PublicHandler } from './RouteManager';
import RouteSubscriber from './RouteSubscriber';
import initializeServer, { resolvedPorts } from './server_Initialization';
export const AdminPriviliges: Map<string, boolean> = new Map();
-export const onWindows = process.platform === "win32";
+export const onWindows = process.platform === 'win32';
export let sessionAgent: AppliedSessionAgent;
-export const publicDirectory = path.resolve(__dirname, "public");
-export const filesDirectory = path.resolve(publicDirectory, "files");
+export const publicDirectory = path.resolve(__dirname, 'public');
+export const filesDirectory = path.resolve(publicDirectory, 'files');
/**
* These are the functions run before the server starts
@@ -43,11 +43,11 @@ async function preliminaryFunctions() {
await GoogleCredentialsLoader.loadCredentials();
SSL.loadCredentials();
GoogleApiServerUtils.processProjectCredentials();
- if (process.env.DB !== "MEM") {
+ if (process.env.DB !== 'MEM') {
await log_execution({
- startMessage: "attempting to initialize mongodb connection",
- endMessage: "connection outcome determined",
- action: Database.tryInitializeConnection
+ startMessage: 'attempting to initialize mongodb connection',
+ endMessage: 'connection outcome determined',
+ action: Database.tryInitializeConnection,
});
}
}
@@ -56,27 +56,16 @@ async function preliminaryFunctions() {
* Either clustered together as an API manager
* or individually referenced below, by the completion
* of this function's execution, all routes will
- * be registered on the server
+ * be registered on the server
* @param router the instance of the route manager
* that will manage the registration of new routes
* with the server
*/
function routeSetter({ isRelease, addSupervisedRoute, logRegistrationOutcome }: RouteManager) {
- const managers = [
- new SessionManager(),
- new UserManager(),
- new UploadManager(),
- new DownloadManager(),
- new SearchManager(),
- new PDFManager(),
- new DeleteManager(),
- new UtilManager(),
- new GeneralGoogleManager(),
- new GooglePhotosManager(),
- ];
+ const managers = [new SessionManager(), new UserManager(), new UploadManager(), new DownloadManager(), new SearchManager(), new PDFManager(), new DeleteManager(), new UtilManager(), new GeneralGoogleManager(), new GooglePhotosManager()];
// initialize API Managers
- console.log(yellow("\nregistering server routes..."));
+ console.log(yellow('\nregistering server routes...'));
managers.forEach(manager => manager.register(addSupervisedRoute));
/**
@@ -84,88 +73,87 @@ function routeSetter({ isRelease, addSupervisedRoute, logRegistrationOutcome }:
*/
addSupervisedRoute({
method: Method.GET,
- subscription: "/",
- secureHandler: ({ res }) => res.redirect("/home")
+ subscription: '/',
+ secureHandler: ({ res }) => res.redirect('/home'),
});
-
addSupervisedRoute({
method: Method.GET,
- subscription: "/serverHeartbeat",
- secureHandler: ({ res }) => res.send(true)
+ subscription: '/serverHeartbeat',
+ secureHandler: ({ res }) => res.send(true),
});
addSupervisedRoute({
method: Method.GET,
- subscription: "/resolvedPorts",
- secureHandler: ({ res }) => res.send(resolvedPorts)
+ subscription: '/resolvedPorts',
+ secureHandler: ({ res }) => res.send(resolvedPorts),
+ publicHandler: ({ res }) => res.send(resolvedPorts),
});
const serve: PublicHandler = ({ req, res }) => {
- const detector = new mobileDetect(req.headers['user-agent'] || "");
+ const detector = new mobileDetect(req.headers['user-agent'] || '');
const filename = detector.mobile() !== null ? 'mobile/image.html' : 'index.html';
res.sendFile(path.join(__dirname, '../../deploy/' + filename));
};
/**
- * Serves a simple password input box for any
+ * Serves a simple password input box for any
*/
addSupervisedRoute({
method: Method.GET,
- subscription: new RouteSubscriber("admin").add("previous_target"),
+ subscription: new RouteSubscriber('admin').add('previous_target'),
secureHandler: ({ res, isRelease }) => {
const { PASSWORD } = process.env;
if (!(isRelease && PASSWORD)) {
- return res.redirect("/home");
+ return res.redirect('/home');
}
- res.render("admin.pug", { title: "Enter Administrator Password" });
- }
+ res.render('admin.pug', { title: 'Enter Administrator Password' });
+ },
});
addSupervisedRoute({
method: Method.POST,
- subscription: new RouteSubscriber("admin").add("previous_target"),
+ subscription: new RouteSubscriber('admin').add('previous_target'),
secureHandler: async ({ req, res, isRelease, user: { id } }) => {
const { PASSWORD } = process.env;
if (!(isRelease && PASSWORD)) {
- return res.redirect("/home");
+ return res.redirect('/home');
}
const { password } = req.body;
const { previous_target } = req.params;
let redirect: string;
if (password === PASSWORD) {
AdminPriviliges.set(id, true);
- redirect = `/${previous_target.replace(":", "/")}`;
+ redirect = `/${previous_target.replace(':', '/')}`;
} else {
redirect = `/admin/${previous_target}`;
}
res.redirect(redirect);
- }
+ },
});
addSupervisedRoute({
method: Method.GET,
- subscription: ["/home", new RouteSubscriber("doc").add("docId")],
+ subscription: ['/home', new RouteSubscriber('doc').add('docId')],
secureHandler: serve,
publicHandler: ({ req, res, ...remaining }) => {
const { originalUrl: target } = req;
- const sharing = qs.parse(qs.extract(req.originalUrl), { sort: false }).sharing === "true";
- const docAccess = target.startsWith("/doc/");
+ const sharing = qs.parse(qs.extract(req.originalUrl), { sort: false }).sharing === 'true';
+ const docAccess = target.startsWith('/doc/');
// since this is the public handler, there's no meaning of '/home' to speak of
// since there's no user logged in, so the only viable operation
// for a guest is to look at a shared document
- if (sharing && docAccess) {
+ if (docAccess) {
serve({ req, res, ...remaining });
} else {
- res.redirect("/login");
+ res.redirect('/login');
}
- }
+ },
});
logRegistrationOutcome();
}
-
/**
* This function can be used in two different ways. If not in release mode,
* this is simply the logic that is invoked to start the server. In release mode,
@@ -174,9 +162,9 @@ function routeSetter({ isRelease, addSupervisedRoute, logRegistrationOutcome }:
*/
export async function launchServer() {
await log_execution({
- startMessage: "\nstarting execution of preliminary functions",
- endMessage: "completed preliminary functions\n",
- action: preliminaryFunctions
+ startMessage: '\nstarting execution of preliminary functions',
+ endMessage: 'completed preliminary functions\n',
+ action: preliminaryFunctions,
});
await initializeServer(routeSetter);
}
diff --git a/src/server/websocket.ts b/src/server/websocket.ts
index 1b7f5919f..9b91a35a6 100644
--- a/src/server/websocket.ts
+++ b/src/server/websocket.ts
@@ -1,24 +1,24 @@
-import * as express from "express";
-import { blue, green } from "colors";
-import { createServer, Server } from "https";
-import { networkInterfaces } from "os";
+import { blue } from 'colors';
+import * as express from 'express';
+import { createServer, Server } from 'https';
+import { networkInterfaces } from 'os';
import * as sio from 'socket.io';
-import { Socket } from "socket.io";
-import { Utils } from "../Utils";
+import { Socket } from 'socket.io';
+import { Opt } from '../fields/Doc';
+import { Utils } from '../Utils';
import { logPort } from './ActionUtilities';
-import { timeMap } from "./ApiManagers/UserManager";
-import { GoogleCredentialsLoader, SSL } from "./apis/google/CredentialsLoader";
-import YoutubeApi from "./apis/youtube/youtubeApiSample";
-import { Client } from "./Client";
-import { Database } from "./database";
-import { DocumentsCollection } from "./IDatabase";
-import { Diff, GestureContent, MessageStore, MobileDocumentUploadContent, MobileInkOverlayContent, Transferable, Types, UpdateMobileInkOverlayPositionContent, YoutubeQueryInput, YoutubeQueryTypes } from "./Message";
-import { Search } from "./Search";
+import { timeMap } from './ApiManagers/UserManager';
+import { GoogleCredentialsLoader, SSL } from './apis/google/CredentialsLoader';
+import YoutubeApi from './apis/youtube/youtubeApiSample';
+import { initializeGuest } from './authentication/DashUserModel';
+import { Client } from './Client';
+import { Database } from './database';
+import { DocumentsCollection } from './IDatabase';
+import { Diff, GestureContent, MessageStore, MobileDocumentUploadContent, MobileInkOverlayContent, Transferable, Types, UpdateMobileInkOverlayPositionContent, YoutubeQueryInput, YoutubeQueryTypes } from './Message';
+import { Search } from './Search';
import { resolvedPorts } from './server_Initialization';
-import { Opt } from "../fields/Doc";
export namespace WebSocket {
-
export let _socket: Socket;
const clients: { [key: string]: Client } = {};
export const socketMap = new Map<SocketIO.Socket, string>();
@@ -32,14 +32,14 @@ export namespace WebSocket {
resolvedPorts.socket = Number(socketPort);
}
let socketEndpoint: Opt<Server>;
- await new Promise<void>(resolve => socketEndpoint = createServer(SSL.Credentials, app).listen(resolvedPorts.socket, resolve));
+ await new Promise<void>(resolve => (socketEndpoint = createServer(SSL.Credentials, app).listen(resolvedPorts.socket, resolve)));
io = sio(socketEndpoint!, SSL.Credentials as any);
} else {
io = sio().listen(resolvedPorts.socket);
}
- logPort("websocket", resolvedPorts.socket);
+ logPort('websocket', resolvedPorts.socket);
- io.on("connection", function (socket: Socket) {
+ io.on('connection', function (socket: Socket) {
_socket = socket;
socket.use((_packet, next) => {
const userEmail = socketMap.get(socket);
@@ -70,14 +70,14 @@ export namespace WebSocket {
socket.join(room);
console.log('Client ID ' + socket.id + ' created room ' + room);
socket.emit('created', room, socket.id);
-
} else if (numClients === 1) {
console.log('Client ID ' + socket.id + ' joined room ' + room);
socket.in(room).emit('join', room);
socket.join(room);
socket.emit('joined', room, socket.id);
socket.in(room).emit('ready');
- } else { // max two clients
+ } else {
+ // max two clients
socket.emit('full', room);
}
});
@@ -97,10 +97,10 @@ export namespace WebSocket {
console.log('received bye');
});
- Utils.Emit(socket, MessageStore.Foo, "handshooken");
+ Utils.Emit(socket, MessageStore.Foo, 'handshooken');
Utils.AddServerHandler(socket, MessageStore.Bar, guid => barReceived(socket, guid));
- Utils.AddServerHandler(socket, MessageStore.SetField, (args) => setField(socket, args));
+ Utils.AddServerHandler(socket, MessageStore.SetField, args => setField(socket, args));
Utils.AddServerHandlerCallback(socket, MessageStore.GetField, getField);
Utils.AddServerHandlerCallback(socket, MessageStore.GetFields, getFields);
if (isRelease) {
@@ -126,26 +126,26 @@ export namespace WebSocket {
*/
disconnect = () => {
- socket.broadcast.emit("connection_terminated", Date.now());
+ socket.broadcast.emit('connection_terminated', Date.now());
socket.disconnect(true);
};
});
}
function processGesturePoints(socket: Socket, content: GestureContent) {
- socket.broadcast.emit("receiveGesturePoints", content);
+ socket.broadcast.emit('receiveGesturePoints', content);
}
function processOverlayTrigger(socket: Socket, content: MobileInkOverlayContent) {
- socket.broadcast.emit("receiveOverlayTrigger", content);
+ socket.broadcast.emit('receiveOverlayTrigger', content);
}
function processUpdateOverlayPosition(socket: Socket, content: UpdateMobileInkOverlayPositionContent) {
- socket.broadcast.emit("receiveUpdateOverlayPosition", content);
+ socket.broadcast.emit('receiveUpdateOverlayPosition', content);
}
function processMobileDocumentUpload(socket: Socket, content: MobileDocumentUploadContent) {
- socket.broadcast.emit("receiveMobileDocumentUpload", content);
+ socket.broadcast.emit('receiveMobileDocumentUpload', content);
}
function HandleYoutubeQuery([query, callback]: [YoutubeQueryInput, (result?: any[]) => void]) {
@@ -165,27 +165,22 @@ export namespace WebSocket {
const target: string[] = [];
onlyFields && target.push(DocumentsCollection);
await Database.Instance.dropSchema(...target);
- if (process.env.DISABLE_SEARCH !== "true") {
+ if (process.env.DISABLE_SEARCH !== 'true') {
await Search.clear();
}
+ initializeGuest();
}
function barReceived(socket: SocketIO.Socket, userEmail: string) {
clients[userEmail] = new Client(userEmail.toString());
const currentdate = new Date();
- const datetime = currentdate.getDate() + "/"
- + (currentdate.getMonth() + 1) + "/"
- + currentdate.getFullYear() + " @ "
- + currentdate.getHours() + ":"
- + currentdate.getMinutes() + ":"
- + currentdate.getSeconds();
+ const datetime = currentdate.getDate() + '/' + (currentdate.getMonth() + 1) + '/' + currentdate.getFullYear() + ' @ ' + currentdate.getHours() + ':' + currentdate.getMinutes() + ':' + currentdate.getSeconds();
console.log(blue(`user ${userEmail} has connected to the web socket at: ${datetime}`));
- socketMap.set(socket, userEmail + " at " + datetime);
+ socketMap.set(socket, userEmail + ' at ' + datetime);
}
function getField([id, callback]: [string, (result?: Transferable) => void]) {
- Database.Instance.getDocument(id, (result?: Transferable) =>
- callback(result ? result : undefined));
+ Database.Instance.getDocument(id, (result?: Transferable) => callback(result ? result : undefined));
}
function getFields([ids, callback]: [string[], (result: Transferable[]) => void]) {
@@ -193,9 +188,9 @@ export namespace WebSocket {
}
function setField(socket: Socket, newValue: Transferable) {
- Database.Instance.update(newValue.id, newValue, () =>
- socket.broadcast.emit(MessageStore.SetField.Message, newValue)); // broadcast set value to all other clients
- if (newValue.type === Types.Text) { // if the newValue has sring type, then it's suitable for searching -- pass it to SOLR
+ Database.Instance.update(newValue.id, newValue, () => socket.broadcast.emit(MessageStore.SetField.Message, newValue)); // broadcast set value to all other clients
+ if (newValue.type === Types.Text) {
+ // if the newValue has sring type, then it's suitable for searching -- pass it to SOLR
Search.updateDocument({ id: newValue.id, data: { set: (newValue as any).data } });
}
}
@@ -213,34 +208,36 @@ export namespace WebSocket {
Database.Instance.getDocuments(ids, callback);
}
- const suffixMap: { [type: string]: (string | [string, string | ((json: any) => any)]) } = {
- "number": "_n",
- "string": "_t",
- "boolean": "_b",
- "image": ["_t", "url"],
- "video": ["_t", "url"],
- "pdf": ["_t", "url"],
- "audio": ["_t", "url"],
- "web": ["_t", "url"],
- "map": ["_t", "url"],
- "script": ["_t", value => value.script.originalScript],
- "RichTextField": ["_t", value => value.Text],
- "date": ["_d", value => new Date(value.date).toISOString()],
- "proxy": ["_i", "fieldId"],
- "list": ["_l", list => {
- const results = [];
- for (const value of list.fields) {
- const term = ToSearchTerm(value);
- if (term) {
- results.push(term.value);
+ const suffixMap: { [type: string]: string | [string, string | ((json: any) => any)] } = {
+ number: '_n',
+ string: '_t',
+ boolean: '_b',
+ image: ['_t', 'url'],
+ video: ['_t', 'url'],
+ pdf: ['_t', 'url'],
+ audio: ['_t', 'url'],
+ web: ['_t', 'url'],
+ map: ['_t', 'url'],
+ script: ['_t', value => value.script.originalScript],
+ RichTextField: ['_t', value => value.Text],
+ date: ['_d', value => new Date(value.date).toISOString()],
+ proxy: ['_i', 'fieldId'],
+ list: [
+ '_l',
+ list => {
+ const results = [];
+ for (const value of list.fields) {
+ const term = ToSearchTerm(value);
+ if (term) {
+ results.push(term.value);
+ }
}
- }
- return results.length ? results : null;
- }]
+ return results.length ? results : null;
+ },
+ ],
};
- function ToSearchTerm(val: any): { suffix: string, value: any } | undefined {
-
+ function ToSearchTerm(val: any): { suffix: string; value: any } | undefined {
if (val === null || val === undefined) {
return;
}
@@ -252,69 +249,79 @@ export namespace WebSocket {
}
if (Array.isArray(suffix)) {
const accessor = suffix[1];
- if (typeof accessor === "function") {
+ if (typeof accessor === 'function') {
val = accessor(val);
} else {
val = val[accessor];
-
}
suffix = suffix[0];
-
}
return { suffix, value: val };
}
function getSuffix(value: string | [string, any]): string {
- return typeof value === "string" ? value : value[0];
+ return typeof value === 'string' ? value : value[0];
}
function addToListField(socket: Socket, diff: Diff, curListItems?: Transferable): void {
- diff.diff.$set = diff.diff.$addToSet; delete diff.diff.$addToSet;// convert add to set to a query of the current fields, and then a set of the composition of the new fields with the old ones
+ diff.diff.$set = diff.diff.$addToSet;
+ delete diff.diff.$addToSet; // convert add to set to a query of the current fields, and then a set of the composition of the new fields with the old ones
const updatefield = Array.from(Object.keys(diff.diff.$set))[0];
const newListItems = diff.diff.$set[updatefield]?.fields;
if (!newListItems) {
- console.log("Error: addToListField - no new list items");
+ console.log('Error: addToListField - no new list items');
return;
}
- const curList = (curListItems as any)?.fields?.[updatefield.replace("fields.", "")]?.fields.filter((item: any) => item !== undefined) || [];
- diff.diff.$set[updatefield].fields = [...curList, ...newListItems];//, ...newListItems.filter((newItem: any) => newItem === null || !curList.some((curItem: any) => curItem.fieldId ? curItem.fieldId === newItem.fieldId : curItem.heading ? curItem.heading === newItem.heading : curItem === newItem))];
+ const curList = (curListItems as any)?.fields?.[updatefield.replace('fields.', '')]?.fields.filter((item: any) => item !== undefined) || [];
+ diff.diff.$set[updatefield].fields = [...curList, ...newListItems]; //, ...newListItems.filter((newItem: any) => newItem === null || !curList.some((curItem: any) => curItem.fieldId ? curItem.fieldId === newItem.fieldId : curItem.heading ? curItem.heading === newItem.heading : curItem === newItem))];
const sendBack = diff.diff.length !== diff.diff.$set[updatefield].fields.length;
delete diff.diff.length;
- Database.Instance.update(diff.id, diff.diff,
+ Database.Instance.update(
+ diff.id,
+ diff.diff,
() => {
if (sendBack) {
- console.log("Warning: list modified during update. Composite list is being returned.");
+ console.log('Warning: list modified during update. Composite list is being returned.');
const id = socket.id;
- socket.id = "";
+ socket.id = '';
socket.broadcast.emit(MessageStore.UpdateField.Message, diff);
socket.id = id;
} else socket.broadcast.emit(MessageStore.UpdateField.Message, diff);
dispatchNextOp(diff.id);
- }, false);
+ },
+ false
+ );
}
function remFromListField(socket: Socket, diff: Diff, curListItems?: Transferable): void {
- diff.diff.$set = diff.diff.$remFromSet; delete diff.diff.$remFromSet;
+ diff.diff.$set = diff.diff.$remFromSet;
+ delete diff.diff.$remFromSet;
const updatefield = Array.from(Object.keys(diff.diff.$set))[0];
const remListItems = diff.diff.$set[updatefield].fields;
- const curList = (curListItems as any)?.fields?.[updatefield.replace("fields.", "")]?.fields.filter((f: any) => f !== null) || [];
- diff.diff.$set[updatefield].fields = curList?.filter((curItem: any) => !remListItems.some((remItem: any) => remItem.fieldId ? remItem.fieldId === curItem.fieldId : remItem.heading ? remItem.heading === curItem.heading : remItem === curItem));
+ const curList = (curListItems as any)?.fields?.[updatefield.replace('fields.', '')]?.fields.filter((f: any) => f !== null) || [];
+ diff.diff.$set[updatefield].fields = curList?.filter(
+ (curItem: any) => !remListItems.some((remItem: any) => (remItem.fieldId ? remItem.fieldId === curItem.fieldId : remItem.heading ? remItem.heading === curItem.heading : remItem === curItem))
+ );
const sendBack = diff.diff.length !== diff.diff.$set[updatefield].fields.length;
delete diff.diff.length;
- Database.Instance.update(diff.id, diff.diff,
+ Database.Instance.update(
+ diff.id,
+ diff.diff,
() => {
if (sendBack) {
- console.log("SEND BACK");
+ console.log('SEND BACK');
const id = socket.id;
- socket.id = "";
+ socket.id = '';
socket.broadcast.emit(MessageStore.UpdateField.Message, diff);
socket.id = id;
} else socket.broadcast.emit(MessageStore.UpdateField.Message, diff);
dispatchNextOp(diff.id);
- }, false);
+ },
+ false
+ );
}
- const pendingOps = new Map<string, { diff: Diff, socket: Socket }[]>();
+ const pendingOps = new Map<string, { diff: Diff; socket: Socket }[]>();
function dispatchNextOp(id: string) {
const next = pendingOps.get(id)!.shift();
@@ -341,7 +348,7 @@ export namespace WebSocket {
function UpdateField(socket: Socket, diff: Diff) {
if (CurUser !== socketMap.get(socket)) {
CurUser = socketMap.get(socket);
- console.log("Switch User: " + CurUser);
+ console.log('Switch User: ' + CurUser);
}
if (pendingOps.has(diff.id)) {
pendingOps.get(diff.id)!.push({ diff, socket });
@@ -357,24 +364,25 @@ export namespace WebSocket {
return GetRefFieldLocal([diff.id, (result?: Transferable) => SetField(socket, diff, result)]);
}
function SetField(socket: Socket, diff: Diff, curListItems?: Transferable) {
- Database.Instance.update(diff.id, diff.diff,
- () => socket.broadcast.emit(MessageStore.UpdateField.Message, diff), false);
+ Database.Instance.update(diff.id, diff.diff, () => socket.broadcast.emit(MessageStore.UpdateField.Message, diff), false);
const docfield = diff.diff.$set || diff.diff.$unset;
if (docfield) {
const update: any = { id: diff.id };
let dynfield = false;
for (let key in docfield) {
- if (!key.startsWith("fields.")) continue;
+ if (!key.startsWith('fields.')) continue;
dynfield = true;
const val = docfield[key];
key = key.substring(7);
- Object.values(suffixMap).forEach(suf => { update[key + getSuffix(suf)] = { set: null }; });
+ Object.values(suffixMap).forEach(suf => {
+ update[key + getSuffix(suf)] = { set: null };
+ });
const term = ToSearchTerm(val);
if (term !== undefined) {
const { suffix, value } = term;
update[key + suffix] = { set: value };
if (key.endsWith('lastModified')) {
- update["lastModified" + suffix] = value;
+ update['lastModified' + suffix] = value;
}
}
}
@@ -403,6 +411,4 @@ export namespace WebSocket {
function CreateField(newValue: any) {
Database.Instance.insert(newValue);
}
-
}
-