aboutsummaryrefslogtreecommitdiff
path: root/src/server
diff options
context:
space:
mode:
Diffstat (limited to 'src/server')
-rw-r--r--src/server/ApiManagers/UploadManager.ts81
-rw-r--r--src/server/DashUploadUtils.ts353
-rw-r--r--src/server/SharedMediaTypes.ts38
-rw-r--r--src/server/server_Initialization.ts142
-rw-r--r--src/server/websocket.ts2
5 files changed, 387 insertions, 229 deletions
diff --git a/src/server/ApiManagers/UploadManager.ts b/src/server/ApiManagers/UploadManager.ts
index 787e331c5..6e28268a9 100644
--- a/src/server/ApiManagers/UploadManager.ts
+++ b/src/server/ApiManagers/UploadManager.ts
@@ -1,19 +1,19 @@
-import ApiManager, { Registration } from './ApiManager';
-import { Method, _success } from '../RouteManager';
import * as formidable from 'formidable';
-import v4 = require('uuid/v4');
-const AdmZip = require('adm-zip');
-import { extname, basename, dirname } from 'path';
import { createReadStream, createWriteStream, unlink, writeFile } from 'fs';
-import { publicDirectory, filesDirectory } from '..';
-import { Database } from '../database';
-import { DashUploadUtils, InjectSize, SizeSuffix } from '../DashUploadUtils';
+import { basename, dirname, extname, normalize } from 'path';
import * as sharp from 'sharp';
-import { AcceptableMedia, Upload } from '../SharedMediaTypes';
-import { normalize } from 'path';
+import { filesDirectory, publicDirectory } from '..';
+import { retrocycle } from '../../decycler/decycler';
+import { DashUploadUtils, InjectSize, SizeSuffix } from '../DashUploadUtils';
+import { Database } from '../database';
+import { Method, _success } from '../RouteManager';
import RouteSubscriber from '../RouteSubscriber';
-const imageDataUri = require('image-data-uri');
+import { AcceptableMedia, Upload } from '../SharedMediaTypes';
+import ApiManager, { Registration } from './ApiManager';
import { SolrManager } from './SearchManager';
+import v4 = require('uuid/v4');
+const AdmZip = require('adm-zip');
+const imageDataUri = require('image-data-uri');
const fs = require('fs');
export enum Directory {
@@ -60,6 +60,18 @@ export default class UploadManager extends ApiManager {
return new Promise<void>(resolve => {
form.parse(req, async (_err, _fields, files) => {
const results: Upload.FileResponse[] = [];
+ if (_err?.message) {
+ results.push({
+ source: {
+ size: 0,
+ path: 'none',
+ name: 'none',
+ type: 'none',
+ toJSON: () => ({ name: 'none', path: '' }),
+ },
+ result: { name: 'failed upload', message: `${_err.message}` },
+ });
+ }
for (const key in files) {
const f = files[key];
if (!Array.isArray(f)) {
@@ -86,7 +98,7 @@ export default class UploadManager extends ApiManager {
const videoId = JSON.parse(payload).videoId;
const results: Upload.FileResponse[] = [];
const result = await DashUploadUtils.uploadYoutube(videoId);
- result && !(result.result instanceof Error) && results.push(result);
+ result && results.push(result);
_success(res, results);
resolve();
});
@@ -96,6 +108,21 @@ export default class UploadManager extends ApiManager {
register({
method: Method.POST,
+ subscription: '/queryYoutubeProgress',
+ secureHandler: async ({ req, res }) => {
+ return new Promise<void>(async resolve => {
+ req.addListener('data', args => {
+ const payload = String.fromCharCode.apply(String, args);
+ const videoId = JSON.parse(payload).videoId;
+ _success(res, { progress: DashUploadUtils.QueryYoutubeProgress(videoId) });
+ resolve();
+ });
+ });
+ },
+ });
+
+ register({
+ method: Method.POST,
subscription: new RouteSubscriber('youtubeScreenshot'),
secureHandler: async ({ req, res }) => {
const { id, timecode } = req.body;
@@ -201,35 +228,41 @@ export default class UploadManager extends ApiManager {
form.parse(req, async (_err, fields, files) => {
remap = fields.remap !== 'false';
let id: string = '';
+ let docids: string[] = [];
try {
for (const name in files) {
const f = files[name];
const path_2 = Array.isArray(f) ? '' : f.path;
const zip = new AdmZip(path_2);
zip.getEntries().forEach((entry: any) => {
- if (!entry.entryName.startsWith('files/')) return;
- let directory = dirname(entry.entryName) + '/';
- const extension = extname(entry.entryName);
- const base = basename(entry.entryName).split('.')[0];
+ let entryName = entry.entryName.replace(/%%%/g, '/');
+ if (!entryName.startsWith('files/')) {
+ return;
+ }
+ const extension = extname(entryName);
+ const pathname = publicDirectory + '/' + entry.entryName;
+ const targetname = publicDirectory + '/' + entryName;
try {
zip.extractEntryTo(entry.entryName, publicDirectory, true, false);
- directory = '/' + directory;
-
- createReadStream(publicDirectory + directory + base + extension).pipe(createWriteStream(publicDirectory + directory + base + '_o' + extension));
- createReadStream(publicDirectory + directory + base + extension).pipe(createWriteStream(publicDirectory + directory + base + '_s' + extension));
- createReadStream(publicDirectory + directory + base + extension).pipe(createWriteStream(publicDirectory + directory + base + '_m' + extension));
- createReadStream(publicDirectory + directory + base + extension).pipe(createWriteStream(publicDirectory + directory + base + '_l' + extension));
+ createReadStream(pathname).pipe(createWriteStream(targetname));
+ if (extension !== '.pdf') {
+ createReadStream(pathname).pipe(createWriteStream(targetname.replace('_o' + extension, '_s' + extension)));
+ createReadStream(pathname).pipe(createWriteStream(targetname.replace('_o' + extension, '_m' + extension)));
+ createReadStream(pathname).pipe(createWriteStream(targetname.replace('_o' + extension, '_l' + extension)));
+ }
+ unlink(pathname, () => {});
} catch (e) {
console.log(e);
}
});
const json = zip.getEntry('doc.json');
try {
- const data = JSON.parse(json.getData().toString('utf8'));
+ const data = JSON.parse(json.getData().toString('utf8'), retrocycle());
const datadocs = data.docs;
id = getId(data.id);
const docs = Object.keys(datadocs).map(key => datadocs[key]);
docs.forEach(mapFn);
+ docids = docs.map(doc => doc.id);
await Promise.all(
docs.map(
(doc: any) =>
@@ -252,7 +285,7 @@ export default class UploadManager extends ApiManager {
unlink(path_2, () => {});
}
SolrManager.update();
- res.send(JSON.stringify(id || 'error'));
+ res.send(JSON.stringify({ id, docids } || 'error'));
} catch (e) {
console.log(e);
}
diff --git a/src/server/DashUploadUtils.ts b/src/server/DashUploadUtils.ts
index cae35da60..f461cf3fa 100644
--- a/src/server/DashUploadUtils.ts
+++ b/src/server/DashUploadUtils.ts
@@ -2,35 +2,35 @@ import { green, red } from 'colors';
import { ExifImage } from 'exif';
import * as exifr from 'exifr';
import { File } from 'formidable';
-import { createWriteStream, existsSync, readFileSync, rename, unlinkSync, writeFile } from 'fs';
+import { createReadStream, createWriteStream, existsSync, readFileSync, rename, unlinkSync, writeFile } from 'fs';
import * as path from 'path';
-import { basename } from "path";
+import { basename } from 'path';
import * as sharp from 'sharp';
import { Stream } from 'stream';
import { filesDirectory, publicDirectory } from '.';
import { Opt } from '../fields/Doc';
-import { ParsedPDF } from "../server/PdfTypes";
+import { ParsedPDF } from '../server/PdfTypes';
import { Utils } from '../Utils';
import { createIfNotExists } from './ActionUtilities';
import { clientPathToFile, Directory, pathToDirectory, serverPathToFile } from './ApiManagers/UploadManager';
-import { resolvedServerUrl } from "./server_Initialization";
+import { resolvedServerUrl } from './server_Initialization';
import { AcceptableMedia, Upload } from './SharedMediaTypes';
import request = require('request-promise');
import formidable = require('formidable');
-import { file } from 'jszip';
-import { csvParser } from './DataVizUtils';
-const { exec } = require("child_process");
+const spawn = require('child_process').spawn;
+const { exec } = require('child_process');
const parse = require('pdf-parse');
-const ffmpeg = require("fluent-ffmpeg");
-const fs = require("fs");
-const requestImageSize = require("../client/util/request-image-size");
+const ffmpeg = require('fluent-ffmpeg');
+const fs = require('fs');
+const requestImageSize = require('../client/util/request-image-size');
+const md5File = require('md5-file');
export enum SizeSuffix {
- Small = "_s",
- Medium = "_m",
- Large = "_l",
- Original = "_o",
- None = ""
+ Small = '_s',
+ Medium = '_m',
+ Large = '_l',
+ Original = '_o',
+ None = '',
}
export function InjectSize(filename: string, size: SizeSuffix) {
@@ -43,7 +43,6 @@ function isLocal() {
}
export namespace DashUploadUtils {
-
export interface Size {
width: number;
suffix: SizeSuffix;
@@ -59,19 +58,19 @@ export namespace DashUploadUtils {
return AcceptableMedia.imageFormats.includes(path.extname(url).toLowerCase());
}
- const size = "content-length";
- const type = "content-type";
+ const size = 'content-length';
+ const type = 'content-type';
+
+ const { imageFormats, videoFormats, applicationFormats, audioFormats } = AcceptableMedia; //TODO:glr
- const { imageFormats, videoFormats, applicationFormats, audioFormats } = AcceptableMedia; //TODO:glr
-
export async function concatVideos(filePaths: string[]): Promise<Upload.AccessPathInfo> {
// make a list of paths to create the ordered text file for ffmpeg
const inputListName = 'concat.txt';
const textFilePath = path.join(filesDirectory, inputListName);
// make a list of paths to create the ordered text file for ffmpeg
- const filePathsText = filePaths.map(filePath => `file '${filePath}'`).join('\n');
+ const filePathsText = filePaths.map(filePath => `file '${filePath}'`).join('\n');
// write the text file to the file system
- writeFile(textFilePath, filePathsText, (err) => console.log(err));
+ writeFile(textFilePath, filePathsText, err => console.log(err));
// make output file name based on timestamp
const outputFileName = `output-${Utils.GenerateGuid()}.mp4`;
@@ -81,117 +80,214 @@ export namespace DashUploadUtils {
// concatenate the videos
await new Promise((resolve, reject) => {
var merge = ffmpeg();
- merge.input(textFilePath)
- .inputOptions(['-f concat', '-safe 0'])
+ merge
+ .input(textFilePath)
+ .inputOptions(['-f concat', '-safe 0'])
.outputOptions('-c copy')
//.videoCodec("copy")
.save(outputFilePath)
- .on("error", reject)
- .on("end", resolve);
- })
-
- // delete concat.txt from the file system
- unlinkSync(textFilePath);
- // delete the old segment videos from the server
- filePaths.forEach(filePath => unlinkSync(filePath));
-
- // return the path(s) to the output file
- return {
- accessPaths: getAccessPaths(Directory.videos, outputFileName)
- }
+ .on('error', reject)
+ .on('end', resolve);
+ });
+
+ // delete concat.txt from the file system
+ unlinkSync(textFilePath);
+ // delete the old segment videos from the server
+ filePaths.forEach(filePath => unlinkSync(filePath));
+
+ // return the path(s) to the output file
+ return {
+ accessPaths: getAccessPaths(Directory.videos, outputFileName),
+ };
}
+ function resolveExistingFile(name: string, pat: string, directory: Directory, type?: string, duration?: number, rawText?: string) {
+ const data = { size: 0, path: path.basename(pat), name, type: type ?? '' };
+ const file = { ...data, toJSON: () => ({ ...data, filename: data.path.replace(/.*\//, ''), mtime: duration?.toString(), mime: '', toJson: () => undefined as any }) };
+ return {
+ source: file,
+ result: {
+ accessPaths: {
+ agnostic: getAccessPaths(directory, data.path),
+ },
+ rawText,
+ duration,
+ },
+ };
+ }
+
+ export function QueryYoutubeProgress(videoId: string) {
+ return uploadProgress.get(videoId) ?? 'failed';
+ }
+
+ let uploadProgress = new Map<string, string>();
+
export function uploadYoutube(videoId: string): Promise<Upload.FileResponse> {
- console.log("UPLOAD " + videoId);
return new Promise<Upload.FileResponse<Upload.FileInformation>>((res, rej) => {
- exec('youtube-dl -o ' + (videoId + ".mp4") + ' https://www.youtube.com/watch?v=' + videoId + ' -f "best[filesize<50M]"',
- (error: any, stdout: any, stderr: any) => {
- if (error) console.log(`error: ${error.message}`);
- else if (stderr) console.log(`stderr: ${stderr}`);
- else {
- console.log(`stdout: ${stdout}`);
- const data = { size: 0, path: videoId + ".mp4", name: videoId, type: "video/mp4" };
- const file = { ...data, toJSON: () => ({ ...data, filename: data.path.replace(/.*\//, ""), mtime: null, length: 0, mime: "", toJson: () => undefined as any }) };
- res(MoveParsedFile(file, Directory.videos));
+ console.log('Uploading YouTube video: ' + videoId);
+ const name = videoId;
+ const path = name.replace(/^-/, '__') + '.mp4';
+ const finalPath = serverPathToFile(Directory.videos, path);
+ if (existsSync(finalPath)) {
+ uploadProgress.set(videoId, 'computing duration');
+ exec(`yt-dlp -o ${finalPath} "https://www.youtube.com/watch?v=${videoId}" --get-duration`, (error: any, stdout: any, stderr: any) => {
+ const time = Array.from(stdout.trim().split(':')).reverse();
+ const duration = (time.length > 2 ? Number(time[2]) * 1000 * 60 : 0) + (time.length > 1 ? Number(time[1]) * 60 : 0) + (time.length > 0 ? Number(time[0]) : 0);
+ res(resolveExistingFile(name, finalPath, Directory.videos, 'video/mp4', duration, undefined));
+ });
+ } else {
+ uploadProgress.set(videoId, 'starting download');
+ const ytdlp = spawn(`yt-dlp`, ['-o', path, `https://www.youtube.com/watch?v=${videoId}`, '--max-filesize', '100M', '-f', 'mp4']);
+
+ ytdlp.stdout.on('data', (data: any) => !uploadProgress.get(videoId)?.includes('Aborting.') && uploadProgress.set(videoId, data.toString()));
+
+ let errors = '';
+ ytdlp.stderr.on('data', (data: any) => (errors = data.toString()));
+
+ ytdlp.on('exit', function (code: any) {
+ if (code || uploadProgress.get(videoId)?.includes('Aborting.')) {
+ res({
+ source: {
+ size: 0,
+ path,
+ name,
+ type: '',
+ toJSON: () => ({ name, path }),
+ },
+ result: { name: 'failed youtube query', message: `Could not archive video. ${code ? errors : uploadProgress.get(videoId)}` },
+ });
+ } else {
+ uploadProgress.set(videoId, 'computing duration');
+ exec(`yt-dlp-o ${path} "https://www.youtube.com/watch?v=${videoId}" --get-duration`, (error: any, stdout: any, stderr: any) => {
+ const time = Array.from(stdout.trim().split(':')).reverse();
+ const duration = (time.length > 2 ? Number(time[2]) * 1000 * 60 : 0) + (time.length > 1 ? Number(time[1]) * 60 : 0) + (time.length > 0 ? Number(time[0]) : 0);
+ const data = { size: 0, path, name, type: 'video/mp4' };
+ const file = { ...data, toJSON: () => ({ ...data, filename: data.path.replace(/.*\//, ''), mtime: duration.toString(), mime: '', toJson: () => undefined as any }) };
+ res(MoveParsedFile(file, Directory.videos));
+ });
}
});
+ }
});
}
export async function upload(file: File): Promise<Upload.FileResponse> {
const { type, path, name } = file;
- const types = type?.split("/") ?? [];
+ const types = type?.split('/') ?? [];
const category = types[0];
let format = `.${types[1]}`;
console.log(green(`Processing upload of file (${name}) and format (${format}) with upload type (${type}) in category (${category}).`));
-
+
switch (category) {
- case "image":
+ case 'image':
if (imageFormats.includes(format)) {
const result = await UploadImage(path, basename(path));
return { source: file, result };
}
- case "video":
- if (format.includes("x-matroska")) {
- console.log("case video");
- await new Promise(res => ffmpeg(file.path)
- .videoCodec("copy") // this will copy the data instead of reencode it
- .save(file.path.replace(".mkv", ".mp4"))
- .on('end', res));
- file.path = file.path.replace(".mkv", ".mp4");
- format = ".mp4";
+ fs.unlink(path, () => {});
+ return { source: file, result: { name: 'Unsupported image format', message: `Could not upload unsupported file (${name}). Please convert to an .jpg` } };
+ case 'video':
+ if (format.includes('x-matroska')) {
+ console.log('case video');
+ await new Promise(res =>
+ ffmpeg(file.path)
+ .videoCodec('copy') // this will copy the data instead of reencode it
+ .save(file.path.replace('.mkv', '.mp4'))
+ .on('end', res)
+ );
+ file.path = file.path.replace('.mkv', '.mp4');
+ format = '.mp4';
+ }
+ if (format.includes('quicktime')) {
+ let abort = false;
+ await new Promise<void>(res =>
+ ffmpeg.ffprobe(file.path, (err: any, metadata: any) => {
+ if (metadata.streams.some((stream: any) => stream.codec_name === 'hevc')) {
+ abort = true;
+ }
+ res();
+ })
+ );
+ if (abort) {
+ // bcz: instead of aborting, we could convert the file using the code below to an mp4. Problem is that this takes a long time and will clog up the server.
+ // await new Promise(res =>
+ // ffmpeg(file.path)
+ // .videoCodec('libx264') // this will copy the data instead of reencode it
+ // .audioCodec('mp2')
+ // .save(file.path.replace('.MOV', '.mp4').replace('.mov', '.mp4'))
+ // .on('end', res)
+ // );
+ // file.path = file.path.replace('.mov', '.mp4').replace('.MOV', '.mp4');
+ // format = '.mp4';
+ fs.unlink(path, () => {});
+ return { source: file, result: { name: 'Unsupported video format', message: `Could not upload unsupported file (${name}). Please convert to an .mp4` } };
+ }
}
if (videoFormats.includes(format)) {
return MoveParsedFile(file, Directory.videos);
}
- case "application":
+ fs.unlink(path, () => {});
+ return { source: file, result: { name: 'Unsupported video format', message: `Could not upload unsupported file (${name}). Please convert to an .mp4` } };
+ case 'application':
if (applicationFormats.includes(format)) {
- return UploadPdf(file);
+ const val = UploadPdf(file);
+ if (val) return val;
}
- case "audio":
- const components = format.split(";");
+ case 'audio':
+ const components = format.split(';');
if (components.length > 1) {
format = components[0];
}
if (audioFormats.includes(format)) {
return UploadAudio(file, format);
}
- case "text":
- if (types[1] == "csv") {
+ fs.unlink(path, () => {});
+ return { source: file, result: { name: 'Unsupported audio format', message: `Could not upload unsupported file (${name}). Please convert to an .mp3` } };
+ case 'text':
+ if (types[1] == 'csv') {
return UploadCsv(file);
}
-
}
console.log(red(`Ignoring unsupported file (${name}) with upload type (${type}).`));
+ fs.unlink(path, () => {});
return { source: file, result: new Error(`Could not upload unsupported file (${name}) with upload type (${type}).`) };
}
async function UploadPdf(file: File) {
- const { path: sourcePath } = file;
- const dataBuffer = readFileSync(sourcePath);
- const result: ParsedPDF = await parse(dataBuffer);
- await new Promise<void>((resolve, reject) => {
- const name = path.basename(sourcePath);
- const textFilename = `${name.substring(0, name.length - 4)}.txt`;
- const writeStream = createWriteStream(serverPathToFile(Directory.text, textFilename));
- writeStream.write(result.text, error => error ? reject(error) : resolve());
- });
- return MoveParsedFile(file, Directory.pdfs, undefined, result.text);
+ const fileKey = (await md5File(file.path)) + '.pdf';
+ const textFilename = `${fileKey.substring(0, fileKey.length - 4)}.txt`;
+ if (fExists(fileKey, Directory.pdfs) && fExists(textFilename, Directory.text)) {
+ return new Promise<Upload.FileResponse>(res => {
+ const textFilename = `${fileKey.substring(0, fileKey.length - 4)}.txt`;
+ const readStream = createReadStream(serverPathToFile(Directory.text, textFilename));
+ var rawText = '';
+ readStream.on('data', chunk => (rawText += chunk.toString())).on('end', () => res(resolveExistingFile(file.name, fileKey, Directory.pdfs, file.type, undefined, rawText)));
+ });
+ }
+ const dataBuffer = readFileSync(file.path);
+ const result: ParsedPDF | any = await parse(dataBuffer).catch((e: any) => e);
+ if (!result.code) {
+ await new Promise<void>((resolve, reject) => {
+ const writeStream = createWriteStream(serverPathToFile(Directory.text, textFilename));
+ writeStream.write(result?.text, error => (error ? reject(error) : resolve()));
+ });
+ return MoveParsedFile(file, Directory.pdfs, undefined, result?.text, undefined, fileKey);
+ }
+ return { source: file, result: { name: 'faile pdf pupload', message: `Could not upload (${file.name}).${result.message}` } };
}
async function UploadCsv(file: File) {
- const { path: sourcePath } = file;
- // read the file as a string
+ const { path: sourcePath } = file;
+ // read the file as a string
const data = readFileSync(sourcePath, 'utf8');
// split the string into an array of lines
return MoveParsedFile(file, Directory.csv, undefined, data);
// console.log(csvParser(data));
-
}
- const manualSuffixes = [".webm"];
+ const manualSuffixes = ['.webm'];
async function UploadAudio(file: File, format: string) {
const suffix = manualSuffixes.includes(format) ? format : undefined;
@@ -200,37 +296,37 @@ export namespace DashUploadUtils {
/**
* Uploads an image specified by the @param source to Dash's /public/files/
- * directory, and returns information generated during that upload
- *
+ * directory, and returns information generated during that upload
+ *
* @param {string} source is either the absolute path of an already uploaded image or
* the url of a remote image
* @param {string} filename dictates what to call the image. If not specified,
* the name {@param prefix}_upload_{GUID}
* @param {string} prefix is a string prepended to the generated image name in the
* event that @param filename is not specified
- *
+ *
* @returns {ImageUploadInformation | Error} This method returns
* 1) the paths to the uploaded images (plural due to resizing)
* 2) the exif data embedded in the image, or the error explaining why exif couldn't be parsed
* 3) the size of the image, in bytes (4432130)
* 4) the content type of the image, i.e. image/(jpeg | png | ...)
*/
- export const UploadImage = async (source: string, filename?: string, prefix: string = ""): Promise<Upload.ImageInformation | Error> => {
+ export const UploadImage = async (source: string, filename?: string, prefix: string = ''): Promise<Upload.ImageInformation | Error> => {
const metadata = await InspectImage(source);
if (metadata instanceof Error) {
- return metadata;
+ return { name: metadata.name, message: metadata.message };
}
return UploadInspectedImage(metadata, filename || metadata.filename, prefix);
};
export async function buildFileDirectories() {
if (!existsSync(publicDirectory)) {
- console.error("\nPlease ensure that the following directory exists...\n");
+ console.error('\nPlease ensure that the following directory exists...\n');
console.log(publicDirectory);
process.exit(0);
}
if (!existsSync(filesDirectory)) {
- console.error("\nPlease ensure that the following directory exists...\n");
+ console.error('\nPlease ensure that the following directory exists...\n');
console.log(filesDirectory);
process.exit(0);
}
@@ -252,7 +348,7 @@ export namespace DashUploadUtils {
/**
* Based on the url's classification as local or remote, gleans
* as much information as possible about the specified image
- *
+ *
* @param source is the path or url to the image in question
*/
export const InspectImage = async (source: string): Promise<Upload.InspectionResults | Error> => {
@@ -265,9 +361,9 @@ export namespace DashUploadUtils {
*/
if ((rawMatches = /^data:image\/([a-z]+);base64,(.*)/.exec(source)) !== null) {
const [ext, data] = rawMatches.slice(1, 3);
- const resolved = filename = `upload_${Utils.GenerateGuid()}.${ext}`;
+ const resolved = (filename = `upload_${Utils.GenerateGuid()}.${ext}`);
const error = await new Promise<Error | null>(resolve => {
- writeFile(serverPathToFile(Directory.images, resolved), data, "base64", resolve);
+ writeFile(serverPathToFile(Directory.images, resolved), data, 'base64', resolve);
});
if (error !== null) {
return error;
@@ -276,12 +372,12 @@ export namespace DashUploadUtils {
}
let resolvedUrl: string;
/**
- *
+ *
* At this point, we want to take whatever url we have and make sure it's requestable.
* Anything that's hosted by some other website already is, but if the url is a local file url
* (locates the file on this server machine), we have to resolve the client side url by cutting out the
* basename subtree (i.e. /images/<some_guid>.<ext>) and put it on the end of the server's url.
- *
+ *
* This can always be localhost, regardless of whether this is on the server or not, since we (the server, not the client)
* will be the ones making the request, and from the perspective of dash-release or dash-web, localhost:<port> refers to the same thing
* as the full dash-release.eastus.cloudapp.azure.com:<port>.
@@ -290,18 +386,22 @@ export namespace DashUploadUtils {
if (matches === null) {
resolvedUrl = source;
} else {
- resolvedUrl = `${resolvedServerUrl}/${matches[1].split("\\").join("/")}`;
+ resolvedUrl = `${resolvedServerUrl}/${matches[1].split('\\').join('/')}`;
}
// See header comments: not all image files have exif data (I believe only JPG is the only format that can have it)
const exifData = await parseExifData(resolvedUrl);
const results = {
exifData,
- requestable: resolvedUrl
+ requestable: resolvedUrl,
};
+
// Use the request library to parse out file level image information in the headers
- const { headers } = (await new Promise<any>((resolve, reject) => {
- request.head(resolvedUrl, (error, res) => error ? reject(error) : resolve(res));
- }).catch(console.error));
+ const { headers } = await new Promise<any>((resolve, reject) => {
+ return request.head(resolvedUrl, (error, res) => (error ? reject(error) : resolve(res)));
+ }).catch(e => {
+ console.log('Error processing headers: ', e);
+ });
+
try {
// Compute the native width and height ofthe image with an npm module
const { width: nativeWidth, height: nativeHeight } = await requestImageSize(resolvedUrl);
@@ -313,7 +413,7 @@ export namespace DashUploadUtils {
nativeWidth,
nativeHeight,
filename,
- ...results
+ ...results,
};
} catch (e: any) {
console.log(e);
@@ -331,42 +431,50 @@ export namespace DashUploadUtils {
* @param suffix If the file doesn't have a suffix and you want to provide it one
* to appear in the new location
*/
- export async function MoveParsedFile(file: formidable.File, destination: Directory, suffix: string | undefined = undefined, text?: string): Promise<Upload.FileResponse> {
+ export async function MoveParsedFile(file: formidable.File, destination: Directory, suffix: string | undefined = undefined, text?: string, duration?: number, targetName?: string): Promise<Upload.FileResponse> {
const { path: sourcePath } = file;
- let name = path.basename(sourcePath);
+ let name = targetName ?? path.basename(sourcePath);
suffix && (name += suffix);
return new Promise(resolve => {
const destinationPath = serverPathToFile(destination, name);
rename(sourcePath, destinationPath, error => {
resolve({
source: file,
- result: error ? error : {
- accessPaths: {
- agnostic: getAccessPaths(destination, name)
- },
- rawText: text
- }
+ result: error
+ ? error
+ : {
+ accessPaths: {
+ agnostic: getAccessPaths(destination, name),
+ },
+ rawText: text,
+ duration,
+ },
});
});
});
}
+ export function fExists(name: string, destination: Directory) {
+ const destinationPath = serverPathToFile(destination, name);
+ return existsSync(destinationPath);
+ }
+
export function getAccessPaths(directory: Directory, fileName: string) {
return {
client: clientPathToFile(directory, fileName),
- server: serverPathToFile(directory, fileName)
+ server: serverPathToFile(directory, fileName),
};
}
- export const UploadInspectedImage = async (metadata: Upload.InspectionResults, filename?: string, prefix = "", cleanUp = true): Promise<Upload.ImageInformation> => {
+ export const UploadInspectedImage = async (metadata: Upload.InspectionResults, filename?: string, prefix = '', cleanUp = true): Promise<Upload.ImageInformation> => {
const { requestable, source, ...remaining } = metadata;
- const resolved = filename || `${prefix}upload_${Utils.GenerateGuid()}.${remaining.contentType.split("/")[1].toLowerCase()}`;
+ const resolved = filename || `${prefix}upload_${Utils.GenerateGuid()}.${remaining.contentType.split('/')[1].toLowerCase()}`;
const { images } = Directory;
const information: Upload.ImageInformation = {
accessPaths: {
- agnostic: getAccessPaths(images, resolved)
+ agnostic: getAccessPaths(images, resolved),
},
- ...metadata
+ ...metadata,
};
const writtenFiles = await outputResizedImages(() => request(requestable), resolved, pathToDirectory(Directory.images));
for (const suffix of Object.keys(writtenFiles)) {
@@ -383,9 +491,9 @@ export namespace DashUploadUtils {
const val: any = layer[key];
if (val instanceof Buffer) {
layer[key] = val.toString();
- } else if (Array.isArray(val) && typeof val[0] === "number") {
+ } else if (Array.isArray(val) && typeof val[0] === 'number') {
layer[key] = Buffer.from(val).toString();
- } else if (typeof val === "object") {
+ } else if (typeof val === 'object') {
bufferConverterRec(val);
}
}
@@ -393,7 +501,7 @@ export namespace DashUploadUtils {
const parseExifData = async (source: string) => {
const image = await request.get(source, { encoding: null });
- const { data, error } = await new Promise(resolve => {
+ const { data, error } = await new Promise<{ data: any; error: any }>(resolve => {
new ExifImage({ image }, (error, data) => {
let reason: Opt<string> = undefined;
if (error) {
@@ -403,27 +511,27 @@ export namespace DashUploadUtils {
});
});
//data && bufferConverterRec(data);
- return { data: await exifr.parse(image), error };
+ return error ? { data: undefined, error } : { data: await exifr.parse(image), error };
};
const { pngs, jpgs, webps, tiffs } = AcceptableMedia;
const pngOptions = {
compressionLevel: 9,
adaptiveFiltering: true,
- force: true
+ force: true,
};
export async function outputResizedImages(streamProvider: () => Stream | Promise<Stream>, outputFileName: string, outputDirectory: string) {
const writtenFiles: { [suffix: string]: string } = {};
for (const { resizer, suffix } of resizers(path.extname(outputFileName))) {
- const outputPath = path.resolve(outputDirectory, writtenFiles[suffix] = InjectSize(outputFileName, suffix));
+ const outputPath = path.resolve(outputDirectory, (writtenFiles[suffix] = InjectSize(outputFileName, suffix)));
await new Promise<void>(async (resolve, reject) => {
const source = streamProvider();
let readStream: Stream = source instanceof Promise ? await source : source;
if (resizer) {
readStream = readStream.pipe(resizer.withMetadata());
}
- readStream.pipe(createWriteStream(outputPath)).on("close", resolve).on("error", reject);
+ readStream.pipe(createWriteStream(outputPath)).on('close', resolve).on('error', reject);
});
}
return writtenFiles;
@@ -442,15 +550,14 @@ export namespace DashUploadUtils {
initial = initial.webp();
} else if (tiffs.includes(ext)) {
initial = initial.tiff();
- } else if (ext === ".gif") {
+ } else if (ext === '.gif') {
initial = undefined;
}
return {
resizer: initial,
- suffix
+ suffix,
};
- })
+ }),
];
}
-
-} \ No newline at end of file
+}
diff --git a/src/server/SharedMediaTypes.ts b/src/server/SharedMediaTypes.ts
index cde95526f..7db1c2dae 100644
--- a/src/server/SharedMediaTypes.ts
+++ b/src/server/SharedMediaTypes.ts
@@ -2,36 +2,45 @@ import { ExifData } from 'exif';
import { File } from 'formidable';
export namespace AcceptableMedia {
- export const gifs = [".gif"];
- export const pngs = [".png"];
- export const jpgs = [".jpg", ".jpeg"];
- export const webps = [".webp"];
- export const tiffs = [".tiff"];
+ export const gifs = ['.gif'];
+ export const pngs = ['.png'];
+ export const jpgs = ['.jpg', '.jpeg'];
+ export const webps = ['.webp'];
+ export const tiffs = ['.tiff'];
export const imageFormats = [...pngs, ...jpgs, ...gifs, ...webps, ...tiffs];
- export const videoFormats = [".mov", ".mp4", ".quicktime", ".mkv", ".x-matroska;codecs=avc1"];
- export const applicationFormats = [".pdf"];
- export const audioFormats = [".wav", ".mp3", ".mpeg", ".flac", ".au", ".aiff", ".m4a", ".webm"];
+ export const videoFormats = ['.mov', '.mp4', '.quicktime', '.mkv', '.x-matroska;codecs=avc1'];
+ export const applicationFormats = ['.pdf'];
+ export const audioFormats = ['.wav', '.mp3', '.mpeg', '.flac', '.au', '.aiff', '.m4a', '.webm'];
}
export namespace Upload {
-
export function isImageInformation(uploadResponse: Upload.FileInformation): uploadResponse is Upload.ImageInformation {
- return "nativeWidth" in uploadResponse;
+ return 'nativeWidth' in uploadResponse;
+ }
+
+ export function isVideoInformation(uploadResponse: Upload.FileInformation): uploadResponse is Upload.VideoInformation {
+ return 'duration' in uploadResponse;
}
export interface FileInformation {
accessPaths: AccessPathInfo;
rawText?: string;
+ duration?: number;
}
- export type FileResponse<T extends FileInformation = FileInformation> = { source: File, result: T | Error };
+ export type FileResponse<T extends FileInformation = FileInformation> = { source: File; result: T | Error };
export type ImageInformation = FileInformation & InspectionResults;
+ export type VideoInformation = FileInformation & VideoResults;
+
export interface AccessPathInfo {
- [suffix: string]: { client: string, server: string };
+ [suffix: string]: { client: string; server: string };
}
+ export interface VideoResults {
+ duration: number;
+ }
export interface InspectionResults {
source: string;
requestable: string;
@@ -44,8 +53,7 @@ export namespace Upload {
}
export interface EnrichedExifData {
- data: ExifData & ExifData["gps"];
+ data: ExifData & ExifData['gps'];
error?: string;
}
-
-} \ No newline at end of file
+}
diff --git a/src/server/server_Initialization.ts b/src/server/server_Initialization.ts
index fd000a83c..b0db71f9c 100644
--- a/src/server/server_Initialization.ts
+++ b/src/server/server_Initialization.ts
@@ -1,13 +1,13 @@
import * as bodyParser from 'body-parser';
import { blue, yellow } from 'colors';
import * as cookieParser from 'cookie-parser';
-import * as cors from "cors";
+import * as cors from 'cors';
import * as express from 'express';
import * as session from 'express-session';
import * as expressValidator from 'express-validator';
import * as fs from 'fs';
-import { Server as HttpServer } from "http";
-import { createServer, Server as HttpsServer } from "https";
+import { Server as HttpServer } from 'http';
+import { createServer, Server as HttpsServer } from 'https';
import * as passport from 'passport';
import * as request from 'request';
import * as webpack from 'webpack';
@@ -33,7 +33,7 @@ const compiler = webpack(config);
export type RouteSetter = (server: RouteManager) => void;
//export let disconnect: Function;
-export let resolvedPorts: { server: number, socket: number } = { server: 1050, socket: 4321 };
+export let resolvedPorts: { server: number; socket: number } = { server: 1050, socket: 4321 };
export let resolvedServerUrl: string;
export default async function InitializeServer(routeSetter: RouteSetter) {
@@ -42,33 +42,32 @@ export default async function InitializeServer(routeSetter: RouteSetter) {
const compiler = webpack(config);
- app.use(require("webpack-dev-middleware")(compiler, {
- publicPath: config.output.publicPath
- }));
+ app.use(
+ require('webpack-dev-middleware')(compiler, {
+ publicPath: config.output.publicPath,
+ })
+ );
- app.use(require("webpack-hot-middleware")(compiler));
+ app.use(require('webpack-hot-middleware')(compiler));
// route table managed by express. routes are tested sequentially against each of these map rules. when a match is found, the handler is called to process the request
- app.get(new RegExp(/^\/+$/), (req, res) => res.redirect(req.user ? "/home" : "/login")); // target urls that consist of one or more '/'s with nothing in between
- app.use(express.static(publicDirectory, { setHeaders: res => res.setHeader("Access-Control-Allow-Origin", "*") })); //all urls that start with dash's public directory: /files/ (e.g., /files/images, /files/audio, etc)
+ app.get(new RegExp(/^\/+$/), (req, res) => res.redirect(req.user ? '/home' : '/login')); // target urls that consist of one or more '/'s with nothing in between
+ app.use(express.static(publicDirectory, { setHeaders: res => res.setHeader('Access-Control-Allow-Origin', '*') })); //all urls that start with dash's public directory: /files/ (e.g., /files/images, /files/audio, etc)
app.use(cors({ origin: (_origin: any, callback: any) => callback(null, true) }));
app.use(wdm(compiler, { publicPath: config.output.publicPath }));
app.use(whm(compiler));
- registerAuthenticationRoutes(app); // this adds routes to authenticate a user (login, etc)
- registerCorsProxy(app); // this adds a /corsProxy/ route to allow clients to get to urls that would otherwise be blocked by cors policies
+ registerAuthenticationRoutes(app); // this adds routes to authenticate a user (login, etc)
+ registerCorsProxy(app); // this adds a /corsProxy/ route to allow clients to get to urls that would otherwise be blocked by cors policies
isRelease && !SSL.Loaded && SSL.exit();
routeSetter(new RouteManager(app, isRelease)); // this sets up all the regular supervised routes (things like /home, download/upload api's, pdf, search, session, etc)
registerEmbeddedBrowseRelativePathHandler(app); // this allows renered web pages which internally have relative paths to find their content
let server: HttpServer | HttpsServer;
isRelease && process.env.serverPort && (resolvedPorts.server = Number(process.env.serverPort));
- await new Promise<void>(resolve => server = isRelease ?
- createServer(SSL.Credentials, app).listen(resolvedPorts.server, resolve) :
- app.listen(resolvedPorts.server, resolve)
- );
- logPort("server", resolvedPorts.server);
+ await new Promise<void>(resolve => (server = isRelease ? createServer(SSL.Credentials, app).listen(resolvedPorts.server, resolve) : app.listen(resolvedPorts.server, resolve)));
+ logPort('server', resolvedPorts.server);
- resolvedServerUrl = `${isRelease && process.env.serverName ? `https://${process.env.serverName}.com` : "http://localhost"}:${resolvedPorts.server}`;
+ resolvedServerUrl = `${isRelease && process.env.serverName ? `https://${process.env.serverName}.com` : 'http://localhost'}:${resolvedPorts.server}`;
// initialize the web socket (bidirectional communication: if a user changes
// a field on one client, that change must be broadcast to all other clients)
@@ -79,7 +78,7 @@ export default async function InitializeServer(routeSetter: RouteSetter) {
}
const week = 7 * 24 * 60 * 60 * 1000;
-const secret = "64d6866242d3b5a5503c675b32c9605e4e90478e9b77bcf2bc";
+const secret = '64d6866242d3b5a5503c675b32c9605e4e90478e9b77bcf2bc';
function buildWithMiddleware(server: express.Express) {
[
@@ -89,18 +88,18 @@ function buildWithMiddleware(server: express.Express) {
resave: true,
cookie: { maxAge: week },
saveUninitialized: true,
- store: process.env.DB === "MEM" ? new session.MemoryStore() : new MongoStore({ url: Database.url })
+ store: process.env.DB === 'MEM' ? new session.MemoryStore() : new MongoStore({ url: Database.url }),
}),
flash(),
expressFlash(),
- bodyParser.json({ limit: "10mb" }),
+ bodyParser.json({ limit: '10mb' }),
bodyParser.urlencoded({ extended: true }),
expressValidator(),
passport.initialize(),
passport.session(),
(req: express.Request, res: express.Response, next: express.NextFunction) => {
res.locals.user = req.user;
- if ((req.originalUrl.endsWith(".png") /*|| req.originalUrl.endsWith(".js")*/) && req.method === 'GET' && (res as any)._contentLength) {
+ if (req.originalUrl.endsWith('.png') /*|| req.originalUrl.endsWith(".js")*/ && req.method === 'GET' && (res as any)._contentLength) {
const period = 30000;
res.set('Cache-control', `public, max-age=${period}`);
} else {
@@ -108,61 +107,61 @@ function buildWithMiddleware(server: express.Express) {
res.set('Cache-control', `no-store`);
}
next();
- }
+ },
].forEach(next => server.use(next));
return server;
}
/* Determine if the enviroment is dev mode or release mode. */
function determineEnvironment() {
- const isRelease = process.env.RELEASE === "true";
+ const isRelease = process.env.RELEASE === 'true';
const color = isRelease ? blue : yellow;
- const label = isRelease ? "release" : "development";
+ const label = isRelease ? 'release' : 'development';
console.log(`\nrunning server in ${color(label)} mode`);
// swilkins: I don't think we need to read from ClientUtils.RELEASE anymore. Should be able to invoke process.env.RELEASE
// on the client side, thanks to dotenv in webpack.config.js
- let clientUtils = fs.readFileSync("./src/client/util/ClientUtils.ts.temp", "utf8");
+ let clientUtils = fs.readFileSync('./src/client/util/ClientUtils.ts.temp', 'utf8');
clientUtils = `//AUTO-GENERATED FILE: DO NOT EDIT\n${clientUtils.replace('"mode"', String(isRelease))}`;
- fs.writeFileSync("./src/client/util/ClientUtils.ts", clientUtils, "utf8");
+ fs.writeFileSync('./src/client/util/ClientUtils.ts', clientUtils, 'utf8');
return isRelease;
}
function registerAuthenticationRoutes(server: express.Express) {
- server.get("/signup", getSignup);
- server.post("/signup", postSignup);
+ server.get('/signup', getSignup);
+ server.post('/signup', postSignup);
- server.get("/login", getLogin);
- server.post("/login", postLogin);
+ server.get('/login', getLogin);
+ server.post('/login', postLogin);
- server.get("/logout", getLogout);
+ server.get('/logout', getLogout);
- server.get("/forgotPassword", getForgot);
- server.post("/forgotPassword", postForgot);
+ server.get('/forgotPassword', getForgot);
+ server.post('/forgotPassword', postForgot);
- const reset = new RouteSubscriber("resetPassword").add("token").build;
+ const reset = new RouteSubscriber('resetPassword').add('token').build;
server.get(reset, getReset);
server.post(reset, postReset);
}
function registerCorsProxy(server: express.Express) {
- server.use("/corsProxy", async (req, res) => {
- const referer = req.headers.referer ? decodeURIComponent(req.headers.referer) : "";
+ server.use('/corsProxy', async (req, res) => {
+ const referer = req.headers.referer ? decodeURIComponent(req.headers.referer) : '';
let requrlraw = decodeURIComponent(req.url.substring(1));
- const qsplit = requrlraw.split("?q=");
- const newqsplit = requrlraw.split("&q=");
+ const qsplit = requrlraw.split('?q=');
+ const newqsplit = requrlraw.split('&q=');
if (qsplit.length > 1 && newqsplit.length > 1) {
const lastq = newqsplit[newqsplit.length - 1];
- requrlraw = qsplit[0] + "?q=" + lastq.split("&")[0] + "&" + qsplit[1].split("&")[1];
+ requrlraw = qsplit[0] + '?q=' + lastq.split('&')[0] + '&' + qsplit[1].split('&')[1];
}
- const requrl = requrlraw.startsWith("/") ? referer + requrlraw : requrlraw;
+ const requrl = requrlraw.startsWith('/') ? referer + requrlraw : requrlraw;
// cors weirdness here...
// if the referer is a cors page and the cors() route (I think) redirected to /corsProxy/<path> and the requested url path was relative,
// then we redirect again to the cors referer and just add the relative path.
- if (!requrl.startsWith("http") && req.originalUrl.startsWith("/corsProxy") && referer?.includes("corsProxy")) {
- res.redirect(referer + (referer.endsWith("/") ? "" : "/") + requrl);
+ if (!requrl.startsWith('http') && req.originalUrl.startsWith('/corsProxy') && referer?.includes('corsProxy')) {
+ res.redirect(referer + (referer.endsWith('/') ? '' : '/') + requrl);
} else {
proxyServe(req, requrl, res);
}
@@ -173,34 +172,40 @@ function proxyServe(req: any, requrl: string, response: any) {
const htmlBodyMemoryStream = new (require('memorystream'))();
var retrieveHTTPBody: any;
const sendModifiedBody = () => {
- const header = response.headers["content-encoding"];
- if (header && header.includes("gzip")) {
+ const header = response.headers['content-encoding'];
+ if (header?.includes('gzip')) {
try {
const replacer = (match: any, href: string, offset: any, string: any) => {
- return `href="${resolvedServerUrl + "/corsProxy/http" + href}"`;
+ return `href="${resolvedServerUrl + '/corsProxy/http' + href}"`;
};
const zipToStringDecoder = new (require('string_decoder').StringDecoder)('utf8');
const bodyStream = htmlBodyMemoryStream.read();
if (bodyStream) {
- const htmlText = zipToStringDecoder.write(zlib.gunzipSync(bodyStream).toString('utf8')
- .replace('<head>', '<head> <style>[id ^= "google"] { display: none; } </style>')
- .replace(/href="https?([^"]*)"/g, replacer)
- .replace(/target="_blank"/g, ""));
+ const htmlText = zipToStringDecoder.write(
+ zlib
+ .gunzipSync(bodyStream)
+ .toString('utf8')
+ .replace('<head>', '<head> <style>[id ^= "google"] { display: none; } </style>')
+ .replace(/href="https?([^"]*)"/g, replacer)
+ .replace(/target="_blank"/g, '')
+ );
response.send(zlib.gzipSync(htmlText));
} else {
req.pipe(request(requrl)).pipe(response);
- console.log("EMPTY body:" + req.url);
+ console.log('EMPTY body:' + req.url);
}
} catch (e) {
- console.log("EROR?: ", e);
+ console.log('EROR?: ', e);
}
- } else req.pipe(request(requrl)).pipe(response);
+ } else {
+ req.pipe(htmlBodyMemoryStream).pipe(response);
+ }
};
retrieveHTTPBody = () => {
- req.headers.cookie = "";
+ req.headers.cookie = '';
req.pipe(request(requrl))
- .on("error", (e: any) => console.log(`Malformed CORS url: ${requrl}`, e))
- .on("response", (res: any) => {
+ .on('error', (e: any) => console.log(`Malformed CORS url: ${requrl}`, e))
+ .on('response', (res: any) => {
res.headers;
const headers = Object.keys(res.headers);
const headerCharRegex = /[^\t\x20-\x7e\x80-\xff]/;
@@ -208,36 +213,41 @@ function proxyServe(req: any, requrl: string, response: any) {
const header = res.headers[headerName];
if (Array.isArray(header)) {
res.headers[headerName] = header.filter(h => !headerCharRegex.test(h));
- } else if (headerCharRegex.test(header || "")) {
+ } else if (headerCharRegex.test(header || '')) {
delete res.headers[headerName];
} else res.headers[headerName] = header;
});
+ res.headers['x-permitted-cross-domain-policies'] = 'all';
+ res.headers['x-frame-options'] = '';
+ res.headers['content-security-policy'] = '';
response.headers = response._headers = res.headers;
})
- .on("end", sendModifiedBody)
+ .on('end', sendModifiedBody)
.pipe(htmlBodyMemoryStream);
};
retrieveHTTPBody();
}
function registerEmbeddedBrowseRelativePathHandler(server: express.Express) {
- server.use("*", (req, res) => {
+ server.use('*', (req, res) => {
const relativeUrl = req.originalUrl;
- if (!req.user) res.redirect("/home"); // When no user is logged in, we interpret a relative URL as being a reference to something they don't have access to and redirect to /home
- else if (!res.headersSent && req.headers.referer?.includes("corsProxy")) { // a request for something by a proxied referrer means it must be a relative reference. So construct a proxied absolute reference here.
+ if (!req.user) res.redirect('/home'); // When no user is logged in, we interpret a relative URL as being a reference to something they don't have access to and redirect to /home
+ else if (!res.headersSent && req.headers.referer?.includes('corsProxy')) {
+ // a request for something by a proxied referrer means it must be a relative reference. So construct a proxied absolute reference here.
try {
const proxiedRefererUrl = decodeURIComponent(req.headers.referer); // (e.g., http://localhost:<port>/corsProxy/https://en.wikipedia.org/wiki/Engelbart)
const dashServerUrl = proxiedRefererUrl.match(/.*corsProxy\//)![0]; // the dash server url (e.g.: http://localhost:<port>/corsProxy/ )
- const actualReferUrl = proxiedRefererUrl.replace(dashServerUrl, ""); // the url of the referer without the proxy (e.g., : https://en.wikipedia.org/wiki/Engelbart)
+ const actualReferUrl = proxiedRefererUrl.replace(dashServerUrl, ''); // the url of the referer without the proxy (e.g., : https://en.wikipedia.org/wiki/Engelbart)
const absoluteTargetBaseUrl = actualReferUrl.match(/https?:\/\/[^\/]*/)![0]; // the base of the original url (e.g., https://en.wikipedia.org)
const redirectedProxiedUrl = dashServerUrl + encodeURIComponent(absoluteTargetBaseUrl + relativeUrl); // the new proxied full url (e.g., http://localhost:<port>/corsProxy/https://en.wikipedia.org/<somethingelse>)
- if (relativeUrl.startsWith("//")) res.redirect("http:" + relativeUrl);
+ if (relativeUrl.startsWith('//')) res.redirect('http:' + relativeUrl);
else res.redirect(redirectedProxiedUrl);
} catch (e) {
- console.log("Error embed: ", e);
+ console.log('Error embed: ', e);
}
- } else if (relativeUrl.startsWith("/search") && !req.headers.referer?.includes("corsProxy")) { // detect search query and use default search engine
- res.redirect(req.headers.referer + "corsProxy/" + encodeURIComponent("http://www.google.com" + relativeUrl));
+ } else if (relativeUrl.startsWith('/search') && !req.headers.referer?.includes('corsProxy')) {
+ // detect search query and use default search engine
+ res.redirect(req.headers.referer + 'corsProxy/' + encodeURIComponent('http://www.google.com' + relativeUrl));
} else {
res.end();
}
diff --git a/src/server/websocket.ts b/src/server/websocket.ts
index 9b91a35a6..68b003496 100644
--- a/src/server/websocket.ts
+++ b/src/server/websocket.ts
@@ -199,7 +199,7 @@ export namespace WebSocket {
return Database.Instance.getDocument(id, callback);
}
function GetRefField([id, callback]: [string, (result?: Transferable) => void]) {
- process.stdout.write(`.`);
+ process.stdout.write(`+`);
GetRefFieldLocal([id, callback]);
}