From 42afc0250de658fc3e924864bfae5afb4edec335 Mon Sep 17 00:00:00 2001 From: bobzel Date: Sun, 14 May 2023 12:03:40 -0400 Subject: major overhaul of field naming conventions. --- src/server/ApiManagers/SearchManager.ts | 151 ++++++++++++++++---------------- 1 file changed, 74 insertions(+), 77 deletions(-) (limited to 'src/server/ApiManagers') diff --git a/src/server/ApiManagers/SearchManager.ts b/src/server/ApiManagers/SearchManager.ts index a74e13a62..186f0bcd3 100644 --- a/src/server/ApiManagers/SearchManager.ts +++ b/src/server/ApiManagers/SearchManager.ts @@ -1,94 +1,89 @@ -import { exec } from "child_process"; -import { cyan, green, red, yellow } from "colors"; +import { exec } from 'child_process'; +import { cyan, green, red, yellow } from 'colors'; import * as path from 'path'; -import { log_execution } from "../ActionUtilities"; -import { Database } from "../database"; -import { Method } from "../RouteManager"; -import RouteSubscriber from "../RouteSubscriber"; -import { Search } from "../Search"; -import ApiManager, { Registration } from "./ApiManager"; -import { Directory, pathToDirectory } from "./UploadManager"; +import { log_execution } from '../ActionUtilities'; +import { Database } from '../database'; +import { Method } from '../RouteManager'; +import RouteSubscriber from '../RouteSubscriber'; +import { Search } from '../Search'; +import ApiManager, { Registration } from './ApiManager'; +import { Directory, pathToDirectory } from './UploadManager'; const findInFiles = require('find-in-files'); export class SearchManager extends ApiManager { - protected initialize(register: Registration): void { - register({ method: Method.GET, - subscription: new RouteSubscriber("solr").add("action"), + subscription: new RouteSubscriber('solr').add('action'), secureHandler: async ({ req, res }) => { const { action } = req.params; switch (action) { - case "start": - case "stop": - const status = req.params.action === "start"; + case 'start': + case 'stop': + const status = req.params.action === 'start'; SolrManager.SetRunning(status); break; - case "update": + case 'update': await SolrManager.update(); break; default: console.log(yellow(`${action} is an unknown solr operation.`)); } - res.redirect("/home"); - } + res.redirect('/home'); + }, }); register({ method: Method.GET, - subscription: "/textsearch", + subscription: '/textsearch', secureHandler: async ({ req, res }) => { const q = req.query.q; if (q === undefined) { res.send([]); return; } - const resObj: { ids: string[], numFound: number, lines: string[] } = { ids: [], numFound: 0, lines: [] }; + const resObj: { ids: string[]; numFound: number; lines: string[] } = { ids: [], numFound: 0, lines: [] }; let results: any; const dir = pathToDirectory(Directory.text); try { const regex = new RegExp(q.toString()); - results = await findInFiles.find({ 'term': q, 'flags': 'ig' }, dir, ".txt$"); + results = await findInFiles.find({ term: q, flags: 'ig' }, dir, '.txt$'); for (const result in results) { - resObj.ids.push(path.basename(result, ".txt").replace(/upload_/, "")); + resObj.ids.push(path.basename(result, '.txt').replace(/upload_/, '')); resObj.lines.push(results[result].line); resObj.numFound++; } res.send(resObj); } catch (e) { - console.log(red("textsearch:bad RegExp" + q.toString())); + console.log(red('textsearch:bad RegExp' + q.toString())); res.send([]); return; } - } + }, }); register({ method: Method.GET, - subscription: "/dashsearch", + subscription: '/dashsearch', secureHandler: async ({ req, res }) => { const solrQuery: any = {}; - ["q", "fq", "start", "rows", "sort", "hl.maxAnalyzedChars", "hl", "hl.fl"].forEach(key => solrQuery[key] = req.query[key]); + ['q', 'fq', 'start', 'rows', 'sort', 'hl.maxAnalyzedChars', 'hl', 'hl.fl'].forEach(key => (solrQuery[key] = req.query[key])); if (solrQuery.q === undefined) { res.send([]); return; } const results = await Search.search(solrQuery); res.send(results); - } + }, }); - } - } export namespace SolrManager { - export function SetRunning(status: boolean) { - const args = status ? "start" : "stop -p 8983"; + const args = status ? 'start' : 'stop -p 8983'; console.log(`solr management: trying to ${args}`); - exec(`solr ${args}`, { cwd: "./solr-8.3.1/bin" }, (error, stdout, stderr) => { + exec(`solr ${args}`, { cwd: './solr-8.3.1/bin' }, (error, stdout, stderr) => { if (error) { console.log(red(`solr management error: unable to ${args} server`)); console.log(red(error.message)); @@ -97,39 +92,39 @@ export namespace SolrManager { console.log(yellow(stderr)); }); if (status) { - console.log(cyan("Start script is executing: please allow 15 seconds for solr to start on port 8983.")); + console.log(cyan('Start script is executing: please allow 15 seconds for solr to start on port 8983.')); } } export async function update() { - console.log(green("Beginning update...")); + console.log(green('Beginning update...')); await log_execution({ - startMessage: "Clearing existing Solr information...", - endMessage: "Solr information successfully cleared", + startMessage: 'Clearing existing Solr information...', + endMessage: 'Solr information successfully cleared', action: Search.clear, - color: cyan + color: cyan, }); const cursor = await log_execution({ - startMessage: "Connecting to and querying for all documents from database...", + startMessage: 'Connecting to and querying for all documents from database...', endMessage: ({ result, error }) => { const success = error === null && result !== undefined; if (!success) { - console.log(red("Unable to connect to the database.")); + console.log(red('Unable to connect to the database.')); process.exit(0); } - return "Connection successful and query complete"; + return 'Connection successful and query complete'; }, action: () => Database.Instance.query({}), - color: yellow + color: yellow, }); const updates: any[] = []; let numDocs = 0; function updateDoc(doc: any) { numDocs++; - if ((numDocs % 50) === 0) { + if (numDocs % 50 === 0) { console.log(`Batch of 50 complete, total of ${numDocs}`); } - if (doc.__type !== "Doc") { + if (doc.__type !== 'Doc') { return; } const fields = doc.fields; @@ -143,8 +138,8 @@ export namespace SolrManager { const term = ToSearchTerm(value); if (term !== undefined) { const { suffix, value } = term; - if (key.endsWith('lastModified')) { - update["lastModified" + suffix] = value; + if (key.endsWith('modificationDate')) { + update['modificationDate' + suffix] = value; } update[key + suffix] = value; dynfield = true; @@ -157,51 +152,54 @@ export namespace SolrManager { await cursor?.forEach(updateDoc); const result = await log_execution({ startMessage: `Dispatching updates for ${updates.length} documents`, - endMessage: "Dispatched updates complete", + endMessage: 'Dispatched updates complete', action: () => Search.updateDocuments(updates), - color: cyan + color: cyan, }); try { if (result) { const { status } = JSON.parse(result).responseHeader; - console.log(status ? red(`Failed with status code (${status})`) : green("Success!")); + console.log(status ? red(`Failed with status code (${status})`) : green('Success!')); } else { - console.log(red("Solr is likely not running!")); + console.log(red('Solr is likely not running!')); } } catch (e) { - console.log(red("Error:")); + console.log(red('Error:')); console.log(e); - console.log("\n"); + console.log('\n'); } await cursor?.close(); } - const suffixMap: { [type: string]: (string | [string, string | ((json: any) => any)]) } = { - "number": "_n", - "string": "_t", - "boolean": "_b", - "image": ["_t", "url"], - "video": ["_t", "url"], - "pdf": ["_t", "url"], - "audio": ["_t", "url"], - "web": ["_t", "url"], - "map": ["_t", "url"], - "date": ["_d", value => new Date(value.date).toISOString()], - "proxy": ["_i", "fieldId"], - "prefetch_proxy": ["_i", "fieldId"], - "list": ["_l", list => { - const results = []; - for (const value of list.fields) { - const term = ToSearchTerm(value); - if (term) { - results.push(term.value); + const suffixMap: { [type: string]: string | [string, string | ((json: any) => any)] } = { + number: '_n', + string: '_t', + boolean: '_b', + image: ['_t', 'url'], + video: ['_t', 'url'], + pdf: ['_t', 'url'], + audio: ['_t', 'url'], + web: ['_t', 'url'], + map: ['_t', 'url'], + date: ['_d', value => new Date(value.date).toISOString()], + proxy: ['_i', 'fieldId'], + prefetch_proxy: ['_i', 'fieldId'], + list: [ + '_l', + list => { + const results = []; + for (const value of list.fields) { + const term = ToSearchTerm(value); + if (term) { + results.push(term.value); + } } - } - return results.length ? results : null; - }] + return results.length ? results : null; + }, + ], }; - function ToSearchTerm(val: any): { suffix: string, value: any } | undefined { + function ToSearchTerm(val: any): { suffix: string; value: any } | undefined { if (val === null || val === undefined) { return; } @@ -213,7 +211,7 @@ export namespace SolrManager { if (Array.isArray(suffix)) { const accessor = suffix[1]; - if (typeof accessor === "function") { + if (typeof accessor === 'function') { val = accessor(val); } else { val = val[accessor]; @@ -223,5 +221,4 @@ export namespace SolrManager { return { suffix, value: val }; } - -} \ No newline at end of file +} -- cgit v1.2.3-70-g09d2 From 27f518632c24f69fff360bef36eb0e5426167b83 Mon Sep 17 00:00:00 2001 From: James Hu <51237606+jameshu111@users.noreply.github.com> Date: Wed, 7 Jun 2023 12:30:53 -0400 Subject: Update other uses --- src/client/documents/Documents.ts | 4 ++-- src/client/util/Import & Export/DirectoryImportBox.tsx | 2 +- src/client/util/ReportManager.tsx | 2 +- src/client/views/nodes/AudioBox.tsx | 2 +- src/client/views/nodes/DocumentView.tsx | 2 +- src/client/views/nodes/RecordingBox/RecordingView.tsx | 2 +- src/client/views/nodes/ScreenshotBox.tsx | 4 ++-- src/mobile/ImageUpload.tsx | 2 +- src/server/ApiManagers/UploadManager.ts | 2 +- 9 files changed, 11 insertions(+), 11 deletions(-) (limited to 'src/server/ApiManagers') diff --git a/src/client/documents/Documents.ts b/src/client/documents/Documents.ts index 77f0e1e5e..0030af982 100644 --- a/src/client/documents/Documents.ts +++ b/src/client/documents/Documents.ts @@ -1844,7 +1844,7 @@ export namespace DocUtils { export async function uploadFilesToDocs(files: File[], options: DocumentOptions) { const generatedDocuments: Doc[] = []; const fileNoGuidPairs: Networking.FileGuidPair[] = []; - files.map(file => fileNoGuidPairs.push({file : file})); + files.map(file => fileNoGuidPairs.push({file})); const upfiles = await Networking.UploadFilesToServer(fileNoGuidPairs); for (const { source: { name, type }, @@ -1857,7 +1857,7 @@ export namespace DocUtils { export function uploadFileToDoc(file: File, options: DocumentOptions, overwriteDoc: Doc) { const generatedDocuments: Doc[] = []; - Networking.UploadFilesToServer([{file: file, guid: overwriteDoc[Id]}]).then(upfiles => { + Networking.UploadFilesToServer([{file, guid: overwriteDoc[Id]}]).then(upfiles => { const { source: { name, type }, result, diff --git a/src/client/util/Import & Export/DirectoryImportBox.tsx b/src/client/util/Import & Export/DirectoryImportBox.tsx index b9bb22564..1a4c2450e 100644 --- a/src/client/util/Import & Export/DirectoryImportBox.tsx +++ b/src/client/util/Import & Export/DirectoryImportBox.tsx @@ -112,7 +112,7 @@ export class DirectoryImportBox extends React.Component { sizes.push(file.size); modifiedDates.push(file.lastModified); }); - collector.push(...(await Networking.UploadFilesToServer(batch))); + collector.push(...(await Networking.UploadFilesToServer(batch.map(file =>({file}))))); runInAction(() => (this.completed += batch.length)); }); diff --git a/src/client/util/ReportManager.tsx b/src/client/util/ReportManager.tsx index 51742d455..4c1020455 100644 --- a/src/client/util/ReportManager.tsx +++ b/src/client/util/ReportManager.tsx @@ -173,7 +173,7 @@ export class ReportManager extends React.Component<{}> { // upload the files to the server if (input.files && input.files.length !== 0) { const fileArray: File[] = Array.from(input.files); - (Networking.UploadFilesToServer(fileArray)).then(links => { + (Networking.UploadFilesToServer(fileArray.map(file =>({file})))).then(links => { console.log('finshed uploading', links.map(this.getServerPath)); this.setFileLinks((links ?? []).map(this.getServerPath)); }) diff --git a/src/client/views/nodes/AudioBox.tsx b/src/client/views/nodes/AudioBox.tsx index 0cb849923..6558d215a 100644 --- a/src/client/views/nodes/AudioBox.tsx +++ b/src/client/views/nodes/AudioBox.tsx @@ -233,7 +233,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent { - const [{ result }] = await Networking.UploadFilesToServer(e.data); + const [{ result }] = await Networking.UploadFilesToServer({file: e.data}); if (!(result instanceof Error)) { this.props.Document[this.fieldKey] = new AudioField(result.accessPaths.agnostic.client); } diff --git a/src/client/views/nodes/DocumentView.tsx b/src/client/views/nodes/DocumentView.tsx index 0769e26d0..687683e6e 100644 --- a/src/client/views/nodes/DocumentView.tsx +++ b/src/client/views/nodes/DocumentView.tsx @@ -1013,7 +1013,7 @@ export class DocumentViewInternal extends DocComponent { - const [{ result }] = await Networking.UploadFilesToServer(e.data); + const [{ result }] = await Networking.UploadFilesToServer({file: e.data}); if (!(result instanceof Error)) { const audioField = new AudioField(result.accessPaths.agnostic.client); const audioAnnos = Cast(dataDoc[field + '-audioAnnotations'], listSpec(AudioField), null); diff --git a/src/client/views/nodes/RecordingBox/RecordingView.tsx b/src/client/views/nodes/RecordingBox/RecordingView.tsx index 424ebc384..51eb774e2 100644 --- a/src/client/views/nodes/RecordingBox/RecordingView.tsx +++ b/src/client/views/nodes/RecordingBox/RecordingView.tsx @@ -67,7 +67,7 @@ export function RecordingView(props: IRecordingViewProps) { const videoFiles = videos.map((vid, i) => new File(vid.videoChunks, `segvideo${i}.mkv`, { type: vid.videoChunks[0].type, lastModified: Date.now() })); // upload the segments to the server and get their server access paths - const serverPaths: string[] = (await Networking.UploadFilesToServer(videoFiles)).map(res => (res.result instanceof Error ? '' : res.result.accessPaths.agnostic.server)); + const serverPaths: string[] = (await Networking.UploadFilesToServer(videoFiles.map(file => ({file})))).map(res => (res.result instanceof Error ? '' : res.result.accessPaths.agnostic.server)); // concat the segments together using post call const result: Upload.AccessPathInfo | Error = await Networking.PostToServer('/concatVideos', serverPaths); diff --git a/src/client/views/nodes/ScreenshotBox.tsx b/src/client/views/nodes/ScreenshotBox.tsx index 1e178b123..312b3c619 100644 --- a/src/client/views/nodes/ScreenshotBox.tsx +++ b/src/client/views/nodes/ScreenshotBox.tsx @@ -224,7 +224,7 @@ export class ScreenshotBox extends ViewBoxAnnotatableComponent aud_chunks.push(e.data); this._audioRec.onstop = async (e: any) => { - const [{ result }] = await Networking.UploadFilesToServer(aud_chunks); + const [{ result }] = await Networking.UploadFilesToServer(aud_chunks.map((file: any) => ({file}))); if (!(result instanceof Error)) { this.dataDoc[this.props.fieldKey + '-audio'] = new AudioField(result.accessPaths.agnostic.client); } @@ -237,7 +237,7 @@ export class ScreenshotBox extends ViewBoxAnnotatableComponent { console.log('screenshotbox: upload'); const file = new File(vid_chunks, `${this.rootDoc[Id]}.mkv`, { type: vid_chunks[0].type, lastModified: Date.now() }); - const [{ result }] = await Networking.UploadFilesToServer(file); + const [{ result }] = await Networking.UploadFilesToServer({file}); this.dataDoc[this.fieldKey + '_duration'] = (new Date().getTime() - this.recordingStart!) / 1000; if (!(result instanceof Error)) { // convert this screenshotBox into normal videoBox diff --git a/src/mobile/ImageUpload.tsx b/src/mobile/ImageUpload.tsx index f910d765e..da38fcaee 100644 --- a/src/mobile/ImageUpload.tsx +++ b/src/mobile/ImageUpload.tsx @@ -42,7 +42,7 @@ export class Uploader extends React.Component { this.process = "Uploading Files"; for (let index = 0; index < files.length; ++index) { const file = files[index]; - const res = await Networking.UploadFilesToServer(file); + const res = await Networking.UploadFilesToServer({file}); this.setOpacity(3, "1"); // Slab 3 // For each item that the user has selected res.map(async ({ result }) => { diff --git a/src/server/ApiManagers/UploadManager.ts b/src/server/ApiManagers/UploadManager.ts index 74c06b4a6..94f744848 100644 --- a/src/server/ApiManagers/UploadManager.ts +++ b/src/server/ApiManagers/UploadManager.ts @@ -75,7 +75,7 @@ export default class UploadManager extends ApiManager { for (const key in files) { const f = files[key]; if (!Array.isArray(f)) { - const result = await DashUploadUtils.upload(f); + const result = await DashUploadUtils.upload(f, key); result && !(result.result instanceof Error) && results.push(result); } } -- cgit v1.2.3-70-g09d2 From b2c0855d6d701bd80666e0693bd193dc69efb4a0 Mon Sep 17 00:00:00 2001 From: James Hu <51237606+jameshu111@users.noreply.github.com> Date: Wed, 7 Jun 2023 12:45:08 -0400 Subject: Comments --- src/client/Network.ts | 16 +++++++++++++--- src/client/documents/Documents.ts | 18 ++++++++++++++++-- src/server/ApiManagers/UploadManager.ts | 2 +- src/server/DashUploadUtils.ts | 2 +- 4 files changed, 31 insertions(+), 7 deletions(-) (limited to 'src/server/ApiManagers') diff --git a/src/client/Network.ts b/src/client/Network.ts index 28825823d..9c293f9af 100644 --- a/src/client/Network.ts +++ b/src/client/Network.ts @@ -17,19 +17,26 @@ export namespace Networking { return requestPromise.post(options); } + /** + * FileGuidPair attaches a guid to a file that is being uploaded, + * allowing the client to track the upload progress. + * + * When files are dragged to the canvas, the overWriteDoc's ID is + * used as the guid. Otherwise, a new guid is generated. + */ export interface FileGuidPair { file: File; guid?: string; } /** * Handles uploading basic file types to server and makes the API call to "/uploadFormData" endpoint - * with the mapping of GUID to filem as parameters. + * with the mapping of guid to filem as parameters. * - * @param fileguidpairs the files to be uploaded to the server + * @param fileguidpairs the files and corresponding guids to be uploaded to the server * @returns the response as a json from the server */ export async function UploadFilesToServer(fileguidpairs: FileGuidPair | FileGuidPair[]): Promise[]> { - const formData = new FormData(); + const formData = new FormData(); if (Array.isArray(fileguidpairs)) { if (!fileguidpairs.length) { return []; @@ -45,8 +52,11 @@ export namespace Networking { ]) ); } + // If the fileguidpair has a guid to use (From the overwriteDoc) use that guid. Otherwise, + // generate a new guid. fileguidpairs.forEach(fileguidpair => formData.append(fileguidpair.guid ?? Utils.GenerateGuid(), fileguidpair.file)); } else { + // Handle the case where fileguidpairs is a single file. formData.append(fileguidpairs.guid ?? Utils.GenerateGuid(), fileguidpairs.file); } const parameters = { diff --git a/src/client/documents/Documents.ts b/src/client/documents/Documents.ts index 0030af982..06b48fe96 100644 --- a/src/client/documents/Documents.ts +++ b/src/client/documents/Documents.ts @@ -1841,10 +1841,22 @@ export namespace DocUtils { }); } + /** + * uploadFilesToDocs will take in an array of Files, and creates documents for the + * new files. + * + * @param files an array of files that will be uploaded + * @param options options to use while uploading + * @returns + */ export async function uploadFilesToDocs(files: File[], options: DocumentOptions) { const generatedDocuments: Doc[] = []; - const fileNoGuidPairs: Networking.FileGuidPair[] = []; - files.map(file => fileNoGuidPairs.push({file})); + + // UploadFilesToServer takes an array of FileGuidPairs, + // but these files do not have overwriteDocs, so + // we do not set guid, allowing the client to generate one. + const fileNoGuidPairs: Networking.FileGuidPair[] = files.map(file => ({file})); + const upfiles = await Networking.UploadFilesToServer(fileNoGuidPairs); for (const { source: { name, type }, @@ -1857,6 +1869,8 @@ export namespace DocUtils { export function uploadFileToDoc(file: File, options: DocumentOptions, overwriteDoc: Doc) { const generatedDocuments: Doc[] = []; + // Since this file has an overwriteDoc, we can set the client tracking guid + // to the overwriteDoc's guid. Networking.UploadFilesToServer([{file, guid: overwriteDoc[Id]}]).then(upfiles => { const { source: { name, type }, diff --git a/src/server/ApiManagers/UploadManager.ts b/src/server/ApiManagers/UploadManager.ts index 94f744848..ba6d7acfe 100644 --- a/src/server/ApiManagers/UploadManager.ts +++ b/src/server/ApiManagers/UploadManager.ts @@ -75,7 +75,7 @@ export default class UploadManager extends ApiManager { for (const key in files) { const f = files[key]; if (!Array.isArray(f)) { - const result = await DashUploadUtils.upload(f, key); + const result = await DashUploadUtils.upload(f, key); // key is the guid used by the client to track upload progress. result && !(result.result instanceof Error) && results.push(result); } } diff --git a/src/server/DashUploadUtils.ts b/src/server/DashUploadUtils.ts index 971fefb5a..eaaac4e6d 100644 --- a/src/server/DashUploadUtils.ts +++ b/src/server/DashUploadUtils.ts @@ -184,7 +184,7 @@ export namespace DashUploadUtils { export async function upload(file: File, overwriteGuid?: string): Promise { const { type, path, name } = file; const types = type?.split('/') ?? []; - uploadProgress.set(overwriteGuid ?? name, 'uploading'); + uploadProgress.set(overwriteGuid ?? name, 'uploading'); // If the client sent a guid it uses to track upload progress, use that guid. Otherwise, use the file's name. const category = types[0]; let format = `.${types[1]}`; -- cgit v1.2.3-70-g09d2