diff options
Diffstat (limited to 'src/server')
-rw-r--r-- | src/server/ApiManagers/UploadManager.ts | 96 | ||||
-rw-r--r-- | src/server/DashUploadUtils.ts | 77 | ||||
-rw-r--r-- | src/server/downsize.ts | 40 |
3 files changed, 58 insertions, 155 deletions
diff --git a/src/server/ApiManagers/UploadManager.ts b/src/server/ApiManagers/UploadManager.ts index ea5d8cb33..391f67bbb 100644 --- a/src/server/ApiManagers/UploadManager.ts +++ b/src/server/ApiManagers/UploadManager.ts @@ -1,7 +1,7 @@ import * as formidable from 'formidable'; import { createReadStream, createWriteStream, unlink, writeFile } from 'fs'; -import { basename, dirname, extname, normalize } from 'path'; -import * as sharp from 'sharp'; +import * as path from 'path'; +import Jimp from 'jimp'; import { filesDirectory, publicDirectory } from '..'; import { retrocycle } from '../../decycler/decycler'; import { DashUploadUtils, InjectSize, SizeSuffix } from '../DashUploadUtils'; @@ -29,11 +29,11 @@ export enum Directory { } export function serverPathToFile(directory: Directory, filename: string) { - return normalize(`${filesDirectory}/${directory}/${filename}`); + return path.normalize(`${filesDirectory}/${directory}/${filename}`); } export function pathToDirectory(directory: Directory) { - return normalize(`${filesDirectory}/${directory}`); + return path.normalize(`${filesDirectory}/${directory}`); } export function clientPathToFile(directory: Directory, filename: string) { @@ -74,6 +74,8 @@ export default class UploadManager extends ApiManager { filesize = value; } }); + fileguids.split(';').map(guid => DashUploadUtils.uploadProgress.set(guid, `upload starting`)); + form.on('progress', e => fileguids.split(';').map(guid => DashUploadUtils.uploadProgress.set(guid, `read:(${Math.round((100 * +e) / +filesize)}%) ${e} of ${filesize}`))); form.keepExtensions = true; form.uploadDir = pathToDirectory(Directory.parsed_files); @@ -92,6 +94,8 @@ export default class UploadManager extends ApiManager { result: { name: 'failed upload', message: `${_err.message}` }, }); } + fileguids.split(';').map(guid => DashUploadUtils.uploadProgress.set(guid, `resampling images`)); + for (const key in files) { const f = files[key]; if (!Array.isArray(f)) { @@ -164,7 +168,7 @@ export default class UploadManager extends ApiManager { if (error) { return res.send(); } - await DashUploadUtils.outputResizedImages(() => createReadStream(resolvedPath), resolvedName, pathToDirectory(Directory.images)); + await DashUploadUtils.outputResizedImages(resolvedPath, resolvedName, pathToDirectory(Directory.images)); res.send({ accessPaths: { agnostic: DashUploadUtils.getAccessPaths(Directory.images, resolvedName), @@ -255,42 +259,20 @@ export default class UploadManager extends ApiManager { if (!entryName.startsWith('files/')) { return; } - const extension = extname(entryName); + const extension = path.extname(entryName); const pathname = publicDirectory + '/' + entry.entryName; const targetname = publicDirectory + '/' + entryName; try { zip.extractEntryTo(entry.entryName, publicDirectory, true, false); createReadStream(pathname).pipe(createWriteStream(targetname)); - if (extension !== '.pdf') { - const { pngs, jpgs } = AcceptableMedia; - const resizers = [ - { resizer: sharp().resize(100, undefined, { withoutEnlargement: true }), suffix: SizeSuffix.Small }, - { resizer: sharp().resize(400, undefined, { withoutEnlargement: true }), suffix: SizeSuffix.Medium }, - { resizer: sharp().resize(900, undefined, { withoutEnlargement: true }), suffix: SizeSuffix.Large }, - ]; - let isImage = false; - if (pngs.includes(extension)) { - resizers.forEach(element => { - element.resizer = element.resizer.png(); - }); - isImage = true; - } else if (jpgs.includes(extension)) { - resizers.forEach(element => { - element.resizer = element.resizer.jpeg(); - }); - isImage = true; - } - if (isImage) { - resizers.forEach(resizer => { - createReadStream(pathname) - .on('error', e => console.log('Resizing read:' + e)) - .pipe(resizer.resizer) - .on('error', e => console.log('Resizing write: ' + e)) - .pipe(createWriteStream(targetname.replace('_o' + extension, resizer.suffix + extension)).on('error', e => console.log('Resizing write: ' + e))); - }); - } - } - unlink(pathname, () => {}); + Jimp.read(pathname).then(img => { + DashUploadUtils.imageResampleSizes(extension).forEach(({ width, suffix }) => { + const outputPath = InjectSize(targetname, suffix); + if (!width) createReadStream(pathname).pipe(createWriteStream(outputPath)); + else img = img.resize(width, Jimp.AUTO).write(outputPath); + }); + unlink(pathname, () => {}); + }); } catch (e) { console.log(e); } @@ -346,7 +328,7 @@ export default class UploadManager extends ApiManager { method: Method.POST, subscription: '/uploadURI', secureHandler: ({ req, res }) => { - const uri = req.body.uri; + const uri: any = req.body.uri; const filename = req.body.name; const origSuffix = req.body.nosuffix ? SizeSuffix.None : SizeSuffix.Original; const deleteFiles = req.body.replaceRootFilename; @@ -362,36 +344,16 @@ export default class UploadManager extends ApiManager { .map((f: any) => fs.unlinkSync(path + f)); } return imageDataUri.outputFile(uri, serverPathToFile(Directory.images, InjectSize(filename, origSuffix))).then((savedName: string) => { - const ext = extname(savedName).toLowerCase(); - const { pngs, jpgs } = AcceptableMedia; - const resizers = !origSuffix - ? [{ resizer: sharp().resize(400, undefined, { withoutEnlargement: true }), suffix: SizeSuffix.Medium }] - : [ - { resizer: sharp().resize(100, undefined, { withoutEnlargement: true }), suffix: SizeSuffix.Small }, - { resizer: sharp().resize(400, undefined, { withoutEnlargement: true }), suffix: SizeSuffix.Medium }, - { resizer: sharp().resize(900, undefined, { withoutEnlargement: true }), suffix: SizeSuffix.Large }, - ]; - let isImage = false; - if (pngs.includes(ext)) { - resizers.forEach(element => { - element.resizer = element.resizer.png(); - }); - isImage = true; - } else if (jpgs.includes(ext)) { - resizers.forEach(element => { - element.resizer = element.resizer.jpeg(); - }); - isImage = true; - } - if (isImage) { - resizers.forEach(resizer => { - const path = serverPathToFile(Directory.images, InjectSize(filename, resizer.suffix) + ext); - createReadStream(savedName) - .on('error', e => console.log('Resizing read:' + e)) - .pipe(resizer.resizer) - .on('error', e => console.log('Resizing write: ' + e)) - .pipe(createWriteStream(path).on('error', e => console.log('Resizing write: ' + e))); - }); + const ext = path.extname(savedName).toLowerCase(); + if (AcceptableMedia.imageFormats.includes(ext)) { + Jimp.read(savedName).then(img => + (!origSuffix ? [{ width: 400, suffix: SizeSuffix.Medium }] : Object.values(DashUploadUtils.Sizes)) // + .forEach(({ width, suffix }) => { + const outputPath = InjectSize(filename, suffix); + if (!width) createReadStream(savedName).pipe(createWriteStream(outputPath)); + else img = img.resize(width, Jimp.AUTO).write(outputPath); + }) + ); } res.send(clientPathToFile(Directory.images, filename + ext)); }); diff --git a/src/server/DashUploadUtils.ts b/src/server/DashUploadUtils.ts index 19cb3f240..2053ae448 100644 --- a/src/server/DashUploadUtils.ts +++ b/src/server/DashUploadUtils.ts @@ -5,8 +5,7 @@ import { File } from 'formidable'; import { createReadStream, createWriteStream, existsSync, readFileSync, rename, unlinkSync, writeFile } from 'fs'; import * as path from 'path'; import { basename } from 'path'; -import * as sharp from 'sharp'; -import { Readable, Stream } from 'stream'; +import Jimp from 'jimp'; import { filesDirectory, publicDirectory } from '.'; import { Opt } from '../fields/Doc'; import { ParsedPDF } from '../server/PdfTypes'; @@ -55,9 +54,9 @@ export namespace DashUploadUtils { } export const Sizes: { [size: string]: Size } = { - SMALL: { width: 100, suffix: SizeSuffix.Small }, + LARGE: { width: 800, suffix: SizeSuffix.Large }, MEDIUM: { width: 400, suffix: SizeSuffix.Medium }, - LARGE: { width: 900, suffix: SizeSuffix.Large }, + SMALL: { width: 100, suffix: SizeSuffix.Small }, }; export function validateExtension(url: string) { @@ -197,7 +196,7 @@ export namespace DashUploadUtils { const isAzureOn = usingAzure(); const { type, path, name } = file; const types = type?.split('/') ?? []; - uploadProgress.set(overwriteGuid ?? name, 'uploading'); // If the client sent a guid it uses to track upload progress, use that guid. Otherwise, use the file's name. + // uploadProgress.set(overwriteGuid ?? name, 'uploading'); // If the client sent a guid it uses to track upload progress, use that guid. Otherwise, use the file's name. const category = types[0]; let format = `.${types[1]}`; @@ -367,7 +366,7 @@ export namespace DashUploadUtils { } export interface ImageResizer { - resizer?: sharp.Sharp; + width?: any; // sharp.Sharp; suffix: SizeSuffix; } @@ -540,7 +539,7 @@ export namespace DashUploadUtils { writtenFiles = {}; } } else { - writtenFiles = await outputResizedImages(() => request(requestable), resolved, pathToDirectory(Directory.images)); + writtenFiles = await outputResizedImages(metadata.source, resolved, pathToDirectory(Directory.images)); } for (const suffix of Object.keys(writtenFiles)) { information.accessPaths[suffix] = getAccessPaths(images, writtenFiles[suffix]); @@ -595,57 +594,39 @@ export namespace DashUploadUtils { * @param outputDirectory the directory to output to, usually Directory.Images * @returns a map with suffixes as keys and resized filenames as values. */ - export async function outputResizedImages(streamProvider: () => Stream | Promise<Stream>, outputFileName: string, outputDirectory: string) { + export async function outputResizedImages(sourcePath: string, outputFileName: string, outputDirectory: string) { const writtenFiles: { [suffix: string]: string } = {}; - for (const { resizer, suffix } of resizers(path.extname(outputFileName))) { - const outputPath = path.resolve(outputDirectory, (writtenFiles[suffix] = InjectSize(outputFileName, suffix))); - await new Promise<void>(async (resolve, reject) => { - const source = streamProvider(); - let readStream = source instanceof Promise ? await source : source; - let error = false; - if (resizer) { - readStream = readStream.pipe(resizer.withMetadata()).on('error', async args => { - error = true; - if (error) { - const source2 = streamProvider(); - let readStream2: Stream | undefined; - readStream2 = source2 instanceof Promise ? await source2 : source2; - readStream2?.pipe(createWriteStream(outputPath)).on('error', resolve).on('close', resolve); - } - }); - } - !error && readStream?.pipe(createWriteStream(outputPath)).on('error', resolve).on('close', resolve); + const sizes = imageResampleSizes(path.extname(outputFileName)); + const outputPath = (suffix: SizeSuffix) => path.resolve(outputDirectory, (writtenFiles[suffix] = InjectSize(outputFileName, suffix))); + await Promise.all( + sizes.filter(({ width }) => !width).map(({ suffix }) => + new Promise<void>(res => createReadStream(sourcePath).pipe(createWriteStream(outputPath(suffix))).on('close', res)) + )); // prettier-ignore + return Jimp.read(sourcePath) + .then(async img => { + await Promise.all( sizes.filter(({ width }) => width) .map(({ width, suffix }) => + img = img.resize(width, Jimp.AUTO).write(outputPath(suffix)) + )); // prettier-ignore + return writtenFiles; + }) + .catch(e => { + console.log('ERROR' + e); + return writtenFiles; }); - } - return writtenFiles; } /** * define the resizers to use * @param ext the extension - * @returns an array of resizer functions from sharp + * @returns an array of resize descriptions */ - function resizers(ext: string): DashUploadUtils.ImageResizer[] { + export function imageResampleSizes(ext: string): DashUploadUtils.ImageResizer[] { return [ { suffix: SizeSuffix.Original }, - ...Object.values(DashUploadUtils.Sizes).map(({ suffix, width }) => { - let initial: sharp.Sharp | undefined = sharp({ failOnError: false }).resize(width, undefined, { withoutEnlargement: true }); - if (pngs.includes(ext)) { - initial = initial.png(pngOptions); - } else if (jpgs.includes(ext)) { - initial = initial.jpeg(); - } else if (webps.includes(ext)) { - initial = initial.webp(); - } else if (tiffs.includes(ext)) { - initial = initial.tiff(); - } else if (ext === '.gif') { - initial = undefined; - } - return { - resizer: suffix === '_o' ? undefined : initial, - suffix, - }; - }), + ...[...(AcceptableMedia.imageFormats.includes(ext.toLowerCase()) ? Object.values(DashUploadUtils.Sizes) : [])].map(({ suffix, width }) => ({ + width, + suffix, + })), ]; } } diff --git a/src/server/downsize.ts b/src/server/downsize.ts deleted file mode 100644 index 382994e2d..000000000 --- a/src/server/downsize.ts +++ /dev/null @@ -1,40 +0,0 @@ -import * as fs from 'fs'; -import * as sharp from 'sharp'; - -const folder = "./src/server/public/files/"; -const pngTypes = ["png", "PNG"]; -const jpgTypes = ["jpg", "JPG", "jpeg", "JPEG"]; -const smallResizer = sharp().resize(100); -fs.readdir(folder, async (err, files) => { - if (err) { - console.log("readdir:" + err); - return; - } - // files.forEach(file => { - // if (file.includes("_s") || file.includes("_m") || file.includes("_l")) { - // fs.unlink(folder + file, () => { }); - // } - // }); - for (const file of files) { - const filesplit = file.split("."); - const resizers = [ - { resizer: sharp().resize(100, undefined, { withoutEnlargement: true }), suffix: "_s" }, - { resizer: sharp().resize(400, undefined, { withoutEnlargement: true }), suffix: "_m" }, - { resizer: sharp().resize(900, undefined, { withoutEnlargement: true }), suffix: "_l" }, - ]; - if (pngTypes.some(type => file.endsWith(type))) { - resizers.forEach(element => { - element.resizer = element.resizer.png(); - }); - } else if (jpgTypes.some(type => file.endsWith(type))) { - resizers.forEach(element => { - element.resizer = element.resizer.jpeg(); - }); - } else { - continue; - } - resizers.forEach(resizer => { - fs.createReadStream(folder + file).pipe(resizer.resizer).pipe(fs.createWriteStream(folder + filesplit[0] + resizer.suffix + "." + filesplit[1])); - }); - } -});
\ No newline at end of file |