diff --git a/src/upload.ts b/src/upload.ts index dba4c45..ad29d09 100644 --- a/src/upload.ts +++ b/src/upload.ts @@ -1,14 +1,12 @@ import fs from 'fs'; import express from 'express'; import multer from 'multer'; -import sharp from 'sharp'; import { capture } from '@snapshot-labs/snapshot-sentry'; -import { rpcError, rpcSuccess } from './utils'; +import { rpcError, rpcSuccess, processImage } from './utils'; import uploadToProviders from './providers/'; import { IMAGE_PROVIDERS } from './providers/utils'; const MAX_INPUT_SIZE = 1024 * 1024; -const MAX_IMAGE_DIMENSION = 1500; const router = express.Router(); const upload = multer({ @@ -22,20 +20,17 @@ router.post('/upload', async (req, res) => { if (err) return rpcError(res, 400, err.message); if (!req.file) return rpcError(res, 400, 'No file submitted'); - const transformer = sharp() - .resize({ - width: MAX_IMAGE_DIMENSION, - height: MAX_IMAGE_DIMENSION, - fit: 'inside' - }) - .webp({ lossless: true }); - - const buffer = await fs - .createReadStream(req.file?.path as string) - .pipe(transformer) - .toBuffer(); + let processedImage: Buffer; + try { + processedImage = await processImage(req.file?.path); + } catch (e: any) { + if (e.message === 'Input buffer contains unsupported image format') { + return rpcError(res, 415, 'Unsupported file type'); + } + throw e; + } - const result = await uploadToProviders(IMAGE_PROVIDERS, 'image', buffer); + const result = await uploadToProviders(IMAGE_PROVIDERS, 'image', processedImage); const file = { cid: result.cid, provider: result.provider @@ -43,10 +38,6 @@ router.post('/upload', async (req, res) => { return rpcSuccess(res, file); } catch (e: any) { - if (e.message === 'Input buffer contains unsupported image format') { - return rpcError(res, 415, 'Unsupported file type'); - } - capture(e); return rpcError(res, 500, e); } finally { diff --git a/src/utils.ts b/src/utils.ts index 95e7060..e668cb2 100644 --- a/src/utils.ts +++ b/src/utils.ts @@ -1,7 +1,10 @@ +import sharp from 'sharp'; +import fs from 'fs'; import { createHash } from 'crypto'; import { Response } from 'express'; export const MAX = 10e4; +const MAX_IMAGE_DIMENSION = 1500; export function rpcSuccess(res: Response, result: any, id = '') { res.json({ @@ -26,3 +29,15 @@ export function rpcError(res: Response, code: number, e: Error | string, id = nu export function sha256(input: string | Buffer) { return createHash('sha256').update(input).digest('hex'); } + +export async function processImage(path: string) { + const transformer = sharp({ failOnError: false }) + .resize({ + width: MAX_IMAGE_DIMENSION, + height: MAX_IMAGE_DIMENSION, + fit: 'inside' + }) + .webp({ lossless: true }); + + return await fs.createReadStream(path).pipe(transformer).toBuffer(); +} diff --git a/test/unit/providers/fixtures/corrupted.jpeg b/test/unit/providers/fixtures/corrupted.jpeg new file mode 100644 index 0000000..ad8a199 Binary files /dev/null and b/test/unit/providers/fixtures/corrupted.jpeg differ diff --git a/test/unit/utils.test.ts b/test/unit/utils.test.ts new file mode 100644 index 0000000..5ed0b34 --- /dev/null +++ b/test/unit/utils.test.ts @@ -0,0 +1,28 @@ +import sharp from 'sharp'; +import path from 'path'; +import { processImage } from '../../src/utils'; + +describe('utils', () => { + describe('processImage()', () => { + const valid_image_path = path.join(__dirname, './providers/fixtures/sample.webp'); + const corrupted_image_path = path.join(__dirname, './providers/fixtures/corrupted.jpeg'); + + it('returns a buffer', () => { + expect(processImage(valid_image_path)).resolves.toBeInstanceOf(Buffer); + }); + + it('resizes the image to fit the max dimensions', async () => { + const result = await processImage(valid_image_path); + const metadata = await sharp(result).metadata(); + + expect(metadata.width).toBe(1500); + return expect(metadata.height).toBe(1004); + }); + + describe('on a corrupted image', () => { + it('ignores errors and returns the image buffer', async () => { + expect(processImage(corrupted_image_path)).resolves.toBeInstanceOf(Buffer); + }); + }); + }); +});