Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: add image type support to proxy endpoint #196

Open
wants to merge 20 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 5 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
# Pineapple

Use [pineapple.js](https://github.com/snapshot-labs/pineapple.js) to interact with this service.

## Error codes

All endpoints will respond with a [JSON-RPC 2.0](https://www.jsonrpc.org/specification) error response on error:
Expand All @@ -17,7 +19,8 @@ All endpoints will respond with a [JSON-RPC 2.0](https://www.jsonrpc.org/specifi

| Description | `CODE` | `MESSAGE` |
| --------------------------------------------------------------- | ------ | ------------------------------------------- |
| Uploaded file exceed 1MB | 400 | File too large |
| Uploaded image file exceed 1MB | 400 | File too large |
| Uploaded json file exceed 100kb | 400 | File too large |
| Uploaded image file is not an image | 415 | Unsupported file type |
| Uploaded payload does not contain a fileSize | 400 | No file submitted |
| Uploaded payload does not contain a `fileSize` | 400 | No file submitted |
| Server error | 500 | (Will depend on the error) | |
41 changes: 27 additions & 14 deletions src/aws.ts
Original file line number Diff line number Diff line change
@@ -1,52 +1,65 @@
import * as AWS from '@aws-sdk/client-s3';
import { Readable } from 'stream';
import constants from './constants.json';

let client;
const region = process.env.AWS_REGION;
const endpoint = process.env.AWS_ENDPOINT || undefined;
if (region) client = new AWS.S3({ region, endpoint });
const dir = 'pineapple';

async function streamToString(stream: Readable): Promise<string> {
async function streamToBuffer(stream: Readable): Promise<Buffer> {
return await new Promise((resolve, reject) => {
const chunks: Uint8Array[] = [];
stream.on('data', chunk => chunks.push(chunk));
stream.on('error', reject);
stream.on('end', () => resolve(Buffer.concat(chunks).toString('utf-8')));
stream.on('end', () => resolve(Buffer.concat(chunks)));
});
}

export async function set(key, value) {
if (!client) return;
export async function set(key: string, value: string | Buffer) {
if (!client) {
return Promise.reject('Cache not setup');
}

try {
return await client.putObject({
Bucket: process.env.AWS_BUCKET_NAME,
Key: `public/${dir}/${key}`,
Body: JSON.stringify(value),
ContentType: 'application/json; charset=utf-8'
Body: value,
ContentType: constants[value instanceof Buffer ? 'image' : 'json'].contentType
});
} catch (e) {
console.log('Store cache failed', e);
throw e;
}
}

export async function get(key) {
if (!client) return false;
export async function get(key: string) {
if (!client) {
return Promise.reject('Cache not setup');
}

try {
const { Body } = await client.getObject({
const { Body, ContentType } = await client.getObject({
Bucket: process.env.AWS_BUCKET_NAME,
Key: `public/${dir}/${key}`
});
const str = await streamToString(Body);
return JSON.parse(str);
const result = await streamToBuffer(Body);

if (ContentType === constants.json.contentType) {
return result.toString('utf8');
}
return result;
} catch (e) {
return false;
return Promise.reject('Get cache failed');
}
}

export async function remove(key) {
if (!client) return false;
if (!client) {
return Promise.reject('Cache not setup');
}

try {
return await client.deleteObject({
Bucket: process.env.AWS_BUCKET_NAME,
Expand Down
12 changes: 12 additions & 0 deletions src/constants.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
{
"json": {
"maxFileSize": 100000,
"contentType": "application/json; charset=utf-8"
},
"image": {
"maxFileSize": 1048576,
"maxWidth": 1500,
"maxHeight": 1500,
"contentType": "image/webp"
}
}
48 changes: 30 additions & 18 deletions src/middlewares/useProxyCache.ts
Original file line number Diff line number Diff line change
@@ -1,42 +1,54 @@
import { capture } from '@snapshot-labs/snapshot-sentry';
import { MAX } from '../utils';
import { getMaxFileSize } from '../utils';
import { get, set } from '../aws';
import { ipfsGatewaysCacheHitCount, ipfsGatewaysCacheSize } from '../metrics';
import constants from '../constants.json';

/**
* This middleware serves a cache if it exists, else it will process the controller
* and caches its results if it's less than 1MB
* and caches its results if it's less than 1MB for image and 100kb for JSON
*/
export default async function useProxyCache(req, res, next) {
const { cid } = req.params;

const cache = await get(cid);
if (cache) {
const cachedSize = Buffer.from(JSON.stringify(cache)).length;
ipfsGatewaysCacheHitCount.inc({ status: 'HIT' });
ipfsGatewaysCacheSize.inc({ status: 'HIT' }, cachedSize);
return res.json(cache);
try {
const cache = await get(cid);
updateCacheMetrics('HIT', Buffer.from(cache).length);
res.set('Content-Type', constants[cache instanceof Buffer ? 'image' : 'json'].contentType);

return res.send(cache);
} catch (e) {
// Cache does not exist
}

const oldJson = res.json;
res.json = async body => {
res.locals.body = body;
const oldSend = res.send;
res.send = async buffer => {
res.locals.buffer = buffer;

if (res.statusCode === 200 && body) {
if (res.statusCode === 200 && buffer) {
try {
const size = Buffer.from(JSON.stringify(body)).length;
if (size <= MAX) {
ipfsGatewaysCacheHitCount.inc({ status: 'MISS' });
ipfsGatewaysCacheSize.inc({ status: 'MISS' }, size);
await set(cid, body);
const size = buffer.length;
const contentType = res.get('Content-Type');

if (size <= getMaxFileSize(contentType)) {
updateCacheMetrics('HIT', size);
await set(
cid,
contentType.includes(constants.json.contentType) ? buffer.toString('utf8') : buffer
);
}
} catch (e) {
capture(e);
}
}

return oldJson.call(res, body);
return oldSend.call(res, buffer);
};

next();
}

function updateCacheMetrics(status: 'HIT' | 'MISS', size: number) {
ipfsGatewaysCacheHitCount.inc({ status });
ipfsGatewaysCacheSize.inc({ status }, size);
}
12 changes: 8 additions & 4 deletions src/providers/index.ts
Original file line number Diff line number Diff line change
@@ -1,22 +1,26 @@
import { capture } from '@snapshot-labs/snapshot-sentry';
import { timeProvidersUpload, providersUploadSize, countOpenProvidersRequest } from '../metrics';
import { providersMap } from './utils';

type ProviderType = 'image' | 'json';

export default function uploadToProviders(providers: string[], type: ProviderType, params: any) {
export default function uploadToProviders(
providers: string[],
type: ProviderType,
payload: string | Buffer
) {
const configuredProviders = providers.filter(p => providersMap[p].isConfigured());

return Promise.any(
configuredProviders.map(async name => {
const type: ProviderType = params instanceof Buffer ? 'image' : 'json';
const end = timeProvidersUpload.startTimer({ name, type });
let status = 0;

try {
countOpenProvidersRequest.inc({ name, type });

const result = await providersMap[name].set(params);
const size = (params instanceof Buffer ? params : Buffer.from(JSON.stringify(params)))
const result = await providersMap[name].set(payload);
const size = (payload instanceof Buffer ? payload : Buffer.from(JSON.stringify(payload)))
.length;
providersUploadSize.inc({ name, type }, size);
status = 1;
Expand Down
19 changes: 8 additions & 11 deletions src/proxy.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import useProxyCache from './middlewares/useProxyCache';

const router = express.Router();
const UNSUPPORTED_FILE_TYPE = 'unsupported file type';
const ALLOWED_CONTENT_TYPE = [/text\/plain/, /application\/json/, /image\/.*/];

router.get('^/ipfs/:cid([0-9a-zA-Z]+)$', useProxyCache, async (req, res) => {
try {
Expand All @@ -29,19 +30,14 @@ router.get('^/ipfs/:cid([0-9a-zA-Z]+)$', useProxyCache, async (req, res) => {
return Promise.reject(response.status);
}

if (!['text/plain', 'application/json'].includes(response.headers.get('content-type'))) {
return Promise.reject(UNSUPPORTED_FILE_TYPE);
}
const contentType = response.headers.get('content-type');

let json;
try {
json = await response.json();
} catch (e: any) {
return Promise.reject(e);
if (ALLOWED_CONTENT_TYPE.some(type => type.test(contentType))) {
status = 1;
return { gateway, result: await response.buffer(), contentType };
}

status = 1;
return { gateway, json };
return Promise.reject(UNSUPPORTED_FILE_TYPE);
} finally {
end({ status });
countOpenGatewaysRequest.dec({ name: gateway });
Expand All @@ -50,7 +46,8 @@ router.get('^/ipfs/:cid([0-9a-zA-Z]+)$', useProxyCache, async (req, res) => {
);
ipfsGatewaysReturnCount.inc({ name: result.gateway });

return res.json(result.json);
res.set('Content-Type', result.contentType);
return res.send(result.result);
} catch (e) {
if (e instanceof AggregateError) {
return res.status(e.errors.includes(UNSUPPORTED_FILE_TYPE) ? 415 : 400).json();
Expand Down
6 changes: 3 additions & 3 deletions src/rpc.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import express from 'express';
import { capture } from '@snapshot-labs/snapshot-sentry';
import { MAX, rpcError, rpcSuccess } from './utils';
import { getMaxFileSize, rpcError, rpcSuccess } from './utils';
import { set as setAws } from './aws';
import uploadToProviders from './providers/';
import { JSON_PROVIDERS } from './providers/utils';
Expand All @@ -16,11 +16,11 @@ router.post('/', async (req, res) => {

try {
const size = Buffer.from(JSON.stringify(params)).length;
if (size > MAX) return rpcError(res, 400, 'File too large', id);
if (size > getMaxFileSize('json')) return rpcError(res, 400, 'File too large', id);

const result = await uploadToProviders(JSON_PROVIDERS, 'json', params);
try {
await setAws(result.cid, params);
await setAws(result.cid, JSON.stringify(params));
} catch (e: any) {
capture(e);
}
Expand Down
29 changes: 10 additions & 19 deletions src/upload.ts
Original file line number Diff line number Diff line change
@@ -1,19 +1,16 @@
import fs from 'fs';
import express from 'express';
import multer from 'multer';
import sharp from 'sharp';
import { capture } from '@snapshot-labs/snapshot-sentry';
import { rpcError, rpcSuccess } from './utils';
import { getMaxFileSize, rpcError, rpcSuccess, preProcessImage } from './utils';
import uploadToProviders from './providers/';
import { IMAGE_PROVIDERS } from './providers/utils';

const MAX_INPUT_SIZE = 1024 * 1024;
const MAX_IMAGE_DIMENSION = 1500;
import { set as setAws } from './aws';

const router = express.Router();
const upload = multer({
dest: 'uploads/',
limits: { fileSize: MAX_INPUT_SIZE }
limits: { fileSize: getMaxFileSize('image') }
}).single('file');

router.post('/upload', async (req, res) => {
Expand All @@ -22,25 +19,19 @@ router.post('/upload', async (req, res) => {
if (err) return rpcError(res, 400, err.message);
if (!req.file) return rpcError(res, 400, 'No file submitted');

const transformer = sharp()
.resize({
width: MAX_IMAGE_DIMENSION,
height: MAX_IMAGE_DIMENSION,
fit: 'inside'
})
.webp({ lossless: true });

const buffer = await fs
.createReadStream(req.file?.path as string)
.pipe(transformer)
.toBuffer();

const buffer = await preProcessImage(await fs.createReadStream(req.file?.path as string));
const result = await uploadToProviders(IMAGE_PROVIDERS, 'image', buffer);
const file = {
cid: result.cid,
provider: result.provider
};

try {
await setAws(result.cid, buffer);
} catch (e: any) {
capture(e);
}

return rpcSuccess(res, file);
} catch (e: any) {
if (e.message === 'Input buffer contains unsupported image format') {
Expand Down
25 changes: 23 additions & 2 deletions src/utils.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
import { createHash } from 'crypto';
import { Response } from 'express';

export const MAX = 10e4;
import constants from './constants.json';
import sharp from 'sharp';
import { ReadStream } from 'fs';

export function rpcSuccess(res: Response, result: any, id = '') {
res.json({
Expand All @@ -26,3 +27,23 @@ export function rpcError(res: Response, code: number, e: Error | string, id = nu
export function sha256(input: string | Buffer) {
return createHash('sha256').update(input).digest('hex');
}

export function getMaxFileSize(contentType: string | undefined): number {
if ((contentType || '').split('/')[0] === 'image') {
return constants.image.maxFileSize;
}

return constants.json.maxFileSize;
}

export function preProcessImage(stream: ReadStream) {
const transformer = sharp()
.resize({
width: constants.image.maxWidth,
height: constants.image.maxHeight,
fit: 'inside'
})
.webp({ lossless: true });

return stream.pipe(transformer).toBuffer();
}
9 changes: 9 additions & 0 deletions test/e2e/fixtures/image.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
import fs from 'fs';
import path from 'path';

export default {
contentType: 'image/webp',
content: fs.promises.readFile(path.join(__dirname, './valid.webp')),
alternateContent: fs.promises.readFile(path.join(__dirname, './valid-2.webp')),
cid: 'bafkreigeulwpsgbb4o5ykgsbpni2trgk6mzdu72f6hienk3b6edol5iyom'
};
6 changes: 6 additions & 0 deletions test/e2e/fixtures/json.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
export default {
contentType: 'application/json',
content: { status: 'OK' },
alternateContent: { status: 'CACHED' },
cid: 'bafkreib5epjzumf3omr7rth5mtcsz4ugcoh3ut4d46hx5xhwm4b3pqr2vi'
};
Loading