Skip to content

Commit

Permalink
fmt
Browse files Browse the repository at this point in the history
  • Loading branch information
talentlessguy committed Nov 23, 2024
1 parent 488bd48 commit 96a7d9a
Show file tree
Hide file tree
Showing 3 changed files with 68 additions and 62 deletions.
3 changes: 2 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,8 @@
"test:coverage": "c8 --include=src pnpm test",
"test:report": "c8 report --reporter=text-lcov > coverage.lcov",
"build": "tsc -p tsconfig.build.json",
"prepublishOnly": "pnpm build && pnpm test"
"prepublishOnly": "pnpm build && pnpm test",
"check": "biome check --write"
},
"packageManager": "[email protected]",
"publishConfig": {
Expand Down
117 changes: 61 additions & 56 deletions src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -37,63 +37,65 @@ export const p =
payloadLimit = defaultPayloadLimit,
payloadLimitErrorFn: LimitErrorFn = defaultErrorFn
) =>
async (req: ReqWithBody<T>, _res: Response, next?: (err?: any) => void) => {
try {
const body: Buffer[] = []

for await (const chunk of req) {
const totalSize = body.reduce((total, buffer) => total + buffer.byteLength, 0)
if (totalSize > payloadLimit) throw payloadLimitErrorFn(payloadLimit)
body.push(chunk as Buffer)
}

return fn(Buffer.concat(body))
} catch (e) {
next?.(e)
async (req: ReqWithBody<T>, _res: Response, next?: (err?: any) => void) => {
try {
const body: Buffer[] = []

for await (const chunk of req) {
const totalSize = body.reduce((total, buffer) => total + buffer.byteLength, 0)
if (totalSize > payloadLimit) throw payloadLimitErrorFn(payloadLimit)
body.push(chunk as Buffer)
}

return fn(Buffer.concat(body))
} catch (e) {
next?.(e)
}
}

/**
* Parse payload with a custom function
* @param fn
*/
const custom =
<T = any>(fn: (body: Buffer) => any) =>
async (req: ReqWithBody, _res: Response, next?: NextFunction) => {
if (hasBody(req.method!)) req.body = await p<T>(fn)(req, _res, next)
next?.()
}
async (req: ReqWithBody, _res: Response, next?: NextFunction) => {
if (hasBody(req.method!)) req.body = await p<T>(fn)(req, _res, next)
next?.()
}

/**
* Parse JSON payload
* @param options
*/
const json =
({ payloadLimit, payloadLimitErrorFn }: ParserOptions = {}) =>
async (req: ReqWithBody, res: Response, next?: NextFunction) => {
if (hasBody(req.method!)) {
req.body = await p(
(x) => {
const str = td.decode(x)
return str ? JSON.parse(str) : {}
},
payloadLimit,
payloadLimitErrorFn
)(req, res, next)
} next?.()
async (req: ReqWithBody, res: Response, next?: NextFunction) => {
if (hasBody(req.method!)) {
req.body = await p(
(x) => {
const str = td.decode(x)
return str ? JSON.parse(str) : {}
},
payloadLimit,
payloadLimitErrorFn
)(req, res, next)
}
next?.()
}

/**
* Parse raw payload
* @param options
*/
const raw =
({ payloadLimit, payloadLimitErrorFn }: ParserOptions = {}) =>
async (req: ReqWithBody, _res: Response, next?: NextFunction) => {
if (hasBody(req.method!)) {
req.body = await p((x) => x, payloadLimit, payloadLimitErrorFn)(req, _res, next)
} next?.()
async (req: ReqWithBody, _res: Response, next?: NextFunction) => {
if (hasBody(req.method!)) {
req.body = await p((x) => x, payloadLimit, payloadLimitErrorFn)(req, _res, next)
}
next?.()
}

const td = new TextDecoder()
/**
Expand All @@ -103,27 +105,29 @@ const td = new TextDecoder()
*/
const text =
({ payloadLimit, payloadLimitErrorFn }: ParserOptions = {}) =>
async (req: ReqWithBody, _res: Response, next?: NextFunction) => {
if (hasBody(req.method!)) {
req.body = await p((x) => td.decode(x), payloadLimit, payloadLimitErrorFn)(req, _res, next)
} next?.()
async (req: ReqWithBody, _res: Response, next?: NextFunction) => {
if (hasBody(req.method!)) {
req.body = await p((x) => td.decode(x), payloadLimit, payloadLimitErrorFn)(req, _res, next)
}
next?.()
}

/**
* Parse urlencoded payload
* @param options
*/
const urlencoded =
({ payloadLimit, payloadLimitErrorFn }: ParserOptions = {}) =>
async (req: ReqWithBody, _res: Response, next?: NextFunction) => {
if (hasBody(req.method!)) {
req.body = await p(
(x) => Object.fromEntries(new URLSearchParams(x.toString()).entries()),
payloadLimit,
payloadLimitErrorFn
)(req, _res, next)
} next?.()
async (req: ReqWithBody, _res: Response, next?: NextFunction) => {
if (hasBody(req.method!)) {
req.body = await p(
(x) => Object.fromEntries(new URLSearchParams(x.toString()).entries()),
payloadLimit,
payloadLimitErrorFn
)(req, _res, next)
}
next?.()
}

const getBoundary = (contentType: string) => {
const match = /boundary=(.+);?/.exec(contentType)
Expand Down Expand Up @@ -195,18 +199,19 @@ type MultipartOptions = Partial<{
*/
const multipart =
({ payloadLimit = Number.POSITIVE_INFINITY, payloadLimitErrorFn, ...opts }: MultipartOptions & ParserOptions = {}) =>
async (req: ReqWithBody, res: Response, next?: NextFunction) => {
if (hasBody(req.method!)) {
req.body = await p(
(x) => {
const boundary = getBoundary(req.headers['content-type']!)
if (boundary) return parseMultipart(td.decode(x), boundary, opts)
return {}
},
payloadLimit,
payloadLimitErrorFn
)(req, res, next)
} next?.()
async (req: ReqWithBody, res: Response, next?: NextFunction) => {
if (hasBody(req.method!)) {
req.body = await p(
(x) => {
const boundary = getBoundary(req.headers['content-type']!)
if (boundary) return parseMultipart(td.decode(x), boundary, opts)
return {}
},
payloadLimit,
payloadLimitErrorFn
)(req, res, next)
}
next?.()
}

export { custom, json, raw, text, urlencoded, multipart }
10 changes: 5 additions & 5 deletions test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -560,14 +560,14 @@ describe('Framework integration', { timeout: 500 }, () => {

await fetch('/json', {
body: JSON.stringify({ hello: 'world' }),
method: 'POST',
})
.expect(200, { hello: 'world' })
method: 'POST'
}).expect(200, { hello: 'world' })

await fetch('/url', {
body: 'hello=world',
method: 'POST'
})
.expect(200, { hello: 'world' }).then(() => server.close())
.expect(200, { hello: 'world' })
.then(() => server.close())
})
})
})

0 comments on commit 96a7d9a

Please sign in to comment.