Skip to content

Commit

Permalink
Update index.d.ts (#154)
Browse files Browse the repository at this point in the history
* Update MultipartFields

If the field names are the same, the value of the field will be an array.

* Add tests

* Update more definitions

* Update onFile handler (#144)

* Update index.d.ts

Co-authored-by: Maksim Sinik <[email protected]>

Co-authored-by: Maksim Sinik <[email protected]>
  • Loading branch information
BlackGlory and fox1t authored Sep 3, 2020
1 parent 16f7e6e commit 6f7803c
Show file tree
Hide file tree
Showing 2 changed files with 108 additions and 6 deletions.
34 changes: 28 additions & 6 deletions index.d.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import * as busboy from "busboy";
import { FastifyPlugin } from "fastify";
import { Readable } from 'stream';
import { FastifyError } from "fastify-error";

type MultipartHandler = (
field: string,
Expand All @@ -18,11 +19,11 @@ interface BodyEntry {
limit: false
}

interface MultipartFields {
[x: string]: Multipart;
export interface MultipartFields {
[x: string]: Multipart | Multipart[];
}

interface Multipart {
export interface Multipart {
toBuffer: () => Promise<Buffer>,
file: NodeJS.ReadableStream,
filepath: string,
Expand All @@ -33,19 +34,40 @@ interface Multipart {
fields: MultipartFields
}

interface MultipartErrors {
PartsLimitError: FastifyError,
FilesLimitError: FastifyError,
FieldsLimitError: FastifyError,
PrototypeViolationError: FastifyError,
InvalidMultipartContentTypeError: FastifyError,
RequestFileTooLargeError: FastifyError
}

declare module "fastify" {
interface FastifyRequest {
isMultipart: () => boolean;

parts: (options?: busboy.BusboyConfig) => AsyncIterableIterator<Multipart>

// legacy
multipart: (handler: MultipartHandler, next: (err: Error) => void, options?: busboy.BusboyConfig) => busboy.Busboy;

// promise api
multipartIterator: (options?: busboy.BusboyConfig) => AsyncIterator<Multipart>
multipartIterator: (options?: busboy.BusboyConfig) => AsyncIterableIterator<Multipart>

// Stream mode
file: (options?: busboy.BusboyConfig) => Promise<Multipart>
files: (options?: busboy.BusboyConfig) => AsyncIterator<Multipart>
files: (options?: busboy.BusboyConfig) => AsyncIterableIterator<Multipart>

// Disk mode
saveRequestFiles: (options?: busboy.BusboyConfig) => Promise<Array<Multipart>>
cleanRequestFiles: () => Promise<void>
tmpUploads: Array<Multipart>
}

interface FastifyInstance {
multipartErrors: MultipartErrors
}
}

export interface FastifyMultipartOptions {
Expand All @@ -67,7 +89,7 @@ export interface FastifyMultipartOptions {
/**
* Manage the file stream like you need
*/
onFile?: (fieldName: string, stream: Readable, filename: string, encoding: string, mimetype: string, body: Record<string, BodyEntry>) => void;
onFile?: (fieldName: string, stream: Readable, filename: string, encoding: string, mimetype: string, body: Record<string, BodyEntry>) => void | Promise<void>;

limits?: {
/**
Expand Down
80 changes: 80 additions & 0 deletions test/index.test-d.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,12 @@
import fastify from 'fastify'
import fastifyMultipart from '..'
import { MultipartFields } from '..'
import * as util from 'util'
import { pipeline } from 'stream'
import * as fs from 'fs'
import { expectType } from 'tsd'

const pump = util.promisify(pipeline)

const runServer = async () => {
const app = fastify()
Expand Down Expand Up @@ -35,6 +42,79 @@ const runServer = async () => {
})
})

// usage
app.post('/', async (req, reply) => {
const data = await req.file()

expectType<NodeJS.ReadableStream>(data.file)
expectType<MultipartFields>(data.fields)
expectType<string>(data.fieldname)
expectType<string>(data.filename)
expectType<string>(data.encoding)
expectType<string>(data.mimetype)

await pump(data.file, fs.createWriteStream(data.filename))

reply.send()
})

// busboy
app.post('/', async function (req, reply) {
const options: busboy.BusboyConfig = { limits: { fileSize: 1000 } };
const data = await req.file(options)
await pump(data.file, fs.createWriteStream(data.filename))
reply.send()
})

// handle multiple file streams
app.post('/', async (req, reply) => {
const parts = await req.files()
for await (const part of parts) {
await pump(part.file, fs.createWriteStream(part.filename))
}
reply.send()
})

// handle multiple file streams and fields
app.post('/upload/raw/any', async function (req, reply) {
const parts = await req.parts()
for await (const part of parts) {
if (part.file) {
await pump(part.file, fs.createWriteStream(part.filename))
} else {
console.log(part)
}
}
reply.send()
})

// accumulate whole file in memory
app.post('/upload/raw/any', async function (req, reply) {
const data = await req.file()
const buffer = await data.toBuffer()
// upload to S3
reply.send()
})

// upload files to disk and work with temporary file paths
app.post('/upload/files', async function (req, reply) {
// stores files to tmp dir and return files
const files = await req.saveRequestFiles()
files[0].filepath
files[0].fieldname
files[0].filename
files[0].encoding
files[0].mimetype
files[0].fields // other parsed parts

reply.send()
})

// access all errors
app.post('/upload/files', async function (req, reply) {
const { FilesLimitError } = app.multipartErrors
})

await app.ready()
}

Expand Down

0 comments on commit 6f7803c

Please sign in to comment.