526 lines
12 KiB
JavaScript
526 lines
12 KiB
JavaScript
import crypto from 'node:crypto'
|
|
import fs from 'node:fs'
|
|
import path from 'node:path'
|
|
import { EventEmitter } from 'node:events'
|
|
import { Stream } from 'node:stream'
|
|
import { StringDecoder } from 'node:string_decoder'
|
|
|
|
import File from './file.js'
|
|
import { MultipartParser } from './multipart_parser.js'
|
|
import { QuerystringParser } from './querystring_parser.js'
|
|
import { OctetParser } from './octet_parser.js'
|
|
import { JSONParser } from './json_parser.js'
|
|
|
|
function dummyParser(self) {
|
|
return {
|
|
end: function () {
|
|
self.ended = true
|
|
self._maybeEnd()
|
|
return null
|
|
}
|
|
}
|
|
}
|
|
|
|
export default class IncomingForm extends EventEmitter {
|
|
#req = null
|
|
|
|
error = null
|
|
ended = false
|
|
headers = null
|
|
type = null
|
|
|
|
bytesReceived = null
|
|
bytesExpected = null
|
|
|
|
_parser = null
|
|
_flushing = 0
|
|
_fieldsSize = 0
|
|
openedFiles = []
|
|
|
|
constructor(req, opts = {}) {
|
|
super()
|
|
|
|
this.#req = req
|
|
|
|
this.maxFields = opts.maxFields || 1000
|
|
this.maxFieldsSize = opts.maxFieldsSize || 2 * 1024 * 1024
|
|
this.keepExtensions = opts.keepExtensions || false
|
|
this.uploadDir = opts.uploadDir
|
|
this.encoding = opts.encoding || 'utf-8'
|
|
this.multiples = opts.multiples || false
|
|
|
|
// Parse headers and setup the parser, ready to start listening for data.
|
|
this.writeHeaders(req.headers)
|
|
|
|
req
|
|
.on('error', err => {
|
|
this._error(err)
|
|
})
|
|
.on('aborted', () => {
|
|
this.emit('aborted')
|
|
this._error(new Error('Request aborted'))
|
|
})
|
|
.on('data', buffer => {
|
|
this.write(buffer)
|
|
})
|
|
.on('end', () => {
|
|
if (this.error) {
|
|
return
|
|
}
|
|
|
|
var err = this._parser.end()
|
|
if (err) {
|
|
this._error(err)
|
|
}
|
|
})
|
|
}
|
|
|
|
writeHeaders(headers) {
|
|
this.headers = headers
|
|
this.#parseContentLength()
|
|
this.#parseContentType()
|
|
}
|
|
|
|
write(buffer) {
|
|
if (this.error) {
|
|
return
|
|
}
|
|
if (!this._parser) {
|
|
this._error(new Error('uninitialized parser'))
|
|
return
|
|
}
|
|
|
|
this.bytesReceived += buffer.length
|
|
this.emit('progress', this.bytesReceived, this.bytesExpected)
|
|
|
|
var bytesParsed = this._parser.write(buffer)
|
|
if (bytesParsed !== buffer.length) {
|
|
this._error(
|
|
new Error(
|
|
'parser error, ' +
|
|
bytesParsed +
|
|
' of ' +
|
|
buffer.length +
|
|
' bytes parsed'
|
|
)
|
|
)
|
|
}
|
|
|
|
return bytesParsed
|
|
}
|
|
|
|
pause() {
|
|
try {
|
|
this.#req.pause()
|
|
} catch (err) {
|
|
// the stream was destroyed
|
|
if (!this.ended) {
|
|
// before it was completed, crash & burn
|
|
this._error(err)
|
|
}
|
|
return false
|
|
}
|
|
return true
|
|
}
|
|
|
|
resume() {
|
|
try {
|
|
this.#req.resume()
|
|
} catch (err) {
|
|
// the stream was destroyed
|
|
if (!this.ended) {
|
|
// before it was completed, crash & burn
|
|
this._error(err)
|
|
}
|
|
return false
|
|
}
|
|
|
|
return true
|
|
}
|
|
|
|
onPart(part) {
|
|
// this method can be overwritten by the user
|
|
this.handlePart(part)
|
|
}
|
|
|
|
handlePart(part) {
|
|
var self = this
|
|
|
|
if (part.filename === undefined) {
|
|
var value = '',
|
|
decoder = new StringDecoder(this.encoding)
|
|
|
|
part.on('data', function (buffer) {
|
|
self._fieldsSize += buffer.length
|
|
if (self._fieldsSize > self.maxFieldsSize) {
|
|
self._error(
|
|
new Error(
|
|
'maxFieldsSize exceeded, received ' +
|
|
self._fieldsSize +
|
|
' bytes of field data'
|
|
)
|
|
)
|
|
return
|
|
}
|
|
value += decoder.write(buffer)
|
|
})
|
|
|
|
part.on('end', function () {
|
|
self.emit('field', part.name, value)
|
|
})
|
|
return
|
|
}
|
|
|
|
this._flushing++
|
|
|
|
var file = new File({
|
|
path: this._uploadPath(part.filename),
|
|
name: part.filename,
|
|
type: part.mime,
|
|
hash: self.hash
|
|
})
|
|
|
|
this.emit('fileBegin', part.name, file)
|
|
|
|
file.open()
|
|
this.openedFiles.push(file)
|
|
|
|
part.on('data', function (buffer) {
|
|
if (buffer.length == 0) {
|
|
return
|
|
}
|
|
self.pause()
|
|
file.write(buffer, function () {
|
|
self.resume()
|
|
})
|
|
})
|
|
|
|
part.on('end', function () {
|
|
file.end(function () {
|
|
self._flushing--
|
|
self.emit('file', part.name, file)
|
|
self._maybeEnd()
|
|
})
|
|
})
|
|
}
|
|
|
|
#parseContentType() {
|
|
if (this.bytesExpected === 0) {
|
|
this._parser = dummyParser(this)
|
|
return
|
|
}
|
|
|
|
if (!this.headers['content-type']) {
|
|
this._error(new Error('bad content-type header, no content-type'))
|
|
return
|
|
}
|
|
|
|
if (this.headers['content-type'].match(/octet-stream/i)) {
|
|
this._initOctetStream()
|
|
return
|
|
}
|
|
|
|
if (this.headers['content-type'].match(/urlencoded/i)) {
|
|
this._initUrlencoded()
|
|
return
|
|
}
|
|
|
|
if (this.headers['content-type'].match(/multipart/i)) {
|
|
var m = this.headers['content-type'].match(
|
|
/boundary=(?:"([^"]+)"|([^;]+))/i
|
|
)
|
|
if (m) {
|
|
this._initMultipart(m[1] || m[2])
|
|
} else {
|
|
this._error(new Error('bad content-type header, no multipart boundary'))
|
|
}
|
|
return
|
|
}
|
|
|
|
if (this.headers['content-type'].match(/json|appliation|plain|text/i)) {
|
|
this._initJSONencoded()
|
|
return
|
|
}
|
|
|
|
this._error(
|
|
new Error(
|
|
'bad content-type header, unknown content-type: ' +
|
|
this.headers['content-type']
|
|
)
|
|
)
|
|
}
|
|
|
|
_error(err) {
|
|
if (this.error || this.ended) {
|
|
return
|
|
}
|
|
|
|
this.error = err
|
|
this.emit('error', err)
|
|
|
|
if (Array.isArray(this.openedFiles)) {
|
|
this.openedFiles.forEach(function (file) {
|
|
file._writeStream.destroy()
|
|
setTimeout(fs.unlink, 0, file.path, function (error) {})
|
|
})
|
|
}
|
|
}
|
|
|
|
#parseContentLength() {
|
|
this.bytesReceived = 0
|
|
if (this.headers['content-length']) {
|
|
this.bytesExpected = +this.headers['content-length']
|
|
} else if (this.headers['transfer-encoding'] === undefined) {
|
|
this.bytesExpected = 0
|
|
}
|
|
|
|
if (this.bytesExpected !== null) {
|
|
this.emit('progress', this.bytesReceived, this.bytesExpected)
|
|
}
|
|
}
|
|
|
|
_newParser() {
|
|
return new MultipartParser()
|
|
}
|
|
|
|
_initMultipart(boundary) {
|
|
this.type = 'multipart'
|
|
|
|
var parser = new MultipartParser(),
|
|
self = this,
|
|
headerField,
|
|
headerValue,
|
|
part
|
|
|
|
parser.initWithBoundary(boundary)
|
|
|
|
parser.onPartBegin = function () {
|
|
part = new Stream()
|
|
part.readable = true
|
|
part.headers = {}
|
|
part.name = null
|
|
part.filename = null
|
|
part.mime = null
|
|
|
|
part.transferEncoding = 'binary'
|
|
part.transferBuffer = ''
|
|
|
|
headerField = ''
|
|
headerValue = ''
|
|
}
|
|
|
|
parser.onHeaderField = function (b, start, end) {
|
|
headerField += b.toString(self.encoding, start, end)
|
|
}
|
|
|
|
parser.onHeaderValue = function (b, start, end) {
|
|
headerValue += b.toString(self.encoding, start, end)
|
|
}
|
|
|
|
parser.onHeaderEnd = function () {
|
|
headerField = headerField.toLowerCase()
|
|
part.headers[headerField] = headerValue
|
|
|
|
var m = headerValue.match(/\bname="([^"]+)"/i)
|
|
if (headerField == 'content-disposition') {
|
|
if (m) {
|
|
part.name = m[1]
|
|
}
|
|
|
|
part.filename = self._fileName(headerValue)
|
|
} else if (headerField == 'content-type') {
|
|
part.mime = headerValue
|
|
} else if (headerField == 'content-transfer-encoding') {
|
|
part.transferEncoding = headerValue.toLowerCase()
|
|
}
|
|
|
|
headerField = ''
|
|
headerValue = ''
|
|
}
|
|
|
|
parser.onHeadersEnd = function () {
|
|
switch (part.transferEncoding) {
|
|
case 'binary':
|
|
case '7bit':
|
|
case '8bit':
|
|
parser.onPartData = function (b, start, end) {
|
|
part.emit('data', b.slice(start, end))
|
|
}
|
|
|
|
parser.onPartEnd = function () {
|
|
part.emit('end')
|
|
}
|
|
break
|
|
|
|
case 'base64':
|
|
parser.onPartData = function (b, start, end) {
|
|
part.transferBuffer += b.slice(start, end).toString('ascii')
|
|
|
|
/*
|
|
four bytes (chars) in base64 converts to three bytes in binary
|
|
encoding. So we should always work with a number of bytes that
|
|
can be divided by 4, it will result in a number of buytes that
|
|
can be divided vy 3.
|
|
*/
|
|
var offset = parseInt(part.transferBuffer.length / 4, 10) * 4
|
|
part.emit(
|
|
'data',
|
|
Buffer.from(part.transferBuffer.substring(0, offset), 'base64')
|
|
)
|
|
part.transferBuffer = part.transferBuffer.substring(offset)
|
|
}
|
|
|
|
parser.onPartEnd = function () {
|
|
part.emit('data', Buffer.from(part.transferBuffer, 'base64'))
|
|
part.emit('end')
|
|
}
|
|
break
|
|
|
|
default:
|
|
return self._error(new Error('unknown transfer-encoding'))
|
|
}
|
|
|
|
self.onPart(part)
|
|
}
|
|
|
|
parser.onEnd = function () {
|
|
self.ended = true
|
|
self._maybeEnd()
|
|
}
|
|
|
|
this._parser = parser
|
|
}
|
|
|
|
_fileName(headerValue) {
|
|
var m = headerValue.match(/\bfilename="(.*?)"($|; )/i)
|
|
if (!m) return
|
|
|
|
var filename = m[1].substr(m[1].lastIndexOf('\\') + 1)
|
|
filename = filename.replace(/%22/g, '"')
|
|
filename = filename.replace(/&#([\d]{4});/g, function (m, code) {
|
|
return String.fromCharCode(code)
|
|
})
|
|
return filename
|
|
}
|
|
|
|
_initUrlencoded() {
|
|
this.type = 'urlencoded'
|
|
|
|
var parser = new QuerystringParser(this.maxFields)
|
|
|
|
parser.onField = (key, val) => {
|
|
this.emit('field', key, val)
|
|
}
|
|
|
|
parser.onEnd = () => {
|
|
this.ended = true
|
|
this._maybeEnd()
|
|
}
|
|
|
|
this._parser = parser
|
|
}
|
|
|
|
_initOctetStream() {
|
|
this.type = 'octet-stream'
|
|
var filename = this.headers['x-file-name']
|
|
var mime = this.headers['content-type']
|
|
|
|
var file = new File({
|
|
path: this._uploadPath(filename),
|
|
name: filename,
|
|
type: mime
|
|
})
|
|
|
|
this.emit('fileBegin', filename, file)
|
|
file.open()
|
|
|
|
this._flushing++
|
|
|
|
var self = this
|
|
|
|
self._parser = new OctetParser()
|
|
|
|
//Keep track of writes that haven't finished so we don't emit the file before it's done being written
|
|
var outstandingWrites = 0
|
|
|
|
self._parser.on('data', function (buffer) {
|
|
self.pause()
|
|
outstandingWrites++
|
|
|
|
file.write(buffer, function () {
|
|
outstandingWrites--
|
|
self.resume()
|
|
|
|
if (self.ended) {
|
|
self._parser.emit('doneWritingFile')
|
|
}
|
|
})
|
|
})
|
|
|
|
self._parser.on('end', function () {
|
|
self._flushing--
|
|
self.ended = true
|
|
|
|
var done = function () {
|
|
file.end(function () {
|
|
self.emit('file', 'file', file)
|
|
self._maybeEnd()
|
|
})
|
|
}
|
|
|
|
if (outstandingWrites === 0) {
|
|
done()
|
|
} else {
|
|
self._parser.once('doneWritingFile', done)
|
|
}
|
|
})
|
|
}
|
|
|
|
_initJSONencoded() {
|
|
this.type = 'json'
|
|
|
|
var parser = new JSONParser(),
|
|
self = this
|
|
|
|
if (this.bytesExpected) {
|
|
parser.initWithLength(this.bytesExpected)
|
|
}
|
|
|
|
parser.onField = function (key, val) {
|
|
self.emit('field', key, val)
|
|
}
|
|
|
|
parser.onEnd = function () {
|
|
self.ended = true
|
|
self._maybeEnd()
|
|
}
|
|
|
|
this._parser = parser
|
|
}
|
|
|
|
_uploadPath(filename) {
|
|
var name = 'upload_'
|
|
var buf = crypto.randomBytes(16)
|
|
for (var i = 0; i < buf.length; ++i) {
|
|
name += ('0' + buf[i].toString(16)).slice(-2)
|
|
}
|
|
|
|
if (this.keepExtensions) {
|
|
var ext = path.extname(filename)
|
|
ext = ext.replace(/(\.[a-z0-9]+).*/i, '$1')
|
|
|
|
name += ext
|
|
}
|
|
|
|
return path.join(this.uploadDir, name)
|
|
}
|
|
|
|
_maybeEnd() {
|
|
if (!this.ended || this._flushing || this.error) {
|
|
return
|
|
}
|
|
|
|
this.emit('end')
|
|
}
|
|
}
|