一大波重构
parent
94a997bb8a
commit
5e827928ba
|
@ -7,6 +7,7 @@
|
|||
._*
|
||||
.idea
|
||||
.vscode
|
||||
.tmp
|
||||
|
||||
|
||||
node_modules/
|
24
index.js
24
index.js
|
@ -171,21 +171,25 @@ export default class Request {
|
|||
this.#body[name] = value
|
||||
})
|
||||
.on('file', (name, file) => {
|
||||
if (name.slice(-2) === '[]') {
|
||||
name = name.slice(0, -2)
|
||||
}
|
||||
if (!this.#body.hasOwnProperty(name)) {
|
||||
this.#body[name] = file
|
||||
if (name === false) {
|
||||
this.#body = file
|
||||
} else {
|
||||
if (!Array.isArray(this.#body[name])) {
|
||||
this.#body[name] = [this.#body[name]]
|
||||
if (name.slice(-2) === '[]') {
|
||||
name = name.slice(0, -2)
|
||||
}
|
||||
if (!this.#body.hasOwnProperty(name)) {
|
||||
this.#body[name] = file
|
||||
} else {
|
||||
if (!Array.isArray(this.#body[name])) {
|
||||
this.#body[name] = [this.#body[name]]
|
||||
}
|
||||
this.#body[name].push(file)
|
||||
}
|
||||
this.#body[name].push(file)
|
||||
}
|
||||
})
|
||||
.on('error', out.reject)
|
||||
.on('end', err => {
|
||||
if (~contentType.indexOf('urlencoded')) {
|
||||
.on('end', _ => {
|
||||
if (contentType.includes('urlencoded')) {
|
||||
for (let i in this.#body) {
|
||||
if (typeof this.#body[i] === 'string') {
|
||||
if (!this.#body[i]) {
|
||||
|
|
19
lib/file.js
19
lib/file.js
|
@ -13,7 +13,7 @@ export default class File extends EventEmitter {
|
|||
constructor(props = {}) {
|
||||
super()
|
||||
|
||||
for (var key in props) {
|
||||
for (let key in props) {
|
||||
this[key] = props[key]
|
||||
}
|
||||
}
|
||||
|
@ -30,24 +30,19 @@ export default class File extends EventEmitter {
|
|||
type: this.type,
|
||||
mtime: this.lastModifiedDate,
|
||||
length: this.length,
|
||||
filename: this.filename,
|
||||
mime: this.mime
|
||||
filename: this.name,
|
||||
mime: this.type
|
||||
}
|
||||
}
|
||||
|
||||
write(buffer, cb) {
|
||||
write(buffer) {
|
||||
this.#stream.write(buffer, _ => {
|
||||
this.lastModifiedDate = new Date()
|
||||
this.size += buffer.length
|
||||
this.emit('progress', this.size)
|
||||
cb()
|
||||
})
|
||||
}
|
||||
|
||||
end(cb) {
|
||||
this.#stream.end(() => {
|
||||
this.emit('end')
|
||||
cb()
|
||||
})
|
||||
end(callback) {
|
||||
this.lastModifiedDate = new Date()
|
||||
this.#stream.end(callback)
|
||||
}
|
||||
}
|
||||
|
|
573
lib/index.js
573
lib/index.js
|
@ -1,50 +1,45 @@
|
|||
import crypto from 'node:crypto'
|
||||
import fs from 'node:fs'
|
||||
import path from 'node:path'
|
||||
import { join } from 'node:path'
|
||||
import { EventEmitter } from 'node:events'
|
||||
import { Stream } from 'node:stream'
|
||||
import { StringDecoder } from 'node:string_decoder'
|
||||
|
||||
import File from './file.js'
|
||||
import { MultipartParser } from './multipart_parser.js'
|
||||
import { QuerystringParser } from './querystring_parser.js'
|
||||
import { OctetParser } from './octet_parser.js'
|
||||
import { UrlencodedParser } from './urlencoded_parser.js'
|
||||
import { OctetParser, EmptyParser } from './octet_parser.js'
|
||||
import { JSONParser } from './json_parser.js'
|
||||
|
||||
function dummyParser(self) {
|
||||
return {
|
||||
end: function () {
|
||||
self.ended = true
|
||||
self._maybeEnd()
|
||||
return null
|
||||
}
|
||||
}
|
||||
function randomPath(uploadDir) {
|
||||
var name = 'upload_' + crypto.randomBytes(16).toString('hex')
|
||||
return join(uploadDir, name)
|
||||
}
|
||||
|
||||
/* ------------------------------------- */
|
||||
|
||||
export default class IncomingForm extends EventEmitter {
|
||||
#req = null
|
||||
|
||||
error = null
|
||||
#error = false
|
||||
#ended = false
|
||||
|
||||
ended = false
|
||||
headers = null
|
||||
type = null
|
||||
|
||||
bytesReceived = null
|
||||
bytesExpected = null
|
||||
|
||||
_parser = null
|
||||
_flushing = 0
|
||||
_fieldsSize = 0
|
||||
openedFiles = []
|
||||
#parser = null
|
||||
#pending = true
|
||||
|
||||
#openedFiles = []
|
||||
|
||||
constructor(req, opts = {}) {
|
||||
super()
|
||||
|
||||
this.#req = req
|
||||
|
||||
this.maxFields = opts.maxFields || 1000
|
||||
this.maxFieldsSize = opts.maxFieldsSize || 2 * 1024 * 1024
|
||||
this.keepExtensions = opts.keepExtensions || false
|
||||
this.uploadDir = opts.uploadDir
|
||||
this.encoding = opts.encoding || 'utf-8'
|
||||
this.multiples = opts.multiples || false
|
||||
|
@ -54,23 +49,21 @@ export default class IncomingForm extends EventEmitter {
|
|||
|
||||
req
|
||||
.on('error', err => {
|
||||
this._error(err)
|
||||
this.#handleError(err)
|
||||
this.#clearUploads()
|
||||
})
|
||||
.on('aborted', () => {
|
||||
this.emit('aborted')
|
||||
this._error(new Error('Request aborted'))
|
||||
})
|
||||
.on('data', buffer => {
|
||||
this.write(buffer)
|
||||
this.#clearUploads()
|
||||
})
|
||||
.on('data', buffer => this.write(buffer))
|
||||
.on('end', () => {
|
||||
if (this.error) {
|
||||
if (this.#error) {
|
||||
return
|
||||
}
|
||||
|
||||
var err = this._parser.end()
|
||||
let err = this.#parser.end()
|
||||
if (err) {
|
||||
this._error(err)
|
||||
this.#handleError(err)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -82,41 +75,24 @@ export default class IncomingForm extends EventEmitter {
|
|||
}
|
||||
|
||||
write(buffer) {
|
||||
if (this.error) {
|
||||
if (this.#error) {
|
||||
return
|
||||
}
|
||||
if (!this._parser) {
|
||||
this._error(new Error('uninitialized parser'))
|
||||
return
|
||||
if (!this.#parser) {
|
||||
return this.#handleError(new Error('uninitialized parser'))
|
||||
}
|
||||
|
||||
this.bytesReceived += buffer.length
|
||||
this.emit('progress', this.bytesReceived, this.bytesExpected)
|
||||
|
||||
var bytesParsed = this._parser.write(buffer)
|
||||
if (bytesParsed !== buffer.length) {
|
||||
this._error(
|
||||
new Error(
|
||||
'parser error, ' +
|
||||
bytesParsed +
|
||||
' of ' +
|
||||
buffer.length +
|
||||
' bytes parsed'
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
return bytesParsed
|
||||
this.#parser.write(buffer)
|
||||
}
|
||||
|
||||
pause() {
|
||||
try {
|
||||
this.#req.pause()
|
||||
} catch (err) {
|
||||
// the stream was destroyed
|
||||
if (!this.ended) {
|
||||
// before it was completed, crash & burn
|
||||
this._error(err)
|
||||
if (!this.#ended) {
|
||||
this.#handleError(err)
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
@ -127,10 +103,8 @@ export default class IncomingForm extends EventEmitter {
|
|||
try {
|
||||
this.#req.resume()
|
||||
} catch (err) {
|
||||
// the stream was destroyed
|
||||
if (!this.ended) {
|
||||
// before it was completed, crash & burn
|
||||
this._error(err)
|
||||
if (!this.#ended) {
|
||||
this.#handleError(err)
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
@ -138,132 +112,81 @@ export default class IncomingForm extends EventEmitter {
|
|||
return true
|
||||
}
|
||||
|
||||
onPart(part) {
|
||||
// this method can be overwritten by the user
|
||||
this.handlePart(part)
|
||||
}
|
||||
|
||||
handlePart(part) {
|
||||
var self = this
|
||||
|
||||
#handlePart(part) {
|
||||
if (part.filename === undefined) {
|
||||
var value = '',
|
||||
decoder = new StringDecoder(this.encoding)
|
||||
let value = ''
|
||||
let decoder = new StringDecoder(this.encoding)
|
||||
|
||||
part.on('data', function (buffer) {
|
||||
self._fieldsSize += buffer.length
|
||||
if (self._fieldsSize > self.maxFieldsSize) {
|
||||
self._error(
|
||||
new Error(
|
||||
'maxFieldsSize exceeded, received ' +
|
||||
self._fieldsSize +
|
||||
' bytes of field data'
|
||||
)
|
||||
)
|
||||
return
|
||||
}
|
||||
value += decoder.write(buffer)
|
||||
part
|
||||
.on('data', buffer => {
|
||||
value += decoder.write(buffer)
|
||||
})
|
||||
.on('end', () => {
|
||||
this.emit('field', part.name, value)
|
||||
})
|
||||
} else {
|
||||
let file = new File({
|
||||
path: randomPath(this.uploadDir),
|
||||
name: part.filename,
|
||||
type: part.mime
|
||||
})
|
||||
|
||||
part.on('end', function () {
|
||||
self.emit('field', part.name, value)
|
||||
})
|
||||
return
|
||||
file.open()
|
||||
|
||||
this.#openedFiles.push(file)
|
||||
|
||||
this.#pending = true
|
||||
|
||||
part
|
||||
.on('data', buffer => {
|
||||
if (buffer.length == 0) {
|
||||
return
|
||||
}
|
||||
file.write(buffer)
|
||||
})
|
||||
.on('end', () => {
|
||||
console.log('file part end...')
|
||||
file.end(() => {
|
||||
console.log('<><><><>', part.name, file)
|
||||
this.emit('file', part.name, file)
|
||||
this.#pending = false
|
||||
// this.#handleEnd()
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
this._flushing++
|
||||
|
||||
var file = new File({
|
||||
path: this._uploadPath(part.filename),
|
||||
name: part.filename,
|
||||
type: part.mime,
|
||||
hash: self.hash
|
||||
})
|
||||
|
||||
this.emit('fileBegin', part.name, file)
|
||||
|
||||
file.open()
|
||||
this.openedFiles.push(file)
|
||||
|
||||
part.on('data', function (buffer) {
|
||||
if (buffer.length == 0) {
|
||||
return
|
||||
}
|
||||
self.pause()
|
||||
file.write(buffer, function () {
|
||||
self.resume()
|
||||
})
|
||||
})
|
||||
|
||||
part.on('end', function () {
|
||||
file.end(function () {
|
||||
self._flushing--
|
||||
self.emit('file', part.name, file)
|
||||
self._maybeEnd()
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
#parseContentType() {
|
||||
let contentType = this.headers['content-type']
|
||||
let lower = contentType.toLowerCase()
|
||||
|
||||
if (this.bytesExpected === 0) {
|
||||
this._parser = dummyParser(this)
|
||||
return
|
||||
return (this.#parser = new EmptyParser())
|
||||
}
|
||||
|
||||
if (!this.headers['content-type']) {
|
||||
this._error(new Error('bad content-type header, no content-type'))
|
||||
return
|
||||
if (lower.includes('octet-stream')) {
|
||||
return this.#createStreamParser()
|
||||
}
|
||||
|
||||
if (this.headers['content-type'].match(/octet-stream/i)) {
|
||||
this._initOctetStream()
|
||||
return
|
||||
if (lower.includes('urlencoded')) {
|
||||
return this.#createUrlencodedParser()
|
||||
}
|
||||
|
||||
if (this.headers['content-type'].match(/urlencoded/i)) {
|
||||
this._initUrlencoded()
|
||||
return
|
||||
}
|
||||
|
||||
if (this.headers['content-type'].match(/multipart/i)) {
|
||||
var m = this.headers['content-type'].match(
|
||||
/boundary=(?:"([^"]+)"|([^;]+))/i
|
||||
)
|
||||
if (m) {
|
||||
this._initMultipart(m[1] || m[2])
|
||||
if (lower.includes('multipart')) {
|
||||
let matches = contentType.match(/boundary=(?:"([^"]+)"|([^;]+))/)
|
||||
if (matches) {
|
||||
this.#createMultipartParser(matches[1] || matches[2])
|
||||
} else {
|
||||
this._error(new Error('bad content-type header, no multipart boundary'))
|
||||
this.#handleError(new TypeError('unknow multipart boundary'))
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if (this.headers['content-type'].match(/json|appliation|plain|text/i)) {
|
||||
this._initJSONencoded()
|
||||
return
|
||||
if (lower.match(/json|appliation|plain|text/)) {
|
||||
return this.#createJsonParser()
|
||||
}
|
||||
|
||||
this._error(
|
||||
new Error(
|
||||
'bad content-type header, unknown content-type: ' +
|
||||
this.headers['content-type']
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
_error(err) {
|
||||
if (this.error || this.ended) {
|
||||
return
|
||||
}
|
||||
|
||||
this.error = err
|
||||
this.emit('error', err)
|
||||
|
||||
if (Array.isArray(this.openedFiles)) {
|
||||
this.openedFiles.forEach(function (file) {
|
||||
file._writeStream.destroy()
|
||||
setTimeout(fs.unlink, 0, file.path, function (error) {})
|
||||
})
|
||||
}
|
||||
this.#handleError(new TypeError('unknown content-type: ' + contentType))
|
||||
}
|
||||
|
||||
#parseContentLength() {
|
||||
|
@ -273,122 +196,109 @@ export default class IncomingForm extends EventEmitter {
|
|||
} else if (this.headers['transfer-encoding'] === undefined) {
|
||||
this.bytesExpected = 0
|
||||
}
|
||||
|
||||
if (this.bytesExpected !== null) {
|
||||
this.emit('progress', this.bytesReceived, this.bytesExpected)
|
||||
}
|
||||
}
|
||||
|
||||
_newParser() {
|
||||
return new MultipartParser()
|
||||
}
|
||||
#createMultipartParser(boundary) {
|
||||
let parser = new MultipartParser(boundary)
|
||||
let headerField, headerValue, part
|
||||
|
||||
_initMultipart(boundary) {
|
||||
this.type = 'multipart'
|
||||
parser
|
||||
.on('partBegin', function () {
|
||||
part = new Stream()
|
||||
part.readable = true
|
||||
part.headers = {}
|
||||
part.name = null
|
||||
part.filename = null
|
||||
part.mime = null
|
||||
|
||||
var parser = new MultipartParser(),
|
||||
self = this,
|
||||
headerField,
|
||||
headerValue,
|
||||
part
|
||||
part.transferEncoding = 'binary'
|
||||
part.transferBuffer = ''
|
||||
|
||||
parser.initWithBoundary(boundary)
|
||||
headerField = ''
|
||||
headerValue = ''
|
||||
})
|
||||
.on('headerField', (b, start, end) => {
|
||||
headerField += b.toString(this.encoding, start, end)
|
||||
})
|
||||
.on('headerValue', (b, start, end) => {
|
||||
headerValue += b.toString(this.encoding, start, end)
|
||||
})
|
||||
.on('headerEnd', () => {
|
||||
headerField = headerField.toLowerCase()
|
||||
part.headers[headerField] = headerValue
|
||||
|
||||
parser.onPartBegin = function () {
|
||||
part = new Stream()
|
||||
part.readable = true
|
||||
part.headers = {}
|
||||
part.name = null
|
||||
part.filename = null
|
||||
part.mime = null
|
||||
var m = headerValue.match(/\bname="([^"]+)"/i)
|
||||
if (headerField == 'content-disposition') {
|
||||
if (m) {
|
||||
part.name = m[1]
|
||||
}
|
||||
|
||||
part.transferEncoding = 'binary'
|
||||
part.transferBuffer = ''
|
||||
|
||||
headerField = ''
|
||||
headerValue = ''
|
||||
}
|
||||
|
||||
parser.onHeaderField = function (b, start, end) {
|
||||
headerField += b.toString(self.encoding, start, end)
|
||||
}
|
||||
|
||||
parser.onHeaderValue = function (b, start, end) {
|
||||
headerValue += b.toString(self.encoding, start, end)
|
||||
}
|
||||
|
||||
parser.onHeaderEnd = function () {
|
||||
headerField = headerField.toLowerCase()
|
||||
part.headers[headerField] = headerValue
|
||||
|
||||
var m = headerValue.match(/\bname="([^"]+)"/i)
|
||||
if (headerField == 'content-disposition') {
|
||||
if (m) {
|
||||
part.name = m[1]
|
||||
part.filename = this._fileName(headerValue)
|
||||
} else if (headerField == 'content-type') {
|
||||
part.mime = headerValue
|
||||
} else if (headerField == 'content-transfer-encoding') {
|
||||
part.transferEncoding = headerValue.toLowerCase()
|
||||
}
|
||||
|
||||
part.filename = self._fileName(headerValue)
|
||||
} else if (headerField == 'content-type') {
|
||||
part.mime = headerValue
|
||||
} else if (headerField == 'content-transfer-encoding') {
|
||||
part.transferEncoding = headerValue.toLowerCase()
|
||||
}
|
||||
headerField = ''
|
||||
headerValue = ''
|
||||
})
|
||||
.on('headersEnd', () => {
|
||||
switch (part.transferEncoding) {
|
||||
case 'binary':
|
||||
case '7bit':
|
||||
case '8bit':
|
||||
parser
|
||||
.on('partData', function (b, start, end) {
|
||||
part.emit('data', b.slice(start, end))
|
||||
})
|
||||
.on('partEnd', function () {
|
||||
part.emit('end')
|
||||
})
|
||||
break
|
||||
|
||||
headerField = ''
|
||||
headerValue = ''
|
||||
}
|
||||
case 'base64':
|
||||
parser
|
||||
.on('partData', function (b, start, end) {
|
||||
part.transferBuffer += b.slice(start, end).toString('ascii')
|
||||
|
||||
parser.onHeadersEnd = function () {
|
||||
switch (part.transferEncoding) {
|
||||
case 'binary':
|
||||
case '7bit':
|
||||
case '8bit':
|
||||
parser.onPartData = function (b, start, end) {
|
||||
part.emit('data', b.slice(start, end))
|
||||
}
|
||||
|
||||
parser.onPartEnd = function () {
|
||||
part.emit('end')
|
||||
}
|
||||
break
|
||||
|
||||
case 'base64':
|
||||
parser.onPartData = function (b, start, end) {
|
||||
part.transferBuffer += b.slice(start, end).toString('ascii')
|
||||
|
||||
/*
|
||||
/*
|
||||
four bytes (chars) in base64 converts to three bytes in binary
|
||||
encoding. So we should always work with a number of bytes that
|
||||
can be divided by 4, it will result in a number of buytes that
|
||||
can be divided vy 3.
|
||||
*/
|
||||
var offset = parseInt(part.transferBuffer.length / 4, 10) * 4
|
||||
part.emit(
|
||||
'data',
|
||||
Buffer.from(part.transferBuffer.substring(0, offset), 'base64')
|
||||
)
|
||||
part.transferBuffer = part.transferBuffer.substring(offset)
|
||||
}
|
||||
var offset = parseInt(part.transferBuffer.length / 4, 10) * 4
|
||||
part.emit(
|
||||
'data',
|
||||
Buffer.from(
|
||||
part.transferBuffer.substring(0, offset),
|
||||
'base64'
|
||||
)
|
||||
)
|
||||
part.transferBuffer = part.transferBuffer.substring(offset)
|
||||
})
|
||||
.on('partEnd', function () {
|
||||
part.emit('data', Buffer.from(part.transferBuffer, 'base64'))
|
||||
part.emit('end')
|
||||
})
|
||||
break
|
||||
|
||||
parser.onPartEnd = function () {
|
||||
part.emit('data', Buffer.from(part.transferBuffer, 'base64'))
|
||||
part.emit('end')
|
||||
}
|
||||
break
|
||||
default:
|
||||
return this.#handleError(new Error('unknown transfer-encoding'))
|
||||
}
|
||||
|
||||
default:
|
||||
return self._error(new Error('unknown transfer-encoding'))
|
||||
}
|
||||
this.#handlePart(part)
|
||||
})
|
||||
.on('end', () => {
|
||||
if (this.#pending) {
|
||||
setTimeout(_ => parser.emit('end'))
|
||||
} else {
|
||||
this.#handleEnd()
|
||||
}
|
||||
})
|
||||
|
||||
self.onPart(part)
|
||||
}
|
||||
|
||||
parser.onEnd = function () {
|
||||
self.ended = true
|
||||
self._maybeEnd()
|
||||
}
|
||||
|
||||
this._parser = parser
|
||||
this.#parser = parser
|
||||
}
|
||||
|
||||
_fileName(headerValue) {
|
||||
|
@ -403,123 +313,72 @@ export default class IncomingForm extends EventEmitter {
|
|||
return filename
|
||||
}
|
||||
|
||||
_initUrlencoded() {
|
||||
this.type = 'urlencoded'
|
||||
#createUrlencodedParser() {
|
||||
this.#parser = new UrlencodedParser()
|
||||
|
||||
var parser = new QuerystringParser(this.maxFields)
|
||||
|
||||
parser.onField = (key, val) => {
|
||||
this.emit('field', key, val)
|
||||
}
|
||||
|
||||
parser.onEnd = () => {
|
||||
this.ended = true
|
||||
this._maybeEnd()
|
||||
}
|
||||
|
||||
this._parser = parser
|
||||
this.#parser
|
||||
.on('field', fields => this.emit('field', false, fields))
|
||||
.on('end', () => this.#handleEnd())
|
||||
}
|
||||
|
||||
_initOctetStream() {
|
||||
this.type = 'octet-stream'
|
||||
var filename = this.headers['x-file-name']
|
||||
var mime = this.headers['content-type']
|
||||
#createStreamParser() {
|
||||
let filename = this.headers['x-file-name']
|
||||
let mime = this.headers['x-file-type']
|
||||
|
||||
var file = new File({
|
||||
path: this._uploadPath(filename),
|
||||
name: filename,
|
||||
type: mime
|
||||
})
|
||||
|
||||
this.emit('fileBegin', filename, file)
|
||||
file.open()
|
||||
|
||||
this._flushing++
|
||||
|
||||
var self = this
|
||||
|
||||
self._parser = new OctetParser()
|
||||
|
||||
//Keep track of writes that haven't finished so we don't emit the file before it's done being written
|
||||
var outstandingWrites = 0
|
||||
|
||||
self._parser.on('data', function (buffer) {
|
||||
self.pause()
|
||||
outstandingWrites++
|
||||
|
||||
file.write(buffer, function () {
|
||||
outstandingWrites--
|
||||
self.resume()
|
||||
|
||||
if (self.ended) {
|
||||
self._parser.emit('doneWritingFile')
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
self._parser.on('end', function () {
|
||||
self._flushing--
|
||||
self.ended = true
|
||||
|
||||
var done = function () {
|
||||
file.end(function () {
|
||||
self.emit('file', 'file', file)
|
||||
self._maybeEnd()
|
||||
})
|
||||
}
|
||||
|
||||
if (outstandingWrites === 0) {
|
||||
done()
|
||||
} else {
|
||||
self._parser.once('doneWritingFile', done)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
_initJSONencoded() {
|
||||
this.type = 'json'
|
||||
|
||||
var parser = new JSONParser(),
|
||||
self = this
|
||||
this.#parser = new OctetParser(filename, mime, randomPath(this.uploadDir))
|
||||
|
||||
if (this.bytesExpected) {
|
||||
parser.initWithLength(this.bytesExpected)
|
||||
this.#parser.initLength(this.bytesExpected)
|
||||
}
|
||||
|
||||
parser.onField = function (key, val) {
|
||||
self.emit('field', key, val)
|
||||
}
|
||||
|
||||
parser.onEnd = function () {
|
||||
self.ended = true
|
||||
self._maybeEnd()
|
||||
}
|
||||
|
||||
this._parser = parser
|
||||
this.#parser
|
||||
.on('file', file => {
|
||||
this.emit('file', false, file)
|
||||
})
|
||||
.on('end', () => this.#handleEnd())
|
||||
.on('error', err => this.#handleError(err))
|
||||
}
|
||||
|
||||
_uploadPath(filename) {
|
||||
var name = 'upload_'
|
||||
var buf = crypto.randomBytes(16)
|
||||
for (var i = 0; i < buf.length; ++i) {
|
||||
name += ('0' + buf[i].toString(16)).slice(-2)
|
||||
#createJsonParser() {
|
||||
this.#parser = new JSONParser()
|
||||
|
||||
if (this.bytesExpected) {
|
||||
this.#parser.initLength(this.bytesExpected)
|
||||
}
|
||||
|
||||
if (this.keepExtensions) {
|
||||
var ext = path.extname(filename)
|
||||
ext = ext.replace(/(\.[a-z0-9]+).*/i, '$1')
|
||||
|
||||
name += ext
|
||||
}
|
||||
|
||||
return path.join(this.uploadDir, name)
|
||||
this.#parser
|
||||
.on('field', (key, val) => {
|
||||
this.emit('field', key, val)
|
||||
})
|
||||
.on('end', () => this.#handleEnd())
|
||||
.on('error', err => this.#handleError(err))
|
||||
}
|
||||
|
||||
_maybeEnd() {
|
||||
if (!this.ended || this._flushing || this.error) {
|
||||
#clearUploads() {
|
||||
while (this.#openedFiles.length) {
|
||||
let file = this.#openedFiles.pop()
|
||||
file._writeStream.destroy()
|
||||
setTimeout(_ => {
|
||||
try {
|
||||
fs.unlink(file.path)
|
||||
} catch (e) {}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#handleError(err) {
|
||||
if (this.#error || this.#ended) {
|
||||
return
|
||||
}
|
||||
this.error = true
|
||||
this.emit('error', err)
|
||||
}
|
||||
|
||||
#handleEnd() {
|
||||
if (this.#ended || this.#error) {
|
||||
return
|
||||
}
|
||||
this.#ended = true
|
||||
this.emit('end')
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,33 +1,36 @@
|
|||
export class JSONParser {
|
||||
data = Buffer.from('')
|
||||
bytesWritten = 0
|
||||
import { EventEmitter } from 'node:events'
|
||||
|
||||
initWithLength(length) {
|
||||
this.data = Buffer.alloc(length)
|
||||
export class JSONParser extends EventEmitter {
|
||||
#buff = Buffer.from('')
|
||||
#byteLen = 0
|
||||
|
||||
initLength(length) {
|
||||
this.#byteLen = length
|
||||
}
|
||||
|
||||
write(buffer) {
|
||||
if (this.data.length >= this.bytesWritten + buffer.length) {
|
||||
buffer.copy(this.data, this.bytesWritten)
|
||||
} else {
|
||||
this.data = Buffer.concat([this.data, buffer])
|
||||
}
|
||||
this.bytesWritten += buffer.length
|
||||
return buffer.length
|
||||
this.#buff = Buffer.concat([this.#buff, buffer])
|
||||
}
|
||||
|
||||
end() {
|
||||
var data = this.data.toString('utf8')
|
||||
var fields
|
||||
try {
|
||||
fields = JSON.parse(data)
|
||||
} catch (e) {
|
||||
fields = Function(`try{return ${data}}catch(e){}`)() || data
|
||||
if (this.#buff.length === this.#byteLen) {
|
||||
let data = this.#buff.toString()
|
||||
let fields = data
|
||||
try {
|
||||
fields = JSON.parse(data)
|
||||
} catch (e) {
|
||||
try{
|
||||
// 非标准的json语法,尝试用 Function 解析
|
||||
fields = Function(`try{return ${data}}catch(e){}`)()
|
||||
}catch(err){}
|
||||
}
|
||||
|
||||
this.emit('field', false, fields)
|
||||
this.emit('end')
|
||||
|
||||
this.#buff = null
|
||||
} else {
|
||||
this.emit('error', new Error(`The uploaded data is incomplete. Expected ${this.#byteLen}, Received ${this.#buff.length} .`))
|
||||
}
|
||||
|
||||
this.onField(false, fields)
|
||||
this.data = null
|
||||
|
||||
this.onEnd()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
import { EventEmitter } from 'node:events'
|
||||
|
||||
var s = 0,
|
||||
S = {
|
||||
STATE_DICT = {
|
||||
PARSER_UNINITIALIZED: s++,
|
||||
START: s++,
|
||||
START_BOUNDARY: s++,
|
||||
|
@ -30,38 +32,43 @@ var s = 0,
|
|||
return c | 0x20
|
||||
}
|
||||
|
||||
export class MultipartParser {
|
||||
function stateToString(stateNumber) {
|
||||
for (let state in STATE_DICT) {
|
||||
let number = STATE_DICT[state]
|
||||
if (number === stateNumber) {
|
||||
return state
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export class MultipartParser extends EventEmitter {
|
||||
boundary = null
|
||||
boundaryChars = null
|
||||
lookbehind = null
|
||||
state = S.PARSER_UNINITIALIZED
|
||||
state = STATE_DICT.PARSER_UNINITIALIZED
|
||||
|
||||
index = null
|
||||
flags = 0
|
||||
|
||||
static stateToString(stateNumber) {
|
||||
for (var state in S) {
|
||||
var number = S[state]
|
||||
if (number === stateNumber) return state
|
||||
}
|
||||
}
|
||||
|
||||
initWithBoundary(str) {
|
||||
|
||||
constructor(str) {
|
||||
super()
|
||||
|
||||
this.boundary = Buffer.alloc(str.length + 4)
|
||||
this.boundary.write('\r\n--', 0)
|
||||
this.boundary.write(str, 4)
|
||||
this.lookbehind = Buffer.alloc(this.boundary.length + 8)
|
||||
this.state = S.START
|
||||
this.state = STATE_DICT.START
|
||||
|
||||
this.boundaryChars = {}
|
||||
for (var i = 0; i < this.boundary.length; i++) {
|
||||
for (let i = 0; i < this.boundary.length; i++) {
|
||||
this.boundaryChars[this.boundary[i]] = true
|
||||
}
|
||||
}
|
||||
|
||||
write(buffer) {
|
||||
var self = this,
|
||||
i = 0,
|
||||
var i = 0,
|
||||
len = buffer.length,
|
||||
prevIndex = this.index,
|
||||
index = this.index,
|
||||
|
@ -74,48 +81,38 @@ export class MultipartParser {
|
|||
boundaryEnd = boundaryLength - 1,
|
||||
bufferLength = buffer.length,
|
||||
c,
|
||||
cl,
|
||||
mark = function (name) {
|
||||
self[name + 'Mark'] = i
|
||||
},
|
||||
clear = function (name) {
|
||||
delete self[name + 'Mark']
|
||||
},
|
||||
callback = function (name, buffer, start, end) {
|
||||
if (start !== undefined && start === end) {
|
||||
return
|
||||
}
|
||||
cl
|
||||
|
||||
var callbackSymbol =
|
||||
'on' + name.substr(0, 1).toUpperCase() + name.substr(1)
|
||||
if (callbackSymbol in self) {
|
||||
self[callbackSymbol](buffer, start, end)
|
||||
}
|
||||
let mark = (name) => {
|
||||
this[name + 'Mark'] = i
|
||||
},
|
||||
dataCallback = function (name, clear) {
|
||||
dataCallback = (name, clear) => {
|
||||
var markSymbol = name + 'Mark'
|
||||
if (!(markSymbol in self)) {
|
||||
return
|
||||
if ((markSymbol in this)) {
|
||||
if (clear) {
|
||||
this.emit(name, buffer, this[markSymbol], i)
|
||||
delete this[markSymbol]
|
||||
} else {
|
||||
this.emit(name, buffer, this[markSymbol], buffer.length)
|
||||
this[markSymbol] = 0
|
||||
}
|
||||
}
|
||||
|
||||
if (!clear) {
|
||||
callback(name, buffer, self[markSymbol], buffer.length)
|
||||
self[markSymbol] = 0
|
||||
} else {
|
||||
callback(name, buffer, self[markSymbol], i)
|
||||
delete self[markSymbol]
|
||||
}
|
||||
}
|
||||
|
||||
// console.log('???? ', state, 'len: ', len);
|
||||
for (i = 0; i < len; i++) {
|
||||
c = buffer[i]
|
||||
switch (state) {
|
||||
case S.PARSER_UNINITIALIZED:
|
||||
case STATE_DICT.PARSER_UNINITIALIZED:
|
||||
return i
|
||||
case S.START:
|
||||
|
||||
case STATE_DICT.START:
|
||||
index = 0
|
||||
state = S.START_BOUNDARY
|
||||
case S.START_BOUNDARY:
|
||||
state = STATE_DICT.START_BOUNDARY
|
||||
|
||||
case STATE_DICT.START_BOUNDARY:
|
||||
// console.log('=====>>>', index, c, boundary);
|
||||
if (index == boundary.length - 2) {
|
||||
if (c == HYPHEN) {
|
||||
flags |= F.LAST_BOUNDARY
|
||||
|
@ -126,13 +123,13 @@ export class MultipartParser {
|
|||
break
|
||||
} else if (index - 1 == boundary.length - 2) {
|
||||
if (flags & F.LAST_BOUNDARY && c == HYPHEN) {
|
||||
callback('end')
|
||||
state = S.END
|
||||
this.emit('end')
|
||||
state = STATE_DICT.END
|
||||
flags = 0
|
||||
} else if (!(flags & F.LAST_BOUNDARY) && c == LF) {
|
||||
index = 0
|
||||
callback('partBegin')
|
||||
state = S.HEADER_FIELD_START
|
||||
this.emit('partBegin')
|
||||
state = STATE_DICT.HEADER_FIELD_START
|
||||
} else {
|
||||
return i
|
||||
}
|
||||
|
@ -146,14 +143,16 @@ export class MultipartParser {
|
|||
index++
|
||||
}
|
||||
break
|
||||
case S.HEADER_FIELD_START:
|
||||
state = S.HEADER_FIELD
|
||||
|
||||
case STATE_DICT.HEADER_FIELD_START:
|
||||
state = STATE_DICT.HEADER_FIELD
|
||||
mark('headerField')
|
||||
index = 0
|
||||
case S.HEADER_FIELD:
|
||||
|
||||
case STATE_DICT.HEADER_FIELD:
|
||||
if (c == CR) {
|
||||
clear('headerField')
|
||||
state = S.HEADERS_ALMOST_DONE
|
||||
delete this.headerFieldMark
|
||||
state = STATE_DICT.HEADERS_ALMOST_DONE
|
||||
break
|
||||
}
|
||||
|
||||
|
@ -168,7 +167,7 @@ export class MultipartParser {
|
|||
return i
|
||||
}
|
||||
dataCallback('headerField', true)
|
||||
state = S.HEADER_VALUE_START
|
||||
state = STATE_DICT.HEADER_VALUE_START
|
||||
break
|
||||
}
|
||||
|
||||
|
@ -177,38 +176,44 @@ export class MultipartParser {
|
|||
return i
|
||||
}
|
||||
break
|
||||
case S.HEADER_VALUE_START:
|
||||
|
||||
case STATE_DICT.HEADER_VALUE_START:
|
||||
if (c == SPACE) {
|
||||
break
|
||||
}
|
||||
|
||||
mark('headerValue')
|
||||
state = S.HEADER_VALUE
|
||||
case S.HEADER_VALUE:
|
||||
state = STATE_DICT.HEADER_VALUE
|
||||
|
||||
case STATE_DICT.HEADER_VALUE:
|
||||
if (c == CR) {
|
||||
dataCallback('headerValue', true)
|
||||
callback('headerEnd')
|
||||
state = S.HEADER_VALUE_ALMOST_DONE
|
||||
this.emit('headerEnd')
|
||||
state = STATE_DICT.HEADER_VALUE_ALMOST_DONE
|
||||
}
|
||||
break
|
||||
case S.HEADER_VALUE_ALMOST_DONE:
|
||||
|
||||
case STATE_DICT.HEADER_VALUE_ALMOST_DONE:
|
||||
if (c != LF) {
|
||||
return i
|
||||
}
|
||||
state = S.HEADER_FIELD_START
|
||||
state = STATE_DICT.HEADER_FIELD_START
|
||||
break
|
||||
case S.HEADERS_ALMOST_DONE:
|
||||
|
||||
case STATE_DICT.HEADERS_ALMOST_DONE:
|
||||
if (c != LF) {
|
||||
return i
|
||||
}
|
||||
|
||||
callback('headersEnd')
|
||||
state = S.PART_DATA_START
|
||||
this.emit('headersEnd')
|
||||
state = STATE_DICT.PART_DATA_START
|
||||
break
|
||||
case S.PART_DATA_START:
|
||||
state = S.PART_DATA
|
||||
|
||||
case STATE_DICT.PART_DATA_START:
|
||||
state = STATE_DICT.PART_DATA
|
||||
mark('partData')
|
||||
case S.PART_DATA:
|
||||
|
||||
case STATE_DICT.PART_DATA:
|
||||
prevIndex = index
|
||||
|
||||
if (index === 0) {
|
||||
|
@ -247,16 +252,16 @@ export class MultipartParser {
|
|||
if (c == LF) {
|
||||
// unset the PART_BOUNDARY flag
|
||||
flags &= ~F.PART_BOUNDARY
|
||||
callback('partEnd')
|
||||
callback('partBegin')
|
||||
state = S.HEADER_FIELD_START
|
||||
this.emit('partEnd')
|
||||
this.emit('partBegin')
|
||||
state = STATE_DICT.HEADER_FIELD_START
|
||||
break
|
||||
}
|
||||
} else if (flags & F.LAST_BOUNDARY) {
|
||||
if (c == HYPHEN) {
|
||||
callback('partEnd')
|
||||
callback('end')
|
||||
state = S.END
|
||||
this.emit('partEnd')
|
||||
this.emit('end')
|
||||
state = STATE_DICT.END
|
||||
flags = 0
|
||||
} else {
|
||||
index = 0
|
||||
|
@ -273,7 +278,7 @@ export class MultipartParser {
|
|||
} else if (prevIndex > 0) {
|
||||
// if our boundary turned out to be rubbish, the captured lookbehind
|
||||
// belongs to partData
|
||||
callback('partData', lookbehind, 0, prevIndex)
|
||||
this.emit('partData', lookbehind, 0, prevIndex)
|
||||
prevIndex = 0
|
||||
mark('partData')
|
||||
|
||||
|
@ -283,8 +288,10 @@ export class MultipartParser {
|
|||
}
|
||||
|
||||
break
|
||||
case S.END:
|
||||
|
||||
case STATE_DICT.END:
|
||||
break
|
||||
|
||||
default:
|
||||
return i
|
||||
}
|
||||
|
@ -298,24 +305,17 @@ export class MultipartParser {
|
|||
this.state = state
|
||||
this.flags = flags
|
||||
|
||||
return len
|
||||
|
||||
}
|
||||
|
||||
end() {
|
||||
var callback = function (self, name) {
|
||||
var callbackSymbol =
|
||||
'on' + name.substr(0, 1).toUpperCase() + name.substr(1)
|
||||
if (callbackSymbol in self) {
|
||||
self[callbackSymbol]()
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
(this.state == S.HEADER_FIELD_START && this.index === 0) ||
|
||||
(this.state == S.PART_DATA && this.index == this.boundary.length)
|
||||
(this.state === STATE_DICT.HEADER_FIELD_START && this.index === 0) ||
|
||||
(this.state === STATE_DICT.PART_DATA && this.index == this.boundary.length)
|
||||
) {
|
||||
callback(this, 'partEnd')
|
||||
callback(this, 'end')
|
||||
} else if (this.state != S.END) {
|
||||
this.emit('end')
|
||||
} else if (this.state !== STATE_DICT.END) {
|
||||
return new Error(
|
||||
'MultipartParser.end(): stream ended unexpectedly: ' + this.explain()
|
||||
)
|
||||
|
@ -323,6 +323,6 @@ export class MultipartParser {
|
|||
}
|
||||
|
||||
explain() {
|
||||
return 'state = ' + MultipartParser.stateToString(this.state)
|
||||
return 'state = ' + stateToString(this.state)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,11 +1,55 @@
|
|||
import { EventEmitter } from 'events'
|
||||
/**
|
||||
* {}
|
||||
* @author yutent<yutent.io@gmail.com>
|
||||
* @date 2023/10/27 14:23:22
|
||||
*/
|
||||
|
||||
import { EventEmitter } from 'node:events'
|
||||
import File from './file.js'
|
||||
|
||||
export class OctetParser extends EventEmitter {
|
||||
write(buffer) {
|
||||
this.emit('data', buffer)
|
||||
return buffer.length
|
||||
#file = null
|
||||
#byteLen = 0
|
||||
#wroteLen = 0
|
||||
|
||||
constructor(name, type, path) {
|
||||
super()
|
||||
|
||||
this.#file = new File({ path, name, type })
|
||||
this.#file.open()
|
||||
}
|
||||
|
||||
initLength(length) {
|
||||
this.#byteLen = length
|
||||
}
|
||||
|
||||
write(buffer) {
|
||||
this.#file.write(buffer)
|
||||
this.#wroteLen += buffer.length
|
||||
}
|
||||
|
||||
end() {
|
||||
this.#file.end(_ => {
|
||||
if (this.#wroteLen === this.#byteLen) {
|
||||
this.emit('file', this.#file)
|
||||
this.emit('end')
|
||||
} else {
|
||||
this.emit(
|
||||
'error',
|
||||
new Error(
|
||||
`The uploaded data is incomplete. Expected ${
|
||||
this.#byteLen
|
||||
}, Received ${this.#wroteLen} .`
|
||||
)
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export class EmptyParser extends EventEmitter {
|
||||
write() {}
|
||||
|
||||
end() {
|
||||
this.emit('end')
|
||||
}
|
||||
|
|
|
@ -1,27 +0,0 @@
|
|||
// This is a buffering parser, not quite as nice as the multipart one.
|
||||
// If I find time I'll rewrite this to be fully streaming as well
|
||||
import { parse } from 'node:querystring'
|
||||
|
||||
export class QuerystringParser {
|
||||
constructor(maxKeys) {
|
||||
this.maxKeys = maxKeys
|
||||
this.buffer = ''
|
||||
}
|
||||
|
||||
write(buffer) {
|
||||
this.buffer += buffer.toString('ascii')
|
||||
return buffer.length
|
||||
}
|
||||
|
||||
end() {
|
||||
var fields = parse(this.buffer, '&', '=', {
|
||||
maxKeys: this.maxKeys
|
||||
})
|
||||
for (var field in fields) {
|
||||
this.onField(field, fields[field])
|
||||
}
|
||||
this.buffer = ''
|
||||
|
||||
this.onEnd()
|
||||
}
|
||||
}
|
|
@ -0,0 +1,27 @@
|
|||
/**
|
||||
* {}
|
||||
* @author yutent<yutent.io@gmail.com>
|
||||
* @date 2023/10/27 12:14:05
|
||||
*/
|
||||
|
||||
import { parse } from 'node:querystring'
|
||||
import { EventEmitter } from 'node:events'
|
||||
|
||||
export class UrlencodedParser extends EventEmitter {
|
||||
#buff = Buffer.from('')
|
||||
|
||||
write(buffer) {
|
||||
this.#buff = Buffer.concat([this.#buff, buffer])
|
||||
}
|
||||
|
||||
end() {
|
||||
let data = this.#buff.toString()
|
||||
let fields = parse(data)
|
||||
|
||||
this.#buff = null
|
||||
|
||||
this.emit('field', fields)
|
||||
this.emit('end')
|
||||
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue