一大波重构

v2
yutent 2023-10-27 19:16:32 +08:00
parent 94a997bb8a
commit 5e827928ba
9 changed files with 428 additions and 522 deletions

1
.gitignore vendored
View File

@ -7,6 +7,7 @@
._* ._*
.idea .idea
.vscode .vscode
.tmp
node_modules/ node_modules/

View File

@ -171,6 +171,9 @@ export default class Request {
this.#body[name] = value this.#body[name] = value
}) })
.on('file', (name, file) => { .on('file', (name, file) => {
if (name === false) {
this.#body = file
} else {
if (name.slice(-2) === '[]') { if (name.slice(-2) === '[]') {
name = name.slice(0, -2) name = name.slice(0, -2)
} }
@ -182,10 +185,11 @@ export default class Request {
} }
this.#body[name].push(file) this.#body[name].push(file)
} }
}
}) })
.on('error', out.reject) .on('error', out.reject)
.on('end', err => { .on('end', _ => {
if (~contentType.indexOf('urlencoded')) { if (contentType.includes('urlencoded')) {
for (let i in this.#body) { for (let i in this.#body) {
if (typeof this.#body[i] === 'string') { if (typeof this.#body[i] === 'string') {
if (!this.#body[i]) { if (!this.#body[i]) {

View File

@ -13,7 +13,7 @@ export default class File extends EventEmitter {
constructor(props = {}) { constructor(props = {}) {
super() super()
for (var key in props) { for (let key in props) {
this[key] = props[key] this[key] = props[key]
} }
} }
@ -30,24 +30,19 @@ export default class File extends EventEmitter {
type: this.type, type: this.type,
mtime: this.lastModifiedDate, mtime: this.lastModifiedDate,
length: this.length, length: this.length,
filename: this.filename, filename: this.name,
mime: this.mime mime: this.type
} }
} }
write(buffer, cb) { write(buffer) {
this.#stream.write(buffer, _ => { this.#stream.write(buffer, _ => {
this.lastModifiedDate = new Date()
this.size += buffer.length this.size += buffer.length
this.emit('progress', this.size)
cb()
}) })
} }
end(cb) { end(callback) {
this.#stream.end(() => { this.lastModifiedDate = new Date()
this.emit('end') this.#stream.end(callback)
cb()
})
} }
} }

View File

@ -1,50 +1,45 @@
import crypto from 'node:crypto' import crypto from 'node:crypto'
import fs from 'node:fs' import fs from 'node:fs'
import path from 'node:path' import { join } from 'node:path'
import { EventEmitter } from 'node:events' import { EventEmitter } from 'node:events'
import { Stream } from 'node:stream' import { Stream } from 'node:stream'
import { StringDecoder } from 'node:string_decoder' import { StringDecoder } from 'node:string_decoder'
import File from './file.js' import File from './file.js'
import { MultipartParser } from './multipart_parser.js' import { MultipartParser } from './multipart_parser.js'
import { QuerystringParser } from './querystring_parser.js' import { UrlencodedParser } from './urlencoded_parser.js'
import { OctetParser } from './octet_parser.js' import { OctetParser, EmptyParser } from './octet_parser.js'
import { JSONParser } from './json_parser.js' import { JSONParser } from './json_parser.js'
function dummyParser(self) { function randomPath(uploadDir) {
return { var name = 'upload_' + crypto.randomBytes(16).toString('hex')
end: function () { return join(uploadDir, name)
self.ended = true
self._maybeEnd()
return null
}
}
} }
/* ------------------------------------- */
export default class IncomingForm extends EventEmitter { export default class IncomingForm extends EventEmitter {
#req = null #req = null
error = null #error = false
#ended = false
ended = false ended = false
headers = null headers = null
type = null
bytesReceived = null bytesReceived = null
bytesExpected = null bytesExpected = null
_parser = null #parser = null
_flushing = 0 #pending = true
_fieldsSize = 0
openedFiles = [] #openedFiles = []
constructor(req, opts = {}) { constructor(req, opts = {}) {
super() super()
this.#req = req this.#req = req
this.maxFields = opts.maxFields || 1000
this.maxFieldsSize = opts.maxFieldsSize || 2 * 1024 * 1024
this.keepExtensions = opts.keepExtensions || false
this.uploadDir = opts.uploadDir this.uploadDir = opts.uploadDir
this.encoding = opts.encoding || 'utf-8' this.encoding = opts.encoding || 'utf-8'
this.multiples = opts.multiples || false this.multiples = opts.multiples || false
@ -54,23 +49,21 @@ export default class IncomingForm extends EventEmitter {
req req
.on('error', err => { .on('error', err => {
this._error(err) this.#handleError(err)
this.#clearUploads()
}) })
.on('aborted', () => { .on('aborted', () => {
this.emit('aborted') this.emit('aborted')
this._error(new Error('Request aborted')) this.#clearUploads()
})
.on('data', buffer => {
this.write(buffer)
}) })
.on('data', buffer => this.write(buffer))
.on('end', () => { .on('end', () => {
if (this.error) { if (this.#error) {
return return
} }
let err = this.#parser.end()
var err = this._parser.end()
if (err) { if (err) {
this._error(err) this.#handleError(err)
} }
}) })
} }
@ -82,41 +75,24 @@ export default class IncomingForm extends EventEmitter {
} }
write(buffer) { write(buffer) {
if (this.error) { if (this.#error) {
return return
} }
if (!this._parser) { if (!this.#parser) {
this._error(new Error('uninitialized parser')) return this.#handleError(new Error('uninitialized parser'))
return
} }
this.bytesReceived += buffer.length this.bytesReceived += buffer.length
this.emit('progress', this.bytesReceived, this.bytesExpected) this.emit('progress', this.bytesReceived, this.bytesExpected)
var bytesParsed = this._parser.write(buffer) this.#parser.write(buffer)
if (bytesParsed !== buffer.length) {
this._error(
new Error(
'parser error, ' +
bytesParsed +
' of ' +
buffer.length +
' bytes parsed'
)
)
}
return bytesParsed
} }
pause() { pause() {
try { try {
this.#req.pause() this.#req.pause()
} catch (err) { } catch (err) {
// the stream was destroyed if (!this.#ended) {
if (!this.ended) { this.#handleError(err)
// before it was completed, crash & burn
this._error(err)
} }
return false return false
} }
@ -127,10 +103,8 @@ export default class IncomingForm extends EventEmitter {
try { try {
this.#req.resume() this.#req.resume()
} catch (err) { } catch (err) {
// the stream was destroyed if (!this.#ended) {
if (!this.ended) { this.#handleError(err)
// before it was completed, crash & burn
this._error(err)
} }
return false return false
} }
@ -138,132 +112,81 @@ export default class IncomingForm extends EventEmitter {
return true return true
} }
onPart(part) { #handlePart(part) {
// this method can be overwritten by the user
this.handlePart(part)
}
handlePart(part) {
var self = this
if (part.filename === undefined) { if (part.filename === undefined) {
var value = '', let value = ''
decoder = new StringDecoder(this.encoding) let decoder = new StringDecoder(this.encoding)
part.on('data', function (buffer) { part
self._fieldsSize += buffer.length .on('data', buffer => {
if (self._fieldsSize > self.maxFieldsSize) {
self._error(
new Error(
'maxFieldsSize exceeded, received ' +
self._fieldsSize +
' bytes of field data'
)
)
return
}
value += decoder.write(buffer) value += decoder.write(buffer)
}) })
.on('end', () => {
part.on('end', function () { this.emit('field', part.name, value)
self.emit('field', part.name, value)
}) })
return } else {
} let file = new File({
path: randomPath(this.uploadDir),
this._flushing++
var file = new File({
path: this._uploadPath(part.filename),
name: part.filename, name: part.filename,
type: part.mime, type: part.mime
hash: self.hash
}) })
this.emit('fileBegin', part.name, file)
file.open() file.open()
this.openedFiles.push(file)
part.on('data', function (buffer) { this.#openedFiles.push(file)
this.#pending = true
part
.on('data', buffer => {
if (buffer.length == 0) { if (buffer.length == 0) {
return return
} }
self.pause() file.write(buffer)
file.write(buffer, function () { })
self.resume() .on('end', () => {
}) console.log('file part end...')
}) file.end(() => {
console.log('<><><><>', part.name, file)
part.on('end', function () { this.emit('file', part.name, file)
file.end(function () { this.#pending = false
self._flushing-- // this.#handleEnd()
self.emit('file', part.name, file)
self._maybeEnd()
}) })
}) })
} }
}
#parseContentType() { #parseContentType() {
let contentType = this.headers['content-type']
let lower = contentType.toLowerCase()
if (this.bytesExpected === 0) { if (this.bytesExpected === 0) {
this._parser = dummyParser(this) return (this.#parser = new EmptyParser())
return
} }
if (!this.headers['content-type']) { if (lower.includes('octet-stream')) {
this._error(new Error('bad content-type header, no content-type')) return this.#createStreamParser()
return
} }
if (this.headers['content-type'].match(/octet-stream/i)) { if (lower.includes('urlencoded')) {
this._initOctetStream() return this.#createUrlencodedParser()
return
} }
if (this.headers['content-type'].match(/urlencoded/i)) { if (lower.includes('multipart')) {
this._initUrlencoded() let matches = contentType.match(/boundary=(?:"([^"]+)"|([^;]+))/)
return if (matches) {
} this.#createMultipartParser(matches[1] || matches[2])
if (this.headers['content-type'].match(/multipart/i)) {
var m = this.headers['content-type'].match(
/boundary=(?:"([^"]+)"|([^;]+))/i
)
if (m) {
this._initMultipart(m[1] || m[2])
} else { } else {
this._error(new Error('bad content-type header, no multipart boundary')) this.#handleError(new TypeError('unknow multipart boundary'))
} }
return return
} }
if (this.headers['content-type'].match(/json|appliation|plain|text/i)) { if (lower.match(/json|appliation|plain|text/)) {
this._initJSONencoded() return this.#createJsonParser()
return
} }
this._error( this.#handleError(new TypeError('unknown content-type: ' + contentType))
new Error(
'bad content-type header, unknown content-type: ' +
this.headers['content-type']
)
)
}
_error(err) {
if (this.error || this.ended) {
return
}
this.error = err
this.emit('error', err)
if (Array.isArray(this.openedFiles)) {
this.openedFiles.forEach(function (file) {
file._writeStream.destroy()
setTimeout(fs.unlink, 0, file.path, function (error) {})
})
}
} }
#parseContentLength() { #parseContentLength() {
@ -273,28 +196,14 @@ export default class IncomingForm extends EventEmitter {
} else if (this.headers['transfer-encoding'] === undefined) { } else if (this.headers['transfer-encoding'] === undefined) {
this.bytesExpected = 0 this.bytesExpected = 0
} }
if (this.bytesExpected !== null) {
this.emit('progress', this.bytesReceived, this.bytesExpected)
}
} }
_newParser() { #createMultipartParser(boundary) {
return new MultipartParser() let parser = new MultipartParser(boundary)
} let headerField, headerValue, part
_initMultipart(boundary) { parser
this.type = 'multipart' .on('partBegin', function () {
var parser = new MultipartParser(),
self = this,
headerField,
headerValue,
part
parser.initWithBoundary(boundary)
parser.onPartBegin = function () {
part = new Stream() part = new Stream()
part.readable = true part.readable = true
part.headers = {} part.headers = {}
@ -307,17 +216,14 @@ export default class IncomingForm extends EventEmitter {
headerField = '' headerField = ''
headerValue = '' headerValue = ''
} })
.on('headerField', (b, start, end) => {
parser.onHeaderField = function (b, start, end) { headerField += b.toString(this.encoding, start, end)
headerField += b.toString(self.encoding, start, end) })
} .on('headerValue', (b, start, end) => {
headerValue += b.toString(this.encoding, start, end)
parser.onHeaderValue = function (b, start, end) { })
headerValue += b.toString(self.encoding, start, end) .on('headerEnd', () => {
}
parser.onHeaderEnd = function () {
headerField = headerField.toLowerCase() headerField = headerField.toLowerCase()
part.headers[headerField] = headerValue part.headers[headerField] = headerValue
@ -327,7 +233,7 @@ export default class IncomingForm extends EventEmitter {
part.name = m[1] part.name = m[1]
} }
part.filename = self._fileName(headerValue) part.filename = this._fileName(headerValue)
} else if (headerField == 'content-type') { } else if (headerField == 'content-type') {
part.mime = headerValue part.mime = headerValue
} else if (headerField == 'content-transfer-encoding') { } else if (headerField == 'content-transfer-encoding') {
@ -336,24 +242,24 @@ export default class IncomingForm extends EventEmitter {
headerField = '' headerField = ''
headerValue = '' headerValue = ''
} })
.on('headersEnd', () => {
parser.onHeadersEnd = function () {
switch (part.transferEncoding) { switch (part.transferEncoding) {
case 'binary': case 'binary':
case '7bit': case '7bit':
case '8bit': case '8bit':
parser.onPartData = function (b, start, end) { parser
.on('partData', function (b, start, end) {
part.emit('data', b.slice(start, end)) part.emit('data', b.slice(start, end))
} })
.on('partEnd', function () {
parser.onPartEnd = function () {
part.emit('end') part.emit('end')
} })
break break
case 'base64': case 'base64':
parser.onPartData = function (b, start, end) { parser
.on('partData', function (b, start, end) {
part.transferBuffer += b.slice(start, end).toString('ascii') part.transferBuffer += b.slice(start, end).toString('ascii')
/* /*
@ -365,30 +271,34 @@ export default class IncomingForm extends EventEmitter {
var offset = parseInt(part.transferBuffer.length / 4, 10) * 4 var offset = parseInt(part.transferBuffer.length / 4, 10) * 4
part.emit( part.emit(
'data', 'data',
Buffer.from(part.transferBuffer.substring(0, offset), 'base64') Buffer.from(
part.transferBuffer.substring(0, offset),
'base64'
)
) )
part.transferBuffer = part.transferBuffer.substring(offset) part.transferBuffer = part.transferBuffer.substring(offset)
} })
.on('partEnd', function () {
parser.onPartEnd = function () {
part.emit('data', Buffer.from(part.transferBuffer, 'base64')) part.emit('data', Buffer.from(part.transferBuffer, 'base64'))
part.emit('end') part.emit('end')
} })
break break
default: default:
return self._error(new Error('unknown transfer-encoding')) return this.#handleError(new Error('unknown transfer-encoding'))
} }
self.onPart(part) this.#handlePart(part)
})
.on('end', () => {
if (this.#pending) {
setTimeout(_ => parser.emit('end'))
} else {
this.#handleEnd()
} }
})
parser.onEnd = function () { this.#parser = parser
self.ended = true
self._maybeEnd()
}
this._parser = parser
} }
_fileName(headerValue) { _fileName(headerValue) {
@ -403,123 +313,72 @@ export default class IncomingForm extends EventEmitter {
return filename return filename
} }
_initUrlencoded() { #createUrlencodedParser() {
this.type = 'urlencoded' this.#parser = new UrlencodedParser()
var parser = new QuerystringParser(this.maxFields) this.#parser
.on('field', fields => this.emit('field', false, fields))
parser.onField = (key, val) => { .on('end', () => this.#handleEnd())
this.emit('field', key, val)
} }
parser.onEnd = () => { #createStreamParser() {
this.ended = true let filename = this.headers['x-file-name']
this._maybeEnd() let mime = this.headers['x-file-type']
}
this._parser = parser this.#parser = new OctetParser(filename, mime, randomPath(this.uploadDir))
}
_initOctetStream() {
this.type = 'octet-stream'
var filename = this.headers['x-file-name']
var mime = this.headers['content-type']
var file = new File({
path: this._uploadPath(filename),
name: filename,
type: mime
})
this.emit('fileBegin', filename, file)
file.open()
this._flushing++
var self = this
self._parser = new OctetParser()
//Keep track of writes that haven't finished so we don't emit the file before it's done being written
var outstandingWrites = 0
self._parser.on('data', function (buffer) {
self.pause()
outstandingWrites++
file.write(buffer, function () {
outstandingWrites--
self.resume()
if (self.ended) {
self._parser.emit('doneWritingFile')
}
})
})
self._parser.on('end', function () {
self._flushing--
self.ended = true
var done = function () {
file.end(function () {
self.emit('file', 'file', file)
self._maybeEnd()
})
}
if (outstandingWrites === 0) {
done()
} else {
self._parser.once('doneWritingFile', done)
}
})
}
_initJSONencoded() {
this.type = 'json'
var parser = new JSONParser(),
self = this
if (this.bytesExpected) { if (this.bytesExpected) {
parser.initWithLength(this.bytesExpected) this.#parser.initLength(this.bytesExpected)
} }
parser.onField = function (key, val) { this.#parser
self.emit('field', key, val) .on('file', file => {
this.emit('file', false, file)
})
.on('end', () => this.#handleEnd())
.on('error', err => this.#handleError(err))
} }
parser.onEnd = function () { #createJsonParser() {
self.ended = true this.#parser = new JSONParser()
self._maybeEnd()
if (this.bytesExpected) {
this.#parser.initLength(this.bytesExpected)
} }
this._parser = parser this.#parser
.on('field', (key, val) => {
this.emit('field', key, val)
})
.on('end', () => this.#handleEnd())
.on('error', err => this.#handleError(err))
} }
_uploadPath(filename) { #clearUploads() {
var name = 'upload_' while (this.#openedFiles.length) {
var buf = crypto.randomBytes(16) let file = this.#openedFiles.pop()
for (var i = 0; i < buf.length; ++i) { file._writeStream.destroy()
name += ('0' + buf[i].toString(16)).slice(-2) setTimeout(_ => {
try {
fs.unlink(file.path)
} catch (e) {}
})
}
} }
if (this.keepExtensions) { #handleError(err) {
var ext = path.extname(filename) if (this.#error || this.#ended) {
ext = ext.replace(/(\.[a-z0-9]+).*/i, '$1')
name += ext
}
return path.join(this.uploadDir, name)
}
_maybeEnd() {
if (!this.ended || this._flushing || this.error) {
return return
} }
this.error = true
this.emit('error', err)
}
#handleEnd() {
if (this.#ended || this.#error) {
return
}
this.#ended = true
this.emit('end') this.emit('end')
} }
} }

View File

@ -1,33 +1,36 @@
export class JSONParser { import { EventEmitter } from 'node:events'
data = Buffer.from('')
bytesWritten = 0
initWithLength(length) { export class JSONParser extends EventEmitter {
this.data = Buffer.alloc(length) #buff = Buffer.from('')
#byteLen = 0
initLength(length) {
this.#byteLen = length
} }
write(buffer) { write(buffer) {
if (this.data.length >= this.bytesWritten + buffer.length) { this.#buff = Buffer.concat([this.#buff, buffer])
buffer.copy(this.data, this.bytesWritten)
} else {
this.data = Buffer.concat([this.data, buffer])
}
this.bytesWritten += buffer.length
return buffer.length
} }
end() { end() {
var data = this.data.toString('utf8') if (this.#buff.length === this.#byteLen) {
var fields let data = this.#buff.toString()
let fields = data
try { try {
fields = JSON.parse(data) fields = JSON.parse(data)
} catch (e) { } catch (e) {
fields = Function(`try{return ${data}}catch(e){}`)() || data try{
// 非标准的json语法,尝试用 Function 解析
fields = Function(`try{return ${data}}catch(e){}`)()
}catch(err){}
} }
this.onField(false, fields) this.emit('field', false, fields)
this.data = null this.emit('end')
this.onEnd() this.#buff = null
} else {
this.emit('error', new Error(`The uploaded data is incomplete. Expected ${this.#byteLen}, Received ${this.#buff.length} .`))
}
} }
} }

View File

@ -1,5 +1,7 @@
import { EventEmitter } from 'node:events'
var s = 0, var s = 0,
S = { STATE_DICT = {
PARSER_UNINITIALIZED: s++, PARSER_UNINITIALIZED: s++,
START: s++, START: s++,
START_BOUNDARY: s++, START_BOUNDARY: s++,
@ -30,38 +32,43 @@ var s = 0,
return c | 0x20 return c | 0x20
} }
export class MultipartParser { function stateToString(stateNumber) {
for (let state in STATE_DICT) {
let number = STATE_DICT[state]
if (number === stateNumber) {
return state
}
}
}
export class MultipartParser extends EventEmitter {
boundary = null boundary = null
boundaryChars = null boundaryChars = null
lookbehind = null lookbehind = null
state = S.PARSER_UNINITIALIZED state = STATE_DICT.PARSER_UNINITIALIZED
index = null index = null
flags = 0 flags = 0
static stateToString(stateNumber) {
for (var state in S) {
var number = S[state]
if (number === stateNumber) return state
}
}
initWithBoundary(str) {
constructor(str) {
super()
this.boundary = Buffer.alloc(str.length + 4) this.boundary = Buffer.alloc(str.length + 4)
this.boundary.write('\r\n--', 0) this.boundary.write('\r\n--', 0)
this.boundary.write(str, 4) this.boundary.write(str, 4)
this.lookbehind = Buffer.alloc(this.boundary.length + 8) this.lookbehind = Buffer.alloc(this.boundary.length + 8)
this.state = S.START this.state = STATE_DICT.START
this.boundaryChars = {} this.boundaryChars = {}
for (var i = 0; i < this.boundary.length; i++) { for (let i = 0; i < this.boundary.length; i++) {
this.boundaryChars[this.boundary[i]] = true this.boundaryChars[this.boundary[i]] = true
} }
} }
write(buffer) { write(buffer) {
var self = this, var i = 0,
i = 0,
len = buffer.length, len = buffer.length,
prevIndex = this.index, prevIndex = this.index,
index = this.index, index = this.index,
@ -74,48 +81,38 @@ export class MultipartParser {
boundaryEnd = boundaryLength - 1, boundaryEnd = boundaryLength - 1,
bufferLength = buffer.length, bufferLength = buffer.length,
c, c,
cl, cl
mark = function (name) {
self[name + 'Mark'] = i
},
clear = function (name) {
delete self[name + 'Mark']
},
callback = function (name, buffer, start, end) {
if (start !== undefined && start === end) {
return
}
var callbackSymbol = let mark = (name) => {
'on' + name.substr(0, 1).toUpperCase() + name.substr(1) this[name + 'Mark'] = i
if (callbackSymbol in self) {
self[callbackSymbol](buffer, start, end)
}
}, },
dataCallback = function (name, clear) { dataCallback = (name, clear) => {
var markSymbol = name + 'Mark' var markSymbol = name + 'Mark'
if (!(markSymbol in self)) { if ((markSymbol in this)) {
return if (clear) {
} this.emit(name, buffer, this[markSymbol], i)
delete this[markSymbol]
if (!clear) {
callback(name, buffer, self[markSymbol], buffer.length)
self[markSymbol] = 0
} else { } else {
callback(name, buffer, self[markSymbol], i) this.emit(name, buffer, this[markSymbol], buffer.length)
delete self[markSymbol] this[markSymbol] = 0
} }
} }
}
// console.log('???? ', state, 'len: ', len);
for (i = 0; i < len; i++) { for (i = 0; i < len; i++) {
c = buffer[i] c = buffer[i]
switch (state) { switch (state) {
case S.PARSER_UNINITIALIZED: case STATE_DICT.PARSER_UNINITIALIZED:
return i return i
case S.START:
case STATE_DICT.START:
index = 0 index = 0
state = S.START_BOUNDARY state = STATE_DICT.START_BOUNDARY
case S.START_BOUNDARY:
case STATE_DICT.START_BOUNDARY:
// console.log('=====>>>', index, c, boundary);
if (index == boundary.length - 2) { if (index == boundary.length - 2) {
if (c == HYPHEN) { if (c == HYPHEN) {
flags |= F.LAST_BOUNDARY flags |= F.LAST_BOUNDARY
@ -126,13 +123,13 @@ export class MultipartParser {
break break
} else if (index - 1 == boundary.length - 2) { } else if (index - 1 == boundary.length - 2) {
if (flags & F.LAST_BOUNDARY && c == HYPHEN) { if (flags & F.LAST_BOUNDARY && c == HYPHEN) {
callback('end') this.emit('end')
state = S.END state = STATE_DICT.END
flags = 0 flags = 0
} else if (!(flags & F.LAST_BOUNDARY) && c == LF) { } else if (!(flags & F.LAST_BOUNDARY) && c == LF) {
index = 0 index = 0
callback('partBegin') this.emit('partBegin')
state = S.HEADER_FIELD_START state = STATE_DICT.HEADER_FIELD_START
} else { } else {
return i return i
} }
@ -146,14 +143,16 @@ export class MultipartParser {
index++ index++
} }
break break
case S.HEADER_FIELD_START:
state = S.HEADER_FIELD case STATE_DICT.HEADER_FIELD_START:
state = STATE_DICT.HEADER_FIELD
mark('headerField') mark('headerField')
index = 0 index = 0
case S.HEADER_FIELD:
case STATE_DICT.HEADER_FIELD:
if (c == CR) { if (c == CR) {
clear('headerField') delete this.headerFieldMark
state = S.HEADERS_ALMOST_DONE state = STATE_DICT.HEADERS_ALMOST_DONE
break break
} }
@ -168,7 +167,7 @@ export class MultipartParser {
return i return i
} }
dataCallback('headerField', true) dataCallback('headerField', true)
state = S.HEADER_VALUE_START state = STATE_DICT.HEADER_VALUE_START
break break
} }
@ -177,38 +176,44 @@ export class MultipartParser {
return i return i
} }
break break
case S.HEADER_VALUE_START:
case STATE_DICT.HEADER_VALUE_START:
if (c == SPACE) { if (c == SPACE) {
break break
} }
mark('headerValue') mark('headerValue')
state = S.HEADER_VALUE state = STATE_DICT.HEADER_VALUE
case S.HEADER_VALUE:
case STATE_DICT.HEADER_VALUE:
if (c == CR) { if (c == CR) {
dataCallback('headerValue', true) dataCallback('headerValue', true)
callback('headerEnd') this.emit('headerEnd')
state = S.HEADER_VALUE_ALMOST_DONE state = STATE_DICT.HEADER_VALUE_ALMOST_DONE
} }
break break
case S.HEADER_VALUE_ALMOST_DONE:
case STATE_DICT.HEADER_VALUE_ALMOST_DONE:
if (c != LF) { if (c != LF) {
return i return i
} }
state = S.HEADER_FIELD_START state = STATE_DICT.HEADER_FIELD_START
break break
case S.HEADERS_ALMOST_DONE:
case STATE_DICT.HEADERS_ALMOST_DONE:
if (c != LF) { if (c != LF) {
return i return i
} }
callback('headersEnd') this.emit('headersEnd')
state = S.PART_DATA_START state = STATE_DICT.PART_DATA_START
break break
case S.PART_DATA_START:
state = S.PART_DATA case STATE_DICT.PART_DATA_START:
state = STATE_DICT.PART_DATA
mark('partData') mark('partData')
case S.PART_DATA:
case STATE_DICT.PART_DATA:
prevIndex = index prevIndex = index
if (index === 0) { if (index === 0) {
@ -247,16 +252,16 @@ export class MultipartParser {
if (c == LF) { if (c == LF) {
// unset the PART_BOUNDARY flag // unset the PART_BOUNDARY flag
flags &= ~F.PART_BOUNDARY flags &= ~F.PART_BOUNDARY
callback('partEnd') this.emit('partEnd')
callback('partBegin') this.emit('partBegin')
state = S.HEADER_FIELD_START state = STATE_DICT.HEADER_FIELD_START
break break
} }
} else if (flags & F.LAST_BOUNDARY) { } else if (flags & F.LAST_BOUNDARY) {
if (c == HYPHEN) { if (c == HYPHEN) {
callback('partEnd') this.emit('partEnd')
callback('end') this.emit('end')
state = S.END state = STATE_DICT.END
flags = 0 flags = 0
} else { } else {
index = 0 index = 0
@ -273,7 +278,7 @@ export class MultipartParser {
} else if (prevIndex > 0) { } else if (prevIndex > 0) {
// if our boundary turned out to be rubbish, the captured lookbehind // if our boundary turned out to be rubbish, the captured lookbehind
// belongs to partData // belongs to partData
callback('partData', lookbehind, 0, prevIndex) this.emit('partData', lookbehind, 0, prevIndex)
prevIndex = 0 prevIndex = 0
mark('partData') mark('partData')
@ -283,8 +288,10 @@ export class MultipartParser {
} }
break break
case S.END:
case STATE_DICT.END:
break break
default: default:
return i return i
} }
@ -298,24 +305,17 @@ export class MultipartParser {
this.state = state this.state = state
this.flags = flags this.flags = flags
return len
} }
end() { end() {
var callback = function (self, name) {
var callbackSymbol =
'on' + name.substr(0, 1).toUpperCase() + name.substr(1)
if (callbackSymbol in self) {
self[callbackSymbol]()
}
}
if ( if (
(this.state == S.HEADER_FIELD_START && this.index === 0) || (this.state === STATE_DICT.HEADER_FIELD_START && this.index === 0) ||
(this.state == S.PART_DATA && this.index == this.boundary.length) (this.state === STATE_DICT.PART_DATA && this.index == this.boundary.length)
) { ) {
callback(this, 'partEnd') this.emit('end')
callback(this, 'end') } else if (this.state !== STATE_DICT.END) {
} else if (this.state != S.END) {
return new Error( return new Error(
'MultipartParser.end(): stream ended unexpectedly: ' + this.explain() 'MultipartParser.end(): stream ended unexpectedly: ' + this.explain()
) )
@ -323,6 +323,6 @@ export class MultipartParser {
} }
explain() { explain() {
return 'state = ' + MultipartParser.stateToString(this.state) return 'state = ' + stateToString(this.state)
} }
} }

View File

@ -1,11 +1,55 @@
import { EventEmitter } from 'events' /**
* {}
* @author yutent<yutent.io@gmail.com>
* @date 2023/10/27 14:23:22
*/
import { EventEmitter } from 'node:events'
import File from './file.js'
export class OctetParser extends EventEmitter { export class OctetParser extends EventEmitter {
write(buffer) { #file = null
this.emit('data', buffer) #byteLen = 0
return buffer.length #wroteLen = 0
constructor(name, type, path) {
super()
this.#file = new File({ path, name, type })
this.#file.open()
} }
initLength(length) {
this.#byteLen = length
}
write(buffer) {
this.#file.write(buffer)
this.#wroteLen += buffer.length
}
end() {
this.#file.end(_ => {
if (this.#wroteLen === this.#byteLen) {
this.emit('file', this.#file)
this.emit('end')
} else {
this.emit(
'error',
new Error(
`The uploaded data is incomplete. Expected ${
this.#byteLen
}, Received ${this.#wroteLen} .`
)
)
}
})
}
}
export class EmptyParser extends EventEmitter {
write() {}
end() { end() {
this.emit('end') this.emit('end')
} }

View File

@ -1,27 +0,0 @@
// This is a buffering parser, not quite as nice as the multipart one.
// If I find time I'll rewrite this to be fully streaming as well
import { parse } from 'node:querystring'
export class QuerystringParser {
constructor(maxKeys) {
this.maxKeys = maxKeys
this.buffer = ''
}
write(buffer) {
this.buffer += buffer.toString('ascii')
return buffer.length
}
end() {
var fields = parse(this.buffer, '&', '=', {
maxKeys: this.maxKeys
})
for (var field in fields) {
this.onField(field, fields[field])
}
this.buffer = ''
this.onEnd()
}
}

27
lib/urlencoded_parser.js Normal file
View File

@ -0,0 +1,27 @@
/**
* {}
* @author yutent<yutent.io@gmail.com>
* @date 2023/10/27 12:14:05
*/
import { parse } from 'node:querystring'
import { EventEmitter } from 'node:events'
export class UrlencodedParser extends EventEmitter {
#buff = Buffer.from('')
write(buffer) {
this.#buff = Buffer.concat([this.#buff, buffer])
}
end() {
let data = this.#buff.toString()
let fields = parse(data)
this.#buff = null
this.emit('field', fields)
this.emit('end')
}
}