diff --git a/index.js b/index.js index 1110b9b..8cd60bf 100644 --- a/index.js +++ b/index.js @@ -14,11 +14,11 @@ import PATH from 'path' const DEFAULT_FORM_TYPE = 'application/x-www-form-urlencoded' -var __dirname = PATH.dirname(URL.fileURLToPath(import.meta.url)) +const __dirname = PATH.dirname(URL.fileURLToPath(import.meta.url)) -var tmpdir = PATH.resolve(__dirname, '.tmp/') -var encode = encodeURIComponent -var decode = decodeURIComponent +const tmpdir = PATH.resolve(__dirname, '.tmp/') +const encode = encodeURIComponent +const decode = decodeURIComponent if (fs.isdir(tmpdir)) { fs.rm(tmpdir, true) @@ -44,7 +44,6 @@ export default class Request { hideProperty(this, '__GET__', null) hideProperty(this, '__POST__', null) hideProperty(this, '__COOKIE__', parseCookie(this.header('cookie') || '')) - this.__fixUrl() } diff --git a/lib/cookie.js b/lib/cookie.js index 4c10d6d..2377ce7 100644 --- a/lib/cookie.js +++ b/lib/cookie.js @@ -4,16 +4,16 @@ */ // var KEY_REGEXP = /^[\u0009\u0020-\u007e\u0080-\u00ff]+$/ -var SPLIT_REGEXP = /; */ +const SPLIT_REGEXP = /; */ // var encode = encodeURIComponent -var decode = decodeURIComponent +const decode = decodeURIComponent /** * [parse 格式化字符串] */ export function parseCookie(str) { - var obj = {} - var pairs + let obj = {} + let pairs if (typeof str !== 'string') { return {} @@ -27,8 +27,8 @@ export function parseCookie(str) { continue } - var key = item[0].trim() - var val = item[1].trim() + let key = item[0].trim() + let val = item[1].trim() obj[key] = decode(val) } diff --git a/lib/file.js b/lib/file.js index f569083..89393cb 100644 --- a/lib/file.js +++ b/lib/file.js @@ -1,70 +1,61 @@ -import util from 'util' -import { WriteStream } from 'fs' -import { EventEmitter } from 'events' -import crypto from 'crypto' +import { WriteStream } from 'node:fs' +import { EventEmitter } from 'node:events' -export default function File(properties) { - EventEmitter.call(this) - this.size = 0 - this.path = null - this.name = null - this.type = null - this.hash = null - this.lastModifiedDate = null +export default class File extends EventEmitter { - this._writeStream = null + #stream = null - for (var key in properties) { - this[key] = properties[key] + size = 0 + path = null + name = null + type = null + lastModifiedDate = null + + constructor(props = {}){ + super() + + for (var key in props) { + this[key] = props[key] + } } - if (typeof this.hash === 'string') { - this.hash = crypto.createHash(properties.hash) - } else { - this.hash = null + open() { + this.#stream = new WriteStream(this.path) + } + + toJSON() { + return { + size: this.size, + path: this.path, + name: this.name, + type: this.type, + mtime: this.lastModifiedDate, + length: this.length, + filename: this.filename, + mime: this.mime + } + } + + write(buffer, cb) { + + + this.#stream.write(buffer, _ =>{ + this.lastModifiedDate = new Date() + this.size += buffer.length + this.emit('progress', this.size) + cb() + }) + } + + end(cb) { + + + this.#stream.end(() => { + this.emit('end') + cb() + }) } } -util.inherits(File, EventEmitter) -File.prototype.open = function() { - this._writeStream = new WriteStream(this.path) -} - -File.prototype.toJSON = function() { - return { - size: this.size, - path: this.path, - name: this.name, - type: this.type, - mtime: this.lastModifiedDate, - length: this.length, - filename: this.filename, - mime: this.mime - } -} - -File.prototype.write = function(buffer, cb) { - var self = this - if (self.hash) { - self.hash.update(buffer) - } - this._writeStream.write(buffer, function() { - self.lastModifiedDate = new Date() - self.size += buffer.length - self.emit('progress', self.size) - cb() - }) -} - -File.prototype.end = function(cb) { - var self = this - if (self.hash) { - self.hash = self.hash.digest('hex') - } - this._writeStream.end(function() { - self.emit('end') - cb() - }) -} diff --git a/lib/index.js b/lib/index.js index c49621d..8df205c 100644 --- a/lib/index.js +++ b/lib/index.js @@ -1,245 +1,17 @@ -import crypto from 'crypto' -import fs from 'fs' -import util from 'util' -import path from 'path' +import crypto from 'node:crypto' +import fs from 'node:fs' +import util from 'node:util' +import path from 'node:path' import File from './file.js' -import { EventEmitter } from 'events' -import { Stream } from 'stream' -import { StringDecoder } from 'string_decoder' +import { EventEmitter } from 'node:events' +import { Stream } from 'node:stream' +import { StringDecoder } from 'node:string_decoder' import { MultipartParser } from './multipart_parser.js' import { QuerystringParser } from './querystring_parser.js' import { OctetParser } from './octet_parser.js' import { JSONParser } from './json_parser.js' -export default function IncomingForm(opts) { - EventEmitter.call(this) - - opts = opts || {} - - this.error = null - this.ended = false - - this.maxFields = opts.maxFields || 1000 - this.maxFieldsSize = opts.maxFieldsSize || 2 * 1024 * 1024 - this.keepExtensions = opts.keepExtensions || false - this.uploadDir = opts.uploadDir - this.encoding = opts.encoding || 'utf-8' - this.headers = null - this.type = null - this.hash = opts.hash || false - this.multiples = opts.multiples || false - - this.bytesReceived = null - this.bytesExpected = null - - this._parser = null - this._flushing = 0 - this._fieldsSize = 0 - this.openedFiles = [] -} - -util.inherits(IncomingForm, EventEmitter) - -IncomingForm.prototype.parse = function(req, cb) { - this.pause = function() { - try { - req.pause() - } catch (err) { - // the stream was destroyed - if (!this.ended) { - // before it was completed, crash & burn - this._error(err) - } - return false - } - return true - } - - this.resume = function() { - try { - req.resume() - } catch (err) { - // the stream was destroyed - if (!this.ended) { - // before it was completed, crash & burn - this._error(err) - } - return false - } - - return true - } - - // Setup callback first, so we don't miss anything from data events emitted - // immediately. - if (cb) { - var fields = {}, - files = {} - this.on('field', function(name, value) { - fields[name] = value - }) - .on('file', function(name, file) { - if (this.multiples) { - if (files[name]) { - if (!Array.isArray(files[name])) { - files[name] = [files[name]] - } - files[name].push(file) - } else { - files[name] = file - } - } else { - files[name] = file - } - }) - .on('error', function(err) { - cb(err, fields, files) - }) - .on('end', function() { - cb(null, fields, files) - }) - } - - // Parse headers and setup the parser, ready to start listening for data. - this.writeHeaders(req.headers) - - // Start listening for data. - var self = this - req - .on('error', function(err) { - self._error(err) - }) - .on('aborted', function() { - self.emit('aborted') - self._error(new Error('Request aborted')) - }) - .on('data', function(buffer) { - self.write(buffer) - }) - .on('end', function() { - if (self.error) { - return - } - - var err = self._parser.end() - if (err) { - self._error(err) - } - }) - - return this -} - -IncomingForm.prototype.writeHeaders = function(headers) { - this.headers = headers - this._parseContentLength() - this._parseContentType() -} - -IncomingForm.prototype.write = function(buffer) { - if (this.error) { - return - } - if (!this._parser) { - this._error(new Error('uninitialized parser')) - return - } - - this.bytesReceived += buffer.length - this.emit('progress', this.bytesReceived, this.bytesExpected) - - var bytesParsed = this._parser.write(buffer) - if (bytesParsed !== buffer.length) { - this._error( - new Error( - 'parser error, ' + - bytesParsed + - ' of ' + - buffer.length + - ' bytes parsed' - ) - ) - } - - return bytesParsed -} - -IncomingForm.prototype.pause = function() { - // this does nothing, unless overwritten in IncomingForm.parse - return false -} - -IncomingForm.prototype.resume = function() { - // this does nothing, unless overwritten in IncomingForm.parse - return false -} - -IncomingForm.prototype.onPart = function(part) { - // this method can be overwritten by the user - this.handlePart(part) -} - -IncomingForm.prototype.handlePart = function(part) { - var self = this - - if (part.filename === undefined) { - var value = '', - decoder = new StringDecoder(this.encoding) - - part.on('data', function(buffer) { - self._fieldsSize += buffer.length - if (self._fieldsSize > self.maxFieldsSize) { - self._error( - new Error( - 'maxFieldsSize exceeded, received ' + - self._fieldsSize + - ' bytes of field data' - ) - ) - return - } - value += decoder.write(buffer) - }) - - part.on('end', function() { - self.emit('field', part.name, value) - }) - return - } - - this._flushing++ - - var file = new File({ - path: this._uploadPath(part.filename), - name: part.filename, - type: part.mime, - hash: self.hash - }) - - this.emit('fileBegin', part.name, file) - - file.open() - this.openedFiles.push(file) - - part.on('data', function(buffer) { - if (buffer.length == 0) { - return - } - self.pause() - file.write(buffer, function() { - self.resume() - }) - }) - - part.on('end', function() { - file.end(function() { - self._flushing-- - self.emit('file', part.name, file) - self._maybeEnd() - }) - }) -} function dummyParser(self) { return { @@ -251,321 +23,555 @@ function dummyParser(self) { } } -IncomingForm.prototype._parseContentType = function() { - if (this.bytesExpected === 0) { - this._parser = dummyParser(this) - return +export default class IncomingForm{ + + constructor(opts = {}) { + + this.error = null + this.ended = false + + this.maxFields = opts.maxFields || 1000 + this.maxFieldsSize = opts.maxFieldsSize || 2 * 1024 * 1024 + this.keepExtensions = opts.keepExtensions || false + this.uploadDir = opts.uploadDir + this.encoding = opts.encoding || 'utf-8' + this.headers = null + this.type = null + this.hash = opts.hash || false + this.multiples = opts.multiples || false + + this.bytesReceived = null + this.bytesExpected = null + + this._parser = null + this._flushing = 0 + this._fieldsSize = 0 + this.openedFiles = [] + } - if (!this.headers['content-type']) { - this._error(new Error('bad content-type header, no content-type')) - return - } - if (this.headers['content-type'].match(/octet-stream/i)) { - this._initOctetStream() - return - } - - if (this.headers['content-type'].match(/urlencoded/i)) { - this._initUrlencoded() - return - } - - if (this.headers['content-type'].match(/multipart/i)) { - var m = this.headers['content-type'].match( - /boundary=(?:"([^"]+)"|([^;]+))/i - ) - if (m) { - this._initMultipart(m[1] || m[2]) - } else { - this._error(new Error('bad content-type header, no multipart boundary')) + + + parse(req, cb) { + this.pause = function() { + try { + req.pause() + } catch (err) { + // the stream was destroyed + if (!this.ended) { + // before it was completed, crash & burn + this._error(err) + } + return false + } + return true } - return + + this.resume = function() { + try { + req.resume() + } catch (err) { + // the stream was destroyed + if (!this.ended) { + // before it was completed, crash & burn + this._error(err) + } + return false + } + + return true + } + + // Setup callback first, so we don't miss anything from data events emitted + // immediately. + if (cb) { + var fields = {}, + files = {} + this.on('field', function(name, value) { + fields[name] = value + }) + .on('file', function(name, file) { + if (this.multiples) { + if (files[name]) { + if (!Array.isArray(files[name])) { + files[name] = [files[name]] + } + files[name].push(file) + } else { + files[name] = file + } + } else { + files[name] = file + } + }) + .on('error', function(err) { + cb(err, fields, files) + }) + .on('end', function() { + cb(null, fields, files) + }) + } + + // Parse headers and setup the parser, ready to start listening for data. + this.writeHeaders(req.headers) + + // Start listening for data. + var self = this + req + .on('error', function(err) { + self._error(err) + }) + .on('aborted', function() { + self.emit('aborted') + self._error(new Error('Request aborted')) + }) + .on('data', function(buffer) { + self.write(buffer) + }) + .on('end', function() { + if (self.error) { + return + } + + var err = self._parser.end() + if (err) { + self._error(err) + } + }) + + return this } - - if (this.headers['content-type'].match(/json|appliation|plain|text/i)) { - this._initJSONencoded() - return + + writeHeaders(headers) { + this.headers = headers + this._parseContentLength() + this._parseContentType() } - - this._error( - new Error( - 'bad content-type header, unknown content-type: ' + - this.headers['content-type'] - ) - ) -} - -IncomingForm.prototype._error = function(err) { - if (this.error || this.ended) { - return - } - - this.error = err - this.emit('error', err) - - if (Array.isArray(this.openedFiles)) { - this.openedFiles.forEach(function(file) { - file._writeStream.destroy() - setTimeout(fs.unlink, 0, file.path, function(error) {}) - }) - } -} - -IncomingForm.prototype._parseContentLength = function() { - this.bytesReceived = 0 - if (this.headers['content-length']) { - this.bytesExpected = parseInt(this.headers['content-length'], 10) - } else if (this.headers['transfer-encoding'] === undefined) { - this.bytesExpected = 0 - } - - if (this.bytesExpected !== null) { + + write(buffer) { + if (this.error) { + return + } + if (!this._parser) { + this._error(new Error('uninitialized parser')) + return + } + + this.bytesReceived += buffer.length this.emit('progress', this.bytesReceived, this.bytesExpected) - } -} - -IncomingForm.prototype._newParser = function() { - return new MultipartParser() -} - -IncomingForm.prototype._initMultipart = function(boundary) { - this.type = 'multipart' - - var parser = new MultipartParser(), - self = this, - headerField, - headerValue, - part - - parser.initWithBoundary(boundary) - - parser.onPartBegin = function() { - part = new Stream() - part.readable = true - part.headers = {} - part.name = null - part.filename = null - part.mime = null - - part.transferEncoding = 'binary' - part.transferBuffer = '' - - headerField = '' - headerValue = '' - } - - parser.onHeaderField = function(b, start, end) { - headerField += b.toString(self.encoding, start, end) - } - - parser.onHeaderValue = function(b, start, end) { - headerValue += b.toString(self.encoding, start, end) - } - - parser.onHeaderEnd = function() { - headerField = headerField.toLowerCase() - part.headers[headerField] = headerValue - - var m = headerValue.match(/\bname="([^"]+)"/i) - if (headerField == 'content-disposition') { - if (m) { - part.name = m[1] - } - - part.filename = self._fileName(headerValue) - } else if (headerField == 'content-type') { - part.mime = headerValue - } else if (headerField == 'content-transfer-encoding') { - part.transferEncoding = headerValue.toLowerCase() + + var bytesParsed = this._parser.write(buffer) + if (bytesParsed !== buffer.length) { + this._error( + new Error( + 'parser error, ' + + bytesParsed + + ' of ' + + buffer.length + + ' bytes parsed' + ) + ) } - - headerField = '' - headerValue = '' + + return bytesParsed } - - parser.onHeadersEnd = function() { - switch (part.transferEncoding) { - case 'binary': - case '7bit': - case '8bit': - parser.onPartData = function(b, start, end) { - part.emit('data', b.slice(start, end)) - } - - parser.onPartEnd = function() { - part.emit('end') - } - break - - case 'base64': - parser.onPartData = function(b, start, end) { - part.transferBuffer += b.slice(start, end).toString('ascii') - - /* - four bytes (chars) in base64 converts to three bytes in binary - encoding. So we should always work with a number of bytes that - can be divided by 4, it will result in a number of buytes that - can be divided vy 3. - */ - var offset = parseInt(part.transferBuffer.length / 4, 10) * 4 - part.emit( - 'data', - Buffer.from(part.transferBuffer.substring(0, offset), 'base64') + + pause() { + // this does nothing, unless overwritten in IncomingForm.parse + return false + } + + resume() { + // this does nothing, unless overwritten in IncomingForm.parse + return false + } + + onPart(part) { + // this method can be overwritten by the user + this.handlePart(part) + } + + handlePart(part) { + var self = this + + if (part.filename === undefined) { + var value = '', + decoder = new StringDecoder(this.encoding) + + part.on('data', function(buffer) { + self._fieldsSize += buffer.length + if (self._fieldsSize > self.maxFieldsSize) { + self._error( + new Error( + 'maxFieldsSize exceeded, received ' + + self._fieldsSize + + ' bytes of field data' + ) ) - part.transferBuffer = part.transferBuffer.substring(offset) + return } - - parser.onPartEnd = function() { - part.emit('data', Buffer.from(part.transferBuffer, 'base64')) - part.emit('end') - } - break - - default: - return self._error(new Error('unknown transfer-encoding')) + value += decoder.write(buffer) + }) + + part.on('end', function() { + self.emit('field', part.name, value) + }) + return } - - self.onPart(part) - } - - parser.onEnd = function() { - self.ended = true - self._maybeEnd() - } - - this._parser = parser -} - -IncomingForm.prototype._fileName = function(headerValue) { - var m = headerValue.match(/\bfilename="(.*?)"($|; )/i) - if (!m) return - - var filename = m[1].substr(m[1].lastIndexOf('\\') + 1) - filename = filename.replace(/%22/g, '"') - filename = filename.replace(/&#([\d]{4});/g, function(m, code) { - return String.fromCharCode(code) - }) - return filename -} - -IncomingForm.prototype._initUrlencoded = function() { - this.type = 'urlencoded' - - var parser = new QuerystringParser(this.maxFields) - - parser.onField = (key, val) => { - this.emit('field', key, val) - } - - parser.onEnd = () => { - this.ended = true - this._maybeEnd() - } - - this._parser = parser -} - -IncomingForm.prototype._initOctetStream = function() { - this.type = 'octet-stream' - var filename = this.headers['x-file-name'] - var mime = this.headers['content-type'] - - var file = new File({ - path: this._uploadPath(filename), - name: filename, - type: mime - }) - - this.emit('fileBegin', filename, file) - file.open() - - this._flushing++ - - var self = this - - self._parser = new OctetParser() - - //Keep track of writes that haven't finished so we don't emit the file before it's done being written - var outstandingWrites = 0 - - self._parser.on('data', function(buffer) { - self.pause() - outstandingWrites++ - - file.write(buffer, function() { - outstandingWrites-- - self.resume() - - if (self.ended) { - self._parser.emit('doneWritingFile') - } + + this._flushing++ + + var file = new File({ + path: this._uploadPath(part.filename), + name: part.filename, + type: part.mime, + hash: self.hash }) - }) - - self._parser.on('end', function() { - self._flushing-- - self.ended = true - - var done = function() { + + this.emit('fileBegin', part.name, file) + + file.open() + this.openedFiles.push(file) + + part.on('data', function(buffer) { + if (buffer.length == 0) { + return + } + self.pause() + file.write(buffer, function() { + self.resume() + }) + }) + + part.on('end', function() { file.end(function() { - self.emit('file', 'file', file) + self._flushing-- + self.emit('file', part.name, file) self._maybeEnd() }) + }) + } + + + _parseContentType() { + if (this.bytesExpected === 0) { + this._parser = dummyParser(this) + return } - - if (outstandingWrites === 0) { - done() - } else { - self._parser.once('doneWritingFile', done) + + if (!this.headers['content-type']) { + this._error(new Error('bad content-type header, no content-type')) + return } - }) + + if (this.headers['content-type'].match(/octet-stream/i)) { + this._initOctetStream() + return + } + + if (this.headers['content-type'].match(/urlencoded/i)) { + this._initUrlencoded() + return + } + + if (this.headers['content-type'].match(/multipart/i)) { + var m = this.headers['content-type'].match( + /boundary=(?:"([^"]+)"|([^;]+))/i + ) + if (m) { + this._initMultipart(m[1] || m[2]) + } else { + this._error(new Error('bad content-type header, no multipart boundary')) + } + return + } + + if (this.headers['content-type'].match(/json|appliation|plain|text/i)) { + this._initJSONencoded() + return + } + + this._error( + new Error( + 'bad content-type header, unknown content-type: ' + + this.headers['content-type'] + ) + ) + } + + _error(err) { + if (this.error || this.ended) { + return + } + + this.error = err + this.emit('error', err) + + if (Array.isArray(this.openedFiles)) { + this.openedFiles.forEach(function(file) { + file._writeStream.destroy() + setTimeout(fs.unlink, 0, file.path, function(error) {}) + }) + } + } + + _parseContentLength() { + this.bytesReceived = 0 + if (this.headers['content-length']) { + this.bytesExpected = parseInt(this.headers['content-length'], 10) + } else if (this.headers['transfer-encoding'] === undefined) { + this.bytesExpected = 0 + } + + if (this.bytesExpected !== null) { + this.emit('progress', this.bytesReceived, this.bytesExpected) + } + } + + _newParser() { + return new MultipartParser() + } + + _initMultipart(boundary) { + this.type = 'multipart' + + var parser = new MultipartParser(), + self = this, + headerField, + headerValue, + part + + parser.initWithBoundary(boundary) + + parser.onPartBegin = function() { + part = new Stream() + part.readable = true + part.headers = {} + part.name = null + part.filename = null + part.mime = null + + part.transferEncoding = 'binary' + part.transferBuffer = '' + + headerField = '' + headerValue = '' + } + + parser.onHeaderField = function(b, start, end) { + headerField += b.toString(self.encoding, start, end) + } + + parser.onHeaderValue = function(b, start, end) { + headerValue += b.toString(self.encoding, start, end) + } + + parser.onHeaderEnd = function() { + headerField = headerField.toLowerCase() + part.headers[headerField] = headerValue + + var m = headerValue.match(/\bname="([^"]+)"/i) + if (headerField == 'content-disposition') { + if (m) { + part.name = m[1] + } + + part.filename = self._fileName(headerValue) + } else if (headerField == 'content-type') { + part.mime = headerValue + } else if (headerField == 'content-transfer-encoding') { + part.transferEncoding = headerValue.toLowerCase() + } + + headerField = '' + headerValue = '' + } + + parser.onHeadersEnd = function() { + switch (part.transferEncoding) { + case 'binary': + case '7bit': + case '8bit': + parser.onPartData = function(b, start, end) { + part.emit('data', b.slice(start, end)) + } + + parser.onPartEnd = function() { + part.emit('end') + } + break + + case 'base64': + parser.onPartData = function(b, start, end) { + part.transferBuffer += b.slice(start, end).toString('ascii') + + /* + four bytes (chars) in base64 converts to three bytes in binary + encoding. So we should always work with a number of bytes that + can be divided by 4, it will result in a number of buytes that + can be divided vy 3. + */ + var offset = parseInt(part.transferBuffer.length / 4, 10) * 4 + part.emit( + 'data', + Buffer.from(part.transferBuffer.substring(0, offset), 'base64') + ) + part.transferBuffer = part.transferBuffer.substring(offset) + } + + parser.onPartEnd = function() { + part.emit('data', Buffer.from(part.transferBuffer, 'base64')) + part.emit('end') + } + break + + default: + return self._error(new Error('unknown transfer-encoding')) + } + + self.onPart(part) + } + + parser.onEnd = function() { + self.ended = true + self._maybeEnd() + } + + this._parser = parser + } + + _fileName(headerValue) { + var m = headerValue.match(/\bfilename="(.*?)"($|; )/i) + if (!m) return + + var filename = m[1].substr(m[1].lastIndexOf('\\') + 1) + filename = filename.replace(/%22/g, '"') + filename = filename.replace(/&#([\d]{4});/g, function(m, code) { + return String.fromCharCode(code) + }) + return filename + } + + _initUrlencoded() { + this.type = 'urlencoded' + + var parser = new QuerystringParser(this.maxFields) + + parser.onField = (key, val) => { + this.emit('field', key, val) + } + + parser.onEnd = () => { + this.ended = true + this._maybeEnd() + } + + this._parser = parser + } + + _initOctetStream() { + this.type = 'octet-stream' + var filename = this.headers['x-file-name'] + var mime = this.headers['content-type'] + + var file = new File({ + path: this._uploadPath(filename), + name: filename, + type: mime + }) + + this.emit('fileBegin', filename, file) + file.open() + + this._flushing++ + + var self = this + + self._parser = new OctetParser() + + //Keep track of writes that haven't finished so we don't emit the file before it's done being written + var outstandingWrites = 0 + + self._parser.on('data', function(buffer) { + self.pause() + outstandingWrites++ + + file.write(buffer, function() { + outstandingWrites-- + self.resume() + + if (self.ended) { + self._parser.emit('doneWritingFile') + } + }) + }) + + self._parser.on('end', function() { + self._flushing-- + self.ended = true + + var done = function() { + file.end(function() { + self.emit('file', 'file', file) + self._maybeEnd() + }) + } + + if (outstandingWrites === 0) { + done() + } else { + self._parser.once('doneWritingFile', done) + } + }) + } + + _initJSONencoded() { + this.type = 'json' + + var parser = new JSONParser(), + self = this + + if (this.bytesExpected) { + parser.initWithLength(this.bytesExpected) + } + + parser.onField = function(key, val) { + self.emit('field', key, val) + } + + parser.onEnd = function() { + self.ended = true + self._maybeEnd() + } + + this._parser = parser + } + + _uploadPath(filename) { + var name = 'upload_' + var buf = crypto.randomBytes(16) + for (var i = 0; i < buf.length; ++i) { + name += ('0' + buf[i].toString(16)).slice(-2) + } + + if (this.keepExtensions) { + var ext = path.extname(filename) + ext = ext.replace(/(\.[a-z0-9]+).*/i, '$1') + + name += ext + } + + return path.join(this.uploadDir, name) + } + + _maybeEnd() { + if (!this.ended || this._flushing || this.error) { + return + } + + this.emit('end') + } + } -IncomingForm.prototype._initJSONencoded = function() { - this.type = 'json' - - var parser = new JSONParser(), - self = this - - if (this.bytesExpected) { - parser.initWithLength(this.bytesExpected) - } - - parser.onField = function(key, val) { - self.emit('field', key, val) - } - - parser.onEnd = function() { - self.ended = true - self._maybeEnd() - } - - this._parser = parser -} - -IncomingForm.prototype._uploadPath = function(filename) { - var name = 'upload_' - var buf = crypto.randomBytes(16) - for (var i = 0; i < buf.length; ++i) { - name += ('0' + buf[i].toString(16)).slice(-2) - } - - if (this.keepExtensions) { - var ext = path.extname(filename) - ext = ext.replace(/(\.[a-z0-9]+).*/i, '$1') - - name += ext - } - - return path.join(this.uploadDir, name) -} - -IncomingForm.prototype._maybeEnd = function() { - if (!this.ended || this._flushing || this.error) { - return - } - - this.emit('end') -} diff --git a/lib/json_parser.js b/lib/json_parser.js index 73824d2..633dafe 100644 --- a/lib/json_parser.js +++ b/lib/json_parser.js @@ -1,33 +1,36 @@ -export function JSONParser() { - this.data = Buffer.from('') - this.bytesWritten = 0 -} +export class JSONParser { + + data = Buffer.from('') + bytesWritten = 0 -JSONParser.prototype.initWithLength = function(length) { - this.data = Buffer.alloc(length) -} - -JSONParser.prototype.write = function(buffer) { - if (this.data.length >= this.bytesWritten + buffer.length) { - buffer.copy(this.data, this.bytesWritten) - } else { - this.data = Buffer.concat([this.data, buffer]) + initWithLength(length) { + this.data = Buffer.alloc(length) } - this.bytesWritten += buffer.length - return buffer.length -} - -JSONParser.prototype.end = function() { - var data = this.data.toString('utf8') - var fields - try { - fields = JSON.parse(data) - } catch (e) { - fields = Function(`try{return ${data}}catch(e){}`)() || data + + write(buffer) { + if (this.data.length >= this.bytesWritten + buffer.length) { + buffer.copy(this.data, this.bytesWritten) + } else { + this.data = Buffer.concat([this.data, buffer]) + } + this.bytesWritten += buffer.length + return buffer.length + } + + end() { + var data = this.data.toString('utf8') + var fields + try { + fields = JSON.parse(data) + } catch (e) { + fields = Function(`try{return ${data}}catch(e){}`)() || data + } + + this.onField(false, fields) + this.data = null + + this.onEnd() } - this.onField(false, fields) - this.data = null - - this.onEnd() } + diff --git a/lib/multipart_parser.js b/lib/multipart_parser.js index 911ab88..4c18dbb 100644 --- a/lib/multipart_parser.js +++ b/lib/multipart_parser.js @@ -30,298 +30,302 @@ var s = 0, return c | 0x20 } -export function MultipartParser() { - this.boundary = null - this.boundaryChars = null - this.lookbehind = null - this.state = S.PARSER_UNINITIALIZED +export class MultipartParser { + boundary = null + boundaryChars = null + lookbehind = null + state = S.PARSER_UNINITIALIZED - this.index = null - this.flags = 0 -} + index = null + flags = 0 -MultipartParser.stateToString = function(stateNumber) { - for (var state in S) { - var number = S[state] - if (number === stateNumber) return state - } -} -MultipartParser.prototype.initWithBoundary = function(str) { - this.boundary = Buffer.alloc(str.length + 4) - this.boundary.write('\r\n--', 0) - this.boundary.write(str, 4) - this.lookbehind = Buffer.alloc(this.boundary.length + 8) - this.state = S.START - - this.boundaryChars = {} - for (var i = 0; i < this.boundary.length; i++) { - this.boundaryChars[this.boundary[i]] = true - } -} - -MultipartParser.prototype.write = function(buffer) { - var self = this, - i = 0, - len = buffer.length, - prevIndex = this.index, - index = this.index, - state = this.state, - flags = this.flags, - lookbehind = this.lookbehind, - boundary = this.boundary, - boundaryChars = this.boundaryChars, - boundaryLength = this.boundary.length, - boundaryEnd = boundaryLength - 1, - bufferLength = buffer.length, - c, - cl, - mark = function(name) { - self[name + 'Mark'] = i - }, - clear = function(name) { - delete self[name + 'Mark'] - }, - callback = function(name, buffer, start, end) { - if (start !== undefined && start === end) { - return - } - - var callbackSymbol = - 'on' + name.substr(0, 1).toUpperCase() + name.substr(1) - if (callbackSymbol in self) { - self[callbackSymbol](buffer, start, end) - } - }, - dataCallback = function(name, clear) { - var markSymbol = name + 'Mark' - if (!(markSymbol in self)) { - return - } - - if (!clear) { - callback(name, buffer, self[markSymbol], buffer.length) - self[markSymbol] = 0 - } else { - callback(name, buffer, self[markSymbol], i) - delete self[markSymbol] - } + static stateToString(stateNumber) { + for (var state in S) { + var number = S[state] + if (number === stateNumber) return state } + } - for (i = 0; i < len; i++) { - c = buffer[i] - switch (state) { - case S.PARSER_UNINITIALIZED: - return i - case S.START: - index = 0 - state = S.START_BOUNDARY - case S.START_BOUNDARY: - if (index == boundary.length - 2) { - if (c == HYPHEN) { - flags |= F.LAST_BOUNDARY - } else if (c != CR) { - return i - } - index++ - break - } else if (index - 1 == boundary.length - 2) { - if (flags & F.LAST_BOUNDARY && c == HYPHEN) { - callback('end') - state = S.END - flags = 0 - } else if (!(flags & F.LAST_BOUNDARY) && c == LF) { - index = 0 - callback('partBegin') - state = S.HEADER_FIELD_START - } else { - return i - } - break - } - if (c != boundary[index + 2]) { - index = -2 + initWithBoundary(str) { + this.boundary = Buffer.alloc(str.length + 4) + this.boundary.write('\r\n--', 0) + this.boundary.write(str, 4) + this.lookbehind = Buffer.alloc(this.boundary.length + 8) + this.state = S.START + + this.boundaryChars = {} + for (var i = 0; i < this.boundary.length; i++) { + this.boundaryChars[this.boundary[i]] = true + } + } + + write(buffer) { + var self = this, + i = 0, + len = buffer.length, + prevIndex = this.index, + index = this.index, + state = this.state, + flags = this.flags, + lookbehind = this.lookbehind, + boundary = this.boundary, + boundaryChars = this.boundaryChars, + boundaryLength = this.boundary.length, + boundaryEnd = boundaryLength - 1, + bufferLength = buffer.length, + c, + cl, + mark = function(name) { + self[name + 'Mark'] = i + }, + clear = function(name) { + delete self[name + 'Mark'] + }, + callback = function(name, buffer, start, end) { + if (start !== undefined && start === end) { + return } - if (c == boundary[index + 2]) { - index++ + + var callbackSymbol = + 'on' + name.substr(0, 1).toUpperCase() + name.substr(1) + if (callbackSymbol in self) { + self[callbackSymbol](buffer, start, end) } - break - case S.HEADER_FIELD_START: - state = S.HEADER_FIELD - mark('headerField') - index = 0 - case S.HEADER_FIELD: - if (c == CR) { - clear('headerField') - state = S.HEADERS_ALMOST_DONE - break + }, + dataCallback = function(name, clear) { + var markSymbol = name + 'Mark' + if (!(markSymbol in self)) { + return } - - index++ - if (c == HYPHEN) { - break + + if (!clear) { + callback(name, buffer, self[markSymbol], buffer.length) + self[markSymbol] = 0 + } else { + callback(name, buffer, self[markSymbol], i) + delete self[markSymbol] } - - if (c == COLON) { - if (index == 1) { - // empty header field - return i - } - dataCallback('headerField', true) - state = S.HEADER_VALUE_START - break - } - - cl = lower(c) - if (cl < A || cl > Z) { + } + + for (i = 0; i < len; i++) { + c = buffer[i] + switch (state) { + case S.PARSER_UNINITIALIZED: return i - } - break - case S.HEADER_VALUE_START: - if (c == SPACE) { - break - } - - mark('headerValue') - state = S.HEADER_VALUE - case S.HEADER_VALUE: - if (c == CR) { - dataCallback('headerValue', true) - callback('headerEnd') - state = S.HEADER_VALUE_ALMOST_DONE - } - break - case S.HEADER_VALUE_ALMOST_DONE: - if (c != LF) { - return i - } - state = S.HEADER_FIELD_START - break - case S.HEADERS_ALMOST_DONE: - if (c != LF) { - return i - } - - callback('headersEnd') - state = S.PART_DATA_START - break - case S.PART_DATA_START: - state = S.PART_DATA - mark('partData') - case S.PART_DATA: - prevIndex = index - - if (index === 0) { - // boyer-moore derrived algorithm to safely skip non-boundary data - i += boundaryEnd - while (i < bufferLength && !(buffer[i] in boundaryChars)) { - i += boundaryLength - } - i -= boundaryEnd - c = buffer[i] - } - - if (index < boundary.length) { - if (boundary[index] == c) { - if (index === 0) { - dataCallback('partData', true) + case S.START: + index = 0 + state = S.START_BOUNDARY + case S.START_BOUNDARY: + if (index == boundary.length - 2) { + if (c == HYPHEN) { + flags |= F.LAST_BOUNDARY + } else if (c != CR) { + return i } index++ - } else { - index = 0 - } - } else if (index == boundary.length) { - index++ - if (c == CR) { - // CR = part boundary - flags |= F.PART_BOUNDARY - } else if (c == HYPHEN) { - // HYPHEN = end boundary - flags |= F.LAST_BOUNDARY - } else { - index = 0 - } - } else if (index - 1 == boundary.length) { - if (flags & F.PART_BOUNDARY) { - index = 0 - if (c == LF) { - // unset the PART_BOUNDARY flag - flags &= ~F.PART_BOUNDARY - callback('partEnd') - callback('partBegin') - state = S.HEADER_FIELD_START - break - } - } else if (flags & F.LAST_BOUNDARY) { - if (c == HYPHEN) { - callback('partEnd') + break + } else if (index - 1 == boundary.length - 2) { + if (flags & F.LAST_BOUNDARY && c == HYPHEN) { callback('end') state = S.END flags = 0 + } else if (!(flags & F.LAST_BOUNDARY) && c == LF) { + index = 0 + callback('partBegin') + state = S.HEADER_FIELD_START + } else { + return i + } + break + } + + if (c != boundary[index + 2]) { + index = -2 + } + if (c == boundary[index + 2]) { + index++ + } + break + case S.HEADER_FIELD_START: + state = S.HEADER_FIELD + mark('headerField') + index = 0 + case S.HEADER_FIELD: + if (c == CR) { + clear('headerField') + state = S.HEADERS_ALMOST_DONE + break + } + + index++ + if (c == HYPHEN) { + break + } + + if (c == COLON) { + if (index == 1) { + // empty header field + return i + } + dataCallback('headerField', true) + state = S.HEADER_VALUE_START + break + } + + cl = lower(c) + if (cl < A || cl > Z) { + return i + } + break + case S.HEADER_VALUE_START: + if (c == SPACE) { + break + } + + mark('headerValue') + state = S.HEADER_VALUE + case S.HEADER_VALUE: + if (c == CR) { + dataCallback('headerValue', true) + callback('headerEnd') + state = S.HEADER_VALUE_ALMOST_DONE + } + break + case S.HEADER_VALUE_ALMOST_DONE: + if (c != LF) { + return i + } + state = S.HEADER_FIELD_START + break + case S.HEADERS_ALMOST_DONE: + if (c != LF) { + return i + } + + callback('headersEnd') + state = S.PART_DATA_START + break + case S.PART_DATA_START: + state = S.PART_DATA + mark('partData') + case S.PART_DATA: + prevIndex = index + + if (index === 0) { + // boyer-moore derrived algorithm to safely skip non-boundary data + i += boundaryEnd + while (i < bufferLength && !(buffer[i] in boundaryChars)) { + i += boundaryLength + } + i -= boundaryEnd + c = buffer[i] + } + + if (index < boundary.length) { + if (boundary[index] == c) { + if (index === 0) { + dataCallback('partData', true) + } + index++ + } else { + index = 0 + } + } else if (index == boundary.length) { + index++ + if (c == CR) { + // CR = part boundary + flags |= F.PART_BOUNDARY + } else if (c == HYPHEN) { + // HYPHEN = end boundary + flags |= F.LAST_BOUNDARY + } else { + index = 0 + } + } else if (index - 1 == boundary.length) { + if (flags & F.PART_BOUNDARY) { + index = 0 + if (c == LF) { + // unset the PART_BOUNDARY flag + flags &= ~F.PART_BOUNDARY + callback('partEnd') + callback('partBegin') + state = S.HEADER_FIELD_START + break + } + } else if (flags & F.LAST_BOUNDARY) { + if (c == HYPHEN) { + callback('partEnd') + callback('end') + state = S.END + flags = 0 + } else { + index = 0 + } } else { index = 0 } - } else { - index = 0 } - } - - if (index > 0) { - // when matching a possible boundary, keep a lookbehind reference - // in case it turns out to be a false lead - lookbehind[index - 1] = c - } else if (prevIndex > 0) { - // if our boundary turned out to be rubbish, the captured lookbehind - // belongs to partData - callback('partData', lookbehind, 0, prevIndex) - prevIndex = 0 - mark('partData') - - // reconsider the current character even so it interrupted the sequence - // it could be the beginning of a new sequence - i-- - } - - break - case S.END: - break - default: - return i + + if (index > 0) { + // when matching a possible boundary, keep a lookbehind reference + // in case it turns out to be a false lead + lookbehind[index - 1] = c + } else if (prevIndex > 0) { + // if our boundary turned out to be rubbish, the captured lookbehind + // belongs to partData + callback('partData', lookbehind, 0, prevIndex) + prevIndex = 0 + mark('partData') + + // reconsider the current character even so it interrupted the sequence + // it could be the beginning of a new sequence + i-- + } + + break + case S.END: + break + default: + return i + } + } + + dataCallback('headerField') + dataCallback('headerValue') + dataCallback('partData') + + this.index = index + this.state = state + this.flags = flags + + return len + } + + end() { + var callback = function(self, name) { + var callbackSymbol = 'on' + name.substr(0, 1).toUpperCase() + name.substr(1) + if (callbackSymbol in self) { + self[callbackSymbol]() + } + } + if ( + (this.state == S.HEADER_FIELD_START && this.index === 0) || + (this.state == S.PART_DATA && this.index == this.boundary.length) + ) { + callback(this, 'partEnd') + callback(this, 'end') + } else if (this.state != S.END) { + return new Error( + 'MultipartParser.end(): stream ended unexpectedly: ' + this.explain() + ) } } - - dataCallback('headerField') - dataCallback('headerValue') - dataCallback('partData') - - this.index = index - this.state = state - this.flags = flags - - return len -} - -MultipartParser.prototype.end = function() { - var callback = function(self, name) { - var callbackSymbol = 'on' + name.substr(0, 1).toUpperCase() + name.substr(1) - if (callbackSymbol in self) { - self[callbackSymbol]() - } - } - if ( - (this.state == S.HEADER_FIELD_START && this.index === 0) || - (this.state == S.PART_DATA && this.index == this.boundary.length) - ) { - callback(this, 'partEnd') - callback(this, 'end') - } else if (this.state != S.END) { - return new Error( - 'MultipartParser.end(): stream ended unexpectedly: ' + this.explain() - ) + + explain() { + return 'state = ' + MultipartParser.stateToString(this.state) } } -MultipartParser.prototype.explain = function() { - return 'state = ' + MultipartParser.stateToString(this.state) -} + diff --git a/lib/octet_parser.js b/lib/octet_parser.js index 87c0b27..db1fab6 100644 --- a/lib/octet_parser.js +++ b/lib/octet_parser.js @@ -1,17 +1,15 @@ import { EventEmitter } from 'events' -import util from 'util' -export function OctetParser() { - EventEmitter.call(this) -} -util.inherits(OctetParser, EventEmitter) - -OctetParser.prototype.write = function(buffer) { +export class OctetParser extends EventEmitter { +write(buffer) { this.emit('data', buffer) return buffer.length } -OctetParser.prototype.end = function() { +end () { this.emit('end') } +} + + diff --git a/lib/querystring_parser.js b/lib/querystring_parser.js index 72bde23..e502c77 100644 --- a/lib/querystring_parser.js +++ b/lib/querystring_parser.js @@ -1,6 +1,6 @@ // This is a buffering parser, not quite as nice as the multipart one. // If I find time I'll rewrite this to be fully streaming as well -import querystring from 'querystring' +import {parse} from 'node:querystring' export class QuerystringParser { constructor(maxKeys) { @@ -14,7 +14,7 @@ export class QuerystringParser { } end() { - var fields = querystring.parse(this.buffer, '&', '=', { + var fields = parse(this.buffer, '&', '=', { maxKeys: this.maxKeys }) for (var field in fields) {