优化线程操作
parent
ff5a225cc1
commit
50a54a152c
142
lib/prod.js
142
lib/prod.js
|
@ -9,14 +9,36 @@ const IS_WIN = process.platform === 'win32'
|
|||
const PREFIX = IS_WIN ? 'pages\\' : 'pages/'
|
||||
// 线程太多, 效率反而不高
|
||||
const THREADS_NUM = os.cpus().length > 4 ? 4 : os.cpus().length - 1
|
||||
// const THREADS_NUM = os.cpus().length
|
||||
const __filename = normalize(import.meta.url.slice(IS_WIN ? 8 : 7))
|
||||
const __dirname = dirname(__filename)
|
||||
const WORKER_POOL = new Set() // 线程池
|
||||
const JOBS_QUEUE = [] // 任务队列
|
||||
|
||||
function readFile(file) {
|
||||
return (file && fs.cat(file)?.toString()) || ''
|
||||
}
|
||||
|
||||
function doJob() {
|
||||
// console.log('<><><>', JOBS_QUEUE.length, WORKER_POOL.size)
|
||||
// if (JOBS_QUEUE.length === 0 && WORKER_POOL.size === THREADS_NUM) {
|
||||
// process.exit()
|
||||
// }
|
||||
|
||||
while (JOBS_QUEUE.length && WORKER_POOL.size) {
|
||||
let job = JOBS_QUEUE.shift()
|
||||
let worker = WORKER_POOL.values().next().value
|
||||
|
||||
WORKER_POOL.delete(worker)
|
||||
|
||||
worker.once('message', _ => {
|
||||
WORKER_POOL.add(worker)
|
||||
|
||||
doJob()
|
||||
})
|
||||
worker.postMessage(job)
|
||||
}
|
||||
}
|
||||
|
||||
export default function compile(root = '', dist = '', conf = {}, verbose) {
|
||||
//
|
||||
const SOURCE_DIR = join(root, 'src')
|
||||
|
@ -91,54 +113,64 @@ export default function compile(root = '', dist = '', conf = {}, verbose) {
|
|||
})
|
||||
}
|
||||
|
||||
if (IS_MPA) {
|
||||
// 电脑线程数比页面数量还多时, 取小
|
||||
let num = Math.min(PAGES_KEYS.length, THREADS_NUM)
|
||||
let chunkSize = Math.ceil(PAGES_KEYS.length / num)
|
||||
|
||||
for (let i = 0; i < num; i++) {
|
||||
let start = i * chunkSize
|
||||
let end = start + chunkSize
|
||||
let pages = PAGES_KEYS.slice(start, end)
|
||||
let chunk = new Map()
|
||||
|
||||
for (let currentPage of pages) {
|
||||
let page = conf.pages[currentPage]
|
||||
let dir = dirname(page.entry)
|
||||
let files = new Map()
|
||||
|
||||
fs.ls(dir, true).forEach(path => {
|
||||
if (fs.isdir(path)) {
|
||||
return
|
||||
}
|
||||
|
||||
let name = path.slice(dir.length + 1)
|
||||
let ext = parse(name).ext
|
||||
|
||||
if (ext === '') {
|
||||
return
|
||||
}
|
||||
|
||||
list.delete(path)
|
||||
files.set(path, { name, path, ext })
|
||||
})
|
||||
|
||||
chunk.set(currentPage, { page, files })
|
||||
}
|
||||
|
||||
// 创建线程池
|
||||
for (let i = 0; i < THREADS_NUM; i++) {
|
||||
WORKER_POOL.add(
|
||||
new Worker(join(__dirname, './thread.js'), {
|
||||
workerData: {
|
||||
options,
|
||||
data: {
|
||||
chunk,
|
||||
verbose,
|
||||
dist,
|
||||
imports: conf.imports,
|
||||
timeStart: Date.now(),
|
||||
title: '正在解析currentPage'
|
||||
}
|
||||
verbose,
|
||||
dist,
|
||||
imports: conf.imports
|
||||
}
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
if (IS_MPA) {
|
||||
// 电脑线程数比页面数量还多时, 取小
|
||||
// let num = Math.min(PAGES_KEYS.length, THREADS_NUM)
|
||||
// let chunkSize = Math.ceil(PAGES_KEYS.length / num)
|
||||
|
||||
// for (let i = 0; i < num; i++) {
|
||||
// let start = i * chunkSize
|
||||
// let end = start + chunkSize
|
||||
// let pages = PAGES_KEYS.slice(start, end)
|
||||
// let chunk = new Map()
|
||||
|
||||
for (let currentPage of PAGES_KEYS) {
|
||||
let page = conf.pages[currentPage]
|
||||
let dir = dirname(page.entry)
|
||||
let files = new Map()
|
||||
let chunk = new Map()
|
||||
|
||||
fs.ls(dir, true).forEach(path => {
|
||||
if (fs.isdir(path)) {
|
||||
return
|
||||
}
|
||||
|
||||
let name = path.slice(dir.length + 1)
|
||||
let ext = parse(name).ext
|
||||
|
||||
if (ext === '') {
|
||||
return
|
||||
}
|
||||
|
||||
list.delete(path)
|
||||
files.set(path, { name, path, ext })
|
||||
})
|
||||
chunk.set(currentPage, { page, files })
|
||||
JOBS_QUEUE.push(chunk)
|
||||
doJob()
|
||||
}
|
||||
|
||||
//
|
||||
{
|
||||
let chunk = new Map()
|
||||
|
||||
chunk.set('', { page: null, files: list })
|
||||
JOBS_QUEUE.push(chunk)
|
||||
doJob()
|
||||
}
|
||||
} else {
|
||||
// 每个线程处理的文件数
|
||||
|
@ -169,28 +201,6 @@ export default function compile(root = '', dist = '', conf = {}, verbose) {
|
|||
}
|
||||
}
|
||||
|
||||
if (IS_MPA) {
|
||||
console.log('\n正在解析公共依赖 ...')
|
||||
// compileFiles('', null, list)
|
||||
let chunk = new Map()
|
||||
|
||||
chunk.set('', { page: null, files: [...list] })
|
||||
|
||||
new Worker(join(__dirname, './thread.js'), {
|
||||
workerData: {
|
||||
options,
|
||||
data: {
|
||||
chunk,
|
||||
verbose,
|
||||
dist,
|
||||
imports: conf.imports,
|
||||
timeStart: Date.now(),
|
||||
title: '正在解析公共依赖'
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
process.on('exit', _ => {
|
||||
console.log('\n页面处理完成, 耗时 %ss\n', (Date.now() - timeStart) / 1000)
|
||||
})
|
||||
|
|
|
@ -3,16 +3,19 @@
|
|||
* @author yutent<yutent.io@gmail.com>
|
||||
* @date 2023/06/14 16:15:39
|
||||
*/
|
||||
import { workerData } from 'node:worker_threads'
|
||||
import { parentPort, workerData } from 'node:worker_threads'
|
||||
import { compileFiles } from './compile.js'
|
||||
|
||||
const { options, data } = workerData
|
||||
const { options, verbose, dist, imports } = workerData
|
||||
|
||||
options.isCustomElement = Function('return ' + options.isCustomElement)()
|
||||
|
||||
console.log('data: ', data.title, Date.now() - data.timeStart)
|
||||
parentPort.once('message', job => {
|
||||
let [currentPage, { page, files }] = job.entries().next().value
|
||||
|
||||
// for (let [currentPage, { page, files }] of data.chunk.entries()) {
|
||||
// currentPage && console.log('正在生成 %s ...', `${currentPage}.html`)
|
||||
// compileFiles(currentPage, page, files, options, data)
|
||||
// }
|
||||
console.log(
|
||||
currentPage ? `正在生成 ${currentPage}.html ...` : '\n正在解析公共依赖 ...'
|
||||
)
|
||||
compileFiles(currentPage, page, files, options, { verbose, dist, imports })
|
||||
parentPort.postMessage('ok')
|
||||
})
|
||||
|
|
Loading…
Reference in New Issue