process chunks strictly sequentially to avoid overloading the event

queue
This commit is contained in:
Henry Jameson 2024-03-04 19:03:29 +02:00
parent a190389f3c
commit 545c875a85
1 changed files with 18 additions and 8 deletions

View File

@ -50,15 +50,25 @@ export const applyTheme = async (input) => {
// Optimization - instead of processing all lazy rules in one go, process them in small chunks // Optimization - instead of processing all lazy rules in one go, process them in small chunks
// so that UI can do other things and be somewhat responsive while less important rules are being // so that UI can do other things and be somewhat responsive while less important rules are being
// processed // processed
chunk(themes3.lazy, 5).forEach(chunk => { let counter = 0
setTimeout(() => { const chunks = chunk(themes3.lazy, 200)
Promise.all(chunk.map(x => x())).then(result => { // let t0 = performance.now()
getCssRules(result.filter(x => x), themes3.staticVars).forEach(rule => { const processChunk = () => {
styleSheet.insertRule(rule, 'index-max') const chunk = chunks[counter]
}) Promise.all(chunk.map(x => x())).then(result => {
getCssRules(result.filter(x => x), themes3.staticVars).forEach(rule => {
styleSheet.insertRule(rule, 'index-max')
}) })
}, 200) // const t1 = performance.now()
}) // console.debug('Chunk ' + counter + ' took ' + (t1 - t0) + 'ms')
// t0 = t1
counter += 1
if (counter < chunks.length) {
setTimeout(processChunk, 0)
}
})
}
setTimeout(processChunk, 0)
return Promise.resolve() return Promise.resolve()
} }