Make it easier to benchmark and profile the code (#4780)
* Make it easier to benchmark and profile the code * Remove unnecessary escape * Clarify that the backend server is started without the frontend * Add missing NODE_CONFIG_ENV environment variable * Add error message when user has not included console.time statements * Fix lint issue * Handle multiple console.time statements * Switch NODE_CONFIG_ENV to test * Switch to const as variable never re-assigned
This commit is contained in:
26
scripts/benchmark-performance.js
Normal file
26
scripts/benchmark-performance.js
Normal file
@@ -0,0 +1,26 @@
|
||||
'use strict'
|
||||
|
||||
const config = require('config').util.toObject()
|
||||
const got = require('got')
|
||||
const minimist = require('minimist')
|
||||
const Server = require('../core/server/server')
|
||||
|
||||
async function main() {
|
||||
const server = new Server(config)
|
||||
await server.start()
|
||||
const args = minimist(process.argv)
|
||||
const iterations = parseInt(args.iterations) || 10000
|
||||
for (let i = 0; i < iterations; ++i) {
|
||||
await got(`${server.baseUrl}badge/coverage-${i}-green.svg`)
|
||||
}
|
||||
await server.stop()
|
||||
}
|
||||
|
||||
;(async () => {
|
||||
try {
|
||||
await main()
|
||||
} catch (e) {
|
||||
console.error(e)
|
||||
process.exit(1)
|
||||
}
|
||||
})()
|
||||
@@ -1,11 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
PROFILE_MAKE_BADGE=1 node server 1111 >perftest.log &
|
||||
sleep 2
|
||||
for ((i=0;i<10000;i++)); do
|
||||
curl -s http://localhost:1111/badge/coverage-"$i"%-green.svg >/dev/null
|
||||
done
|
||||
kill $(jobs -p)
|
||||
<perftest.log grep 'makeBadge total' | \
|
||||
grep -Eo '[0-9\.]+' | \
|
||||
awk '{s+=$1;n++} END {print s/n}'
|
||||
61
scripts/capture-timings.js
Normal file
61
scripts/capture-timings.js
Normal file
@@ -0,0 +1,61 @@
|
||||
'use strict'
|
||||
|
||||
const readline = require('readline')
|
||||
const minimist = require('minimist')
|
||||
|
||||
async function captureTimings(warmupIterations) {
|
||||
const rl = readline.createInterface({
|
||||
input: process.stdin,
|
||||
})
|
||||
|
||||
const times = {}
|
||||
let timingsCount = 0
|
||||
let labelsCount = 0
|
||||
const timing = /^(.+): ([0-9.]+)ms$/i
|
||||
|
||||
for await (const line of rl) {
|
||||
const match = timing.exec(line)
|
||||
if (match) {
|
||||
labelsCount = Object.keys(times).length
|
||||
if (timingsCount > warmupIterations * labelsCount) {
|
||||
const label = match[1]
|
||||
const time = parseFloat(match[2])
|
||||
times[label] = time + (times[label] || 0)
|
||||
}
|
||||
++timingsCount
|
||||
}
|
||||
}
|
||||
return { times, iterations: timingsCount / labelsCount }
|
||||
}
|
||||
|
||||
function logResults({ times, iterations, warmupIterations }) {
|
||||
if (isNaN(iterations)) {
|
||||
console.log(
|
||||
`No timings captured. Have you included console.time statements in the badge creation code path?`
|
||||
)
|
||||
} else {
|
||||
const timedIterations = iterations - warmupIterations
|
||||
for (const [label, time] of Object.entries(times)) {
|
||||
const averageTime = time / timedIterations
|
||||
console.log(
|
||||
`Average '${label}' time over ${timedIterations} iterations: ${averageTime}ms`
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const args = minimist(process.argv)
|
||||
const warmupIterations = parseInt(args['warmup-iterations']) || 100
|
||||
const { times, iterations } = await captureTimings(warmupIterations)
|
||||
logResults({ times, iterations, warmupIterations })
|
||||
}
|
||||
|
||||
;(async () => {
|
||||
try {
|
||||
await main()
|
||||
} catch (e) {
|
||||
console.error(e)
|
||||
process.exit(1)
|
||||
}
|
||||
})()
|
||||
Reference in New Issue
Block a user