Make it easier to benchmark and profile the code (#4780)
* Make it easier to benchmark and profile the code * Remove unnecessary escape * Clarify that the backend server is started without the frontend * Add missing NODE_CONFIG_ENV environment variable * Add error message when user has not included console.time statements * Fix lint issue * Handle multiple console.time statements * Switch NODE_CONFIG_ENV to test * Switch to const as variable never re-assigned
This commit is contained in:
3
.gitignore
vendored
3
.gitignore
vendored
@@ -113,3 +113,6 @@ service-definitions.yml
|
||||
|
||||
# Rendered API docs
|
||||
/api-docs/
|
||||
|
||||
# Flamebearer
|
||||
flamegraph.html
|
||||
|
||||
@@ -48,9 +48,6 @@ public:
|
||||
authorizedOrigins: 'TEAMCITY_ORIGINS'
|
||||
trace: 'TRACE_SERVICES'
|
||||
|
||||
profiling:
|
||||
makeBadge: 'PROFILE_MAKE_BADGE'
|
||||
|
||||
cacheHeaders:
|
||||
defaultCacheLengthSeconds: 'BADGE_MAX_AGE_SECONDS'
|
||||
|
||||
|
||||
@@ -23,9 +23,6 @@ public:
|
||||
intervalSeconds: 200
|
||||
trace: false
|
||||
|
||||
profiling:
|
||||
makeBadge: false
|
||||
|
||||
cacheHeaders:
|
||||
defaultCacheLengthSeconds: 120
|
||||
|
||||
|
||||
@@ -13,9 +13,6 @@ const { prepareRoute, namedParamsForMatch } = require('./route')
|
||||
|
||||
module.exports = class BaseStaticService extends BaseService {
|
||||
static register({ camp, metricInstance }, serviceConfig) {
|
||||
const {
|
||||
profiling: { makeBadge: shouldProfileMakeBadge },
|
||||
} = serviceConfig
|
||||
const { regex, captureNames } = prepareRoute(this.route)
|
||||
|
||||
const metricHelper = MetricHelper.create({
|
||||
@@ -52,16 +49,9 @@ module.exports = class BaseStaticService extends BaseService {
|
||||
const format = (match.slice(-1)[0] || '.svg').replace(/^\./, '')
|
||||
badgeData.format = format
|
||||
|
||||
if (shouldProfileMakeBadge) {
|
||||
console.time('makeBadge total')
|
||||
}
|
||||
const svg = makeBadge(badgeData)
|
||||
if (shouldProfileMakeBadge) {
|
||||
console.timeEnd('makeBadge total')
|
||||
}
|
||||
|
||||
setCacheHeadersForStaticResource(ask.res)
|
||||
|
||||
const svg = makeBadge(badgeData)
|
||||
makeSend(format, ask.res, end)(svg)
|
||||
|
||||
metricHandle.noteResponseSent()
|
||||
|
||||
@@ -123,9 +123,6 @@ const publicConfigSchema = Joi.object({
|
||||
teamcity: defaultService,
|
||||
trace: Joi.boolean().required(),
|
||||
}).required(),
|
||||
profiling: {
|
||||
makeBadge: Joi.boolean().required(),
|
||||
},
|
||||
cacheHeaders: {
|
||||
defaultCacheLengthSeconds: Joi.number()
|
||||
.integer()
|
||||
@@ -341,7 +338,6 @@ class Server {
|
||||
{
|
||||
handleInternalErrors: config.public.handleInternalErrors,
|
||||
cacheHeaders: config.public.cacheHeaders,
|
||||
profiling: config.public.profiling,
|
||||
fetchLimitBytes: bytes(config.public.fetchLimit),
|
||||
rasterUrl: config.public.rasterUrl,
|
||||
private: config.private,
|
||||
|
||||
BIN
doc/flamegraph.png
Normal file
BIN
doc/flamegraph.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 30 KiB |
45
doc/performance-testing.md
Normal file
45
doc/performance-testing.md
Normal file
@@ -0,0 +1,45 @@
|
||||
# Performance testing
|
||||
|
||||
Shields has some basic tooling available to help you get started with
|
||||
performance testing.
|
||||
|
||||
## Benchmarking the badge generation
|
||||
|
||||
Want to micro-benchmark a section of the code responsible for generating the
|
||||
static badges? Follow these two simple steps:
|
||||
|
||||
1. Surround the code you want to time with `console.time` and `console.timeEnd`
|
||||
statements. For example:
|
||||
|
||||
```
|
||||
console.time('makeBadge')
|
||||
const svg = makeBadge(badgeData)
|
||||
console.timeEnd('makeBadge')
|
||||
```
|
||||
|
||||
2. Run `npm run benchmark:badge` in your terminal. An average timing will
|
||||
be displayed!
|
||||
|
||||
If you want to change the number of iterations in the benchmark, you can modify
|
||||
the values specified by the `benchmark:badge` script in _package.json_. If
|
||||
you want to benchmark a specific code path not covered by the static badge, you
|
||||
can modify the badge URL in _scripts/benchmark-performance.js_.
|
||||
|
||||
## Profiling the full code
|
||||
|
||||
Want to have an overview of how the entire application is performing? Simply
|
||||
run `npm run profile:server` in your terminal. This will start the
|
||||
backend server (i.e. without the frontend) in profiling mode and any requests
|
||||
you make on `localhost:8080` will generate data in a file with a name
|
||||
similar to _isolate-00000244AB6ED3B0-11920-v8.log_.
|
||||
|
||||
You can then make use of this profiling data in various tools, for example
|
||||
[flamebearer](https://github.com/mapbox/flamebearer):
|
||||
|
||||
```
|
||||
npm install -g flamebearer
|
||||
node --prof-process --preprocess -j isolate-00000244AB6ED3B0-11920-v8.log | flamebearer
|
||||
```
|
||||
|
||||
An example output is the following:
|
||||

|
||||
@@ -106,6 +106,8 @@
|
||||
"start:server:e2e-on-build": "node server 8080",
|
||||
"start:server": "cross-env NODE_CONFIG_ENV=development nodemon server 8080",
|
||||
"debug:server": "cross-env NODE_CONFIG_ENV=development nodemon --inspect server.js 8080",
|
||||
"profile:server": "cross-env NODE_CONFIG_ENV=development node --prof server 8080",
|
||||
"benchmark:badge": "cross-env NODE_CONFIG_ENV=test node scripts/benchmark-performance.js --iterations 10100 | node scripts/capture-timings.js --warmup-iterations 100",
|
||||
"prestart": "run-s --silent depcheck defs features",
|
||||
"start": "concurrently --names server,frontend \"npm run start:server\" \"cross-env GATSBY_BASE_URL=http://localhost:8080 gatsby develop --port 3000\"",
|
||||
"e2e": "start-server-and-test start http://localhost:3000 test:e2e",
|
||||
|
||||
26
scripts/benchmark-performance.js
Normal file
26
scripts/benchmark-performance.js
Normal file
@@ -0,0 +1,26 @@
|
||||
'use strict'
|
||||
|
||||
const config = require('config').util.toObject()
|
||||
const got = require('got')
|
||||
const minimist = require('minimist')
|
||||
const Server = require('../core/server/server')
|
||||
|
||||
async function main() {
|
||||
const server = new Server(config)
|
||||
await server.start()
|
||||
const args = minimist(process.argv)
|
||||
const iterations = parseInt(args.iterations) || 10000
|
||||
for (let i = 0; i < iterations; ++i) {
|
||||
await got(`${server.baseUrl}badge/coverage-${i}-green.svg`)
|
||||
}
|
||||
await server.stop()
|
||||
}
|
||||
|
||||
;(async () => {
|
||||
try {
|
||||
await main()
|
||||
} catch (e) {
|
||||
console.error(e)
|
||||
process.exit(1)
|
||||
}
|
||||
})()
|
||||
@@ -1,11 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
PROFILE_MAKE_BADGE=1 node server 1111 >perftest.log &
|
||||
sleep 2
|
||||
for ((i=0;i<10000;i++)); do
|
||||
curl -s http://localhost:1111/badge/coverage-"$i"%-green.svg >/dev/null
|
||||
done
|
||||
kill $(jobs -p)
|
||||
<perftest.log grep 'makeBadge total' | \
|
||||
grep -Eo '[0-9\.]+' | \
|
||||
awk '{s+=$1;n++} END {print s/n}'
|
||||
61
scripts/capture-timings.js
Normal file
61
scripts/capture-timings.js
Normal file
@@ -0,0 +1,61 @@
|
||||
'use strict'
|
||||
|
||||
const readline = require('readline')
|
||||
const minimist = require('minimist')
|
||||
|
||||
async function captureTimings(warmupIterations) {
|
||||
const rl = readline.createInterface({
|
||||
input: process.stdin,
|
||||
})
|
||||
|
||||
const times = {}
|
||||
let timingsCount = 0
|
||||
let labelsCount = 0
|
||||
const timing = /^(.+): ([0-9.]+)ms$/i
|
||||
|
||||
for await (const line of rl) {
|
||||
const match = timing.exec(line)
|
||||
if (match) {
|
||||
labelsCount = Object.keys(times).length
|
||||
if (timingsCount > warmupIterations * labelsCount) {
|
||||
const label = match[1]
|
||||
const time = parseFloat(match[2])
|
||||
times[label] = time + (times[label] || 0)
|
||||
}
|
||||
++timingsCount
|
||||
}
|
||||
}
|
||||
return { times, iterations: timingsCount / labelsCount }
|
||||
}
|
||||
|
||||
function logResults({ times, iterations, warmupIterations }) {
|
||||
if (isNaN(iterations)) {
|
||||
console.log(
|
||||
`No timings captured. Have you included console.time statements in the badge creation code path?`
|
||||
)
|
||||
} else {
|
||||
const timedIterations = iterations - warmupIterations
|
||||
for (const [label, time] of Object.entries(times)) {
|
||||
const averageTime = time / timedIterations
|
||||
console.log(
|
||||
`Average '${label}' time over ${timedIterations} iterations: ${averageTime}ms`
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const args = minimist(process.argv)
|
||||
const warmupIterations = parseInt(args['warmup-iterations']) || 100
|
||||
const { times, iterations } = await captureTimings(warmupIterations)
|
||||
logResults({ times, iterations, warmupIterations })
|
||||
}
|
||||
|
||||
;(async () => {
|
||||
try {
|
||||
await main()
|
||||
} catch (e) {
|
||||
console.error(e)
|
||||
process.exit(1)
|
||||
}
|
||||
})()
|
||||
Reference in New Issue
Block a user