Combine bundle stats comments in PR (#6101)

* Combine bundle stats comments in PR

* Refactor bundle stats comment generation to use a mapping approach for base and head stats, improving argument parsing and validation in the CI action.

* Enhance size comparison workflow by adding steps to checkout the base branch and set up the environment, ensuring accurate build status checks for pull requests.

* Implement size comparison job in GitHub Actions workflow, replacing the deprecated size-compare.yml. This new job downloads build stats from both the base and head branches, processes the stats files, and generates a combined comment for pull requests, enhancing visibility into bundle size changes.

* Add release notes for bundle size stats update, consolidating workflow and PR comment generation.

* Refactor GitHub Actions workflows by removing the deprecated size comparison job from build.yml and introducing a new size-compare.yml workflow. This new workflow enhances build status checks and ensures accurate reporting of bundle size changes for pull requests.

* chore: update action versions and improve error messages in bundle stats scripts

- Updated GitHub Actions to specific commit versions for `upload-artifact` and `checkout`.
- Enhanced error messages in `bundle-stats-comment.mjs` and `update-bundle-stats-comment.mjs` for better clarity and consistency, replacing standard quotes with typographic quotes.

* fix: standardize error messages in bundle-stats-comment.mjs

- Updated error messages in `bundle-stats-comment.mjs` to remove typographic quotes for consistency and clarity.
This commit is contained in:
Matiss Janis Aboltins
2025-11-13 18:11:34 +00:00
committed by GitHub
parent 033fd2d7e1
commit 7648fc6809
5 changed files with 924 additions and 28 deletions

View File

@@ -30,11 +30,18 @@ jobs:
run: cd packages/api && yarn build
- name: Create package tgz
run: cd packages/api && yarn pack && mv package.tgz actual-api.tgz
- name: Prepare bundle stats artifact
run: cp packages/api/app/stats.json api-stats.json
- name: Upload Build
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
name: actual-api
path: packages/api/actual-api.tgz
- name: Upload API bundle stats
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
name: api-build-stats
path: api-stats.json
crdt:
runs-on: ubuntu-latest

View File

@@ -26,39 +26,72 @@ jobs:
runs-on: ubuntu-latest
permissions:
pull-requests: write
contents: read
steps:
- name: Wait for ${{github.base_ref}} build to succeed
- name: Checkout base branch
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
ref: ${{ github.base_ref }}
- name: Set up environment
uses: ./.github/actions/setup
with:
download-translations: 'false'
- name: Wait for ${{github.base_ref}} web build to succeed
uses: fountainhead/action-wait-for-check@5a908a24814494009c4bb27c242ea38c93c593be # v1.2.0
id: master-build
id: master-web-build
with:
token: ${{ secrets.GITHUB_TOKEN }}
checkName: web
ref: ${{github.base_ref}}
- name: Wait for ${{github.base_ref}} API build to succeed
uses: fountainhead/action-wait-for-check@5a908a24814494009c4bb27c242ea38c93c593be # v1.2.0
id: master-api-build
with:
token: ${{ secrets.GITHUB_TOKEN }}
checkName: api
ref: ${{github.base_ref}}
- name: Wait for PR build to succeed
uses: fountainhead/action-wait-for-check@5a908a24814494009c4bb27c242ea38c93c593be # v1.2.0
id: wait-for-build
id: wait-for-web-build
with:
token: ${{ secrets.GITHUB_TOKEN }}
checkName: web
ref: ${{github.event.pull_request.head.sha}}
- name: Wait for API PR build to succeed
uses: fountainhead/action-wait-for-check@5a908a24814494009c4bb27c242ea38c93c593be # v1.2.0
id: wait-for-api-build
with:
token: ${{ secrets.GITHUB_TOKEN }}
checkName: api
ref: ${{github.event.pull_request.head.sha}}
- name: Report build failure
if: steps.wait-for-build.outputs.conclusion == 'failure'
if: steps.wait-for-web-build.outputs.conclusion == 'failure' || steps.wait-for-api-build.outputs.conclusion == 'failure'
run: |
echo "Build failed on PR branch or ${{github.base_ref}}"
exit 1
- name: Download build artifact from ${{github.base_ref}}
- name: Download web build artifact from ${{github.base_ref}}
uses: dawidd6/action-download-artifact@ac66b43f0e6a346234dd65d4d0c8fbb31cb316e5 # v11
id: pr-build
id: pr-web-build
with:
branch: ${{github.base_ref}}
workflow: build.yml
workflow_conclusion: '' # ignore the conclusion of the workflow, since we already checked it
name: build-stats
path: base
- name: Download build artifact from PR
- name: Download API build artifact from ${{github.base_ref}}
uses: dawidd6/action-download-artifact@ac66b43f0e6a346234dd65d4d0c8fbb31cb316e5 # v11
id: pr-api-build
with:
branch: ${{github.base_ref}}
workflow: build.yml
workflow_conclusion: '' # ignore the conclusion of the workflow, since we already checked it
name: api-build-stats
path: base
- name: Download build stats from PR
uses: dawidd6/action-download-artifact@ac66b43f0e6a346234dd65d4d0c8fbb31cb316e5 # v11
with:
pr: ${{github.event.pull_request.number}}
@@ -67,25 +100,46 @@ jobs:
name: build-stats
path: head
allow_forks: true
- name: Download API stats from PR
uses: dawidd6/action-download-artifact@ac66b43f0e6a346234dd65d4d0c8fbb31cb316e5 # v11
with:
pr: ${{github.event.pull_request.number}}
workflow: build.yml
workflow_conclusion: '' # ignore the conclusion of the workflow, since we already checked it
name: api-build-stats
path: head
allow_forks: true
- name: Strip content hashes from stats files
run: |
if [ -f ./head/web-stats.json ]; then
sed -i -E 's/index\.[0-9a-zA-Z_-]{8,}\./index./g' ./head/web-stats.json
sed -i -E 's/\.[0-9a-zA-Z_-]{8,}\.chunk\././g' ./head/web-stats.json
sed -i -E 's/\.[0-9a-f]{8,}\././g' ./head/*.json
fi
if [ -f ./base/web-stats.json ]; then
sed -i -E 's/index\.[0-9a-zA-Z_-]{8,}\./index./g' ./base/web-stats.json
sed -i -E 's/\.[0-9a-zA-Z_-]{8,}\.chunk\././g' ./base/web-stats.json
sed -i -E 's/\.[0-9a-f]{8,}\././g' ./base/*.json
- uses: twk3/rollup-size-compare-action@a1f8628fee0e40899ab2b46c1b6e14552b99281e # v1.2.0
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
current-stats-json-path: ./head/web-stats.json
base-stats-json-path: ./base/web-stats.json
title: desktop-client
- uses: twk3/rollup-size-compare-action@a1f8628fee0e40899ab2b46c1b6e14552b99281e # v1.2.0
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
current-stats-json-path: ./head/loot-core-stats.json
base-stats-json-path: ./base/loot-core-stats.json
title: loot-core
fi
for file in ./head/*.json ./base/*.json; do
if [ -f "$file" ]; then
sed -i -E 's/\.[0-9a-f]{8,}\././g' "$file"
fi
done
- name: Generate combined bundle stats comment
run: |
node packages/ci-actions/bin/bundle-stats-comment.mjs \
--base desktop-client=./base/web-stats.json \
--base loot-core=./base/loot-core-stats.json \
--base api=./base/api-stats.json \
--head desktop-client=./head/web-stats.json \
--head loot-core=./head/loot-core-stats.json \
--head api=./head/api-stats.json \
--identifier combined > bundle-stats-comment.md
- name: Post combined bundle stats comment
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_REPOSITORY: ${{ github.repository }}
PR_NUMBER: ${{ github.event.pull_request.number }}
run: |
node packages/ci-actions/bin/update-bundle-stats-comment.mjs \
--comment-file bundle-stats-comment.md \
--identifier '<!--- bundlestats-action-comment key:combined --->'

View File

@@ -0,0 +1,678 @@
#!/usr/bin/env node
/**
* Generates a combined bundle stats comment for GitHub Actions.
* Heavily inspired by https://github.com/twk3/rollup-size-compare-action (MIT).
*/
import { readFile } from 'node:fs/promises';
import path from 'node:path';
import process from 'node:process';
const REQUIRED_ARGS = new Map([
['base', 'Mapping of bundle names to base stats JSON'],
['head', 'Mapping of bundle names to head stats JSON'],
]);
function parseRawArgs(argv) {
const args = new Map();
for (let index = 2; index < argv.length; index += 1) {
const key = argv[index];
if (!key?.startsWith('--')) {
throw new Error(
`Unexpected argument “${key ?? ''}”. Use --key value pairs.`,
);
}
const values = [];
while (index + 1 < argv.length && !argv[index + 1].startsWith('--')) {
values.push(argv[index + 1]);
index += 1;
}
if (values.length === 0) {
throw new Error(`Missing value for argument “${key}”.`);
}
const keyName = key.slice(2);
// Accumulate values if the key already exists
if (args.has(keyName)) {
args.set(keyName, [...args.get(keyName), ...values]);
} else {
args.set(keyName, values);
}
}
return args;
}
function getSingleValue(args, key) {
const values = args.get(key);
if (!values) {
return undefined;
}
if (values.length !== 1) {
throw new Error(`Argument “--${key}” must have exactly one value.`);
}
return values[0];
}
function parseMapping(values, key, description) {
if (!values || values.length === 0) {
throw new Error(`Missing required argument “--${key}” (${description}).`);
}
if (values.length === 1) {
const [rawValue] = values;
const trimmed = rawValue.trim();
if (trimmed.startsWith('{')) {
try {
const parsed = JSON.parse(trimmed);
if (!parsed || typeof parsed !== 'object' || Array.isArray(parsed)) {
throw new Error('Value must be a JSON object.');
}
return new Map(
Object.entries(parsed).map(([name, pathValue]) => {
if (typeof pathValue !== 'string') {
throw new Error(
`Value for “${name}” in “--${key}” must be a string path.`,
);
}
return [name, pathValue];
}),
);
} catch (error) {
const message =
error instanceof Error ? error.message : 'Unknown parsing error';
throw new Error(
`Failed to parse “--${key}” value as JSON object: ${message}`,
);
}
}
}
const entries = new Map();
for (const value of values) {
const [rawName, ...rawPathParts] = value.split('=');
if (!rawName || rawPathParts.length === 0) {
throw new Error(
`Argument “--${key}” must be provided as name=path pairs or a JSON object.`,
);
}
const name = rawName.trim();
const pathValue = rawPathParts.join('=').trim();
if (!name) {
throw new Error(`Argument “--${key}” contains an empty bundle name.`);
}
if (!pathValue) {
throw new Error(
`Argument “--${key}” for bundle “${name}” must include a non-empty path.`,
);
}
entries.set(name, pathValue);
}
if (entries.size === 0) {
throw new Error(`Argument “--${key}” must define at least one bundle.`);
}
return entries;
}
function parseArgs(argv) {
const args = parseRawArgs(argv);
const baseMap = parseMapping(
args.get('base'),
'base',
REQUIRED_ARGS.get('base'),
);
const headMap = parseMapping(
args.get('head'),
'head',
REQUIRED_ARGS.get('head'),
);
const sections = [];
for (const [name, basePath] of baseMap.entries()) {
const headPath = headMap.get(name);
if (!headPath) {
throw new Error(
`Bundle “${name}” is missing a corresponding “--head” entry.`,
);
}
sections.push({
name,
basePath,
headPath,
});
}
for (const name of headMap.keys()) {
if (!baseMap.has(name)) {
throw new Error(
`Bundle “${name}” is missing a corresponding “--base” entry.`,
);
}
}
return {
sections,
identifier: getSingleValue(args, 'identifier') ?? 'bundle-stats',
};
}
async function loadStats(filePath) {
try {
const absolutePath = path.resolve(process.cwd(), filePath);
const fileContents = await readFile(absolutePath, 'utf8');
const parsed = JSON.parse(fileContents);
// Validate that we got a meaningful stats object
if (!parsed || typeof parsed !== 'object') {
throw new Error('Stats file does not contain a valid JSON object');
}
return parsed;
} catch (error) {
const message =
error instanceof Error
? error.message
: 'Unknown error while parsing stats file';
console.error(`[bundle-stats] Failed to parse “${filePath}”: ${message}`);
throw new Error(`Failed to load stats file “${filePath}”: ${message}`);
}
}
function findAllChildren(node = {}) {
if (Array.isArray(node.children)) {
return node.children.flatMap(findAllChildren);
}
return [node];
}
function trimPath(input) {
if (!input) {
return '';
}
return input.replace(/.*node_modules/, '/node_modules');
}
function assetNameToSizeMap(statAssets = {}) {
const children = statAssets?.tree?.children;
if (!Array.isArray(children) || children.length === 0) {
return new Map();
}
return new Map(
children.map(asset => {
const descendants = findAllChildren(asset);
let size = 0;
let gzipSize = statAssets?.options?.gzip ? 0 : null;
for (const mod of descendants) {
const nodePart = statAssets?.nodeParts?.[mod.uid];
if (!nodePart) {
continue;
}
size += nodePart.renderedLength ?? 0;
if (gzipSize !== null) {
gzipSize += nodePart.gzipLength ?? 0;
}
}
return [trimPath(asset.name), { size, gzipSize }];
}),
);
}
function chunkModuleNameToSizeMap(statChunks = {}) {
if (!statChunks?.tree) {
return new Map();
}
return new Map(
findAllChildren(statChunks.tree).map(mod => {
const modInfo = statChunks?.nodeParts?.[mod.uid] ?? {};
const meta = statChunks?.nodeMetas?.[modInfo.metaUid] ?? {};
const id = trimPath(meta.id ?? '');
return [
id,
{
size: modInfo.renderedLength ?? 0,
gzipSize: statChunks?.options?.gzip
? (modInfo.gzipLength ?? 0)
: null,
},
];
}),
);
}
function sortDiffDescending(items) {
return items.sort((a, b) => Math.abs(b.diff) - Math.abs(a.diff));
}
function normaliseGzip(value) {
if (value == null || Number.isNaN(value)) {
return NaN;
}
return value;
}
function getAssetDiff(name, oldSize, newSize) {
const diff = newSize.size - oldSize.size;
const percent =
oldSize.size === 0
? newSize.size === 0
? 0
: Infinity
: +((1 - newSize.size / oldSize.size) * -100).toFixed(5) || 0;
return {
name,
new: {
size: newSize.size,
gzipSize: normaliseGzip(newSize.gzipSize),
},
old: {
size: oldSize.size,
gzipSize: normaliseGzip(oldSize.gzipSize),
},
diff,
diffPercentage: percent,
};
}
function webpackStatsDiff(oldAssets, newAssets) {
const added = [];
const removed = [];
const bigger = [];
const smaller = [];
const unchanged = [];
let newSizeTotal = 0;
let oldSizeTotal = 0;
let newGzipSizeTotal = 0;
let oldGzipSizeTotal = 0;
for (const [name, oldAssetSizes] of oldAssets) {
oldSizeTotal += oldAssetSizes.size;
oldGzipSizeTotal += oldAssetSizes.gzipSize ?? NaN;
const newAsset = newAssets.get(name);
if (!newAsset) {
removed.push(getAssetDiff(name, oldAssetSizes, { size: 0, gzipSize: 0 }));
continue;
}
const diff = getAssetDiff(name, oldAssetSizes, newAsset);
if (diff.diffPercentage > 0) {
bigger.push(diff);
} else if (diff.diffPercentage < 0) {
smaller.push(diff);
} else {
unchanged.push(diff);
}
}
for (const [name, newAssetSizes] of newAssets) {
newSizeTotal += newAssetSizes.size;
newGzipSizeTotal += newAssetSizes.gzipSize ?? NaN;
if (!oldAssets.has(name)) {
added.push(getAssetDiff(name, { size: 0, gzipSize: 0 }, newAssetSizes));
}
}
const oldFilesCount = oldAssets.size;
const newFilesCount = newAssets.size;
return {
added: sortDiffDescending(added),
removed: sortDiffDescending(removed),
bigger: sortDiffDescending(bigger),
smaller: sortDiffDescending(smaller),
unchanged,
total: getAssetDiff(
oldFilesCount === newFilesCount
? `${newFilesCount}`
: `${oldFilesCount}${newFilesCount}`,
{ size: oldSizeTotal, gzipSize: oldGzipSizeTotal },
{ size: newSizeTotal, gzipSize: newGzipSizeTotal },
),
};
}
function getStatsDiff(oldStats, newStats) {
return webpackStatsDiff(
assetNameToSizeMap(oldStats),
assetNameToSizeMap(newStats),
);
}
function getChunkModuleDiff(oldStats, newStats) {
const diff = webpackStatsDiff(
chunkModuleNameToSizeMap(oldStats),
chunkModuleNameToSizeMap(newStats),
);
if (
diff.added.length === 0 &&
diff.removed.length === 0 &&
diff.bigger.length === 0 &&
diff.smaller.length === 0
) {
return null;
}
return diff;
}
const BYTES_PER_KILOBYTE = 1024;
const FILE_SIZE_DENOMINATIONS = [
'B',
'kB',
'MB',
'GB',
'TB',
'PB',
'EB',
'ZB',
'YB',
'BB',
];
function formatFileSizeIEC(bytes, precision = 2) {
if (bytes == null || Number.isNaN(bytes)) {
return 'N/A';
}
if (bytes === 0) {
return `0 ${FILE_SIZE_DENOMINATIONS[0]}`;
}
const absBytes = Math.abs(bytes);
const denominationIndex = Math.floor(
Math.log(absBytes) / Math.log(BYTES_PER_KILOBYTE),
);
const value = absBytes / Math.pow(BYTES_PER_KILOBYTE, denominationIndex);
const stripped = parseFloat(value.toFixed(precision));
return `${stripped} ${FILE_SIZE_DENOMINATIONS[denominationIndex]}`;
}
function conditionalPercentage(number) {
if (number === Infinity || number === -Infinity) {
return '-';
}
const absValue = Math.abs(number);
if (absValue === 0 || absValue === 100) {
return `${number}%`;
}
const value = Number.isFinite(absValue) ? absValue.toFixed(2) : absValue;
return `${signFor(number)}${value}%`;
}
function capitalize(text) {
if (!text) return '';
return `${text[0].toUpperCase()}${text.slice(1)}`;
}
function makeHeader(columns) {
const header = columns.join(' | ');
const separator = columns
.map(column =>
Array.from({ length: column.length })
.map(() => '-')
.join(''),
)
.join(' | ');
return `${header}\n${separator}`;
}
const TOTAL_HEADERS = makeHeader([
'Files count',
'Total bundle size',
'% Changed',
]);
const TABLE_HEADERS = makeHeader(['Asset', 'File Size', '% Changed']);
const CHUNK_TABLE_HEADERS = makeHeader(['File', 'Δ', 'Size']);
function signFor(num) {
if (num === 0) return '';
return num > 0 ? '+' : '-';
}
function toFileSizeDiff(oldSize, newSize, diff) {
const diffLine = [
`${formatFileSizeIEC(oldSize)}${formatFileSizeIEC(newSize)}`,
];
if (typeof diff !== 'undefined') {
diffLine.push(`(${signFor(diff)}${formatFileSizeIEC(diff)})`);
}
return diffLine.join(' ');
}
function toFileSizeDiffCell(asset) {
const lines = [];
if (asset.diff === 0) {
lines.push(formatFileSizeIEC(asset.new.size));
if (asset.new.gzipSize) {
lines.push(formatFileSizeIEC(asset.new.gzipSize));
}
} else {
lines.push(toFileSizeDiff(asset.old.size, asset.new.size, asset.diff));
if (asset.old.gzipSize || asset.new.gzipSize) {
lines.push(
`${toFileSizeDiff(asset.old.gzipSize, asset.new.gzipSize)} (gzip)`,
);
}
}
return lines.join('<br />');
}
function printAssetTableRow(asset) {
return [
asset.name,
toFileSizeDiffCell(asset),
conditionalPercentage(asset.diffPercentage),
].join(' | ');
}
function printAssetTablesByGroup(statsDiff) {
const statsFields = ['added', 'removed', 'bigger', 'smaller', 'unchanged'];
return statsFields
.map(field => {
const assets = statsDiff[field] ?? [];
if (assets.length === 0) {
return `**${capitalize(field)}**\nNo assets were ${field}`;
}
return `**${capitalize(field)}**\n${TABLE_HEADERS}\n${assets
.map(asset => printAssetTableRow(asset))
.join('\n')}`;
})
.join('\n\n');
}
function getDiffEmoji(diff) {
if (diff.diffPercentage === Infinity) return '🆕';
if (diff.diffPercentage <= -100) return '🔥';
if (diff.diffPercentage > 0) return '📈';
if (diff.diffPercentage < 0) return '📉';
return ' ';
}
function getTrimmedChunkName(chunkModule) {
const chunkName = chunkModule.name ?? '';
if (chunkName.startsWith('./')) {
return chunkName.substring(2);
}
if (chunkName.startsWith('/')) {
return chunkName.substring(1);
}
return chunkName;
}
function printChunkModuleRow(chunkModule) {
const emoji = getDiffEmoji(chunkModule);
const chunkName = getTrimmedChunkName(chunkModule);
const diffPart = `${chunkModule.diff >= 0 ? '+' : '-'}${formatFileSizeIEC(chunkModule.diff)}`;
const percentPart = Number.isFinite(chunkModule.diffPercentage)
? ` (${conditionalPercentage(chunkModule.diffPercentage)})`
: '';
return [
`\`${chunkName}\``,
`${emoji} ${diffPart}${percentPart}`,
`${formatFileSizeIEC(chunkModule.old.size)}${formatFileSizeIEC(chunkModule.new.size)}`,
].join(' | ');
}
function printChunkModulesTable(statsDiff) {
if (!statsDiff) {
return '';
}
const changedModules = [
...(statsDiff.added ?? []),
...(statsDiff.removed ?? []),
...(statsDiff.bigger ?? []),
...(statsDiff.smaller ?? []),
].sort((a, b) => b.diffPercentage - a.diffPercentage);
if (changedModules.length === 0) {
return `<details>\n<summary>Changeset</summary>\nNo files were changed\n</details>`;
}
const rows = changedModules
.slice(0, 100)
.map(chunkModule => printChunkModuleRow(chunkModule))
.join('\n');
const summarySuffix =
changedModules.length > 100 ? ' (largest 100 files by percent change)' : '';
return `<details>\n<summary>Changeset${summarySuffix}</summary>\n${CHUNK_TABLE_HEADERS}\n${rows}\n</details>`;
}
function printTotalAssetTable(statsDiff) {
return `**Total**\n${TOTAL_HEADERS}\n${printAssetTableRow(statsDiff.total)}`;
}
function renderSection(title, statsDiff, chunkModuleDiff) {
const { total, ...groups } = statsDiff;
const parts = [`#### ${title}`, '', printTotalAssetTable({ total })];
const chunkTable = printChunkModulesTable(chunkModuleDiff);
if (chunkTable) {
parts.push('', chunkTable);
}
parts.push(
'',
`<details>\n<summary>View detailed bundle breakdown</summary>\n<div>\n${printAssetTablesByGroup(
groups,
)}\n</div>\n</details>`,
);
return parts.join('\n');
}
async function main() {
const args = parseArgs(process.argv);
console.error(
`[bundle-stats] Found ${args.sections.length} sections to process`,
);
args.sections.forEach((section, index) => {
console.error(
`[bundle-stats] Section ${index + 1}: ${section.name} (base: ${section.basePath}, head: ${section.headPath})`,
);
});
const sections = [];
for (const section of args.sections) {
console.error(`[bundle-stats] Processing section: ${section.name}`);
console.error(
`[bundle-stats] Loading base stats from: ${section.basePath}`,
);
const baseStats = await loadStats(section.basePath);
console.error(
`[bundle-stats] Loading head stats from: ${section.headPath}`,
);
const headStats = await loadStats(section.headPath);
const statsDiff = getStatsDiff(baseStats, headStats);
const chunkDiff = getChunkModuleDiff(baseStats, headStats);
console.error(
`[bundle-stats] Section ${section.name}: ${statsDiff.total.name} files, total size ${statsDiff.total.old.size}${statsDiff.total.new.size}`,
);
sections.push({
name: section.name,
statsDiff,
chunkDiff,
});
}
const identifier = `<!--- bundlestats-action-comment key:${args.identifier} --->`;
const comment = [
'### Bundle Stats',
'',
sections
.map(section =>
renderSection(section.name, section.statsDiff, section.chunkDiff),
)
.join('\n\n---\n\n'),
'',
identifier,
'',
].join('\n');
process.stdout.write(comment);
}
main().catch(error => {
console.error(error);
process.exitCode = 1;
});

View File

@@ -0,0 +1,151 @@
#!/usr/bin/env node
/**
* Updates (or creates) a bundle stats comment on a pull request.
* Requires the following environment variables to be set:
* - GITHUB_TOKEN
* - GITHUB_REPOSITORY (owner/repo)
* - PR_NUMBER
*/
import { readFile } from 'node:fs/promises';
import path from 'node:path';
import process from 'node:process';
import { Octokit } from '@octokit/rest';
function parseArgs(argv) {
const args = {
commentFile: null,
identifier: null,
};
for (let i = 2; i < argv.length; i += 2) {
const key = argv[i];
const value = argv[i + 1];
if (!key?.startsWith('--')) {
throw new Error(
`Unexpected argument “${key ?? ''}”. Use --key value pairs.`,
);
}
if (typeof value === 'undefined') {
throw new Error(`Missing value for argument “${key}”.`);
}
switch (key) {
case '--comment-file':
args.commentFile = value;
break;
case '--identifier':
args.identifier = value;
break;
default:
throw new Error(`Unknown argument “${key}”.`);
}
}
if (!args.commentFile) {
throw new Error('Missing required argument “--comment-file“.');
}
if (!args.identifier) {
throw new Error('Missing required argument “--identifier“.');
}
return args;
}
async function loadCommentBody(commentFile) {
const absolutePath = path.resolve(process.cwd(), commentFile);
return readFile(absolutePath, 'utf8');
}
function getRepoInfo() {
const repository = process.env.GITHUB_REPOSITORY;
if (!repository) {
throw new Error('GITHUB_REPOSITORY environment variable is required.');
}
const [owner, repo] = repository.split('/');
if (!owner || !repo) {
throw new Error(`Invalid GITHUB_REPOSITORY value “${repository}”.`);
}
return { owner, repo };
}
function getPullRequestNumber() {
const rawNumber = process.env.PR_NUMBER ?? '';
const prNumber = Number.parseInt(rawNumber, 10);
if (!Number.isInteger(prNumber) || prNumber <= 0) {
throw new Error(
'PR_NUMBER environment variable must be a positive integer.',
);
}
return prNumber;
}
function assertGitHubToken() {
const token = process.env.GITHUB_TOKEN;
if (!token) {
throw new Error('GITHUB_TOKEN environment variable is required.');
}
return token;
}
async function listComments(octokit, owner, repo, issueNumber) {
return octokit.paginate(octokit.rest.issues.listComments, {
owner,
repo,
issue_number: issueNumber,
per_page: 100,
});
}
function isGitHubActionsBot(comment) {
return comment.user?.login === 'github-actions[bot]';
}
async function main() {
const { commentFile, identifier } = parseArgs(process.argv);
const commentBody = await loadCommentBody(commentFile);
const token = assertGitHubToken();
const { owner, repo } = getRepoInfo();
const issueNumber = getPullRequestNumber();
const octokit = new Octokit({ auth: token });
const comments = await listComments(octokit, owner, repo, issueNumber);
const existingComment = comments.find(
comment =>
isGitHubActionsBot(comment) && comment.body?.includes(identifier),
);
if (existingComment) {
await octokit.rest.issues.updateComment({
owner,
repo,
comment_id: existingComment.id,
body: commentBody,
});
console.log('Updated existing bundle stats comment.');
} else {
await octokit.rest.issues.createComment({
owner,
repo,
issue_number: issueNumber,
body: commentBody,
});
console.log('Created new bundle stats comment.');
}
}
main().catch(error => {
console.error(error);
process.exitCode = 1;
});

View File

@@ -0,0 +1,6 @@
---
category: Maintenance
authors: [MatissJanis]
---
Update bundle size stats and comparison to run in one workflow and summarize in a single PR comment.