mirror of
https://github.com/actualbudget/actual.git
synced 2026-05-11 01:18:59 -05:00
Compare commits
16 Commits
ai/stabili
...
matiss/crd
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
04a2e8a309 | ||
|
|
988edc4a7d | ||
|
|
4b6baab79f | ||
|
|
1e142e055d | ||
|
|
8393a65d7a | ||
|
|
6b351eafc7 | ||
|
|
43fba254b5 | ||
|
|
6e8ac07846 | ||
|
|
99682268cc | ||
|
|
749aee4f44 | ||
|
|
531b1a1914 | ||
|
|
f08490052f | ||
|
|
145868f9da | ||
|
|
9513c1e160 | ||
|
|
e661951753 | ||
|
|
fc5e836a02 |
145
.github/workflows/size-compare.yml
vendored
145
.github/workflows/size-compare.yml
vendored
@@ -33,7 +33,6 @@ jobs:
|
||||
permissions:
|
||||
pull-requests: write
|
||||
contents: read
|
||||
actions: read
|
||||
steps:
|
||||
- name: Checkout base branch
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
@@ -45,120 +44,140 @@ jobs:
|
||||
with:
|
||||
download-translations: 'false'
|
||||
|
||||
# Resolve one successful `build.yml` run for each side (master and PR
|
||||
# head) up front, then pin every download below to its `run_id`. This
|
||||
# ensures artifact downloads are consistent and prevents race conditions.
|
||||
- name: Resolve build runs
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
id: build-runs
|
||||
env:
|
||||
BASE_REF: ${{ github.base_ref }}
|
||||
HEAD_SHA: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Wait for ${{github.base_ref}} web build to succeed
|
||||
uses: fountainhead/action-wait-for-check@5a908a24814494009c4bb27c242ea38c93c593be # v1.2.0
|
||||
id: master-web-build
|
||||
with:
|
||||
script: |
|
||||
const TIMEOUT_MS = 30 * 60 * 1000;
|
||||
const SLEEP_MS = 15000;
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
checkName: web
|
||||
ref: ${{github.base_ref}}
|
||||
- name: Wait for ${{github.base_ref}} API build to succeed
|
||||
uses: fountainhead/action-wait-for-check@5a908a24814494009c4bb27c242ea38c93c593be # v1.2.0
|
||||
id: master-api-build
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
checkName: api
|
||||
ref: ${{github.base_ref}}
|
||||
- name: Wait for ${{github.base_ref}} CLI build to succeed
|
||||
uses: fountainhead/action-wait-for-check@5a908a24814494009c4bb27c242ea38c93c593be # v1.2.0
|
||||
id: master-cli-build
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
checkName: cli
|
||||
ref: ${{github.base_ref}}
|
||||
- name: Wait for ${{github.base_ref}} CRDT build to succeed
|
||||
uses: fountainhead/action-wait-for-check@5a908a24814494009c4bb27c242ea38c93c593be # v1.2.0
|
||||
id: master-crdt-build
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
checkName: crdt
|
||||
ref: ${{github.base_ref}}
|
||||
|
||||
async function resolveRun({ label, filter, notFoundHint }) {
|
||||
const deadline = Date.now() + TIMEOUT_MS;
|
||||
while (true) {
|
||||
const { data } = await github.rest.actions.listWorkflowRuns({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
workflow_id: 'build.yml',
|
||||
...filter,
|
||||
status: 'success',
|
||||
per_page: 1,
|
||||
});
|
||||
if (data.workflow_runs.length > 0) {
|
||||
const run = data.workflow_runs[0];
|
||||
core.info(`Found ${label} build run ${run.id} (${run.html_url})`);
|
||||
return run.id;
|
||||
}
|
||||
if (Date.now() > deadline) {
|
||||
throw new Error(
|
||||
`No successful build.yml run found for ${label} within 30 min — ${notFoundHint}.`,
|
||||
);
|
||||
}
|
||||
core.info(`No successful ${label} build run yet — sleeping 15s.`);
|
||||
await new Promise(r => setTimeout(r, SLEEP_MS));
|
||||
}
|
||||
}
|
||||
- name: Wait for PR build to succeed
|
||||
uses: fountainhead/action-wait-for-check@5a908a24814494009c4bb27c242ea38c93c593be # v1.2.0
|
||||
id: wait-for-web-build
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
checkName: web
|
||||
ref: ${{github.event.pull_request.head.sha}}
|
||||
- name: Wait for API PR build to succeed
|
||||
uses: fountainhead/action-wait-for-check@5a908a24814494009c4bb27c242ea38c93c593be # v1.2.0
|
||||
id: wait-for-api-build
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
checkName: api
|
||||
ref: ${{github.event.pull_request.head.sha}}
|
||||
- name: Wait for CLI PR build to succeed
|
||||
uses: fountainhead/action-wait-for-check@5a908a24814494009c4bb27c242ea38c93c593be # v1.2.0
|
||||
id: wait-for-cli-build
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
checkName: cli
|
||||
ref: ${{github.event.pull_request.head.sha}}
|
||||
- name: Wait for CRDT PR build to succeed
|
||||
uses: fountainhead/action-wait-for-check@5a908a24814494009c4bb27c242ea38c93c593be # v1.2.0
|
||||
id: wait-for-crdt-build
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
checkName: crdt
|
||||
ref: ${{github.event.pull_request.head.sha}}
|
||||
|
||||
const baseRef = process.env.BASE_REF;
|
||||
const headSha = process.env.HEAD_SHA;
|
||||
const [masterRunId, headRunId] = await Promise.all([
|
||||
resolveRun({
|
||||
label: baseRef,
|
||||
filter: { branch: baseRef },
|
||||
notFoundHint: `${baseRef} may be broken`,
|
||||
}),
|
||||
resolveRun({
|
||||
label: `PR head ${headSha}`,
|
||||
filter: { head_sha: headSha },
|
||||
notFoundHint:
|
||||
'build may still be running, have failed, or the branch may have been force-pushed',
|
||||
}),
|
||||
]);
|
||||
core.setOutput('master_run_id', masterRunId);
|
||||
core.setOutput('head_run_id', headRunId);
|
||||
- name: Report build failure
|
||||
if: steps.wait-for-web-build.outputs.conclusion == 'failure' || steps.wait-for-api-build.outputs.conclusion == 'failure' || steps.wait-for-cli-build.outputs.conclusion == 'failure' || steps.wait-for-crdt-build.outputs.conclusion == 'failure'
|
||||
run: |
|
||||
echo "Build failed on PR branch or ${GITHUB_BASE_REF}"
|
||||
exit 1
|
||||
|
||||
- name: Download web build artifact from ${{github.base_ref}}
|
||||
uses: dawidd6/action-download-artifact@8305c0f1062bb0d184d09ef4493ecb9288447732 # v20
|
||||
id: pr-web-build
|
||||
with:
|
||||
run_id: ${{ steps.build-runs.outputs.master_run_id }}
|
||||
branch: ${{github.base_ref}}
|
||||
workflow: build.yml
|
||||
workflow_conclusion: '' # ignore the conclusion of the workflow, since we already checked it
|
||||
name: build-stats
|
||||
path: base
|
||||
- name: Download API build artifact from ${{github.base_ref}}
|
||||
uses: dawidd6/action-download-artifact@8305c0f1062bb0d184d09ef4493ecb9288447732 # v20
|
||||
id: pr-api-build
|
||||
with:
|
||||
run_id: ${{ steps.build-runs.outputs.master_run_id }}
|
||||
branch: ${{github.base_ref}}
|
||||
workflow: build.yml
|
||||
workflow_conclusion: '' # ignore the conclusion of the workflow, since we already checked it
|
||||
name: api-build-stats
|
||||
path: base
|
||||
- name: Download build stats from PR
|
||||
uses: dawidd6/action-download-artifact@8305c0f1062bb0d184d09ef4493ecb9288447732 # v20
|
||||
with:
|
||||
run_id: ${{ steps.build-runs.outputs.head_run_id }}
|
||||
pr: ${{github.event.pull_request.number}}
|
||||
workflow: build.yml
|
||||
workflow_conclusion: '' # ignore the conclusion of the workflow, since we already checked it
|
||||
name: build-stats
|
||||
path: head
|
||||
allow_forks: true
|
||||
- name: Download API stats from PR
|
||||
uses: dawidd6/action-download-artifact@8305c0f1062bb0d184d09ef4493ecb9288447732 # v20
|
||||
with:
|
||||
run_id: ${{ steps.build-runs.outputs.head_run_id }}
|
||||
pr: ${{github.event.pull_request.number}}
|
||||
workflow: build.yml
|
||||
workflow_conclusion: '' # ignore the conclusion of the workflow, since we already checked it
|
||||
name: api-build-stats
|
||||
path: head
|
||||
allow_forks: true
|
||||
- name: Download CLI build artifact from ${{github.base_ref}}
|
||||
uses: dawidd6/action-download-artifact@8305c0f1062bb0d184d09ef4493ecb9288447732 # v20
|
||||
with:
|
||||
run_id: ${{ steps.build-runs.outputs.master_run_id }}
|
||||
branch: ${{github.base_ref}}
|
||||
workflow: build.yml
|
||||
workflow_conclusion: '' # ignore the conclusion of the workflow, since we already checked it
|
||||
name: cli-build-stats
|
||||
path: base
|
||||
- name: Download CLI stats from PR
|
||||
uses: dawidd6/action-download-artifact@8305c0f1062bb0d184d09ef4493ecb9288447732 # v20
|
||||
with:
|
||||
run_id: ${{ steps.build-runs.outputs.head_run_id }}
|
||||
pr: ${{github.event.pull_request.number}}
|
||||
workflow: build.yml
|
||||
workflow_conclusion: '' # ignore the conclusion of the workflow, since we already checked it
|
||||
name: cli-build-stats
|
||||
path: head
|
||||
allow_forks: true
|
||||
- name: Download CRDT build artifact from ${{github.base_ref}}
|
||||
uses: dawidd6/action-download-artifact@8305c0f1062bb0d184d09ef4493ecb9288447732 # v20
|
||||
with:
|
||||
run_id: ${{ steps.build-runs.outputs.master_run_id }}
|
||||
branch: ${{github.base_ref}}
|
||||
workflow: build.yml
|
||||
workflow_conclusion: '' # ignore the conclusion of the workflow, since we already checked it
|
||||
name: crdt-build-stats
|
||||
path: base
|
||||
- name: Download CRDT stats from PR
|
||||
uses: dawidd6/action-download-artifact@8305c0f1062bb0d184d09ef4493ecb9288447732 # v20
|
||||
with:
|
||||
run_id: ${{ steps.build-runs.outputs.head_run_id }}
|
||||
pr: ${{github.event.pull_request.number}}
|
||||
workflow: build.yml
|
||||
workflow_conclusion: '' # ignore the conclusion of the workflow, since we already checked it
|
||||
name: crdt-build-stats
|
||||
path: head
|
||||
allow_forks: true
|
||||
- name: Strip content hashes from stats files
|
||||
run: |
|
||||
if [ -f ./head/web-stats.json ]; then
|
||||
|
||||
9
.github/workflows/vrt-update-apply.yml
vendored
9
.github/workflows/vrt-update-apply.yml
vendored
@@ -75,12 +75,9 @@ jobs:
|
||||
|
||||
echo "Found patch file: $PATCH_FILE"
|
||||
|
||||
# Validate patch only contains PNG files. `git format-patch` emits a
|
||||
# `GIT binary patch` block for PNGs (no +++/--- lines), so check
|
||||
# `diff --git` headers — those are present for both text and binary.
|
||||
# Validate patch only contains PNG files
|
||||
echo "Validating patch contains only PNG files..."
|
||||
if grep -E '^diff --git ' "$PATCH_FILE" \
|
||||
| grep -vE '^diff --git a/[^[:space:]]+\.png b/[^[:space:]]+\.png$'; then
|
||||
if grep -E '^(\+\+\+|---) [ab]/' "$PATCH_FILE" | grep -v '\.png$'; then
|
||||
echo "ERROR: Patch contains non-PNG files! Rejecting for security."
|
||||
echo "applied=false" >> "$GITHUB_OUTPUT"
|
||||
echo "error=Patch validation failed: contains non-PNG files" >> "$GITHUB_OUTPUT"
|
||||
@@ -88,7 +85,7 @@ jobs:
|
||||
fi
|
||||
|
||||
# Extract file list for verification
|
||||
FILES_CHANGED=$(grep -cE '^diff --git ' "$PATCH_FILE")
|
||||
FILES_CHANGED=$(grep -E '^\+\+\+ b/' "$PATCH_FILE" | sed 's/^+++ b\///' | wc -l)
|
||||
echo "Patch modifies $FILES_CHANGED PNG file(s)"
|
||||
|
||||
# Configure git
|
||||
|
||||
240
.github/workflows/vrt-update-generate.yml
vendored
240
.github/workflows/vrt-update-generate.yml
vendored
@@ -36,16 +36,15 @@ jobs:
|
||||
content: 'eyes'
|
||||
});
|
||||
|
||||
get-pr:
|
||||
name: Resolve PR details
|
||||
generate-vrt-updates:
|
||||
name: Generate VRT Updates
|
||||
runs-on: ubuntu-latest
|
||||
# Only run on PR comments containing /update-vrt
|
||||
if: >
|
||||
github.event.issue.pull_request &&
|
||||
startsWith(github.event.comment.body, '/update-vrt')
|
||||
outputs:
|
||||
head_sha: ${{ steps.pr.outputs.head_sha }}
|
||||
head_ref: ${{ steps.pr.outputs.head_ref }}
|
||||
head_repo: ${{ steps.pr.outputs.head_repo }}
|
||||
container:
|
||||
image: mcr.microsoft.com/playwright:v1.59.1-jammy
|
||||
steps:
|
||||
- name: Get PR details
|
||||
id: pr
|
||||
@@ -61,125 +60,9 @@ jobs:
|
||||
core.setOutput('head_ref', pr.head.ref);
|
||||
core.setOutput('head_repo', pr.head.repo.full_name);
|
||||
|
||||
build-web:
|
||||
name: Build web bundle
|
||||
runs-on: ubuntu-latest
|
||||
needs: get-pr
|
||||
container:
|
||||
image: mcr.microsoft.com/playwright:v1.59.1-jammy
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: ${{ needs.get-pr.outputs.head_sha }}
|
||||
persist-credentials: false
|
||||
|
||||
- name: Trust workspace directory
|
||||
run: git config --global --add safe.directory "$GITHUB_WORKSPACE"
|
||||
shell: bash
|
||||
|
||||
- name: Set up environment
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
download-translations: 'false'
|
||||
- name: Build browser bundle
|
||||
# REACT_APP_NETLIFY=true keeps the "Create test file" button in the
|
||||
# production bundle — every VRT test's beforeEach relies on it via
|
||||
# ConfigurationPage.createTestFile().
|
||||
env:
|
||||
REACT_APP_NETLIFY: 'true'
|
||||
run: |
|
||||
yarn workspace plugins-service build
|
||||
yarn workspace @actual-app/crdt build
|
||||
yarn workspace @actual-app/core build:browser
|
||||
yarn workspace @actual-app/web build:browser
|
||||
- name: Upload build artifact
|
||||
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
|
||||
with:
|
||||
name: desktop-client-build
|
||||
path: packages/desktop-client/build/
|
||||
retention-days: 1
|
||||
overwrite: true
|
||||
|
||||
browser-vrt:
|
||||
name: Browser VRT (shard ${{ matrix.shard }}/3)
|
||||
runs-on: ubuntu-latest
|
||||
needs: [get-pr, build-web]
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
shard: [1, 2, 3]
|
||||
container:
|
||||
image: mcr.microsoft.com/playwright:v1.59.1-jammy
|
||||
env:
|
||||
E2E_USE_BUILD: '1'
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: ${{ needs.get-pr.outputs.head_sha }}
|
||||
persist-credentials: false
|
||||
|
||||
- name: Trust workspace directory
|
||||
run: git config --global --add safe.directory "$GITHUB_WORKSPACE"
|
||||
shell: bash
|
||||
|
||||
- name: Set up environment
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
download-translations: 'false'
|
||||
- name: Download web build
|
||||
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
|
||||
with:
|
||||
name: desktop-client-build
|
||||
path: packages/desktop-client/build/
|
||||
- name: Run VRT Tests
|
||||
continue-on-error: true
|
||||
run: yarn vrt --update-snapshots --shard=${{ matrix.shard }}/3
|
||||
- name: Create shard patch with PNG changes only
|
||||
id: create-patch
|
||||
run: |
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git config --global user.email "github-actions[bot]@users.noreply.github.com"
|
||||
|
||||
git add "**/*.png"
|
||||
|
||||
if git diff --staged --quiet; then
|
||||
echo "has_changes=false" >> "$GITHUB_OUTPUT"
|
||||
echo "No VRT changes in this shard"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "has_changes=true" >> "$GITHUB_OUTPUT"
|
||||
|
||||
git commit -m "Update VRT screenshots (browser shard ${{ matrix.shard }})"
|
||||
git format-patch -1 HEAD --stdout > vrt-shard.patch
|
||||
|
||||
# Validate patch only contains PNG files. `git format-patch` emits a
|
||||
# `GIT binary patch` block for PNGs (no +++/--- lines), so check
|
||||
# `diff --git` headers — those are present for both text and binary.
|
||||
if grep -E '^diff --git ' vrt-shard.patch \
|
||||
| grep -vE '^diff --git a/[^[:space:]]+\.png b/[^[:space:]]+\.png$'; then
|
||||
echo "ERROR: Shard patch contains non-PNG files!"
|
||||
exit 1
|
||||
fi
|
||||
- name: Upload shard patch
|
||||
if: steps.create-patch.outputs.has_changes == 'true'
|
||||
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
|
||||
with:
|
||||
name: vrt-shard-browser-${{ matrix.shard }}
|
||||
path: vrt-shard.patch
|
||||
retention-days: 1
|
||||
overwrite: true
|
||||
|
||||
desktop-vrt:
|
||||
name: Desktop VRT
|
||||
runs-on: ubuntu-latest
|
||||
needs: get-pr
|
||||
container:
|
||||
image: mcr.microsoft.com/playwright:v1.59.1-jammy
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: ${{ needs.get-pr.outputs.head_sha }}
|
||||
ref: ${{ steps.pr.outputs.head_sha }}
|
||||
persist-credentials: false
|
||||
|
||||
- name: Trust workspace directory
|
||||
@@ -195,120 +78,45 @@ jobs:
|
||||
- name: Install build tools
|
||||
run: apt-get update && apt-get install -y build-essential python3
|
||||
|
||||
- name: Run Desktop VRT Tests
|
||||
- name: Run VRT Tests on Desktop app
|
||||
continue-on-error: true
|
||||
run: |
|
||||
yarn rebuild-electron
|
||||
xvfb-run --auto-servernum --server-args="-screen 0 1920x1080x24" -- yarn e2e:desktop --update-snapshots
|
||||
|
||||
- name: Create shard patch with PNG changes only
|
||||
- name: Run VRT Tests
|
||||
continue-on-error: true
|
||||
run: yarn vrt --update-snapshots
|
||||
|
||||
- name: Create patch with PNG changes only
|
||||
id: create-patch
|
||||
run: |
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git config --global user.email "github-actions[bot]@users.noreply.github.com"
|
||||
|
||||
# Stage only PNG files
|
||||
git add "**/*.png"
|
||||
|
||||
# Check if there are any changes
|
||||
if git diff --staged --quiet; then
|
||||
echo "has_changes=false" >> "$GITHUB_OUTPUT"
|
||||
echo "No VRT changes in desktop shard"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "has_changes=true" >> "$GITHUB_OUTPUT"
|
||||
|
||||
git commit -m "Update VRT screenshots (desktop)"
|
||||
git format-patch -1 HEAD --stdout > vrt-shard.patch
|
||||
|
||||
# See validation note in browser-vrt above.
|
||||
if grep -E '^diff --git ' vrt-shard.patch \
|
||||
| grep -vE '^diff --git a/[^[:space:]]+\.png b/[^[:space:]]+\.png$'; then
|
||||
echo "ERROR: Desktop shard patch contains non-PNG files!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Upload shard patch
|
||||
if: steps.create-patch.outputs.has_changes == 'true'
|
||||
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
|
||||
with:
|
||||
name: vrt-shard-desktop
|
||||
path: vrt-shard.patch
|
||||
retention-days: 1
|
||||
overwrite: true
|
||||
|
||||
merge-patch:
|
||||
name: Merge VRT Patches
|
||||
runs-on: ubuntu-latest
|
||||
needs: [get-pr, browser-vrt, desktop-vrt]
|
||||
if: ${{ !cancelled() && needs.get-pr.result == 'success' }}
|
||||
container:
|
||||
image: mcr.microsoft.com/playwright:v1.59.1-jammy
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: ${{ needs.get-pr.outputs.head_sha }}
|
||||
persist-credentials: false
|
||||
|
||||
- name: Download all shard patches
|
||||
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
|
||||
with:
|
||||
path: /tmp/shard-patches
|
||||
pattern: vrt-shard-*
|
||||
|
||||
- name: Merge shard patches
|
||||
id: create-patch
|
||||
run: |
|
||||
git config --global --add safe.directory "$GITHUB_WORKSPACE"
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git config --global user.email "github-actions[bot]@users.noreply.github.com"
|
||||
|
||||
shopt -s nullglob
|
||||
patches=(/tmp/shard-patches/*/vrt-shard.patch)
|
||||
|
||||
if [ ${#patches[@]} -eq 0 ]; then
|
||||
echo "has_changes=false" >> "$GITHUB_OUTPUT"
|
||||
echo "No shard patches to merge"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Defense in depth: re-validate every shard patch before applying.
|
||||
# See validation note in browser-vrt above for why we match
|
||||
# `diff --git` headers instead of +++/--- lines.
|
||||
for patch in "${patches[@]}"; do
|
||||
echo "Validating $patch"
|
||||
if grep -E '^diff --git ' "$patch" \
|
||||
| grep -vE '^diff --git a/[^[:space:]]+\.png b/[^[:space:]]+\.png$'; then
|
||||
echo "ERROR: $patch contains non-PNG files!"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
# Apply each shard patch. Shards touch disjoint PNG files so
|
||||
# order does not matter. --index stages the applied changes.
|
||||
for patch in "${patches[@]}"; do
|
||||
echo "Applying $patch"
|
||||
git apply --index "$patch"
|
||||
done
|
||||
|
||||
if git diff --staged --quiet; then
|
||||
echo "has_changes=false" >> "$GITHUB_OUTPUT"
|
||||
echo "No VRT changes after merge"
|
||||
echo "No VRT changes to commit"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "has_changes=true" >> "$GITHUB_OUTPUT"
|
||||
|
||||
# Create commit and patch
|
||||
git commit -m "Update VRT screenshots"
|
||||
git format-patch -1 HEAD --stdout > vrt-update.patch
|
||||
|
||||
# Final guard on the combined patch.
|
||||
if grep -E '^diff --git ' vrt-update.patch \
|
||||
| grep -vE '^diff --git a/[^[:space:]]+\.png b/[^[:space:]]+\.png$'; then
|
||||
echo "ERROR: Merged patch contains non-PNG files!"
|
||||
# Validate patch only contains PNG files
|
||||
if grep -E '^(\+\+\+|---) [ab]/' vrt-update.patch | grep -v '\.png$'; then
|
||||
echo "ERROR: Patch contains non-PNG files!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Merged patch created successfully with PNG changes only"
|
||||
echo "Patch created successfully with PNG changes only"
|
||||
|
||||
- name: Upload patch artifact
|
||||
if: steps.create-patch.outputs.has_changes == 'true'
|
||||
@@ -323,11 +131,11 @@ jobs:
|
||||
run: |
|
||||
mkdir -p pr-metadata
|
||||
echo "${{ github.event.issue.number }}" > pr-metadata/pr-number.txt
|
||||
echo "${NEEDS_GET_PR_OUTPUTS_HEAD_REF}" > pr-metadata/head-ref.txt
|
||||
echo "${NEEDS_GET_PR_OUTPUTS_HEAD_REPO}" > pr-metadata/head-repo.txt
|
||||
echo "${STEPS_PR_OUTPUTS_HEAD_REF}" > pr-metadata/head-ref.txt
|
||||
echo "${STEPS_PR_OUTPUTS_HEAD_REPO}" > pr-metadata/head-repo.txt
|
||||
env:
|
||||
NEEDS_GET_PR_OUTPUTS_HEAD_REF: ${{ needs.get-pr.outputs.head_ref }}
|
||||
NEEDS_GET_PR_OUTPUTS_HEAD_REPO: ${{ needs.get-pr.outputs.head_repo }}
|
||||
STEPS_PR_OUTPUTS_HEAD_REF: ${{ steps.pr.outputs.head_ref }}
|
||||
STEPS_PR_OUTPUTS_HEAD_REPO: ${{ steps.pr.outputs.head_repo }}
|
||||
|
||||
- name: Upload PR metadata
|
||||
if: steps.create-patch.outputs.has_changes == 'true'
|
||||
|
||||
@@ -15,7 +15,8 @@
|
||||
"vi": "readonly",
|
||||
"backend": "readonly",
|
||||
"importScripts": "readonly",
|
||||
"FS": "readonly"
|
||||
"FS": "readonly",
|
||||
"__APP_VERSION__": "readonly"
|
||||
},
|
||||
"rules": {
|
||||
// Import sorting
|
||||
|
||||
@@ -52,7 +52,7 @@
|
||||
"playwright": "yarn workspace @actual-app/web run playwright",
|
||||
"vrt": "yarn workspace @actual-app/web run vrt",
|
||||
"vrt:docker": "./bin/run-vrt",
|
||||
"rebuild-electron": "./node_modules/.bin/electron-rebuild -f -m ./packages/desktop-electron -o better-sqlite3,bcrypt",
|
||||
"rebuild-electron": "./node_modules/.bin/electron-rebuild -m ./packages/desktop-electron -o better-sqlite3,bcrypt --build-from-source -f",
|
||||
"rebuild-node": "yarn workspace @actual-app/core rebuild",
|
||||
"lint": "oxfmt --check . && oxlint --type-aware --quiet",
|
||||
"lint:fix": "oxfmt . && oxlint --fix --type-aware --quiet",
|
||||
|
||||
@@ -10,14 +10,10 @@
|
||||
"!dist/**/*.spec.d.ts",
|
||||
"!dist/**/*.spec.d.ts.map"
|
||||
],
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"main": "src/index.ts",
|
||||
"types": "src/index.ts",
|
||||
"exports": {
|
||||
".": {
|
||||
"types": "./dist/index.d.ts",
|
||||
"development": "./src/index.ts",
|
||||
"default": "./dist/index.js"
|
||||
}
|
||||
".": "./src/index.ts"
|
||||
},
|
||||
"publishConfig": {
|
||||
"exports": {
|
||||
@@ -25,7 +21,9 @@
|
||||
"types": "./dist/index.d.ts",
|
||||
"default": "./dist/index.js"
|
||||
}
|
||||
}
|
||||
},
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts"
|
||||
},
|
||||
"scripts": {
|
||||
"build:node": "vite build",
|
||||
|
||||
@@ -4,8 +4,8 @@
|
||||
"rootDir": "./src",
|
||||
"composite": true,
|
||||
"target": "ES2021",
|
||||
"module": "NodeNext",
|
||||
"moduleResolution": "NodeNext",
|
||||
"module": "ES2022",
|
||||
"moduleResolution": "bundler",
|
||||
"noEmit": false,
|
||||
"emitDeclarationOnly": true,
|
||||
"declaration": true,
|
||||
|
||||
@@ -335,10 +335,17 @@ const isUpdateReadyForDownloadPromise = new Promise(resolve => {
|
||||
resolve(true);
|
||||
};
|
||||
});
|
||||
const updateSW = registerSW({
|
||||
immediate: true,
|
||||
onNeedRefresh: markUpdateReadyForDownload,
|
||||
});
|
||||
// Skip SW registration in dev so stale cached assets don't override edits
|
||||
// between page loads. Plugin code that needs a SW can register one itself.
|
||||
// In dev there is no SW to install, so applyAppUpdate() can't rely on the
|
||||
// SW lifecycle to swap the page — fall back to a plain reload so callers
|
||||
// don't hang on the never-resolving promise inside applyAppUpdate.
|
||||
const updateSW = IS_DEV
|
||||
? () => window.location.reload()
|
||||
: registerSW({
|
||||
immediate: true,
|
||||
onNeedRefresh: markUpdateReadyForDownload,
|
||||
});
|
||||
|
||||
global.Actual = {
|
||||
IS_DEV,
|
||||
|
||||
@@ -376,7 +376,9 @@ export default defineConfig(async ({ mode, command }) => {
|
||||
// swSrc: `service-worker/plugin-sw.js`,
|
||||
// },
|
||||
devOptions: {
|
||||
enabled: true, // We need service worker in dev mode to work with plugins
|
||||
// Disabled: caches stale assets across reloads in dev. Plugin
|
||||
// code that explicitly needs a SW can register one itself.
|
||||
enabled: false,
|
||||
type: 'module',
|
||||
},
|
||||
workbox: {
|
||||
|
||||
@@ -239,12 +239,11 @@ async function startSyncServer() {
|
||||
),
|
||||
};
|
||||
|
||||
const serverPath = path.join(
|
||||
// require.resolve will recursively search up the workspace for the module
|
||||
path.dirname(require.resolve('@actual-app/sync-server/package.json')),
|
||||
'build',
|
||||
'app.js',
|
||||
// require.resolve will recursively search up the workspace for the module
|
||||
const syncServerRoot = path.dirname(
|
||||
require.resolve('@actual-app/sync-server/package.json'),
|
||||
);
|
||||
const serverPath = path.join(syncServerRoot, 'build/app.js');
|
||||
|
||||
const webRoot = path.join(
|
||||
// require.resolve will recursively search up the workspace for the module
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
#!/usr/bin/env node
|
||||
import { existsSync, readFileSync } from 'node:fs';
|
||||
import { dirname, resolve } from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
import { resolve } from 'node:path';
|
||||
import { parseArgs } from 'node:util';
|
||||
|
||||
const args = process.argv;
|
||||
@@ -54,11 +53,7 @@ if (values.help) {
|
||||
}
|
||||
|
||||
if (values.version) {
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||
const packageJsonPath = resolve(__dirname, '../../package.json');
|
||||
const packageJson = JSON.parse(readFileSync(packageJsonPath, 'utf-8'));
|
||||
|
||||
console.log('v' + packageJson.version);
|
||||
console.log('v' + __APP_VERSION__);
|
||||
process.exit();
|
||||
}
|
||||
|
||||
|
||||
@@ -1,146 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
import { existsSync, readFileSync } from 'node:fs';
|
||||
import { readdir, readFile, writeFile } from 'node:fs/promises';
|
||||
import { dirname, extname, join, relative, resolve } from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
|
||||
const buildDir = resolve(__dirname, '../build');
|
||||
const packageRoot = resolve(__dirname, '..');
|
||||
|
||||
const packageJson = JSON.parse(
|
||||
readFileSync(join(packageRoot, 'package.json'), 'utf-8'),
|
||||
);
|
||||
// publishConfig.imports already has ./build/src/ paths with .js extensions
|
||||
const importsMap = packageJson.publishConfig?.imports || {};
|
||||
|
||||
// Sort wildcard patterns longest-prefix-first so more specific patterns
|
||||
// (e.g. #app-gocardless/services/tests/*) match before broader ones (#app-gocardless/*)
|
||||
const wildcardEntries = Object.entries(importsMap)
|
||||
.filter(([p]) => p.includes('*'))
|
||||
.sort(([a], [b]) => b.length - a.length);
|
||||
|
||||
async function getAllJsFiles(dir) {
|
||||
const files = [];
|
||||
const entries = await readdir(dir, { withFileTypes: true });
|
||||
|
||||
for (const entry of entries) {
|
||||
const fullPath = join(dir, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
files.push(...(await getAllJsFiles(fullPath)));
|
||||
} else if (entry.isFile() && extname(entry.name) === '.js') {
|
||||
files.push(fullPath);
|
||||
}
|
||||
}
|
||||
|
||||
return files;
|
||||
}
|
||||
|
||||
function resolveImportPath(importPath, fromFile) {
|
||||
const baseDir = dirname(fromFile);
|
||||
const resolvedPath = resolve(baseDir, importPath);
|
||||
|
||||
// Check if it's a file with .js extension
|
||||
if (existsSync(`${resolvedPath}.js`)) {
|
||||
return `${importPath}.js`;
|
||||
}
|
||||
|
||||
// Check if it's a directory with index.js
|
||||
if (existsSync(resolvedPath) && existsSync(join(resolvedPath, 'index.js'))) {
|
||||
return `${importPath}/index.js`;
|
||||
}
|
||||
|
||||
// Verify the file exists before adding extension
|
||||
if (!existsSync(`${resolvedPath}.js`)) {
|
||||
console.warn(
|
||||
`Warning: Could not resolve import '${importPath}' from ${relative(buildDir, fromFile)}`,
|
||||
);
|
||||
}
|
||||
|
||||
// Default: assume it's a file and add .js
|
||||
return `${importPath}.js`;
|
||||
}
|
||||
|
||||
function toRelativePath(target, fromFile) {
|
||||
const absoluteTarget = resolve(packageRoot, target);
|
||||
let rel = relative(dirname(fromFile), absoluteTarget);
|
||||
if (!rel.startsWith('.')) rel = './' + rel;
|
||||
return rel.split('\\').join('/');
|
||||
}
|
||||
|
||||
function resolveSubpathImport(importPath, fromFile) {
|
||||
if (importsMap[importPath]) {
|
||||
return toRelativePath(importsMap[importPath], fromFile);
|
||||
}
|
||||
|
||||
for (const [pattern, target] of wildcardEntries) {
|
||||
const prefix = pattern.replaceAll('*', '');
|
||||
if (importPath.startsWith(prefix)) {
|
||||
const wildcard = importPath.slice(prefix.length);
|
||||
return toRelativePath(target.replaceAll('*', wildcard), fromFile);
|
||||
}
|
||||
}
|
||||
|
||||
console.warn(
|
||||
`Warning: Could not resolve subpath import '${importPath}' from ${relative(buildDir, fromFile)}`,
|
||||
);
|
||||
return null;
|
||||
}
|
||||
|
||||
function addExtensionsToImports(content, filePath) {
|
||||
const importRegex =
|
||||
/(?:import\s+(?:(?:\{[^}]*\}|\*\s+as\s+\w+|\w+)(?:\s*,\s*(?:\{[^}]*\}|\*\s+as\s+\w+|\w+))*\s+from\s+)?|import\s*\(|require\s*\()['"]((\.\.?\/[^'"]+)|(#[^'"]+))['"]/g;
|
||||
|
||||
return content.replace(importRegex, (match, importPath) => {
|
||||
if (!importPath || typeof importPath !== 'string') {
|
||||
return match;
|
||||
}
|
||||
|
||||
if (importPath.startsWith('#')) {
|
||||
const resolved = resolveSubpathImport(importPath, filePath);
|
||||
if (resolved) {
|
||||
return match.replace(importPath, resolved);
|
||||
}
|
||||
return match;
|
||||
}
|
||||
|
||||
// Skip if already has an extension
|
||||
if (/\.(js|mjs|ts|mts|json)$/.test(importPath)) {
|
||||
return match;
|
||||
}
|
||||
|
||||
// Skip if ends with / (directory import that already has trailing slash)
|
||||
if (importPath.endsWith('/')) {
|
||||
return match;
|
||||
}
|
||||
|
||||
const newImportPath = resolveImportPath(importPath, filePath);
|
||||
return match.replace(importPath, newImportPath);
|
||||
});
|
||||
}
|
||||
|
||||
async function processFile(filePath) {
|
||||
const content = await readFile(filePath, 'utf-8');
|
||||
const newContent = addExtensionsToImports(content, filePath);
|
||||
|
||||
if (content !== newContent) {
|
||||
await writeFile(filePath, newContent, 'utf-8');
|
||||
const relativePath = relative(buildDir, filePath);
|
||||
console.log(`Updated imports in ${relativePath}`);
|
||||
}
|
||||
}
|
||||
|
||||
async function main() {
|
||||
try {
|
||||
const files = await getAllJsFiles(buildDir);
|
||||
await Promise.all(files.map(processFile));
|
||||
console.log(`Processed ${files.length} files`);
|
||||
} catch (error) {
|
||||
console.error('Error processing files:', error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
void main();
|
||||
@@ -1,26 +0,0 @@
|
||||
import { existsSync } from 'node:fs';
|
||||
import { dirname, extname, resolve as nodeResolve } from 'node:path';
|
||||
import { pathToFileURL } from 'node:url';
|
||||
|
||||
const extensions = ['.ts', '.js', '.mts', '.mjs'];
|
||||
|
||||
export async function resolve(specifier, context, nextResolve) {
|
||||
// Only handle relative imports without extensions
|
||||
if (specifier.startsWith('.') && !extname(specifier)) {
|
||||
const parentURL = context.parentURL;
|
||||
if (parentURL) {
|
||||
const parentPath = new URL(parentURL).pathname;
|
||||
const parentDir = dirname(parentPath);
|
||||
|
||||
// Try extensions in order
|
||||
for (const ext of extensions) {
|
||||
const resolvedPath = nodeResolve(parentDir, `${specifier}${ext}`);
|
||||
if (existsSync(resolvedPath)) {
|
||||
return nextResolve(pathToFileURL(resolvedPath).href, context);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nextResolve(specifier, context);
|
||||
}
|
||||
@@ -41,47 +41,19 @@
|
||||
"#util/title": "./src/util/title/index.js",
|
||||
"#util/*": "./src/util/*.ts"
|
||||
},
|
||||
"publishConfig": {
|
||||
"imports": {
|
||||
"#db": "./build/src/db.js",
|
||||
"#account-db": "./build/src/account-db.js",
|
||||
"#load-config": "./build/src/load-config.js",
|
||||
"#migrations": "./build/src/migrations.js",
|
||||
"#accounts/*": "./build/src/accounts/*.js",
|
||||
"#app-gocardless/banks/bank.interface": "./build/src/app-gocardless/banks/bank.interface.js",
|
||||
"#app-gocardless/banks/*": "./build/src/app-gocardless/banks/*.js",
|
||||
"#app-gocardless/errors": "./build/src/app-gocardless/errors.js",
|
||||
"#app-gocardless/gocardless-node.types": "./build/src/app-gocardless/gocardless-node.types.js",
|
||||
"#app-gocardless/gocardless.types": "./build/src/app-gocardless/gocardless.types.js",
|
||||
"#app-gocardless/services/*": "./build/src/app-gocardless/services/*.js",
|
||||
"#app-gocardless/services/tests/*": "./build/src/app-gocardless/services/tests/*.js",
|
||||
"#app-gocardless/util/*": "./build/src/app-gocardless/util/*.js",
|
||||
"#app-gocardless/*": "./build/src/app-gocardless/*.js",
|
||||
"#app-pluggyai/*": "./build/src/app-pluggyai/*.js",
|
||||
"#app-simplefin/*": "./build/src/app-simplefin/*.js",
|
||||
"#app-sync/services/*": "./build/src/app-sync/services/*.js",
|
||||
"#app-sync/*": "./build/src/app-sync/*.js",
|
||||
"#scripts/*": "./build/src/scripts/*.js",
|
||||
"#services/*": "./build/src/services/*.js",
|
||||
"#util/title": "./build/src/util/title/index.js",
|
||||
"#util/*": "./build/src/util/*.js"
|
||||
}
|
||||
},
|
||||
"scripts": {
|
||||
"start": "yarn build && node build/app",
|
||||
"start-monitor": "nodemon --exec 'yarn build && node build/app' --ignore './build/**/*' --ext 'ts,js' build/app",
|
||||
"build": "tsgo -b && yarn add-import-extensions && yarn copy-static-assets",
|
||||
"start": "yarn build && node build/app.js",
|
||||
"start-monitor": "nodemon --exec 'yarn build && node build/app.js' --ignore './build/**/*' --ext 'ts,js' build/app.js",
|
||||
"build": "vite build",
|
||||
"typecheck": "tsgo -b && tsc-strict",
|
||||
"add-import-extensions": "node bin/add-import-extensions.mjs",
|
||||
"copy-static-assets": "rm -rf build/src/sql && cp -r src/sql build/src/sql",
|
||||
"test": "NODE_ENV=test NODE_OPTIONS='--experimental-vm-modules --import ./register-loader.mjs --trace-warnings' vitest --run",
|
||||
"db:migrate": "yarn build && cross-env NODE_ENV=development node build/src/scripts/run-migrations.js up",
|
||||
"db:downgrade": "yarn build && cross-env NODE_ENV=development node build/src/scripts/run-migrations.js down",
|
||||
"db:test-migrate": "yarn build && cross-env NODE_ENV=test node build/src/scripts/run-migrations.js up",
|
||||
"db:test-downgrade": "yarn build && cross-env NODE_ENV=test node build/src/scripts/run-migrations.js down",
|
||||
"reset-password": "yarn build && node build/src/scripts/reset-password.js",
|
||||
"disable-openid": "yarn build && node build/src/scripts/disable-openid.js",
|
||||
"health-check": "yarn build && node build/src/scripts/health-check.js"
|
||||
"test": "NODE_ENV=test NODE_OPTIONS='--experimental-vm-modules --trace-warnings' vitest --run",
|
||||
"db:migrate": "yarn build && cross-env NODE_ENV=development node build/scripts/run-migrations.js up",
|
||||
"db:downgrade": "yarn build && cross-env NODE_ENV=development node build/scripts/run-migrations.js down",
|
||||
"db:test-migrate": "yarn build && cross-env NODE_ENV=test node build/scripts/run-migrations.js up",
|
||||
"db:test-downgrade": "yarn build && cross-env NODE_ENV=test node build/scripts/run-migrations.js down",
|
||||
"reset-password": "yarn build && node build/scripts/reset-password.js",
|
||||
"disable-openid": "yarn build && node build/scripts/disable-openid.js",
|
||||
"health-check": "yarn build && node build/scripts/health-check.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"@actual-app/crdt": "workspace:*",
|
||||
@@ -116,6 +88,7 @@
|
||||
"nodemon": "^3.1.14",
|
||||
"supertest": "^7.2.2",
|
||||
"typescript-strict-plugin": "^2.4.4",
|
||||
"vite": "^8.0.5",
|
||||
"vitest": "^4.1.2"
|
||||
},
|
||||
"engines": {
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
import { register } from 'node:module';
|
||||
import { dirname, resolve } from 'node:path';
|
||||
import { fileURLToPath, pathToFileURL } from 'node:url';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
const loaderPath = resolve(__dirname, 'loader.mjs');
|
||||
|
||||
register(pathToFileURL(loaderPath).href, pathToFileURL(__dirname));
|
||||
@@ -1,22 +1,14 @@
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import { fileURLToPath, pathToFileURL } from 'node:url';
|
||||
|
||||
import IntegrationBank from './banks/integration-bank';
|
||||
|
||||
const dirname = path.resolve(fileURLToPath(import.meta.url), '..');
|
||||
const banksDir = path.resolve(dirname, 'banks');
|
||||
// Filename convention: <name>_<bic>.{ts,js} (skips bank.interface,
|
||||
// integration-bank, and any other helper without an underscore).
|
||||
const bankLoaders = import.meta.glob('./banks/*_*.{ts,js}');
|
||||
|
||||
async function loadBanks() {
|
||||
const bankHandlers = fs
|
||||
.readdirSync(banksDir)
|
||||
.filter(filename => filename.includes('_') && filename.endsWith('.js'));
|
||||
|
||||
const imports = await Promise.all(
|
||||
bankHandlers.map(file => {
|
||||
const fileUrlToBank = pathToFileURL(path.resolve(banksDir, file)); // pathToFileURL for ESM compatibility
|
||||
return import(fileUrlToBank.toString()).then(handler => handler.default);
|
||||
}),
|
||||
Object.values(bankLoaders).map(loader =>
|
||||
loader().then(handler => handler.default),
|
||||
),
|
||||
);
|
||||
|
||||
return imports;
|
||||
|
||||
@@ -124,17 +124,32 @@ app.get('/metrics', (_req, res) => {
|
||||
});
|
||||
});
|
||||
|
||||
// The web frontend
|
||||
// The web frontend.
|
||||
// Dev mode proxies to Vite, which injects inline preamble scripts and uses
|
||||
// a websocket for HMR. Loosen script-src and connect-src accordingly.
|
||||
// `'unsafe-eval'` is required at runtime for the Electron app, so it is
|
||||
// kept in both branches.
|
||||
const isDev = process.env.NODE_ENV === 'development';
|
||||
const scriptSrc = isDev
|
||||
? "'self' 'unsafe-inline' 'unsafe-eval' blob:"
|
||||
: "'self' 'unsafe-eval' blob:";
|
||||
const connectSrc = isDev ? "'self' ws: wss: http: https:" : 'http: https:';
|
||||
const csp = [
|
||||
"default-src 'self' blob:",
|
||||
"img-src 'self' blob: data:",
|
||||
`script-src ${scriptSrc}`,
|
||||
"style-src 'self' 'unsafe-inline'",
|
||||
"font-src 'self' data:",
|
||||
`connect-src ${connectSrc}`,
|
||||
].join('; ');
|
||||
|
||||
app.use((req, res, next) => {
|
||||
res.set('Cross-Origin-Opener-Policy', 'same-origin');
|
||||
res.set('Cross-Origin-Embedder-Policy', 'require-corp');
|
||||
res.set(
|
||||
'Content-Security-Policy',
|
||||
"default-src 'self' blob:; img-src 'self' blob: data:; script-src 'self' 'unsafe-eval' blob:; style-src 'self' 'unsafe-inline'; font-src 'self' data:; connect-src http: https:;",
|
||||
);
|
||||
res.set('Content-Security-Policy', csp);
|
||||
next();
|
||||
});
|
||||
if (process.env.NODE_ENV === 'development') {
|
||||
if (isDev) {
|
||||
console.log(
|
||||
'Running in development mode - Proxying frontend routes to React Dev Server',
|
||||
);
|
||||
|
||||
@@ -21,8 +21,6 @@ const defaultDataDir = process.env.ACTUAL_DATA_DIR
|
||||
|
||||
debug(`Project root: '${projectRoot}'`);
|
||||
|
||||
export const sqlDir = path.join(__dirname, 'sql');
|
||||
|
||||
const actualAppWebBuildPath = path.join(
|
||||
path.dirname(require.resolve('@actual-app/web/package.json')),
|
||||
'build',
|
||||
|
||||
@@ -1,40 +1,34 @@
|
||||
import { readdir } from 'node:fs/promises';
|
||||
import path, { dirname } from 'node:path';
|
||||
import { fileURLToPath, pathToFileURL } from 'node:url';
|
||||
import path from 'node:path';
|
||||
|
||||
import { load } from 'migrate';
|
||||
|
||||
import { config } from './load-config';
|
||||
|
||||
type MigrationCallback = (err?: Error) => void;
|
||||
type MigrationModule = {
|
||||
up: (next?: MigrationCallback) => void;
|
||||
down: (next?: MigrationCallback) => void;
|
||||
};
|
||||
|
||||
// Vite resolves this glob at build time and inlines a static map of
|
||||
// () => import('chunks/...js') calls. Each migration becomes its own chunk.
|
||||
// Runtime fs reads against a migrations/ directory disappear.
|
||||
const migrationsLoaders = import.meta.glob<MigrationModule>(
|
||||
'../migrations/*.{ts,js}',
|
||||
);
|
||||
|
||||
export async function run(direction: 'up' | 'down' = 'up'): Promise<void> {
|
||||
console.log(
|
||||
`Checking if there are any migrations to run for direction "${direction}"...`,
|
||||
);
|
||||
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url)); // this directory
|
||||
const migrationsDir = path.join(__dirname, '../migrations');
|
||||
|
||||
try {
|
||||
// Load all script files in the migrations directory
|
||||
const files = await readdir(migrationsDir);
|
||||
const migrationsModules: Record<
|
||||
string,
|
||||
{
|
||||
up: (next?: MigrationCallback) => void;
|
||||
down: (next?: MigrationCallback) => void;
|
||||
}
|
||||
> = {};
|
||||
const sortedKeys = Object.keys(migrationsLoaders).sort();
|
||||
const migrationsModules: Record<string, MigrationModule> = {};
|
||||
|
||||
for (const f of files
|
||||
.filter(
|
||||
f => (f.endsWith('.js') || f.endsWith('.ts')) && !f.endsWith('.d.ts'),
|
||||
)
|
||||
.sort((a, b) => (a > b ? 1 : a < b ? -1 : 0))) {
|
||||
migrationsModules[f] = await import(
|
||||
pathToFileURL(path.join(migrationsDir, f)).href
|
||||
);
|
||||
for (const key of sortedKeys) {
|
||||
const fileName = key.split('/').pop()!;
|
||||
migrationsModules[fileName] = await migrationsLoaders[key]();
|
||||
}
|
||||
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
import { existsSync, readFileSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { existsSync } from 'node:fs';
|
||||
|
||||
import { merkle, SyncProtoBuf, Timestamp } from '@actual-app/crdt';
|
||||
|
||||
import { openDatabase } from './db';
|
||||
import { sqlDir } from './load-config';
|
||||
import messagesSql from './sql/messages.sql?raw';
|
||||
import { getPathForGroupFile } from './util/paths';
|
||||
|
||||
function getGroupDb(groupId) {
|
||||
@@ -14,8 +13,7 @@ function getGroupDb(groupId) {
|
||||
const db = openDatabase(path);
|
||||
|
||||
if (needsInit) {
|
||||
const sql = readFileSync(join(sqlDir, 'messages.sql'), 'utf8');
|
||||
db.exec(sql);
|
||||
db.exec(messagesSql);
|
||||
}
|
||||
|
||||
return db;
|
||||
|
||||
73
packages/sync-server/vite.config.mts
Normal file
73
packages/sync-server/vite.config.mts
Normal file
@@ -0,0 +1,73 @@
|
||||
import { readFileSync } from 'node:fs';
|
||||
import path from 'node:path';
|
||||
|
||||
import { defineConfig } from 'vite';
|
||||
import type { Plugin } from 'vite';
|
||||
|
||||
const pkg = JSON.parse(
|
||||
readFileSync(path.resolve(__dirname, 'package.json'), 'utf-8'),
|
||||
);
|
||||
|
||||
const shebangPlugin = (entryFile: string): Plugin => ({
|
||||
name: 'sync-server-shebang',
|
||||
generateBundle(_options, bundle) {
|
||||
const chunk = bundle[entryFile];
|
||||
if (chunk?.type === 'chunk' && !chunk.code.startsWith('#!')) {
|
||||
chunk.code = `#!/usr/bin/env node\n${chunk.code}`;
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
export default defineConfig({
|
||||
ssr: {
|
||||
target: 'node',
|
||||
// Inline workspace deps that ship as TS source. Anything else
|
||||
// (express, better-sqlite3, bcrypt, @actual-app/web, etc.) stays
|
||||
// external so Node resolves it at runtime.
|
||||
noExternal: ['@actual-app/crdt'],
|
||||
},
|
||||
build: {
|
||||
ssr: true,
|
||||
target: 'node22',
|
||||
outDir: path.resolve(__dirname, 'build'),
|
||||
emptyOutDir: true,
|
||||
sourcemap: true,
|
||||
minify: false,
|
||||
rollupOptions: {
|
||||
input: {
|
||||
app: path.resolve(__dirname, 'app.ts'),
|
||||
'bin/actual-server': path.resolve(__dirname, 'bin/actual-server.js'),
|
||||
'scripts/run-migrations': path.resolve(
|
||||
__dirname,
|
||||
'src/scripts/run-migrations.js',
|
||||
),
|
||||
'scripts/reset-password': path.resolve(
|
||||
__dirname,
|
||||
'src/scripts/reset-password.js',
|
||||
),
|
||||
'scripts/disable-openid': path.resolve(
|
||||
__dirname,
|
||||
'src/scripts/disable-openid.js',
|
||||
),
|
||||
'scripts/enable-openid': path.resolve(
|
||||
__dirname,
|
||||
'src/scripts/enable-openid.js',
|
||||
),
|
||||
'scripts/health-check': path.resolve(
|
||||
__dirname,
|
||||
'src/scripts/health-check.js',
|
||||
),
|
||||
},
|
||||
output: {
|
||||
format: 'esm',
|
||||
entryFileNames: '[name].js',
|
||||
chunkFileNames: 'chunks/[name]-[hash].js',
|
||||
},
|
||||
},
|
||||
},
|
||||
define: {
|
||||
__APP_VERSION__: JSON.stringify(pkg.version),
|
||||
},
|
||||
assetsInclude: ['**/*.sql'],
|
||||
plugins: [shebangPlugin('bin/actual-server.js')],
|
||||
});
|
||||
@@ -1,6 +0,0 @@
|
||||
---
|
||||
category: Maintenance
|
||||
authors: [MatissJanis]
|
||||
---
|
||||
|
||||
Refactor VRT workflow to enable parallel execution of browser and desktop tests.
|
||||
6
upcoming-release-notes/7702.md
Normal file
6
upcoming-release-notes/7702.md
Normal file
@@ -0,0 +1,6 @@
|
||||
---
|
||||
category: Maintenance
|
||||
authors: [MatissJanis]
|
||||
---
|
||||
|
||||
Refactor module resolution to load `@actual-app/crdt` from source during development.
|
||||
@@ -1,6 +0,0 @@
|
||||
---
|
||||
category: Maintenance
|
||||
authors: [MatissJanis]
|
||||
---
|
||||
|
||||
Stabilize size comparison workflow by pinning artifact downloads to specific run IDs.
|
||||
Reference in New Issue
Block a user