mirror of
https://github.com/actualbudget/actual.git
synced 2026-05-06 15:12:35 -05:00
Compare commits
25 Commits
worktree-c
...
matiss/bro
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
95a99200d0 | ||
|
|
3ec327ccc9 | ||
|
|
9ea4021516 | ||
|
|
4f013dc3ed | ||
|
|
88e148c168 | ||
|
|
afce78503a | ||
|
|
89891a8151 | ||
|
|
0c5fc1b38c | ||
|
|
35f84b3f7f | ||
|
|
45a733f2ac | ||
|
|
59e7f858a7 | ||
|
|
d5a75a831a | ||
|
|
35d208a978 | ||
|
|
b9b3c7ecf5 | ||
|
|
f95a881d24 | ||
|
|
9a71b66929 | ||
|
|
e161eefc02 | ||
|
|
bb7e0b63bc | ||
|
|
579e50f727 | ||
|
|
1165b5ad1e | ||
|
|
55889c560b | ||
|
|
9787894535 | ||
|
|
f3850cae1d | ||
|
|
05f4b84a85 | ||
|
|
54cb04f1c9 |
1
.github/workflows/electron-master.yml
vendored
1
.github/workflows/electron-master.yml
vendored
@@ -22,7 +22,6 @@ jobs:
|
||||
permissions:
|
||||
contents: write
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os:
|
||||
- ubuntu-22.04
|
||||
|
||||
1
.github/workflows/electron-pr.yml
vendored
1
.github/workflows/electron-pr.yml
vendored
@@ -26,7 +26,6 @@ concurrency:
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os:
|
||||
- ubuntu-22.04
|
||||
|
||||
@@ -20,7 +20,6 @@ concurrency:
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os:
|
||||
- ubuntu-22.04
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -92,3 +92,4 @@ storybook-static
|
||||
.actualrc.yaml
|
||||
.actualrc.yml
|
||||
actual.config.js
|
||||
.playwright-cli/
|
||||
|
||||
102
packages/api/browser-worker.ts
Normal file
102
packages/api/browser-worker.ts
Normal file
@@ -0,0 +1,102 @@
|
||||
/// <reference lib="webworker" />
|
||||
|
||||
// Worker entry for @actual-app/api's browser build.
|
||||
//
|
||||
// This owns the real loot-core instance (sql.js + absurd-sql + IndexedDB)
|
||||
// and speaks loot-core's existing backend protocol over postMessage:
|
||||
// main → worker: {id, name, args, undoTag?, catchErrors?}
|
||||
// worker → main: {type:'reply', id, result, mutated, undoTag}
|
||||
// {type:'error', id, error}
|
||||
// {type:'connect'} (handshake heartbeat)
|
||||
//
|
||||
// Bootstrapping:
|
||||
// - We register an `api-browser/init` handler that runs loot-core's public
|
||||
// init(config), so the main-thread facade can kick off the DB + auth via
|
||||
// a normal RPC call. The reply carries no return value (loot-core's
|
||||
// `init(config)` resolves to `lib`, which isn't structured-cloneable).
|
||||
// - connection.init(self, handlers) starts the message loop and the
|
||||
// `{type:'connect'}` handshake loot-core's client connection expects.
|
||||
|
||||
import * as connection from '@actual-app/core/platform/server/connection';
|
||||
import { handlers, init } from '@actual-app/core/server/main';
|
||||
import type { InitConfig } from '@actual-app/core/server/main';
|
||||
|
||||
// Dev-server friendliness: consumer bundlers (Vite first, others too) run
|
||||
// import-analysis on every `.js` URL they serve. loot-core's JS migrations
|
||||
// use `#`-subpath imports that only resolve inside loot-core — analysis
|
||||
// fails when those files live under node_modules/@actual-app/api/dist/.
|
||||
// Our build writes those files with an extra `.data` suffix, so bundlers
|
||||
// leave them alone. Translate the URLs here so loot-core's fetch layer
|
||||
// still sees `.js` names both in the manifest and on-disk.
|
||||
//
|
||||
// The wrap has to install before connection.init() runs, and populateDefault-
|
||||
// Filesystem is kicked off lazily from the first `load-budget` / init call.
|
||||
{
|
||||
const origFetch = globalThis.fetch;
|
||||
const MIGRATION_JS = /\/data\/migrations\/[^/?]+\.js(\?.*)?$/;
|
||||
globalThis.fetch = (async (
|
||||
input: RequestInfo | URL,
|
||||
initArg?: RequestInit,
|
||||
): Promise<Response> => {
|
||||
const url =
|
||||
typeof input === 'string' ? input : (input as URL | Request).toString();
|
||||
if (MIGRATION_JS.test(url)) {
|
||||
// Re-target .js → .js.data before hitting the network.
|
||||
const patched = url.replace(/(\.js)(\?|$)/, '.js.data$2');
|
||||
return origFetch(patched, initArg);
|
||||
}
|
||||
if (
|
||||
url.endsWith('/data-file-index.txt') ||
|
||||
url.endsWith('data-file-index.txt')
|
||||
) {
|
||||
const res = await origFetch(input as RequestInfo | URL, initArg);
|
||||
if (!res.ok) return res;
|
||||
const text = await res.text();
|
||||
const rewritten = text.replace(/\.js\.data(\r?\n|$)/g, '.js$1');
|
||||
return new Response(rewritten, {
|
||||
status: res.status,
|
||||
statusText: res.statusText,
|
||||
headers: res.headers,
|
||||
});
|
||||
}
|
||||
return origFetch(input as RequestInfo | URL, initArg);
|
||||
}) as typeof fetch;
|
||||
}
|
||||
|
||||
// `api-browser/init` is a worker-local handler; it isn't part of the shared
|
||||
// Handlers type. Assign via the index-signature cast rather than extending
|
||||
// the type globally.
|
||||
(handlers as Record<string, (args?: unknown) => Promise<unknown>>)[
|
||||
'api-browser/init'
|
||||
] = async function (args?: unknown) {
|
||||
const payload = (args ?? {}) as InitConfig & { __assetsBaseUrl?: string };
|
||||
// Main thread hands us a URL pointing at the api's own dist/ dir. Setting
|
||||
// PUBLIC_URL here is what makes loot-core's populateDefaultFilesystem
|
||||
// fetch `data-file-index.txt` / `data/<name>` / `sql-wasm.wasm` from our
|
||||
// package instead of the consumer's page origin — no manual copy step.
|
||||
const { __assetsBaseUrl, ...config } = payload;
|
||||
if (__assetsBaseUrl) {
|
||||
process.env.PUBLIC_URL = __assetsBaseUrl;
|
||||
}
|
||||
await init(config);
|
||||
// Nothing to return — the resolved `lib` has functions and isn't
|
||||
// structured-cloneable anyway.
|
||||
};
|
||||
|
||||
self.addEventListener('error', e => {
|
||||
// eslint-disable-next-line no-console
|
||||
console.error(
|
||||
'[api worker] uncaught',
|
||||
(e as ErrorEvent).error ?? (e as ErrorEvent).message,
|
||||
);
|
||||
});
|
||||
|
||||
self.addEventListener('unhandledrejection', e => {
|
||||
// eslint-disable-next-line no-console
|
||||
console.error(
|
||||
'[api worker] unhandled rejection',
|
||||
(e as PromiseRejectionEvent).reason,
|
||||
);
|
||||
});
|
||||
|
||||
connection.init(self as unknown as Window, handlers);
|
||||
39
packages/api/browser/lib-stub.ts
Normal file
39
packages/api/browser/lib-stub.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
// Browser main-thread stub for `@actual-app/core/server/main`.
|
||||
//
|
||||
// The real loot-core runs inside the worker (see browser-worker.ts). The
|
||||
// main-thread bundle reuses packages/api/methods.ts verbatim, but that file
|
||||
// reads `lib.send(...)` from loot-core. Resolving that import to this stub
|
||||
// routes every call over postMessage instead of touching loot-core on the
|
||||
// main thread.
|
||||
|
||||
export type BrowserSendFn = (name: string, args?: unknown) => Promise<unknown>;
|
||||
|
||||
let workerSend: BrowserSendFn = () => {
|
||||
return Promise.reject(
|
||||
new Error('@actual-app/api: call init() before any other method'),
|
||||
);
|
||||
};
|
||||
|
||||
// Shape-cast rather than `typeof import(...)` so this stub stays
|
||||
// module-graph-independent from the real loot-core.
|
||||
export const lib = {
|
||||
send(name: string, args?: unknown) {
|
||||
return workerSend(name, args);
|
||||
},
|
||||
} as unknown as {
|
||||
send: <T = unknown>(name: string, args?: unknown) => Promise<T>;
|
||||
};
|
||||
|
||||
export function _setBrowserSend(fn: BrowserSendFn) {
|
||||
workerSend = fn;
|
||||
}
|
||||
|
||||
// Inline InitConfig (matches loot-core's shape) so this stub does not force
|
||||
// TS to pull in the real @actual-app/core/server/main module graph at all.
|
||||
export type InitConfig = {
|
||||
dataDir?: string;
|
||||
serverURL?: string;
|
||||
password?: string;
|
||||
sessionToken?: string;
|
||||
verbose?: boolean;
|
||||
};
|
||||
132
packages/api/browser/rpc.ts
Normal file
132
packages/api/browser/rpc.ts
Normal file
@@ -0,0 +1,132 @@
|
||||
// Main-thread RPC bridge to the api worker.
|
||||
//
|
||||
// Reuses `createBackendWorker` from loot-core so absurd-sql's main-thread
|
||||
// plumbing (IDB helper worker, __absurd:* filtering) stays in one place.
|
||||
// Speaks loot-core's existing backend protocol:
|
||||
// out: {id, name, args, catchErrors?}
|
||||
// in : {type:'reply', id, result, error?}
|
||||
// {type:'error', id, error}
|
||||
// {type:'connect'} (handshake heartbeat)
|
||||
// {type:'push', name, args}
|
||||
//
|
||||
// We handle the handshake by replying {name:'client-connected-to-backend'}
|
||||
// on the first 'connect'. Messages sent before handshake completes are
|
||||
// queued.
|
||||
|
||||
import { createBackendWorker } from '@actual-app/core/platform/client/backend-worker';
|
||||
import type { BackendWorker } from '@actual-app/core/platform/client/backend-worker';
|
||||
|
||||
type Pending = {
|
||||
resolve: (v: unknown) => void;
|
||||
reject: (e: unknown) => void;
|
||||
};
|
||||
|
||||
type Reply =
|
||||
| {
|
||||
type: 'reply';
|
||||
id: string;
|
||||
result?: unknown;
|
||||
error?: { type?: string; message?: string; [k: string]: unknown };
|
||||
}
|
||||
| {
|
||||
type: 'error';
|
||||
id: string;
|
||||
error: { type?: string; message?: string; [k: string]: unknown };
|
||||
};
|
||||
|
||||
let backend: BackendWorker | null = null;
|
||||
let connected = false;
|
||||
let queue: Array<{ id: string; name: string; args?: unknown }> = [];
|
||||
const pending = new Map<string, Pending>();
|
||||
|
||||
function nextId(): string {
|
||||
if (typeof crypto !== 'undefined' && 'randomUUID' in crypto) {
|
||||
return crypto.randomUUID();
|
||||
}
|
||||
return Date.now().toString(36) + '-' + Math.random().toString(36).slice(2);
|
||||
}
|
||||
|
||||
function toError(info: { type?: string; message?: string } | undefined) {
|
||||
const msg = info?.message || info?.type || 'api worker error';
|
||||
const err = new Error(msg);
|
||||
if (info?.type) err.name = info.type;
|
||||
return err;
|
||||
}
|
||||
|
||||
export function setWorker(worker: Worker): BackendWorker {
|
||||
if (backend) {
|
||||
backend.terminate();
|
||||
}
|
||||
|
||||
connected = false;
|
||||
queue = [];
|
||||
pending.clear();
|
||||
|
||||
backend = createBackendWorker(worker);
|
||||
|
||||
backend.onMessage((data: unknown) => {
|
||||
if (!data || typeof data !== 'object') return;
|
||||
const msg = data as { type?: string; name?: string };
|
||||
|
||||
if (msg.type === 'connect') {
|
||||
if (!connected) {
|
||||
connected = true;
|
||||
backend!.postMessage({ name: 'client-connected-to-backend' });
|
||||
// Drain anything queued while waiting for the handshake.
|
||||
const drained = queue;
|
||||
queue = [];
|
||||
for (const m of drained) backend!.postMessage(m);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (msg.type === 'reply' || msg.type === 'error') {
|
||||
const reply = msg as Reply;
|
||||
const p = pending.get(reply.id);
|
||||
if (!p) return;
|
||||
pending.delete(reply.id);
|
||||
if (reply.type === 'error') {
|
||||
p.reject(toError(reply.error));
|
||||
} else if ('error' in reply && reply.error) {
|
||||
// api/* handlers funnel errors through the reply envelope.
|
||||
p.reject(toError(reply.error));
|
||||
} else {
|
||||
p.resolve(reply.result);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// push/capture-exception/etc. — ignore for now; the api consumer
|
||||
// doesn't subscribe to loot-core's server events.
|
||||
});
|
||||
|
||||
return backend;
|
||||
}
|
||||
|
||||
export function rpc(name: string, args?: unknown): Promise<unknown> {
|
||||
if (!backend) {
|
||||
return Promise.reject(
|
||||
new Error('@actual-app/api: init() must be called before any api method'),
|
||||
);
|
||||
}
|
||||
return new Promise((resolve, reject) => {
|
||||
const id = nextId();
|
||||
pending.set(id, { resolve, reject });
|
||||
const msg = { id, name, args };
|
||||
if (connected) {
|
||||
backend!.postMessage(msg);
|
||||
} else {
|
||||
queue.push(msg);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export function terminate() {
|
||||
if (backend) {
|
||||
backend.terminate();
|
||||
backend = null;
|
||||
}
|
||||
connected = false;
|
||||
queue = [];
|
||||
pending.clear();
|
||||
}
|
||||
66
packages/api/index.browser.ts
Normal file
66
packages/api/index.browser.ts
Normal file
@@ -0,0 +1,66 @@
|
||||
// Main-thread browser entry for @actual-app/api.
|
||||
//
|
||||
// Public surface matches the Node entry. The worker is spawned internally
|
||||
// so consumers write:
|
||||
//
|
||||
// import * as api from '@actual-app/api';
|
||||
// await api.init({ dataDir: '/documents', serverURL, password });
|
||||
// await api.getAccounts();
|
||||
//
|
||||
// worker.js must be a sibling of browser.js at runtime. Our build ships
|
||||
// them together in dist/; the consumer's bundler resolves the worker URL
|
||||
// via `new URL(..., import.meta.url)`.
|
||||
|
||||
import { _setBrowserSend } from './browser/lib-stub';
|
||||
import type { InitConfig } from './browser/lib-stub';
|
||||
import { rpc, setWorker, terminate } from './browser/rpc';
|
||||
|
||||
export * from './methods';
|
||||
export * as utils from './utils';
|
||||
|
||||
// Wire methods.ts's `lib.send` through the worker.
|
||||
_setBrowserSend((name, args) => rpc(name, args));
|
||||
|
||||
function createWorker(): Worker {
|
||||
// Vite's `vite:worker-import-meta-url` plugin rewrites this pattern at
|
||||
// the CONSUMER's build time (emit worker.js as an asset, substitute the
|
||||
// hashed URL). Feeding it a non-literal first argument keeps the api's
|
||||
// OWN lib build from trying to pre-bundle it, which would fail because
|
||||
// ./worker.js is not a source-tree sibling of this file.
|
||||
const rel = './worker.js';
|
||||
return new Worker(new URL(rel, import.meta.url), { type: 'module' });
|
||||
}
|
||||
|
||||
export async function init(config: InitConfig = {}) {
|
||||
setWorker(createWorker());
|
||||
// Point loot-core's browser fs at our dist/ directory. We want the
|
||||
// directory portion of this bundle's own URL so loot-core's fetches land
|
||||
// on files we ship (data-file-index.txt, migrations/, default-db.sqlite,
|
||||
// sql-wasm.wasm). Vite's asset plugin tries to pre-bundle
|
||||
// `new URL('.', import.meta.url)` at consumer build time and picks up
|
||||
// the `development` export condition (inlining index.ts as a data URL!).
|
||||
// Derive the base URL via string manipulation instead so static analyzers
|
||||
// leave it alone.
|
||||
const assetsBaseUrl = import.meta.url.replace(/[^/]+$/, '');
|
||||
await rpc('api-browser/init', { ...config, __assetsBaseUrl: assetsBaseUrl });
|
||||
// Return a {send} handle compatible with the Node entry so existing
|
||||
// consumer code that does `const internal = await api.init(...); internal.send(...)`
|
||||
// keeps working on the browser build too.
|
||||
return {
|
||||
send: (name: string, args?: unknown) => rpc(name, args),
|
||||
};
|
||||
}
|
||||
|
||||
export async function shutdown() {
|
||||
try {
|
||||
await rpc('sync');
|
||||
} catch {
|
||||
// most likely no budget loaded
|
||||
}
|
||||
try {
|
||||
await rpc('close-budget');
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
terminate();
|
||||
}
|
||||
@@ -13,6 +13,7 @@
|
||||
".": {
|
||||
"types": "./@types/index.d.ts",
|
||||
"development": "./index.ts",
|
||||
"browser": "./dist/browser.js",
|
||||
"default": "./dist/index.js"
|
||||
}
|
||||
},
|
||||
@@ -20,26 +21,37 @@
|
||||
"exports": {
|
||||
".": {
|
||||
"types": "./@types/index.d.ts",
|
||||
"browser": "./dist/browser.js",
|
||||
"default": "./dist/index.js"
|
||||
}
|
||||
}
|
||||
},
|
||||
"scripts": {
|
||||
"build": "vite build && tsgo --emitDeclarationOnly",
|
||||
"test": "vitest --run",
|
||||
"build": "npm-run-all -s build:node build:browser-worker build:browser",
|
||||
"build:node": "vite build --config vite.config.mts && tsgo --emitDeclarationOnly",
|
||||
"build:browser": "vite build --config vite.browser.config.mts",
|
||||
"build:browser-worker": "vite build --config vite.browser-worker.config.mts",
|
||||
"test": "npm-run-all -cp 'test:*'",
|
||||
"test:node": "vitest --run --config vite.config.mts",
|
||||
"test:browser": "vitest --run --config vitest.browser.config.mts",
|
||||
"typecheck": "tsgo -b && tsc-strict"
|
||||
},
|
||||
"dependencies": {
|
||||
"@actual-app/core": "workspace:*",
|
||||
"@actual-app/crdt": "workspace:*",
|
||||
"absurd-sql": "0.0.54",
|
||||
"better-sqlite3": "^12.8.0",
|
||||
"compare-versions": "^6.1.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@typescript/native-preview": "^7.0.0-dev.20260404.1",
|
||||
"fake-indexeddb": "^6.2.5",
|
||||
"jsdom": "^27.4.0",
|
||||
"npm-run-all": "^4.1.5",
|
||||
"rollup-plugin-visualizer": "^7.0.1",
|
||||
"typescript-strict-plugin": "^2.4.4",
|
||||
"vite": "^8.0.5",
|
||||
"vite-plugin-node-polyfills": "^0.26.0",
|
||||
"vite-plugin-peggy-loader": "^2.0.1",
|
||||
"vitest": "^4.1.2"
|
||||
},
|
||||
|
||||
183
packages/api/test/browser-facade.test.ts
Normal file
183
packages/api/test/browser-facade.test.ts
Normal file
@@ -0,0 +1,183 @@
|
||||
import { afterEach, describe, expect, test, vi } from 'vitest';
|
||||
|
||||
import * as api from '../index.browser';
|
||||
|
||||
// Swap the real Worker constructor for a mock that the tests control. Vitest
|
||||
// picks this up via vite.config resolve.alias; here we just stand in globally
|
||||
// because jsdom does not ship Worker at all.
|
||||
class MockWorker {
|
||||
public posted: Array<unknown> = [];
|
||||
public responder: (
|
||||
req: { id: string; name: string; args?: unknown },
|
||||
reply: (res: unknown) => void,
|
||||
) => void = () => undefined;
|
||||
|
||||
private listeners: Array<(e: MessageEvent) => void> = [];
|
||||
onmessage: ((e: MessageEvent) => void) | null = null;
|
||||
onerror: ((e: ErrorEvent) => void) | null = null;
|
||||
private connected = false;
|
||||
|
||||
addEventListener(type: string, handler: (e: MessageEvent) => void) {
|
||||
if (type === 'message') this.listeners.push(handler);
|
||||
}
|
||||
|
||||
removeEventListener() {
|
||||
// no-op for tests
|
||||
}
|
||||
|
||||
postMessage(msg: unknown) {
|
||||
this.posted.push(msg);
|
||||
|
||||
if (
|
||||
msg &&
|
||||
typeof msg === 'object' &&
|
||||
(msg as { name?: string }).name === 'client-connected-to-backend'
|
||||
) {
|
||||
// Handshake complete; we won't keep sending 'connect' heartbeats.
|
||||
return;
|
||||
}
|
||||
|
||||
const req = msg as { id: string; name: string; args?: unknown };
|
||||
queueMicrotask(() => {
|
||||
this.responder(req, (data: unknown) => {
|
||||
const ev = { data } as MessageEvent;
|
||||
this.onmessage?.(ev);
|
||||
for (const l of this.listeners) l(ev);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/** Simulate loot-core's connect handshake from the worker side. */
|
||||
fireConnect() {
|
||||
if (this.connected) return;
|
||||
this.connected = true;
|
||||
const ev = { data: { type: 'connect' } } as MessageEvent;
|
||||
this.onmessage?.(ev);
|
||||
for (const l of this.listeners) l(ev);
|
||||
}
|
||||
|
||||
terminate() {
|
||||
this.listeners = [];
|
||||
}
|
||||
}
|
||||
|
||||
// Every Worker the api spawns inside init() comes through here.
|
||||
let lastMockWorker: MockWorker | null = null;
|
||||
const mockWorkerResponder = vi.fn<
|
||||
(
|
||||
req: { id: string; name: string; args?: unknown },
|
||||
reply: (res: unknown) => void,
|
||||
) => void
|
||||
>(() => undefined);
|
||||
|
||||
// Global Worker stub — the api's internal `new Worker(...)` will call this.
|
||||
// @ts-expect-error jsdom has no Worker; we override the global for the test.
|
||||
globalThis.Worker = class {
|
||||
constructor(_url: URL | string, _opts?: WorkerOptions) {
|
||||
const w = new MockWorker();
|
||||
w.responder = (req, reply) => mockWorkerResponder(req, reply);
|
||||
lastMockWorker = w;
|
||||
// Fire the connect handshake on the next tick so init() resolves.
|
||||
queueMicrotask(() => w.fireConnect());
|
||||
return w as unknown as Worker;
|
||||
}
|
||||
};
|
||||
|
||||
// absurd-sql's main-thread bridge expects real Worker event semantics. The
|
||||
// mock above exposes addEventListener; initSQLBackend just attaches a
|
||||
// message listener, so it's safe with jsdom.
|
||||
|
||||
afterEach(async () => {
|
||||
// Keep whatever responder the test installed so shutdown's sync/close-budget
|
||||
// calls resolve rather than hang.
|
||||
await api.shutdown().catch(() => undefined);
|
||||
mockWorkerResponder.mockReset();
|
||||
lastMockWorker = null;
|
||||
});
|
||||
|
||||
describe('@actual-app/api browser facade', () => {
|
||||
test('spawns a worker on init and forwards config via api-browser/init', async () => {
|
||||
mockWorkerResponder.mockImplementation((req, reply) => {
|
||||
reply({ type: 'reply', id: req.id, result: undefined });
|
||||
});
|
||||
|
||||
await api.init({
|
||||
dataDir: '/documents',
|
||||
serverURL: 'https://example.test',
|
||||
password: 'pw',
|
||||
});
|
||||
|
||||
expect(lastMockWorker).toBeTruthy();
|
||||
// First post after the handshake ack is the api-browser/init request.
|
||||
const initCall = lastMockWorker!.posted.find(
|
||||
m =>
|
||||
m &&
|
||||
typeof m === 'object' &&
|
||||
(m as { name?: string }).name === 'api-browser/init',
|
||||
) as { name: string; args: unknown } | undefined;
|
||||
expect(initCall).toBeTruthy();
|
||||
expect(initCall!.args).toMatchObject({
|
||||
dataDir: '/documents',
|
||||
serverURL: 'https://example.test',
|
||||
password: 'pw',
|
||||
});
|
||||
// The api also hands over its own asset base URL so loot-core's fs
|
||||
// can fetch migrations / default-db / WASM from the api's dist/
|
||||
// instead of the consumer's page origin.
|
||||
expect(
|
||||
(initCall!.args as { __assetsBaseUrl?: string }).__assetsBaseUrl,
|
||||
).toBeTypeOf('string');
|
||||
});
|
||||
|
||||
test('rpc methods forward as {id, name, args} and read {type:reply, result}', async () => {
|
||||
mockWorkerResponder.mockImplementation((req, reply) => {
|
||||
if (req.name === 'api-browser/init') {
|
||||
reply({ type: 'reply', id: req.id, result: undefined });
|
||||
return;
|
||||
}
|
||||
if (req.name === 'api/accounts-get') {
|
||||
reply({
|
||||
type: 'reply',
|
||||
id: req.id,
|
||||
result: [{ id: 'a1', name: 'Checking' }],
|
||||
});
|
||||
return;
|
||||
}
|
||||
reply({
|
||||
type: 'error',
|
||||
id: req.id,
|
||||
error: { type: 'APIError', message: 'unexpected' },
|
||||
});
|
||||
});
|
||||
|
||||
await api.init({ dataDir: '/documents' });
|
||||
const accounts = await api.getAccounts();
|
||||
expect(accounts).toEqual([{ id: 'a1', name: 'Checking' }]);
|
||||
|
||||
const sendCalls = lastMockWorker!.posted.filter(
|
||||
m =>
|
||||
m &&
|
||||
typeof m === 'object' &&
|
||||
(m as { name?: string }).name === 'api/accounts-get',
|
||||
);
|
||||
expect(sendCalls).toHaveLength(1);
|
||||
expect((sendCalls[0] as { args?: unknown }).args).toBeUndefined();
|
||||
});
|
||||
|
||||
test('worker errors reject at the call site', async () => {
|
||||
mockWorkerResponder.mockImplementation((req, reply) => {
|
||||
if (req.name === 'api-browser/init') {
|
||||
reply({ type: 'reply', id: req.id, result: undefined });
|
||||
return;
|
||||
}
|
||||
reply({
|
||||
type: 'reply',
|
||||
id: req.id,
|
||||
error: { type: 'APIError', message: 'budget not loaded' },
|
||||
});
|
||||
});
|
||||
|
||||
await api.init({ dataDir: '/documents' });
|
||||
await expect(api.getAccounts()).rejects.toThrow(/budget not loaded/);
|
||||
});
|
||||
});
|
||||
43
packages/api/test/integration.test.ts
Normal file
43
packages/api/test/integration.test.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import { afterEach, describe, expect, test } from 'vitest';
|
||||
|
||||
import * as api from '../index';
|
||||
|
||||
declare const __API_DATA_DIR__: string;
|
||||
|
||||
afterEach(async () => {
|
||||
await api.shutdown();
|
||||
});
|
||||
|
||||
describe('api CRUD roundtrip (Node)', () => {
|
||||
test('creates a budget, writes, reads it back', async () => {
|
||||
const internal = await api.init({ dataDir: __API_DATA_DIR__ });
|
||||
|
||||
await internal.send('create-budget', {
|
||||
budgetName: 'Integration Test',
|
||||
testMode: true,
|
||||
testBudgetId: 'integration-test',
|
||||
});
|
||||
await api.loadBudget('integration-test');
|
||||
|
||||
const accountId = await api.createAccount(
|
||||
{ name: 'Checking', offbudget: false },
|
||||
0,
|
||||
);
|
||||
|
||||
await api.addTransactions(accountId, [
|
||||
{ date: '2026-04-01', amount: 1000, payee_name: 'Coffee' },
|
||||
{ date: '2026-04-02', amount: -500, payee_name: 'Book' },
|
||||
]);
|
||||
|
||||
const accounts = await api.getAccounts();
|
||||
expect(accounts.map(a => a.name)).toContain('Checking');
|
||||
|
||||
const txns = await api.getTransactions(
|
||||
accountId,
|
||||
'2026-04-01',
|
||||
'2026-04-30',
|
||||
);
|
||||
expect(txns).toHaveLength(2);
|
||||
expect(txns.map(t => t.amount).sort((a, b) => a - b)).toEqual([-500, 1000]);
|
||||
});
|
||||
});
|
||||
@@ -2,38 +2,18 @@ import * as fs from 'fs/promises';
|
||||
import * as path from 'path';
|
||||
|
||||
import type { RuleEntity } from '@actual-app/core/types/models';
|
||||
import { vi } from 'vitest';
|
||||
|
||||
import * as api from './index';
|
||||
|
||||
// In tests we run from source; loot-core's API fs uses __dirname (for the built dist/).
|
||||
// Mock the fs so path constants point at loot-core package root where migrations live.
|
||||
vi.mock(
|
||||
'../loot-core/src/platform/server/fs/index.api',
|
||||
async importOriginal => {
|
||||
const actual = (await importOriginal()) as Record<string, unknown>;
|
||||
const pathMod = await import('path');
|
||||
const lootCoreRoot = pathMod.join(__dirname, '..', 'loot-core');
|
||||
return {
|
||||
...actual,
|
||||
migrationsPath: pathMod.join(lootCoreRoot, 'migrations'),
|
||||
bundledDatabasePath: pathMod.join(lootCoreRoot, 'default-db.sqlite'),
|
||||
demoBudgetPath: pathMod.join(lootCoreRoot, 'demo-budget'),
|
||||
};
|
||||
},
|
||||
);
|
||||
import * as api from '../index';
|
||||
|
||||
const budgetName = 'test-budget';
|
||||
|
||||
global.IS_TESTING = true;
|
||||
|
||||
beforeEach(async () => {
|
||||
const budgetPath = path.join(__dirname, '/mocks/budgets/', budgetName);
|
||||
const budgetPath = path.join(__dirname, '/../mocks/budgets/', budgetName);
|
||||
await fs.rm(budgetPath, { force: true, recursive: true });
|
||||
|
||||
await createTestBudget('default-budget-template', budgetName);
|
||||
await api.init({
|
||||
dataDir: path.join(__dirname, '/mocks/budgets/'),
|
||||
dataDir: path.join(__dirname, '/../mocks/budgets/'),
|
||||
});
|
||||
});
|
||||
|
||||
@@ -45,10 +25,10 @@ afterEach(async () => {
|
||||
async function createTestBudget(templateName: string, name: string) {
|
||||
const templatePath = path.join(
|
||||
__dirname,
|
||||
'/../loot-core/src/mocks/files',
|
||||
'/../../loot-core/src/mocks/files',
|
||||
templateName,
|
||||
);
|
||||
const budgetPath = path.join(__dirname, '/mocks/budgets/', name);
|
||||
const budgetPath = path.join(__dirname, '/../mocks/budgets/', name);
|
||||
|
||||
await fs.mkdir(budgetPath);
|
||||
await fs.copyFile(
|
||||
31
packages/api/test/setup.node.ts
Normal file
31
packages/api/test/setup.node.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
import * as fsPromises from 'fs/promises';
|
||||
import * as os from 'os';
|
||||
import * as path from 'path';
|
||||
|
||||
import { vi } from 'vitest';
|
||||
|
||||
// In tests we run from source; loot-core's API fs uses __dirname (for the built dist/).
|
||||
// Mock the fs so path constants point at loot-core package root where migrations live.
|
||||
vi.mock(
|
||||
'../../loot-core/src/platform/server/fs/index.api',
|
||||
async importOriginal => {
|
||||
const actual = (await importOriginal()) as Record<string, unknown>;
|
||||
const lootCoreRoot = path.join(__dirname, '..', '..', 'loot-core');
|
||||
return {
|
||||
...actual,
|
||||
migrationsPath: path.join(lootCoreRoot, 'migrations'),
|
||||
bundledDatabasePath: path.join(lootCoreRoot, 'default-db.sqlite'),
|
||||
demoBudgetPath: path.join(lootCoreRoot, 'demo-budget'),
|
||||
};
|
||||
},
|
||||
);
|
||||
|
||||
global.IS_TESTING = true;
|
||||
|
||||
// Shared integration test lives in a filesystem-backed tmp dir.
|
||||
const dataDir = path.join(
|
||||
os.tmpdir(),
|
||||
`api-it-${Date.now()}-${Math.random().toString(36).slice(2)}`,
|
||||
);
|
||||
await fsPromises.mkdir(dataDir, { recursive: true });
|
||||
globalThis.__API_DATA_DIR__ = dataDir;
|
||||
@@ -8,7 +8,12 @@
|
||||
"module": "es2022",
|
||||
"moduleResolution": "bundler",
|
||||
"customConditions": ["api"],
|
||||
// composite + declaration: true require `noEmit: false`, so use
|
||||
// emitDeclarationOnly to keep typecheck + project refs working without
|
||||
// clobbering the Vite build artifacts in dist/. build:node also passes
|
||||
// --emitDeclarationOnly on the CLI (redundant but explicit).
|
||||
"noEmit": false,
|
||||
"emitDeclarationOnly": true,
|
||||
"declaration": true,
|
||||
"declarationMap": true,
|
||||
"outDir": "dist",
|
||||
@@ -23,7 +28,8 @@
|
||||
"**/node_modules/*",
|
||||
"dist",
|
||||
"@types",
|
||||
"*.test.ts",
|
||||
"**/*.test.ts",
|
||||
"test/setup.*.ts",
|
||||
"*.config.ts",
|
||||
"*.config.mts"
|
||||
]
|
||||
|
||||
62
packages/api/vite.browser-worker.config.mts
Normal file
62
packages/api/vite.browser-worker.config.mts
Normal file
@@ -0,0 +1,62 @@
|
||||
import path from 'path';
|
||||
|
||||
import { defineConfig } from 'vite';
|
||||
import { nodePolyfills } from 'vite-plugin-node-polyfills';
|
||||
import peggyLoader from 'vite-plugin-peggy-loader';
|
||||
|
||||
const distDir = path.resolve(__dirname, 'dist');
|
||||
|
||||
// Worker bundle: contains the full loot-core + sql.js + absurd-sql stack.
|
||||
// Runs inside a Web Worker where absurd-sql's Atomics.wait has the right
|
||||
// thread context. Consumer spawns the worker with this file as the entry.
|
||||
export default defineConfig({
|
||||
define: {
|
||||
// NODE_ENV is read at build time by dead-code elimination paths and
|
||||
// must stay a literal. The others (PUBLIC_URL, DATA_DIR, SERVER_URL,
|
||||
// DOCUMENT_DIR) are set at runtime via the `api-browser/init` handler
|
||||
// which receives them from the main thread — so they stay as
|
||||
// `process.env.<name>` references and the nodePolyfills-provided
|
||||
// process shim serves as the backing store.
|
||||
'process.env.NODE_ENV': JSON.stringify('production'),
|
||||
},
|
||||
build: {
|
||||
target: 'esnext',
|
||||
outDir: distDir,
|
||||
emptyOutDir: false,
|
||||
sourcemap: true,
|
||||
lib: {
|
||||
entry: path.resolve(__dirname, 'browser-worker.ts'),
|
||||
formats: ['es'],
|
||||
fileName: () => 'worker.js',
|
||||
},
|
||||
rollupOptions: {
|
||||
output: {
|
||||
codeSplitting: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
plugins: [
|
||||
peggyLoader(),
|
||||
nodePolyfills({
|
||||
include: [
|
||||
'process',
|
||||
'buffer',
|
||||
'stream',
|
||||
'path',
|
||||
'crypto',
|
||||
'timers',
|
||||
'util',
|
||||
'zlib',
|
||||
'fs',
|
||||
'assert',
|
||||
],
|
||||
globals: {
|
||||
process: true,
|
||||
Buffer: true,
|
||||
global: true,
|
||||
},
|
||||
}),
|
||||
],
|
||||
// Intentionally no resolve.conditions: ['api'] — loot-core falls back to
|
||||
// its default (browser) platform files.
|
||||
});
|
||||
39
packages/api/vite.browser.config.mts
Normal file
39
packages/api/vite.browser.config.mts
Normal file
@@ -0,0 +1,39 @@
|
||||
import path from 'path';
|
||||
|
||||
import { defineConfig } from 'vite';
|
||||
|
||||
const distDir = path.resolve(__dirname, 'dist');
|
||||
|
||||
// Main-thread facade only. Tiny bundle: no loot-core, no sql.js, no absurd-sql.
|
||||
// The worker is built separately by vite.browser-worker.config.mts. The
|
||||
// consumer constructs the Worker (handling URL resolution through their own
|
||||
// bundler) and hands it to init().
|
||||
export default defineConfig({
|
||||
build: {
|
||||
target: 'esnext',
|
||||
outDir: distDir,
|
||||
emptyOutDir: false,
|
||||
sourcemap: true,
|
||||
lib: {
|
||||
entry: path.resolve(__dirname, 'index.browser.ts'),
|
||||
formats: ['es'],
|
||||
fileName: () => 'browser.js',
|
||||
},
|
||||
rollupOptions: {
|
||||
output: {
|
||||
codeSplitting: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
resolve: {
|
||||
alias: {
|
||||
// methods.ts reads `lib.send` from loot-core's server/main. Route it
|
||||
// through the main-thread stub so loot-core is never pulled into
|
||||
// the main bundle.
|
||||
'@actual-app/core/server/main': path.resolve(
|
||||
__dirname,
|
||||
'browser/lib-stub.ts',
|
||||
),
|
||||
},
|
||||
},
|
||||
});
|
||||
@@ -49,10 +49,61 @@ function copyMigrationsAndDefaultDb() {
|
||||
throw new Error(`default-db.sqlite not found at ${defaultDbPath}`);
|
||||
}
|
||||
fs.copyFileSync(defaultDbPath, path.join(distDir, 'default-db.sqlite'));
|
||||
|
||||
// Browser consumers need sql.js' WASM to be served at the same origin
|
||||
// as the bundle. Ship it alongside dist/ so downstream apps just point
|
||||
// a static handler at dist and don't have to reach into node_modules.
|
||||
const sqlJsWasm = require.resolve('@jlongster/sql.js/dist/sql-wasm.wasm');
|
||||
fs.copyFileSync(sqlJsWasm, path.join(distDir, 'sql-wasm.wasm'));
|
||||
|
||||
// loot-core's browser fs bootstraps by fetching:
|
||||
// `${PUBLIC_URL}data-file-index.txt` - flat manifest
|
||||
// `${PUBLIC_URL}data/<name>` - each file listed in the manifest
|
||||
// We point PUBLIC_URL at the api's dist dir at runtime (see
|
||||
// index.browser.ts), so these two shapes need to exist here.
|
||||
//
|
||||
// JS migrations get a `.data` suffix on the *wire* path. Consumer
|
||||
// bundlers (Vite's dev server first, others to varying degrees)
|
||||
// auto-transform `.js` URLs through their import-analysis pipelines,
|
||||
// which fails on loot-core's `#`-subpath imports. The api's worker
|
||||
// (browser-worker.ts) wraps `fetch` to translate back to `.js` so
|
||||
// loot-core's migration runner finds the file under its original
|
||||
// name in the virtual FS. `.sql` migrations stay as-is.
|
||||
const dataDir = path.join(distDir, 'data');
|
||||
const dataMigrationsDir = path.join(dataDir, 'migrations');
|
||||
fs.mkdirSync(dataMigrationsDir, { recursive: true });
|
||||
|
||||
linkOrCopy(
|
||||
path.join(distDir, 'default-db.sqlite'),
|
||||
path.join(dataDir, 'default-db.sqlite'),
|
||||
);
|
||||
const wireMigrationNames: string[] = [];
|
||||
for (const name of fs.readdirSync(migrationsDest)) {
|
||||
const wireName = name.endsWith('.js') ? `${name}.data` : name;
|
||||
linkOrCopy(
|
||||
path.join(migrationsDest, name),
|
||||
path.join(dataMigrationsDir, wireName),
|
||||
);
|
||||
wireMigrationNames.push(`migrations/${wireName}`);
|
||||
}
|
||||
wireMigrationNames.sort();
|
||||
|
||||
// data-file-index.txt: one path per line, relative to `data/`.
|
||||
const manifest =
|
||||
['default-db.sqlite', ...wireMigrationNames].join('\n') + '\n';
|
||||
fs.writeFileSync(path.join(distDir, 'data-file-index.txt'), manifest);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function linkOrCopy(src: string, dest: string) {
|
||||
try {
|
||||
fs.linkSync(src, dest);
|
||||
} catch {
|
||||
fs.copyFileSync(src, dest);
|
||||
}
|
||||
}
|
||||
|
||||
export default defineConfig({
|
||||
ssr: {
|
||||
noExternal: true,
|
||||
@@ -82,6 +133,9 @@ export default defineConfig({
|
||||
},
|
||||
test: {
|
||||
globals: true,
|
||||
environment: 'node',
|
||||
setupFiles: ['./test/setup.node.ts'],
|
||||
exclude: ['**/node_modules/**', '**/browser-facade.test.ts'],
|
||||
onConsoleLog(log: string, type: 'stdout' | 'stderr'): boolean | void {
|
||||
// print only console.error
|
||||
return type === 'stderr';
|
||||
|
||||
35
packages/api/vitest.browser.config.mts
Normal file
35
packages/api/vitest.browser.config.mts
Normal file
@@ -0,0 +1,35 @@
|
||||
import path from 'path';
|
||||
|
||||
import { defineConfig } from 'vite';
|
||||
import peggyLoader from 'vite-plugin-peggy-loader';
|
||||
|
||||
// Deliberately independent from vite.browser.config.mts: the build config
|
||||
// applies node polyfills that would swap out Node fs in the test setup
|
||||
// file. The test setup uses real Node fs to stream the on-disk fixtures
|
||||
// (default-db.sqlite, migrations, sql.js WASM) through a fetch polyfill.
|
||||
|
||||
export default defineConfig({
|
||||
plugins: [peggyLoader()],
|
||||
// The facade test imports `../index.browser` directly and uses a mock
|
||||
// Worker. loot-core never loads on the main thread, so no platform
|
||||
// condition juggling is needed here. The sibling vite.browser.config.mts
|
||||
// aliases loot-core to the stub for the bundled facade; for the test we
|
||||
// mirror that so `methods.ts` resolves correctly.
|
||||
resolve: {
|
||||
alias: {
|
||||
'@actual-app/core/server/main': path.resolve(
|
||||
__dirname,
|
||||
'browser/lib-stub.ts',
|
||||
),
|
||||
},
|
||||
},
|
||||
test: {
|
||||
globals: true,
|
||||
environment: 'jsdom',
|
||||
include: ['test/browser-facade.test.ts'],
|
||||
onConsoleLog(log: string, type: 'stdout' | 'stderr'): boolean | void {
|
||||
return type === 'stderr';
|
||||
},
|
||||
maxWorkers: 2,
|
||||
},
|
||||
});
|
||||
@@ -43,16 +43,13 @@ Configuration is resolved in this order (highest priority first):
|
||||
|
||||
### Environment Variables
|
||||
|
||||
| Variable | Description |
|
||||
| ---------------------- | ----------------------------------------------------- |
|
||||
| `ACTUAL_SERVER_URL` | URL of the Actual sync server (required) |
|
||||
| `ACTUAL_PASSWORD` | Server password (required unless using token) |
|
||||
| `ACTUAL_SESSION_TOKEN` | Session token (alternative to password) |
|
||||
| `ACTUAL_SYNC_ID` | Budget Sync ID (required for most commands) |
|
||||
| `ACTUAL_DATA_DIR` | Local directory for cached budget data |
|
||||
| `ACTUAL_CACHE_TTL` | Cache TTL in seconds (default: 60) |
|
||||
| `ACTUAL_LOCK_TIMEOUT` | Budget-dir lock wait timeout in seconds (default: 10) |
|
||||
| `ACTUAL_NO_LOCK` | Set to `1` to disable budget-dir locking |
|
||||
| Variable | Description |
|
||||
| ---------------------- | --------------------------------------------- |
|
||||
| `ACTUAL_SERVER_URL` | URL of the Actual sync server (required) |
|
||||
| `ACTUAL_PASSWORD` | Server password (required unless using token) |
|
||||
| `ACTUAL_SESSION_TOKEN` | Session token (alternative to password) |
|
||||
| `ACTUAL_SYNC_ID` | Budget Sync ID (required for most commands) |
|
||||
| `ACTUAL_DATA_DIR` | Local directory for cached budget data |
|
||||
|
||||
### Config File
|
||||
|
||||
@@ -62,10 +59,7 @@ Create an `.actualrc.json` (or `.actualrc`, `.actualrc.yaml`, `actual.config.js`
|
||||
{
|
||||
"serverUrl": "http://localhost:5006",
|
||||
"password": "your-password",
|
||||
"syncId": "1cfdbb80-6274-49bf-b0c2-737235a4c81f",
|
||||
"cacheTtl": 60,
|
||||
"lockTimeout": 10,
|
||||
"noLock": false
|
||||
"syncId": "1cfdbb80-6274-49bf-b0c2-737235a4c81f"
|
||||
}
|
||||
```
|
||||
|
||||
@@ -80,11 +74,6 @@ Create an `.actualrc.json` (or `.actualrc`, `.actualrc.yaml`, `actual.config.js`
|
||||
| `--session-token <token>` | Session token |
|
||||
| `--sync-id <id>` | Budget Sync ID |
|
||||
| `--data-dir <path>` | Data directory |
|
||||
| `--cache-ttl <seconds>` | Cache TTL; `0` disables caching (default: 60) |
|
||||
| `--refresh` | Force a sync on this call, ignoring the cache |
|
||||
| `--no-cache` | Alias for `--refresh` |
|
||||
| `--lock-timeout <secs>` | Lock wait timeout (default: 10) |
|
||||
| `--no-lock` | Disable budget-dir locking (use with care) |
|
||||
| `--format <format>` | Output format: `json` (default), `table`, `csv` |
|
||||
| `--verbose` | Show informational messages |
|
||||
|
||||
@@ -103,7 +92,6 @@ Create an `.actualrc.json` (or `.actualrc`, `.actualrc.yaml`, `actual.config.js`
|
||||
| `schedules` | Manage scheduled transactions |
|
||||
| `query` | Run an ActualQL query |
|
||||
| `server` | Server utilities and lookups |
|
||||
| `sync` | Refresh or inspect local cache |
|
||||
|
||||
Run `actual <command> --help` for subcommands and options.
|
||||
|
||||
@@ -147,32 +135,22 @@ All monetary amounts are **integer cents** when passed as input (flags, JSON):
|
||||
|
||||
- **Split transactions:** When summing or counting transactions, filter `"is_parent": false` to avoid double-counting. A split parent holds the total amount, and its children hold the individual parts — including both would count the total twice.
|
||||
|
||||
- **Rapid sequential requests:** The CLI caches the budget locally (see [Caching](#caching)), so read-heavy scripts no longer need a single-query workaround by default. For very chatty scripts, run `actual sync` once and then use a long `--cache-ttl` for reads:
|
||||
- **Avoid rapid sequential requests:** Each CLI invocation opens a new server connection. Running queries in a tight loop (e.g. one per month) may trigger rate limiting or authentication failures. Instead, fetch all data in a single query with a date range filter and process locally:
|
||||
|
||||
```bash
|
||||
actual sync
|
||||
actual --cache-ttl 3600 query run ...
|
||||
actual --cache-ttl 3600 accounts list
|
||||
# Good: single query for the full year
|
||||
actual query run --table transactions \
|
||||
--filter '{"$and":[{"date":{"$gte":"2025-01-01"}},{"date":{"$lte":"2025-12-31"}}]}' \
|
||||
--limit 5000
|
||||
|
||||
# Bad: one query per month in a loop (may fail with auth errors)
|
||||
for month in 01 02 03 ...; do actual query run ...; done
|
||||
```
|
||||
|
||||
- **Uncategorized transactions:** `category.name` is `null` for transactions without a category. Account for this when filtering or grouping by category.
|
||||
|
||||
- **No date sub-fields in AQL:** `date.month`, `date.year`, etc. are not supported as query fields. To group by month, fetch raw transactions with a date range filter and aggregate locally in a script.
|
||||
|
||||
## Caching
|
||||
|
||||
The CLI keeps a local copy of your budget so repeated commands don't hit the sync server on every call. Within the TTL (default `60` seconds), read commands (`list`, `balance`, `query run`, …) reuse the cached budget without a network round-trip. Write commands (`add`, `update`, `set-amount`, …) always sync with the server before and after the write.
|
||||
|
||||
- `actual sync` — refresh the cache now.
|
||||
- `actual sync --status` — show how stale the local cache is.
|
||||
- `actual sync --clear` — delete the local cache; the next command re-downloads.
|
||||
- `--refresh` (or `--no-cache`) — force a sync on a single call.
|
||||
- `--cache-ttl <seconds>` — override the TTL for a single call (use `0` to disable caching).
|
||||
|
||||
### Concurrency
|
||||
|
||||
The CLI takes a shared lock for reads and an exclusive lock for writes on the per-budget cache directory. Many parallel reads are safe; writes serialize. If another CLI process is holding the lock, subsequent invocations wait up to `--lock-timeout` seconds (default `10`) before failing with an error. Pass `--no-lock` to opt out in trusted single-process setups.
|
||||
|
||||
## Running Locally (Development)
|
||||
|
||||
If you're working on the CLI within the monorepo:
|
||||
|
||||
@@ -12,12 +12,10 @@
|
||||
],
|
||||
"type": "module",
|
||||
"imports": {
|
||||
"#cache": "./src/cache.ts",
|
||||
"#commands/*": "./src/commands/*.ts",
|
||||
"#config": "./src/config.ts",
|
||||
"#connection": "./src/connection.ts",
|
||||
"#input": "./src/input.ts",
|
||||
"#lock": "./src/lock.ts",
|
||||
"#output": "./src/output.ts",
|
||||
"#utils": "./src/utils.ts"
|
||||
},
|
||||
@@ -30,12 +28,10 @@
|
||||
"@actual-app/api": "workspace:*",
|
||||
"cli-table3": "^0.6.5",
|
||||
"commander": "^14.0.3",
|
||||
"cosmiconfig": "^9.0.1",
|
||||
"proper-lockfile": "^4.1.2"
|
||||
"cosmiconfig": "^9.0.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^22.19.17",
|
||||
"@types/proper-lockfile": "^4",
|
||||
"@typescript/native-preview": "^7.0.0-dev.20260404.1",
|
||||
"rollup-plugin-visualizer": "^7.0.1",
|
||||
"vite": "^8.0.5",
|
||||
|
||||
@@ -1,206 +0,0 @@
|
||||
import {
|
||||
existsSync,
|
||||
mkdtempSync,
|
||||
readFileSync,
|
||||
rmSync,
|
||||
writeFileSync,
|
||||
} from 'node:fs';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
|
||||
import {
|
||||
CACHE_FILE_NAME,
|
||||
decideSyncAction,
|
||||
readCacheState,
|
||||
writeCacheState,
|
||||
} from './cache';
|
||||
|
||||
describe('readCacheState', () => {
|
||||
let dir: string;
|
||||
|
||||
beforeEach(() => {
|
||||
dir = mkdtempSync(join(tmpdir(), 'actual-cli-cache-'));
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
rmSync(dir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('returns null when the file does not exist', () => {
|
||||
expect(readCacheState(dir)).toBeNull();
|
||||
});
|
||||
|
||||
it('returns null when the file is corrupt', () => {
|
||||
writeFileSync(join(dir, CACHE_FILE_NAME), 'not json');
|
||||
expect(readCacheState(dir)).toBeNull();
|
||||
});
|
||||
|
||||
it('returns null when the file has the wrong version', () => {
|
||||
writeFileSync(
|
||||
join(dir, CACHE_FILE_NAME),
|
||||
JSON.stringify({
|
||||
version: 999,
|
||||
syncId: 'a',
|
||||
budgetId: 'b',
|
||||
serverUrl: 'c',
|
||||
lastSyncedAt: 1,
|
||||
lastDownloadedAt: 1,
|
||||
}),
|
||||
);
|
||||
expect(readCacheState(dir)).toBeNull();
|
||||
});
|
||||
|
||||
it('returns the parsed state when the file is valid', () => {
|
||||
writeFileSync(
|
||||
join(dir, CACHE_FILE_NAME),
|
||||
JSON.stringify({
|
||||
version: 1,
|
||||
syncId: 'a',
|
||||
budgetId: 'b',
|
||||
serverUrl: 'c',
|
||||
lastSyncedAt: 1234,
|
||||
lastDownloadedAt: 5678,
|
||||
}),
|
||||
);
|
||||
expect(readCacheState(dir)).toEqual({
|
||||
version: 1,
|
||||
syncId: 'a',
|
||||
budgetId: 'b',
|
||||
serverUrl: 'c',
|
||||
lastSyncedAt: 1234,
|
||||
lastDownloadedAt: 5678,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('writeCacheState', () => {
|
||||
let dir: string;
|
||||
|
||||
beforeEach(() => {
|
||||
dir = mkdtempSync(join(tmpdir(), 'actual-cli-cache-'));
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
rmSync(dir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('writes the state to the cache file', () => {
|
||||
writeCacheState(dir, {
|
||||
version: 1,
|
||||
syncId: 'a',
|
||||
budgetId: 'b',
|
||||
serverUrl: 'c',
|
||||
lastSyncedAt: 1,
|
||||
lastDownloadedAt: 1,
|
||||
});
|
||||
const raw = readFileSync(join(dir, CACHE_FILE_NAME), 'utf-8');
|
||||
expect(JSON.parse(raw).syncId).toBe('a');
|
||||
});
|
||||
|
||||
it('is atomic: removes the tmp file after rename', () => {
|
||||
writeCacheState(dir, {
|
||||
version: 1,
|
||||
syncId: 'a',
|
||||
budgetId: 'b',
|
||||
serverUrl: 'c',
|
||||
lastSyncedAt: 1,
|
||||
lastDownloadedAt: 1,
|
||||
});
|
||||
expect(existsSync(join(dir, `${CACHE_FILE_NAME}.tmp`))).toBe(false);
|
||||
});
|
||||
|
||||
it('does not throw when the filesystem refuses the write', () => {
|
||||
// Force ENOTDIR by pointing writeCacheState at a path whose parent is a
|
||||
// regular file — no OS-specific pseudo-filesystem semantics needed.
|
||||
const file = join(dir, 'not-a-dir');
|
||||
writeFileSync(file, '');
|
||||
expect(() =>
|
||||
writeCacheState(join(file, 'nested'), {
|
||||
version: 1,
|
||||
syncId: 'a',
|
||||
budgetId: 'b',
|
||||
serverUrl: 'c',
|
||||
lastSyncedAt: 1,
|
||||
lastDownloadedAt: 1,
|
||||
}),
|
||||
).not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('decideSyncAction', () => {
|
||||
const base = {
|
||||
state: {
|
||||
version: 1 as const,
|
||||
syncId: 'sync-1',
|
||||
budgetId: 'bud-1',
|
||||
serverUrl: 'http://s',
|
||||
lastSyncedAt: 1_000_000,
|
||||
lastDownloadedAt: 1_000_000,
|
||||
},
|
||||
config: { syncId: 'sync-1', serverUrl: 'http://s' },
|
||||
now: 1_000_000,
|
||||
ttlMs: 60_000,
|
||||
mutates: false,
|
||||
refresh: false,
|
||||
encrypted: false,
|
||||
};
|
||||
|
||||
it('returns "download" when state is null', () => {
|
||||
expect(decideSyncAction({ ...base, state: null }).action).toBe('download');
|
||||
});
|
||||
|
||||
it('returns "download" when syncId changed', () => {
|
||||
expect(
|
||||
decideSyncAction({
|
||||
...base,
|
||||
config: { ...base.config, syncId: 'other' },
|
||||
}).action,
|
||||
).toBe('download');
|
||||
});
|
||||
|
||||
it('returns "download" when serverUrl changed', () => {
|
||||
expect(
|
||||
decideSyncAction({
|
||||
...base,
|
||||
config: { ...base.config, serverUrl: 'http://other' },
|
||||
}).action,
|
||||
).toBe('download');
|
||||
});
|
||||
|
||||
it('returns "skip" for a read within the TTL', () => {
|
||||
expect(decideSyncAction({ ...base, now: 1_000_000 + 30_000 }).action).toBe(
|
||||
'skip',
|
||||
);
|
||||
});
|
||||
|
||||
it('returns "sync" for a read past the TTL', () => {
|
||||
expect(decideSyncAction({ ...base, now: 1_000_000 + 61_000 }).action).toBe(
|
||||
'sync',
|
||||
);
|
||||
});
|
||||
|
||||
it('returns "sync" for a write even when fresh', () => {
|
||||
expect(decideSyncAction({ ...base, mutates: true }).action).toBe('sync');
|
||||
});
|
||||
|
||||
it('returns "sync" when refresh is true', () => {
|
||||
expect(decideSyncAction({ ...base, refresh: true }).action).toBe('sync');
|
||||
});
|
||||
|
||||
it('returns "sync" when ttlMs is 0', () => {
|
||||
expect(decideSyncAction({ ...base, ttlMs: 0 }).action).toBe('sync');
|
||||
});
|
||||
|
||||
it('returns "sync" for encrypted budgets within the TTL', () => {
|
||||
expect(decideSyncAction({ ...base, encrypted: true }).action).toBe('sync');
|
||||
});
|
||||
|
||||
it('treats clock skew (negative age) as stale', () => {
|
||||
expect(decideSyncAction({ ...base, now: 999_999 }).action).toBe('sync');
|
||||
});
|
||||
|
||||
it('carries cached state on non-download actions', () => {
|
||||
const decision = decideSyncAction({ ...base, mutates: true });
|
||||
expect(decision).toEqual({ action: 'sync', state: base.state });
|
||||
});
|
||||
});
|
||||
@@ -1,102 +0,0 @@
|
||||
import { mkdirSync, readFileSync, renameSync, writeFileSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
|
||||
import { isRecord } from './utils';
|
||||
|
||||
export const CACHE_FILE_NAME = 'state.json';
|
||||
export const CACHE_VERSION = 1;
|
||||
export const META_ROOT_DIR = '.actual-cli';
|
||||
|
||||
export type CacheState = {
|
||||
version: typeof CACHE_VERSION;
|
||||
syncId: string;
|
||||
budgetId: string;
|
||||
serverUrl: string;
|
||||
lastSyncedAt: number;
|
||||
lastDownloadedAt: number;
|
||||
};
|
||||
|
||||
export function getMetaDir(dataDir: string, syncId: string): string {
|
||||
return join(dataDir, META_ROOT_DIR, syncId);
|
||||
}
|
||||
|
||||
function cachePath(metaDir: string): string {
|
||||
return join(metaDir, CACHE_FILE_NAME);
|
||||
}
|
||||
|
||||
function isCacheState(value: unknown): value is CacheState {
|
||||
if (!isRecord(value)) return false;
|
||||
return (
|
||||
value.version === CACHE_VERSION &&
|
||||
typeof value.syncId === 'string' &&
|
||||
typeof value.budgetId === 'string' &&
|
||||
typeof value.serverUrl === 'string' &&
|
||||
typeof value.lastSyncedAt === 'number' &&
|
||||
typeof value.lastDownloadedAt === 'number'
|
||||
);
|
||||
}
|
||||
|
||||
export function readCacheState(metaDir: string): CacheState | null {
|
||||
let raw: string;
|
||||
try {
|
||||
raw = readFileSync(cachePath(metaDir), 'utf-8');
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
let parsed: unknown;
|
||||
try {
|
||||
parsed = JSON.parse(raw);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
return isCacheState(parsed) ? parsed : null;
|
||||
}
|
||||
|
||||
export function writeCacheState(metaDir: string, state: CacheState): void {
|
||||
try {
|
||||
mkdirSync(metaDir, { recursive: true });
|
||||
const target = cachePath(metaDir);
|
||||
const tmp = `${target}.tmp`;
|
||||
writeFileSync(tmp, JSON.stringify(state));
|
||||
renameSync(tmp, target);
|
||||
} catch {
|
||||
// Cache persistence is best-effort. A read-only or unreachable dir must
|
||||
// not crash the CLI; the next invocation simply won't find a cache.
|
||||
}
|
||||
}
|
||||
|
||||
export type SyncDecision =
|
||||
| { action: 'download' }
|
||||
| { action: 'skip'; state: CacheState }
|
||||
| { action: 'sync'; state: CacheState };
|
||||
|
||||
export type DecideSyncArgs = {
|
||||
state: CacheState | null;
|
||||
config: { syncId: string; serverUrl: string };
|
||||
now: number;
|
||||
ttlMs: number;
|
||||
mutates: boolean;
|
||||
refresh: boolean;
|
||||
encrypted: boolean;
|
||||
};
|
||||
|
||||
export function decideSyncAction({
|
||||
state,
|
||||
config,
|
||||
now,
|
||||
ttlMs,
|
||||
mutates,
|
||||
refresh,
|
||||
encrypted,
|
||||
}: DecideSyncArgs): SyncDecision {
|
||||
if (state === null) return { action: 'download' };
|
||||
if (state.syncId !== config.syncId) return { action: 'download' };
|
||||
if (state.serverUrl !== config.serverUrl) return { action: 'download' };
|
||||
if (mutates || refresh || ttlMs === 0 || encrypted) {
|
||||
return { action: 'sync', state };
|
||||
}
|
||||
const age = now - state.lastSyncedAt;
|
||||
if (age < 0) return { action: 'sync', state };
|
||||
if (age < ttlMs) return { action: 'skip', state };
|
||||
return { action: 'sync', state };
|
||||
}
|
||||
@@ -14,30 +14,26 @@ export function registerAccountsCommand(program: Command) {
|
||||
.option('--include-closed', 'Include closed accounts', false)
|
||||
.action(async cmdOpts => {
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
const allAccounts = await api.getAccounts();
|
||||
const accounts = allAccounts.filter(
|
||||
a => cmdOpts.includeClosed || !a.closed,
|
||||
);
|
||||
// Stable sort: on-budget first, off-budget second
|
||||
// (preserves API sort_order within each group)
|
||||
accounts.sort((a, b) => Number(a.offbudget) - Number(b.offbudget));
|
||||
const balances = await Promise.all(
|
||||
accounts.map(a => api.getAccountBalance(a.id)),
|
||||
);
|
||||
const output = accounts.map((a, i) => ({
|
||||
id: a.id,
|
||||
name: a.name,
|
||||
offbudget: a.offbudget,
|
||||
closed: a.closed,
|
||||
balance: balances[i],
|
||||
}));
|
||||
printOutput(output, opts.format);
|
||||
},
|
||||
{ mutates: false },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
const allAccounts = await api.getAccounts();
|
||||
const accounts = allAccounts.filter(
|
||||
a => cmdOpts.includeClosed || !a.closed,
|
||||
);
|
||||
// Stable sort: on-budget first, off-budget second
|
||||
// (preserves API sort_order within each group)
|
||||
accounts.sort((a, b) => Number(a.offbudget) - Number(b.offbudget));
|
||||
const balances = await Promise.all(
|
||||
accounts.map(a => api.getAccountBalance(a.id)),
|
||||
);
|
||||
const output = accounts.map((a, i) => ({
|
||||
id: a.id,
|
||||
name: a.name,
|
||||
offbudget: a.offbudget,
|
||||
closed: a.closed,
|
||||
balance: balances[i],
|
||||
}));
|
||||
printOutput(output, opts.format);
|
||||
});
|
||||
});
|
||||
|
||||
accounts
|
||||
@@ -53,17 +49,13 @@ export function registerAccountsCommand(program: Command) {
|
||||
.action(async cmdOpts => {
|
||||
const balance = parseIntFlag(cmdOpts.balance, '--balance');
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
const id = await api.createAccount(
|
||||
{ name: cmdOpts.name, offbudget: cmdOpts.offbudget },
|
||||
balance,
|
||||
);
|
||||
printOutput({ id }, opts.format);
|
||||
},
|
||||
{ mutates: true },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
const id = await api.createAccount(
|
||||
{ name: cmdOpts.name, offbudget: cmdOpts.offbudget },
|
||||
balance,
|
||||
);
|
||||
printOutput({ id }, opts.format);
|
||||
});
|
||||
});
|
||||
|
||||
accounts
|
||||
@@ -89,14 +81,10 @@ export function registerAccountsCommand(program: Command) {
|
||||
'No update fields provided. Use --name or --offbudget.',
|
||||
);
|
||||
}
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
await api.updateAccount(id, fields);
|
||||
printOutput({ success: true, id }, opts.format);
|
||||
},
|
||||
{ mutates: true },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
await api.updateAccount(id, fields);
|
||||
printOutput({ success: true, id }, opts.format);
|
||||
});
|
||||
});
|
||||
|
||||
accounts
|
||||
@@ -112,18 +100,14 @@ export function registerAccountsCommand(program: Command) {
|
||||
)
|
||||
.action(async (id: string, cmdOpts) => {
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
await api.closeAccount(
|
||||
id,
|
||||
cmdOpts.transferAccount,
|
||||
cmdOpts.transferCategory,
|
||||
);
|
||||
printOutput({ success: true, id }, opts.format);
|
||||
},
|
||||
{ mutates: true },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
await api.closeAccount(
|
||||
id,
|
||||
cmdOpts.transferAccount,
|
||||
cmdOpts.transferCategory,
|
||||
);
|
||||
printOutput({ success: true, id }, opts.format);
|
||||
});
|
||||
});
|
||||
|
||||
accounts
|
||||
@@ -131,14 +115,10 @@ export function registerAccountsCommand(program: Command) {
|
||||
.description('Reopen a closed account')
|
||||
.action(async (id: string) => {
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
await api.reopenAccount(id);
|
||||
printOutput({ success: true, id }, opts.format);
|
||||
},
|
||||
{ mutates: true },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
await api.reopenAccount(id);
|
||||
printOutput({ success: true, id }, opts.format);
|
||||
});
|
||||
});
|
||||
|
||||
accounts
|
||||
@@ -146,14 +126,10 @@ export function registerAccountsCommand(program: Command) {
|
||||
.description('Delete an account')
|
||||
.action(async (id: string) => {
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
await api.deleteAccount(id);
|
||||
printOutput({ success: true, id }, opts.format);
|
||||
},
|
||||
{ mutates: true },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
await api.deleteAccount(id);
|
||||
printOutput({ success: true, id }, opts.format);
|
||||
});
|
||||
});
|
||||
|
||||
accounts
|
||||
@@ -172,13 +148,9 @@ export function registerAccountsCommand(program: Command) {
|
||||
cutoff = cutoffDate;
|
||||
}
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
const balance = await api.getAccountBalance(id, cutoff);
|
||||
printOutput({ id, balance }, opts.format);
|
||||
},
|
||||
{ mutates: false },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
const balance = await api.getAccountBalance(id, cutoff);
|
||||
printOutput({ id, balance }, opts.format);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import * as api from '@actual-app/api';
|
||||
import type { Command } from 'commander';
|
||||
|
||||
import { resolveConfig } from '#config';
|
||||
import { withConnection } from '#connection';
|
||||
import { printOutput } from '#output';
|
||||
import { parseBoolFlag, parseIntFlag } from '#utils';
|
||||
@@ -19,7 +20,7 @@ export function registerBudgetsCommand(program: Command) {
|
||||
const result = await api.getBudgets();
|
||||
printOutput(result, opts.format);
|
||||
},
|
||||
{ mutates: false, skipBudget: true },
|
||||
{ loadBudget: false },
|
||||
);
|
||||
});
|
||||
|
||||
@@ -29,33 +30,40 @@ export function registerBudgetsCommand(program: Command) {
|
||||
.option('--encryption-password <password>', 'Encryption password')
|
||||
.action(async (syncId: string, cmdOpts) => {
|
||||
const opts = program.opts();
|
||||
const config = await resolveConfig(opts);
|
||||
const password = config.encryptionPassword ?? cmdOpts.encryptionPassword;
|
||||
await withConnection(
|
||||
opts,
|
||||
async config => {
|
||||
const password =
|
||||
config.encryptionPassword ?? cmdOpts.encryptionPassword;
|
||||
async () => {
|
||||
await api.downloadBudget(syncId, {
|
||||
password,
|
||||
});
|
||||
printOutput({ success: true, syncId }, opts.format);
|
||||
},
|
||||
{ mutates: false, skipBudget: true },
|
||||
{ loadBudget: false },
|
||||
);
|
||||
});
|
||||
|
||||
budgets
|
||||
.command('sync')
|
||||
.description('Sync the current budget')
|
||||
.action(async () => {
|
||||
const opts = program.opts();
|
||||
await withConnection(opts, async () => {
|
||||
await api.sync();
|
||||
printOutput({ success: true }, opts.format);
|
||||
});
|
||||
});
|
||||
|
||||
budgets
|
||||
.command('months')
|
||||
.description('List available budget months')
|
||||
.action(async () => {
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
const result = await api.getBudgetMonths();
|
||||
printOutput(result, opts.format);
|
||||
},
|
||||
{ mutates: false },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
const result = await api.getBudgetMonths();
|
||||
printOutput(result, opts.format);
|
||||
});
|
||||
});
|
||||
|
||||
budgets
|
||||
@@ -63,14 +71,10 @@ export function registerBudgetsCommand(program: Command) {
|
||||
.description('Get budget data for a specific month (YYYY-MM)')
|
||||
.action(async (month: string) => {
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
const result = await api.getBudgetMonth(month);
|
||||
printOutput(result, opts.format);
|
||||
},
|
||||
{ mutates: false },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
const result = await api.getBudgetMonth(month);
|
||||
printOutput(result, opts.format);
|
||||
});
|
||||
});
|
||||
|
||||
budgets
|
||||
@@ -85,14 +89,10 @@ export function registerBudgetsCommand(program: Command) {
|
||||
.action(async cmdOpts => {
|
||||
const amount = parseIntFlag(cmdOpts.amount, '--amount');
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
await api.setBudgetAmount(cmdOpts.month, cmdOpts.category, amount);
|
||||
printOutput({ success: true }, opts.format);
|
||||
},
|
||||
{ mutates: true },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
await api.setBudgetAmount(cmdOpts.month, cmdOpts.category, amount);
|
||||
printOutput({ success: true }, opts.format);
|
||||
});
|
||||
});
|
||||
|
||||
budgets
|
||||
@@ -104,14 +104,10 @@ export function registerBudgetsCommand(program: Command) {
|
||||
.action(async cmdOpts => {
|
||||
const flag = parseBoolFlag(cmdOpts.flag, '--flag');
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
await api.setBudgetCarryover(cmdOpts.month, cmdOpts.category, flag);
|
||||
printOutput({ success: true }, opts.format);
|
||||
},
|
||||
{ mutates: true },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
await api.setBudgetCarryover(cmdOpts.month, cmdOpts.category, flag);
|
||||
printOutput({ success: true }, opts.format);
|
||||
});
|
||||
});
|
||||
|
||||
budgets
|
||||
@@ -125,14 +121,10 @@ export function registerBudgetsCommand(program: Command) {
|
||||
.action(async cmdOpts => {
|
||||
const parsedAmount = parseIntFlag(cmdOpts.amount, '--amount');
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
await api.holdBudgetForNextMonth(cmdOpts.month, parsedAmount);
|
||||
printOutput({ success: true }, opts.format);
|
||||
},
|
||||
{ mutates: true },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
await api.holdBudgetForNextMonth(cmdOpts.month, parsedAmount);
|
||||
printOutput({ success: true }, opts.format);
|
||||
});
|
||||
});
|
||||
|
||||
budgets
|
||||
@@ -141,13 +133,9 @@ export function registerBudgetsCommand(program: Command) {
|
||||
.requiredOption('--month <month>', 'Budget month (YYYY-MM)')
|
||||
.action(async cmdOpts => {
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
await api.resetBudgetHold(cmdOpts.month);
|
||||
printOutput({ success: true }, opts.format);
|
||||
},
|
||||
{ mutates: true },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
await api.resetBudgetHold(cmdOpts.month);
|
||||
printOutput({ success: true }, opts.format);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
@@ -15,14 +15,10 @@ export function registerCategoriesCommand(program: Command) {
|
||||
.description('List all categories')
|
||||
.action(async () => {
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
const result = await api.getCategories();
|
||||
printOutput(result, opts.format);
|
||||
},
|
||||
{ mutates: false },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
const result = await api.getCategories();
|
||||
printOutput(result, opts.format);
|
||||
});
|
||||
});
|
||||
|
||||
categories
|
||||
@@ -33,19 +29,15 @@ export function registerCategoriesCommand(program: Command) {
|
||||
.option('--is-income', 'Mark as income category', false)
|
||||
.action(async cmdOpts => {
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
const id = await api.createCategory({
|
||||
name: cmdOpts.name,
|
||||
group_id: cmdOpts.groupId,
|
||||
is_income: cmdOpts.isIncome,
|
||||
hidden: false,
|
||||
});
|
||||
printOutput({ id }, opts.format);
|
||||
},
|
||||
{ mutates: true },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
const id = await api.createCategory({
|
||||
name: cmdOpts.name,
|
||||
group_id: cmdOpts.groupId,
|
||||
is_income: cmdOpts.isIncome,
|
||||
hidden: false,
|
||||
});
|
||||
printOutput({ id }, opts.format);
|
||||
});
|
||||
});
|
||||
|
||||
categories
|
||||
@@ -63,14 +55,10 @@ export function registerCategoriesCommand(program: Command) {
|
||||
throw new Error('No update fields provided. Use --name or --hidden.');
|
||||
}
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
await api.updateCategory(id, fields);
|
||||
printOutput({ success: true, id }, opts.format);
|
||||
},
|
||||
{ mutates: true },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
await api.updateCategory(id, fields);
|
||||
printOutput({ success: true, id }, opts.format);
|
||||
});
|
||||
});
|
||||
|
||||
categories
|
||||
@@ -79,13 +67,9 @@ export function registerCategoriesCommand(program: Command) {
|
||||
.option('--transfer-to <id>', 'Transfer transactions to this category')
|
||||
.action(async (id: string, cmdOpts) => {
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
await api.deleteCategory(id, cmdOpts.transferTo);
|
||||
printOutput({ success: true, id }, opts.format);
|
||||
},
|
||||
{ mutates: true },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
await api.deleteCategory(id, cmdOpts.transferTo);
|
||||
printOutput({ success: true, id }, opts.format);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
@@ -15,14 +15,10 @@ export function registerCategoryGroupsCommand(program: Command) {
|
||||
.description('List all category groups')
|
||||
.action(async () => {
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
const result = await api.getCategoryGroups();
|
||||
printOutput(result, opts.format);
|
||||
},
|
||||
{ mutates: false },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
const result = await api.getCategoryGroups();
|
||||
printOutput(result, opts.format);
|
||||
});
|
||||
});
|
||||
|
||||
groups
|
||||
@@ -32,18 +28,14 @@ export function registerCategoryGroupsCommand(program: Command) {
|
||||
.option('--is-income', 'Mark as income group', false)
|
||||
.action(async cmdOpts => {
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
const id = await api.createCategoryGroup({
|
||||
name: cmdOpts.name,
|
||||
is_income: cmdOpts.isIncome,
|
||||
hidden: false,
|
||||
});
|
||||
printOutput({ id }, opts.format);
|
||||
},
|
||||
{ mutates: true },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
const id = await api.createCategoryGroup({
|
||||
name: cmdOpts.name,
|
||||
is_income: cmdOpts.isIncome,
|
||||
hidden: false,
|
||||
});
|
||||
printOutput({ id }, opts.format);
|
||||
});
|
||||
});
|
||||
|
||||
groups
|
||||
@@ -61,14 +53,10 @@ export function registerCategoryGroupsCommand(program: Command) {
|
||||
throw new Error('No update fields provided. Use --name or --hidden.');
|
||||
}
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
await api.updateCategoryGroup(id, fields);
|
||||
printOutput({ success: true, id }, opts.format);
|
||||
},
|
||||
{ mutates: true },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
await api.updateCategoryGroup(id, fields);
|
||||
printOutput({ success: true, id }, opts.format);
|
||||
});
|
||||
});
|
||||
|
||||
groups
|
||||
@@ -77,13 +65,9 @@ export function registerCategoryGroupsCommand(program: Command) {
|
||||
.option('--transfer-to <id>', 'Transfer transactions to this category ID')
|
||||
.action(async (id: string, cmdOpts) => {
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
await api.deleteCategoryGroup(id, cmdOpts.transferTo);
|
||||
printOutput({ success: true, id }, opts.format);
|
||||
},
|
||||
{ mutates: true },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
await api.deleteCategoryGroup(id, cmdOpts.transferTo);
|
||||
printOutput({ success: true, id }, opts.format);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
@@ -12,14 +12,10 @@ export function registerPayeesCommand(program: Command) {
|
||||
.description('List all payees')
|
||||
.action(async () => {
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
const result = await api.getPayees();
|
||||
printOutput(result, opts.format);
|
||||
},
|
||||
{ mutates: false },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
const result = await api.getPayees();
|
||||
printOutput(result, opts.format);
|
||||
});
|
||||
});
|
||||
|
||||
payees
|
||||
@@ -27,14 +23,10 @@ export function registerPayeesCommand(program: Command) {
|
||||
.description('List frequently used payees')
|
||||
.action(async () => {
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
const result = await api.getCommonPayees();
|
||||
printOutput(result, opts.format);
|
||||
},
|
||||
{ mutates: false },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
const result = await api.getCommonPayees();
|
||||
printOutput(result, opts.format);
|
||||
});
|
||||
});
|
||||
|
||||
payees
|
||||
@@ -43,14 +35,10 @@ export function registerPayeesCommand(program: Command) {
|
||||
.requiredOption('--name <name>', 'Payee name')
|
||||
.action(async cmdOpts => {
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
const id = await api.createPayee({ name: cmdOpts.name });
|
||||
printOutput({ id }, opts.format);
|
||||
},
|
||||
{ mutates: true },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
const id = await api.createPayee({ name: cmdOpts.name });
|
||||
printOutput({ id }, opts.format);
|
||||
});
|
||||
});
|
||||
|
||||
payees
|
||||
@@ -66,14 +54,10 @@ export function registerPayeesCommand(program: Command) {
|
||||
);
|
||||
}
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
await api.updatePayee(id, fields);
|
||||
printOutput({ success: true, id }, opts.format);
|
||||
},
|
||||
{ mutates: true },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
await api.updatePayee(id, fields);
|
||||
printOutput({ success: true, id }, opts.format);
|
||||
});
|
||||
});
|
||||
|
||||
payees
|
||||
@@ -81,14 +65,10 @@ export function registerPayeesCommand(program: Command) {
|
||||
.description('Delete a payee')
|
||||
.action(async (id: string) => {
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
await api.deletePayee(id);
|
||||
printOutput({ success: true, id }, opts.format);
|
||||
},
|
||||
{ mutates: true },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
await api.deletePayee(id);
|
||||
printOutput({ success: true, id }, opts.format);
|
||||
});
|
||||
});
|
||||
|
||||
payees
|
||||
@@ -107,13 +87,9 @@ export function registerPayeesCommand(program: Command) {
|
||||
);
|
||||
}
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
await api.mergePayees(cmdOpts.target, mergeIds);
|
||||
printOutput({ success: true }, opts.format);
|
||||
},
|
||||
{ mutates: true },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
await api.mergePayees(cmdOpts.target, mergeIds);
|
||||
printOutput({ success: true }, opts.format);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
@@ -301,31 +301,27 @@ export function registerQueryCommand(program: Command) {
|
||||
.addHelpText('after', RUN_EXAMPLES)
|
||||
.action(async cmdOpts => {
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
const parsed = cmdOpts.file ? readJsonInput(cmdOpts) : undefined;
|
||||
if (parsed !== undefined && !isRecord(parsed)) {
|
||||
throw new Error('Query file must contain a JSON object');
|
||||
}
|
||||
const queryObj = parsed
|
||||
? buildQueryFromFile(parsed, cmdOpts.table)
|
||||
: buildQueryFromFlags(cmdOpts);
|
||||
await withConnection(opts, async () => {
|
||||
const parsed = cmdOpts.file ? readJsonInput(cmdOpts) : undefined;
|
||||
if (parsed !== undefined && !isRecord(parsed)) {
|
||||
throw new Error('Query file must contain a JSON object');
|
||||
}
|
||||
const queryObj = parsed
|
||||
? buildQueryFromFile(parsed, cmdOpts.table)
|
||||
: buildQueryFromFlags(cmdOpts);
|
||||
|
||||
const result = await api.aqlQuery(queryObj);
|
||||
const result = await api.aqlQuery(queryObj);
|
||||
|
||||
if (!isRecord(result) || !('data' in result)) {
|
||||
throw new Error('Query result missing data');
|
||||
}
|
||||
if (!isRecord(result) || !('data' in result)) {
|
||||
throw new Error('Query result missing data');
|
||||
}
|
||||
|
||||
if (cmdOpts.count) {
|
||||
printOutput({ count: result.data }, opts.format);
|
||||
} else {
|
||||
printOutput(result.data, opts.format);
|
||||
}
|
||||
},
|
||||
{ mutates: false },
|
||||
);
|
||||
if (cmdOpts.count) {
|
||||
printOutput({ count: result.data }, opts.format);
|
||||
} else {
|
||||
printOutput(result.data, opts.format);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
query
|
||||
|
||||
@@ -15,14 +15,10 @@ export function registerRulesCommand(program: Command) {
|
||||
.description('List all rules')
|
||||
.action(async () => {
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
const result = await api.getRules();
|
||||
printOutput(result, opts.format);
|
||||
},
|
||||
{ mutates: false },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
const result = await api.getRules();
|
||||
printOutput(result, opts.format);
|
||||
});
|
||||
});
|
||||
|
||||
rules
|
||||
@@ -30,14 +26,10 @@ export function registerRulesCommand(program: Command) {
|
||||
.description('List rules for a specific payee')
|
||||
.action(async (payeeId: string) => {
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
const result = await api.getPayeeRules(payeeId);
|
||||
printOutput(result, opts.format);
|
||||
},
|
||||
{ mutates: false },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
const result = await api.getPayeeRules(payeeId);
|
||||
printOutput(result, opts.format);
|
||||
});
|
||||
});
|
||||
|
||||
rules
|
||||
@@ -47,17 +39,13 @@ export function registerRulesCommand(program: Command) {
|
||||
.option('--file <path>', 'Read rule from JSON file (use - for stdin)')
|
||||
.action(async cmdOpts => {
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
const rule = readJsonInput(cmdOpts) as Parameters<
|
||||
typeof api.createRule
|
||||
>[0];
|
||||
const id = await api.createRule(rule);
|
||||
printOutput({ id }, opts.format);
|
||||
},
|
||||
{ mutates: true },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
const rule = readJsonInput(cmdOpts) as Parameters<
|
||||
typeof api.createRule
|
||||
>[0];
|
||||
const id = await api.createRule(rule);
|
||||
printOutput({ id }, opts.format);
|
||||
});
|
||||
});
|
||||
|
||||
rules
|
||||
@@ -67,17 +55,13 @@ export function registerRulesCommand(program: Command) {
|
||||
.option('--file <path>', 'Read rule from JSON file (use - for stdin)')
|
||||
.action(async cmdOpts => {
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
const rule = readJsonInput(cmdOpts) as Parameters<
|
||||
typeof api.updateRule
|
||||
>[0];
|
||||
await api.updateRule(rule);
|
||||
printOutput({ success: true }, opts.format);
|
||||
},
|
||||
{ mutates: true },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
const rule = readJsonInput(cmdOpts) as Parameters<
|
||||
typeof api.updateRule
|
||||
>[0];
|
||||
await api.updateRule(rule);
|
||||
printOutput({ success: true }, opts.format);
|
||||
});
|
||||
});
|
||||
|
||||
rules
|
||||
@@ -85,13 +69,9 @@ export function registerRulesCommand(program: Command) {
|
||||
.description('Delete a rule')
|
||||
.action(async (id: string) => {
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
await api.deleteRule(id);
|
||||
printOutput({ success: true, id }, opts.format);
|
||||
},
|
||||
{ mutates: true },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
await api.deleteRule(id);
|
||||
printOutput({ success: true, id }, opts.format);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
@@ -15,14 +15,10 @@ export function registerSchedulesCommand(program: Command) {
|
||||
.description('List all schedules')
|
||||
.action(async () => {
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
const result = await api.getSchedules();
|
||||
printOutput(result, opts.format);
|
||||
},
|
||||
{ mutates: false },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
const result = await api.getSchedules();
|
||||
printOutput(result, opts.format);
|
||||
});
|
||||
});
|
||||
|
||||
schedules
|
||||
@@ -32,17 +28,13 @@ export function registerSchedulesCommand(program: Command) {
|
||||
.option('--file <path>', 'Read schedule from JSON file (use - for stdin)')
|
||||
.action(async cmdOpts => {
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
const schedule = readJsonInput(cmdOpts) as Parameters<
|
||||
typeof api.createSchedule
|
||||
>[0];
|
||||
const id = await api.createSchedule(schedule);
|
||||
printOutput({ id }, opts.format);
|
||||
},
|
||||
{ mutates: true },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
const schedule = readJsonInput(cmdOpts) as Parameters<
|
||||
typeof api.createSchedule
|
||||
>[0];
|
||||
const id = await api.createSchedule(schedule);
|
||||
printOutput({ id }, opts.format);
|
||||
});
|
||||
});
|
||||
|
||||
schedules
|
||||
@@ -53,17 +45,13 @@ export function registerSchedulesCommand(program: Command) {
|
||||
.option('--reset-next-date', 'Reset next occurrence date', false)
|
||||
.action(async (id: string, cmdOpts) => {
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
const fields = readJsonInput(cmdOpts) as Parameters<
|
||||
typeof api.updateSchedule
|
||||
>[1];
|
||||
await api.updateSchedule(id, fields, cmdOpts.resetNextDate);
|
||||
printOutput({ success: true, id }, opts.format);
|
||||
},
|
||||
{ mutates: true },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
const fields = readJsonInput(cmdOpts) as Parameters<
|
||||
typeof api.updateSchedule
|
||||
>[1];
|
||||
await api.updateSchedule(id, fields, cmdOpts.resetNextDate);
|
||||
printOutput({ success: true, id }, opts.format);
|
||||
});
|
||||
});
|
||||
|
||||
schedules
|
||||
@@ -71,13 +59,9 @@ export function registerSchedulesCommand(program: Command) {
|
||||
.description('Delete a schedule')
|
||||
.action(async (id: string) => {
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
await api.deleteSchedule(id);
|
||||
printOutput({ success: true, id }, opts.format);
|
||||
},
|
||||
{ mutates: true },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
await api.deleteSchedule(id);
|
||||
printOutput({ success: true, id }, opts.format);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
@@ -19,7 +19,7 @@ export function registerServerCommand(program: Command) {
|
||||
const version = await api.getServerVersion();
|
||||
printOutput({ version }, opts.format);
|
||||
},
|
||||
{ mutates: false, skipBudget: true },
|
||||
{ loadBudget: false },
|
||||
);
|
||||
});
|
||||
|
||||
@@ -34,17 +34,13 @@ export function registerServerCommand(program: Command) {
|
||||
.requiredOption('--name <name>', 'Entity name')
|
||||
.action(async cmdOpts => {
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
const id = await api.getIDByName(cmdOpts.type, cmdOpts.name);
|
||||
printOutput(
|
||||
{ id, type: cmdOpts.type, name: cmdOpts.name },
|
||||
opts.format,
|
||||
);
|
||||
},
|
||||
{ mutates: false },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
const id = await api.getIDByName(cmdOpts.type, cmdOpts.name);
|
||||
printOutput(
|
||||
{ id, type: cmdOpts.type, name: cmdOpts.name },
|
||||
opts.format,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
server
|
||||
@@ -53,16 +49,12 @@ export function registerServerCommand(program: Command) {
|
||||
.option('--account <id>', 'Specific account ID to sync')
|
||||
.action(async cmdOpts => {
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
const args = cmdOpts.account
|
||||
? { accountId: cmdOpts.account }
|
||||
: undefined;
|
||||
await api.runBankSync(args);
|
||||
printOutput({ success: true }, opts.format);
|
||||
},
|
||||
{ mutates: true },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
const args = cmdOpts.account
|
||||
? { accountId: cmdOpts.account }
|
||||
: undefined;
|
||||
await api.runBankSync(args);
|
||||
printOutput({ success: true }, opts.format);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1,124 +0,0 @@
|
||||
import { existsSync, mkdtempSync, rmSync } from 'node:fs';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
|
||||
import { Command } from 'commander';
|
||||
|
||||
import { CACHE_FILE_NAME, getMetaDir, writeCacheState } from '#cache';
|
||||
import { resolveConfig } from '#config';
|
||||
|
||||
import { registerSyncCommand } from './sync';
|
||||
|
||||
vi.mock('@actual-app/api', () => ({
|
||||
init: vi.fn().mockResolvedValue(undefined),
|
||||
downloadBudget: vi.fn().mockResolvedValue(undefined),
|
||||
loadBudget: vi.fn().mockResolvedValue(undefined),
|
||||
sync: vi.fn().mockResolvedValue(undefined),
|
||||
shutdown: vi.fn().mockResolvedValue(undefined),
|
||||
getBudgets: vi
|
||||
.fn()
|
||||
.mockResolvedValue([{ id: 'bud-disk-1', groupId: 'sync-1' }]),
|
||||
}));
|
||||
|
||||
vi.mock('#config', () => ({
|
||||
resolveConfig: vi.fn(),
|
||||
}));
|
||||
|
||||
let dataDir: string;
|
||||
|
||||
function metaDirFor(syncId: string) {
|
||||
return getMetaDir(dataDir, syncId);
|
||||
}
|
||||
|
||||
function program() {
|
||||
const p = new Command();
|
||||
p.exitOverride();
|
||||
p.option('--sync-id <id>');
|
||||
p.option('--data-dir <path>');
|
||||
p.option('--format <fmt>');
|
||||
p.option('--verbose');
|
||||
registerSyncCommand(p);
|
||||
return p;
|
||||
}
|
||||
|
||||
describe('actual sync', () => {
|
||||
let stdoutSpy: ReturnType<typeof vi.spyOn>;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
dataDir = mkdtempSync(join(tmpdir(), 'actual-cli-sync-'));
|
||||
vi.mocked(resolveConfig).mockResolvedValue({
|
||||
serverUrl: 'http://test',
|
||||
password: 'pw',
|
||||
dataDir,
|
||||
syncId: 'sync-1',
|
||||
cacheTtl: 60,
|
||||
lockTimeout: 10,
|
||||
refresh: false,
|
||||
noLock: true,
|
||||
});
|
||||
stdoutSpy = vi
|
||||
.spyOn(process.stdout, 'write')
|
||||
.mockImplementation(() => true);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
stdoutSpy.mockRestore();
|
||||
rmSync(dataDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('runs a sync and prints the syncId', async () => {
|
||||
writeCacheState(metaDirFor('sync-1'), {
|
||||
version: 1,
|
||||
syncId: 'sync-1',
|
||||
budgetId: 'bud-disk-1',
|
||||
serverUrl: 'http://test',
|
||||
lastSyncedAt: 0,
|
||||
lastDownloadedAt: 0,
|
||||
});
|
||||
await program().parseAsync(['node', 'actual', 'sync']);
|
||||
const out = stdoutSpy.mock.calls
|
||||
.map((c: unknown[]) => String(c[0]))
|
||||
.join('');
|
||||
expect(out).toMatch(/"syncId":\s*"sync-1"/);
|
||||
});
|
||||
|
||||
it('--status prints cache info without syncing', async () => {
|
||||
writeCacheState(metaDirFor('sync-1'), {
|
||||
version: 1,
|
||||
syncId: 'sync-1',
|
||||
budgetId: 'bud-disk-1',
|
||||
serverUrl: 'http://test',
|
||||
lastSyncedAt: Date.now() - 5000,
|
||||
lastDownloadedAt: Date.now() - 5000,
|
||||
});
|
||||
await program().parseAsync(['node', 'actual', 'sync', '--status']);
|
||||
const out = stdoutSpy.mock.calls
|
||||
.map((c: unknown[]) => String(c[0]))
|
||||
.join('');
|
||||
expect(out).toMatch(/"stale":\s*(true|false)/);
|
||||
expect(out).toMatch(/"ageSeconds":\s*\d+/);
|
||||
});
|
||||
|
||||
it('--status on no prior sync reports "never synced" and exits 0', async () => {
|
||||
await program().parseAsync(['node', 'actual', 'sync', '--status']);
|
||||
const out = stdoutSpy.mock.calls
|
||||
.map((c: unknown[]) => String(c[0]))
|
||||
.join('');
|
||||
expect(out).toMatch(/"neverSynced":\s*true/);
|
||||
});
|
||||
|
||||
it('--clear removes the cache file', async () => {
|
||||
writeCacheState(metaDirFor('sync-1'), {
|
||||
version: 1,
|
||||
syncId: 'sync-1',
|
||||
budgetId: 'bud-disk-1',
|
||||
serverUrl: 'http://test',
|
||||
lastSyncedAt: Date.now(),
|
||||
lastDownloadedAt: Date.now(),
|
||||
});
|
||||
expect(existsSync(join(metaDirFor('sync-1'), CACHE_FILE_NAME))).toBe(true);
|
||||
await program().parseAsync(['node', 'actual', 'sync', '--clear']);
|
||||
expect(existsSync(join(metaDirFor('sync-1'), CACHE_FILE_NAME))).toBe(false);
|
||||
});
|
||||
});
|
||||
@@ -1,118 +0,0 @@
|
||||
import { rmSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
|
||||
import type { Command } from 'commander';
|
||||
|
||||
import { CACHE_FILE_NAME, getMetaDir, readCacheState } from '#cache';
|
||||
import type { CliConfig } from '#config';
|
||||
import { resolveConfig } from '#config';
|
||||
import { withConnection } from '#connection';
|
||||
import { acquireExclusive } from '#lock';
|
||||
import { printOutput } from '#output';
|
||||
|
||||
type SyncCmdOpts = {
|
||||
status?: boolean;
|
||||
clear?: boolean;
|
||||
};
|
||||
|
||||
async function requireSyncIdAndMeta(
|
||||
opts: Record<string, unknown>,
|
||||
flag: string,
|
||||
): Promise<{ config: CliConfig; meta: string }> {
|
||||
const config = await resolveConfig(opts);
|
||||
if (!config.syncId) {
|
||||
throw new Error(
|
||||
`Sync ID is required for sync ${flag}. Set --sync-id or ACTUAL_SYNC_ID.`,
|
||||
);
|
||||
}
|
||||
return { config, meta: getMetaDir(config.dataDir, config.syncId) };
|
||||
}
|
||||
|
||||
export function registerSyncCommand(program: Command) {
|
||||
program
|
||||
.command('sync')
|
||||
.description(
|
||||
'Sync the local cached budget with the server, print cache status, or clear the cache',
|
||||
)
|
||||
.option('--status', 'Print cache status without syncing', false)
|
||||
.option(
|
||||
'--clear',
|
||||
'Delete the local cache; next command re-downloads',
|
||||
false,
|
||||
)
|
||||
.action(async (cmdOpts: SyncCmdOpts) => {
|
||||
const opts = program.opts();
|
||||
|
||||
if (cmdOpts.status) {
|
||||
const { config, meta } = await requireSyncIdAndMeta(opts, '--status');
|
||||
const state = readCacheState(meta);
|
||||
if (state === null) {
|
||||
printOutput(
|
||||
{
|
||||
neverSynced: true,
|
||||
syncId: config.syncId,
|
||||
ttlSeconds: config.cacheTtl,
|
||||
},
|
||||
opts.format,
|
||||
);
|
||||
return;
|
||||
}
|
||||
const ageSeconds = Math.max(
|
||||
0,
|
||||
Math.round((Date.now() - state.lastSyncedAt) / 1000),
|
||||
);
|
||||
printOutput(
|
||||
{
|
||||
neverSynced: false,
|
||||
syncId: state.syncId,
|
||||
budgetId: state.budgetId,
|
||||
syncedAt: new Date(state.lastSyncedAt).toISOString(),
|
||||
lastDownloadedAt: new Date(state.lastDownloadedAt).toISOString(),
|
||||
ageSeconds,
|
||||
ttlSeconds: config.cacheTtl,
|
||||
stale: ageSeconds > config.cacheTtl,
|
||||
},
|
||||
opts.format,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (cmdOpts.clear) {
|
||||
const { config, meta } = await requireSyncIdAndMeta(opts, '--clear');
|
||||
// Serialize with concurrent writers so we don't rm a half-written
|
||||
// state.json that's about to be renamed into place.
|
||||
const release = config.noLock
|
||||
? null
|
||||
: await acquireExclusive(meta, {
|
||||
timeoutMs: config.lockTimeout * 1000,
|
||||
});
|
||||
try {
|
||||
rmSync(join(meta, CACHE_FILE_NAME), { force: true });
|
||||
} finally {
|
||||
await release?.();
|
||||
}
|
||||
printOutput({ cleared: true, syncId: config.syncId }, opts.format);
|
||||
return;
|
||||
}
|
||||
|
||||
await withConnection(
|
||||
opts,
|
||||
async config => {
|
||||
const state = config.syncId
|
||||
? readCacheState(getMetaDir(config.dataDir, config.syncId))
|
||||
: null;
|
||||
printOutput(
|
||||
{
|
||||
syncedAt: new Date(
|
||||
state?.lastSyncedAt ?? Date.now(),
|
||||
).toISOString(),
|
||||
syncId: config.syncId,
|
||||
budgetId: state?.budgetId ?? config.syncId,
|
||||
},
|
||||
opts.format,
|
||||
);
|
||||
},
|
||||
{ mutates: true },
|
||||
);
|
||||
});
|
||||
}
|
||||
@@ -12,14 +12,10 @@ export function registerTagsCommand(program: Command) {
|
||||
.description('List all tags')
|
||||
.action(async () => {
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
const result = await api.getTags();
|
||||
printOutput(result, opts.format);
|
||||
},
|
||||
{ mutates: false },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
const result = await api.getTags();
|
||||
printOutput(result, opts.format);
|
||||
});
|
||||
});
|
||||
|
||||
tags
|
||||
@@ -30,18 +26,14 @@ export function registerTagsCommand(program: Command) {
|
||||
.option('--description <description>', 'Tag description')
|
||||
.action(async cmdOpts => {
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
const id = await api.createTag({
|
||||
tag: cmdOpts.tag,
|
||||
color: cmdOpts.color,
|
||||
description: cmdOpts.description,
|
||||
});
|
||||
printOutput({ id }, opts.format);
|
||||
},
|
||||
{ mutates: true },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
const id = await api.createTag({
|
||||
tag: cmdOpts.tag,
|
||||
color: cmdOpts.color,
|
||||
description: cmdOpts.description,
|
||||
});
|
||||
printOutput({ id }, opts.format);
|
||||
});
|
||||
});
|
||||
|
||||
tags
|
||||
@@ -63,14 +55,10 @@ export function registerTagsCommand(program: Command) {
|
||||
);
|
||||
}
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
await api.updateTag(id, fields);
|
||||
printOutput({ success: true, id }, opts.format);
|
||||
},
|
||||
{ mutates: true },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
await api.updateTag(id, fields);
|
||||
printOutput({ success: true, id }, opts.format);
|
||||
});
|
||||
});
|
||||
|
||||
tags
|
||||
@@ -78,13 +66,9 @@ export function registerTagsCommand(program: Command) {
|
||||
.description('Delete a tag')
|
||||
.action(async (id: string) => {
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
await api.deleteTag(id);
|
||||
printOutput({ success: true, id }, opts.format);
|
||||
},
|
||||
{ mutates: true },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
await api.deleteTag(id);
|
||||
printOutput({ success: true, id }, opts.format);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
@@ -18,18 +18,14 @@ export function registerTransactionsCommand(program: Command) {
|
||||
.requiredOption('--end <date>', 'End date (YYYY-MM-DD)')
|
||||
.action(async cmdOpts => {
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
const result = await api.getTransactions(
|
||||
cmdOpts.account,
|
||||
cmdOpts.start,
|
||||
cmdOpts.end,
|
||||
);
|
||||
printOutput(result, opts.format);
|
||||
},
|
||||
{ mutates: false },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
const result = await api.getTransactions(
|
||||
cmdOpts.account,
|
||||
cmdOpts.start,
|
||||
cmdOpts.end,
|
||||
);
|
||||
printOutput(result, opts.format);
|
||||
});
|
||||
});
|
||||
|
||||
transactions
|
||||
@@ -45,24 +41,20 @@ export function registerTransactionsCommand(program: Command) {
|
||||
.option('--run-transfers', 'Process transfers', false)
|
||||
.action(async cmdOpts => {
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
const transactions = readJsonInput(cmdOpts) as Parameters<
|
||||
typeof api.addTransactions
|
||||
>[1];
|
||||
const result = await api.addTransactions(
|
||||
cmdOpts.account,
|
||||
transactions,
|
||||
{
|
||||
learnCategories: cmdOpts.learnCategories,
|
||||
runTransfers: cmdOpts.runTransfers,
|
||||
},
|
||||
);
|
||||
printOutput(result, opts.format);
|
||||
},
|
||||
{ mutates: true },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
const transactions = readJsonInput(cmdOpts) as Parameters<
|
||||
typeof api.addTransactions
|
||||
>[1];
|
||||
const result = await api.addTransactions(
|
||||
cmdOpts.account,
|
||||
transactions,
|
||||
{
|
||||
learnCategories: cmdOpts.learnCategories,
|
||||
runTransfers: cmdOpts.runTransfers,
|
||||
},
|
||||
);
|
||||
printOutput(result, opts.format);
|
||||
});
|
||||
});
|
||||
|
||||
transactions
|
||||
@@ -77,24 +69,20 @@ export function registerTransactionsCommand(program: Command) {
|
||||
.option('--dry-run', 'Preview without importing', false)
|
||||
.action(async cmdOpts => {
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
const transactions = readJsonInput(cmdOpts) as Parameters<
|
||||
typeof api.importTransactions
|
||||
>[1];
|
||||
const result = await api.importTransactions(
|
||||
cmdOpts.account,
|
||||
transactions,
|
||||
{
|
||||
defaultCleared: true,
|
||||
dryRun: cmdOpts.dryRun,
|
||||
},
|
||||
);
|
||||
printOutput(result, opts.format);
|
||||
},
|
||||
{ mutates: true },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
const transactions = readJsonInput(cmdOpts) as Parameters<
|
||||
typeof api.importTransactions
|
||||
>[1];
|
||||
const result = await api.importTransactions(
|
||||
cmdOpts.account,
|
||||
transactions,
|
||||
{
|
||||
defaultCleared: true,
|
||||
dryRun: cmdOpts.dryRun,
|
||||
},
|
||||
);
|
||||
printOutput(result, opts.format);
|
||||
});
|
||||
});
|
||||
|
||||
transactions
|
||||
@@ -104,17 +92,13 @@ export function registerTransactionsCommand(program: Command) {
|
||||
.option('--file <path>', 'Read fields from JSON file (use - for stdin)')
|
||||
.action(async (id: string, cmdOpts) => {
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
const fields = readJsonInput(cmdOpts) as Parameters<
|
||||
typeof api.updateTransaction
|
||||
>[1];
|
||||
await api.updateTransaction(id, fields);
|
||||
printOutput({ success: true, id }, opts.format);
|
||||
},
|
||||
{ mutates: true },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
const fields = readJsonInput(cmdOpts) as Parameters<
|
||||
typeof api.updateTransaction
|
||||
>[1];
|
||||
await api.updateTransaction(id, fields);
|
||||
printOutput({ success: true, id }, opts.format);
|
||||
});
|
||||
});
|
||||
|
||||
transactions
|
||||
@@ -122,13 +106,9 @@ export function registerTransactionsCommand(program: Command) {
|
||||
.description('Delete a transaction')
|
||||
.action(async (id: string) => {
|
||||
const opts = program.opts();
|
||||
await withConnection(
|
||||
opts,
|
||||
async () => {
|
||||
await api.deleteTransaction(id);
|
||||
printOutput({ success: true, id }, opts.format);
|
||||
},
|
||||
{ mutates: true },
|
||||
);
|
||||
await withConnection(opts, async () => {
|
||||
await api.deleteTransaction(id);
|
||||
printOutput({ success: true, id }, opts.format);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
@@ -28,9 +28,6 @@ describe('resolveConfig', () => {
|
||||
'ACTUAL_SYNC_ID',
|
||||
'ACTUAL_DATA_DIR',
|
||||
'ACTUAL_ENCRYPTION_PASSWORD',
|
||||
'ACTUAL_CACHE_TTL',
|
||||
'ACTUAL_LOCK_TIMEOUT',
|
||||
'ACTUAL_NO_LOCK',
|
||||
];
|
||||
|
||||
beforeEach(() => {
|
||||
@@ -162,105 +159,6 @@ describe('resolveConfig', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('cache options', () => {
|
||||
beforeEach(() => {
|
||||
process.env.ACTUAL_SERVER_URL = 'http://test';
|
||||
process.env.ACTUAL_PASSWORD = 'pw';
|
||||
});
|
||||
|
||||
it('defaults cacheTtl to 60 seconds', async () => {
|
||||
const config = await resolveConfig({});
|
||||
expect(config.cacheTtl).toBe(60);
|
||||
});
|
||||
|
||||
it('reads cacheTtl from env', async () => {
|
||||
process.env.ACTUAL_CACHE_TTL = '300';
|
||||
const config = await resolveConfig({});
|
||||
expect(config.cacheTtl).toBe(300);
|
||||
});
|
||||
|
||||
it('prefers cacheTtl from CLI flag', async () => {
|
||||
process.env.ACTUAL_CACHE_TTL = '300';
|
||||
const config = await resolveConfig({ cacheTtl: 10 });
|
||||
expect(config.cacheTtl).toBe(10);
|
||||
});
|
||||
|
||||
it('rejects negative cacheTtl', async () => {
|
||||
await expect(resolveConfig({ cacheTtl: -1 })).rejects.toThrow(/cacheTtl/);
|
||||
});
|
||||
|
||||
it('rejects non-integer cacheTtl from env', async () => {
|
||||
process.env.ACTUAL_CACHE_TTL = 'banana';
|
||||
await expect(resolveConfig({})).rejects.toThrow(/ACTUAL_CACHE_TTL/);
|
||||
});
|
||||
|
||||
it('defaults lockTimeout to 10 seconds', async () => {
|
||||
const config = await resolveConfig({});
|
||||
expect(config.lockTimeout).toBe(10);
|
||||
});
|
||||
|
||||
it('reads lockTimeout from env', async () => {
|
||||
process.env.ACTUAL_LOCK_TIMEOUT = '30';
|
||||
const config = await resolveConfig({});
|
||||
expect(config.lockTimeout).toBe(30);
|
||||
});
|
||||
|
||||
it('defaults refresh to false', async () => {
|
||||
const config = await resolveConfig({});
|
||||
expect(config.refresh).toBe(false);
|
||||
});
|
||||
|
||||
it('sets refresh when provided on CLI opts', async () => {
|
||||
const config = await resolveConfig({ refresh: true });
|
||||
expect(config.refresh).toBe(true);
|
||||
});
|
||||
|
||||
it('sets refresh when noCache is true', async () => {
|
||||
const config = await resolveConfig({ noCache: true });
|
||||
expect(config.refresh).toBe(true);
|
||||
});
|
||||
|
||||
it('defaults noLock to false', async () => {
|
||||
const config = await resolveConfig({});
|
||||
expect(config.noLock).toBe(false);
|
||||
});
|
||||
|
||||
it('parses ACTUAL_NO_LOCK=1 as true', async () => {
|
||||
process.env.ACTUAL_NO_LOCK = '1';
|
||||
const config = await resolveConfig({});
|
||||
expect(config.noLock).toBe(true);
|
||||
});
|
||||
|
||||
it('parses ACTUAL_NO_LOCK=true as true', async () => {
|
||||
process.env.ACTUAL_NO_LOCK = 'true';
|
||||
const config = await resolveConfig({});
|
||||
expect(config.noLock).toBe(true);
|
||||
});
|
||||
|
||||
it('reads cacheTtl/lockTimeout/noLock from config file', async () => {
|
||||
mockConfigFile({
|
||||
serverUrl: 'http://file',
|
||||
password: 'pw',
|
||||
cacheTtl: 120,
|
||||
lockTimeout: 5,
|
||||
noLock: true,
|
||||
});
|
||||
const config = await resolveConfig({});
|
||||
expect(config.cacheTtl).toBe(120);
|
||||
expect(config.lockTimeout).toBe(5);
|
||||
expect(config.noLock).toBe(true);
|
||||
});
|
||||
|
||||
it('rejects non-number cacheTtl in config file', async () => {
|
||||
mockConfigFile({
|
||||
serverUrl: 'http://file',
|
||||
password: 'pw',
|
||||
cacheTtl: 'soon',
|
||||
});
|
||||
await expect(resolveConfig({})).rejects.toThrow(/cacheTtl/);
|
||||
});
|
||||
});
|
||||
|
||||
describe('cosmiconfig handling', () => {
|
||||
it('handles null result (no config file found)', async () => {
|
||||
mockConfigFile(null);
|
||||
|
||||
@@ -3,7 +3,7 @@ import { join } from 'path';
|
||||
|
||||
import { cosmiconfig } from 'cosmiconfig';
|
||||
|
||||
import { isRecord, parseBoolEnv, parseNonNegativeIntFlag } from './utils';
|
||||
import { isRecord } from './utils';
|
||||
|
||||
export type CliConfig = {
|
||||
serverUrl: string;
|
||||
@@ -12,10 +12,6 @@ export type CliConfig = {
|
||||
syncId?: string;
|
||||
dataDir: string;
|
||||
encryptionPassword?: string;
|
||||
cacheTtl: number;
|
||||
lockTimeout: number;
|
||||
refresh: boolean;
|
||||
noLock: boolean;
|
||||
};
|
||||
|
||||
export type CliGlobalOpts = {
|
||||
@@ -25,27 +21,10 @@ export type CliGlobalOpts = {
|
||||
syncId?: string;
|
||||
dataDir?: string;
|
||||
encryptionPassword?: string;
|
||||
cacheTtl?: number;
|
||||
lockTimeout?: number;
|
||||
refresh?: boolean;
|
||||
noCache?: boolean;
|
||||
noLock?: boolean;
|
||||
format?: 'json' | 'table' | 'csv';
|
||||
verbose?: boolean;
|
||||
};
|
||||
|
||||
const stringKeys = [
|
||||
'serverUrl',
|
||||
'password',
|
||||
'sessionToken',
|
||||
'syncId',
|
||||
'dataDir',
|
||||
'encryptionPassword',
|
||||
] as const;
|
||||
|
||||
const numberKeys = ['cacheTtl', 'lockTimeout'] as const;
|
||||
const booleanKeys = ['noLock'] as const;
|
||||
|
||||
type ConfigFileContent = {
|
||||
serverUrl?: string;
|
||||
password?: string;
|
||||
@@ -53,15 +32,15 @@ type ConfigFileContent = {
|
||||
syncId?: string;
|
||||
dataDir?: string;
|
||||
encryptionPassword?: string;
|
||||
cacheTtl?: number;
|
||||
lockTimeout?: number;
|
||||
noLock?: boolean;
|
||||
};
|
||||
|
||||
const configFileKeys: readonly string[] = [
|
||||
...stringKeys,
|
||||
...numberKeys,
|
||||
...booleanKeys,
|
||||
'serverUrl',
|
||||
'password',
|
||||
'sessionToken',
|
||||
'syncId',
|
||||
'dataDir',
|
||||
'encryptionPassword',
|
||||
];
|
||||
|
||||
function validateConfigFileContent(value: unknown): ConfigFileContent {
|
||||
@@ -75,30 +54,9 @@ function validateConfigFileContent(value: unknown): ConfigFileContent {
|
||||
if (!configFileKeys.includes(key)) {
|
||||
throw new Error(`Invalid config file: unknown key "${key}"`);
|
||||
}
|
||||
const v = value[key];
|
||||
if (v === undefined) continue;
|
||||
if (
|
||||
(stringKeys as readonly string[]).includes(key) &&
|
||||
typeof v !== 'string'
|
||||
) {
|
||||
if (value[key] !== undefined && typeof value[key] !== 'string') {
|
||||
throw new Error(
|
||||
`Invalid config file: key "${key}" must be a string, got ${typeof v}`,
|
||||
);
|
||||
}
|
||||
if (
|
||||
(numberKeys as readonly string[]).includes(key) &&
|
||||
(typeof v !== 'number' || !Number.isInteger(v) || v < 0)
|
||||
) {
|
||||
throw new Error(
|
||||
`Invalid config file: key "${key}" must be a non-negative integer`,
|
||||
);
|
||||
}
|
||||
if (
|
||||
(booleanKeys as readonly string[]).includes(key) &&
|
||||
typeof v !== 'boolean'
|
||||
) {
|
||||
throw new Error(
|
||||
`Invalid config file: key "${key}" must be a boolean, got ${typeof v}`,
|
||||
`Invalid config file: key "${key}" must be a string, got ${typeof value[key]}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -125,22 +83,6 @@ async function loadConfigFile(): Promise<ConfigFileContent> {
|
||||
return {};
|
||||
}
|
||||
|
||||
function parseNonNegativeIntEnv(
|
||||
raw: string | undefined,
|
||||
source: string,
|
||||
): number | undefined {
|
||||
return raw === undefined ? undefined : parseNonNegativeIntFlag(raw, source);
|
||||
}
|
||||
|
||||
function validateNonNegativeInt(value: number, name: string): number {
|
||||
if (!Number.isInteger(value) || value < 0) {
|
||||
throw new Error(
|
||||
`Invalid ${name}: expected a non-negative integer, got ${value}`,
|
||||
);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
export async function resolveConfig(
|
||||
cliOpts: CliGlobalOpts,
|
||||
): Promise<CliConfig> {
|
||||
@@ -186,36 +128,6 @@ export async function resolveConfig(
|
||||
);
|
||||
}
|
||||
|
||||
const cacheTtl = validateNonNegativeInt(
|
||||
cliOpts.cacheTtl ??
|
||||
parseNonNegativeIntEnv(
|
||||
process.env.ACTUAL_CACHE_TTL,
|
||||
'ACTUAL_CACHE_TTL',
|
||||
) ??
|
||||
fileConfig.cacheTtl ??
|
||||
60,
|
||||
'cacheTtl',
|
||||
);
|
||||
|
||||
const lockTimeout = validateNonNegativeInt(
|
||||
cliOpts.lockTimeout ??
|
||||
parseNonNegativeIntEnv(
|
||||
process.env.ACTUAL_LOCK_TIMEOUT,
|
||||
'ACTUAL_LOCK_TIMEOUT',
|
||||
) ??
|
||||
fileConfig.lockTimeout ??
|
||||
10,
|
||||
'lockTimeout',
|
||||
);
|
||||
|
||||
const refresh = cliOpts.refresh ?? cliOpts.noCache ?? false;
|
||||
|
||||
const noLock =
|
||||
cliOpts.noLock ??
|
||||
parseBoolEnv(process.env.ACTUAL_NO_LOCK) ??
|
||||
fileConfig.noLock ??
|
||||
false;
|
||||
|
||||
return {
|
||||
serverUrl,
|
||||
password,
|
||||
@@ -223,9 +135,5 @@ export async function resolveConfig(
|
||||
syncId,
|
||||
dataDir,
|
||||
encryptionPassword,
|
||||
cacheTtl,
|
||||
lockTimeout,
|
||||
refresh,
|
||||
noLock,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,44 +1,24 @@
|
||||
import { mkdtempSync, rmSync } from 'node:fs';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
|
||||
import * as api from '@actual-app/api';
|
||||
|
||||
import { getMetaDir, writeCacheState } from './cache';
|
||||
import { resolveConfig } from './config';
|
||||
import { withConnection } from './connection';
|
||||
|
||||
vi.mock('@actual-app/api', () => ({
|
||||
init: vi.fn().mockResolvedValue(undefined),
|
||||
downloadBudget: vi.fn().mockResolvedValue(undefined),
|
||||
loadBudget: vi.fn().mockResolvedValue(undefined),
|
||||
sync: vi.fn().mockResolvedValue(undefined),
|
||||
shutdown: vi.fn().mockResolvedValue(undefined),
|
||||
getBudgets: vi
|
||||
.fn()
|
||||
.mockResolvedValue([{ id: 'bud-disk-1', groupId: 'sync-1' }]),
|
||||
}));
|
||||
|
||||
vi.mock('./config', () => ({
|
||||
resolveConfig: vi.fn(),
|
||||
}));
|
||||
|
||||
let dataDir: string;
|
||||
|
||||
function metaDirFor(syncId: string) {
|
||||
return getMetaDir(dataDir, syncId);
|
||||
}
|
||||
|
||||
function setConfig(overrides: Record<string, unknown> = {}) {
|
||||
vi.mocked(resolveConfig).mockResolvedValue({
|
||||
serverUrl: 'http://test',
|
||||
password: 'pw',
|
||||
dataDir,
|
||||
syncId: 'sync-1',
|
||||
cacheTtl: 60,
|
||||
lockTimeout: 10,
|
||||
refresh: false,
|
||||
noLock: true,
|
||||
dataDir: '/tmp/data',
|
||||
syncId: 'budget-1',
|
||||
...overrides,
|
||||
});
|
||||
}
|
||||
@@ -51,182 +31,104 @@ describe('withConnection', () => {
|
||||
stderrSpy = vi
|
||||
.spyOn(process.stderr, 'write')
|
||||
.mockImplementation(() => true);
|
||||
dataDir = mkdtempSync(join(tmpdir(), 'actual-cli-conn-'));
|
||||
setConfig();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
stderrSpy.mockRestore();
|
||||
rmSync(dataDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('calls api.init with password when no sessionToken', async () => {
|
||||
await withConnection({}, async () => 'ok', { mutates: false });
|
||||
setConfig({ password: 'pw', sessionToken: undefined });
|
||||
|
||||
await withConnection({}, async () => 'ok');
|
||||
|
||||
expect(api.init).toHaveBeenCalledWith({
|
||||
serverURL: 'http://test',
|
||||
password: 'pw',
|
||||
dataDir,
|
||||
dataDir: '/tmp/data',
|
||||
verbose: undefined,
|
||||
});
|
||||
});
|
||||
|
||||
it('calls api.init with sessionToken when present', async () => {
|
||||
setConfig({ sessionToken: 'tok', password: undefined });
|
||||
await withConnection({}, async () => 'ok', { mutates: false });
|
||||
|
||||
await withConnection({}, async () => 'ok');
|
||||
|
||||
expect(api.init).toHaveBeenCalledWith({
|
||||
serverURL: 'http://test',
|
||||
sessionToken: 'tok',
|
||||
dataDir,
|
||||
dataDir: '/tmp/data',
|
||||
verbose: undefined,
|
||||
});
|
||||
});
|
||||
|
||||
it('first run: calls downloadBudget and writes cache state', async () => {
|
||||
await withConnection({}, async () => 'ok', { mutates: false });
|
||||
expect(api.downloadBudget).toHaveBeenCalledWith('sync-1', {
|
||||
it('calls api.downloadBudget when syncId is set', async () => {
|
||||
setConfig({ syncId: 'budget-1' });
|
||||
|
||||
await withConnection({}, async () => 'ok');
|
||||
|
||||
expect(api.downloadBudget).toHaveBeenCalledWith('budget-1', {
|
||||
password: undefined,
|
||||
});
|
||||
expect(api.sync).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('skips sync on a read inside the TTL', async () => {
|
||||
writeCacheState(metaDirFor('sync-1'), {
|
||||
version: 1,
|
||||
syncId: 'sync-1',
|
||||
budgetId: 'bud-disk-1',
|
||||
serverUrl: 'http://test',
|
||||
lastSyncedAt: Date.now(),
|
||||
lastDownloadedAt: Date.now(),
|
||||
});
|
||||
await withConnection({}, async () => 'ok', { mutates: false });
|
||||
expect(api.loadBudget).toHaveBeenCalledWith('bud-disk-1');
|
||||
expect(api.sync).not.toHaveBeenCalled();
|
||||
expect(api.downloadBudget).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('syncs on a read past the TTL', async () => {
|
||||
writeCacheState(metaDirFor('sync-1'), {
|
||||
version: 1,
|
||||
syncId: 'sync-1',
|
||||
budgetId: 'bud-disk-1',
|
||||
serverUrl: 'http://test',
|
||||
lastSyncedAt: Date.now() - 10 * 60_000,
|
||||
lastDownloadedAt: Date.now() - 10 * 60_000,
|
||||
});
|
||||
await withConnection({}, async () => 'ok', { mutates: false });
|
||||
expect(api.loadBudget).toHaveBeenCalled();
|
||||
expect(api.sync).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('write command syncs before and after the callback, even when fresh', async () => {
|
||||
writeCacheState(metaDirFor('sync-1'), {
|
||||
version: 1,
|
||||
syncId: 'sync-1',
|
||||
budgetId: 'bud-disk-1',
|
||||
serverUrl: 'http://test',
|
||||
lastSyncedAt: Date.now(),
|
||||
lastDownloadedAt: Date.now(),
|
||||
});
|
||||
await withConnection({}, async () => 'ok', { mutates: true });
|
||||
expect(api.loadBudget).toHaveBeenCalled();
|
||||
expect(api.sync).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
it('--refresh forces a sync on a read inside the TTL', async () => {
|
||||
setConfig({ refresh: true });
|
||||
writeCacheState(metaDirFor('sync-1'), {
|
||||
version: 1,
|
||||
syncId: 'sync-1',
|
||||
budgetId: 'bud-disk-1',
|
||||
serverUrl: 'http://test',
|
||||
lastSyncedAt: Date.now(),
|
||||
lastDownloadedAt: Date.now(),
|
||||
});
|
||||
await withConnection({}, async () => 'ok', { mutates: false });
|
||||
expect(api.sync).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('encrypted budget forces a sync on a read inside the TTL', async () => {
|
||||
setConfig({ encryptionPassword: 'secret' });
|
||||
writeCacheState(metaDirFor('sync-1'), {
|
||||
version: 1,
|
||||
syncId: 'sync-1',
|
||||
budgetId: 'bud-disk-1',
|
||||
serverUrl: 'http://test',
|
||||
lastSyncedAt: Date.now(),
|
||||
lastDownloadedAt: Date.now(),
|
||||
});
|
||||
await withConnection({}, async () => 'ok', { mutates: false });
|
||||
expect(api.sync).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('invalidates cache when syncId changes', async () => {
|
||||
writeCacheState(metaDirFor('sync-1'), {
|
||||
version: 1,
|
||||
syncId: 'OTHER',
|
||||
budgetId: 'bud-disk-1',
|
||||
serverUrl: 'http://test',
|
||||
lastSyncedAt: Date.now(),
|
||||
lastDownloadedAt: Date.now(),
|
||||
});
|
||||
await withConnection({}, async () => 'ok', { mutates: false });
|
||||
expect(api.downloadBudget).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('skips budget work when skipBudget is true', async () => {
|
||||
await withConnection({}, async () => 'ok', {
|
||||
mutates: false,
|
||||
skipBudget: true,
|
||||
});
|
||||
expect(api.downloadBudget).not.toHaveBeenCalled();
|
||||
expect(api.loadBudget).not.toHaveBeenCalled();
|
||||
expect(api.sync).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('throws when syncId is missing and skipBudget is false', async () => {
|
||||
it('throws when loadBudget is true but syncId is not set', async () => {
|
||||
setConfig({ syncId: undefined });
|
||||
await expect(
|
||||
withConnection({}, async () => 'ok', { mutates: false }),
|
||||
).rejects.toThrow('Sync ID is required');
|
||||
|
||||
await expect(withConnection({}, async () => 'ok')).rejects.toThrow(
|
||||
'Sync ID is required',
|
||||
);
|
||||
});
|
||||
|
||||
it('returns the callback result', async () => {
|
||||
const result = await withConnection({}, async () => 42, {
|
||||
mutates: false,
|
||||
});
|
||||
it('skips budget download when loadBudget is false and syncId is not set', async () => {
|
||||
setConfig({ syncId: undefined });
|
||||
|
||||
await withConnection({}, async () => 'ok', { loadBudget: false });
|
||||
|
||||
expect(api.downloadBudget).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('does not call api.downloadBudget when loadBudget is false', async () => {
|
||||
setConfig({ syncId: 'budget-1' });
|
||||
|
||||
await withConnection({}, async () => 'ok', { loadBudget: false });
|
||||
|
||||
expect(api.downloadBudget).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('returns callback result', async () => {
|
||||
const result = await withConnection({}, async () => 42);
|
||||
expect(result).toBe(42);
|
||||
});
|
||||
|
||||
it('calls api.shutdown on success', async () => {
|
||||
await withConnection({}, async () => 'ok', { mutates: false });
|
||||
it('calls api.shutdown in finally block on success', async () => {
|
||||
await withConnection({}, async () => 'ok');
|
||||
expect(api.shutdown).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('calls api.shutdown on error', async () => {
|
||||
it('calls api.shutdown in finally block on error', async () => {
|
||||
await expect(
|
||||
withConnection(
|
||||
{},
|
||||
async () => {
|
||||
throw new Error('boom');
|
||||
},
|
||||
{ mutates: false },
|
||||
),
|
||||
withConnection({}, async () => {
|
||||
throw new Error('boom');
|
||||
}),
|
||||
).rejects.toThrow('boom');
|
||||
|
||||
expect(api.shutdown).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('propagates sync errors on a stale read', async () => {
|
||||
writeCacheState(metaDirFor('sync-1'), {
|
||||
version: 1,
|
||||
syncId: 'sync-1',
|
||||
budgetId: 'bud-disk-1',
|
||||
serverUrl: 'http://test',
|
||||
lastSyncedAt: Date.now() - 10 * 60_000,
|
||||
lastDownloadedAt: Date.now() - 10 * 60_000,
|
||||
});
|
||||
vi.mocked(api.sync).mockRejectedValueOnce(new Error('network'));
|
||||
await expect(
|
||||
withConnection({}, async () => 'ok', { mutates: false }),
|
||||
).rejects.toThrow('network');
|
||||
it('does not write to stderr by default', async () => {
|
||||
await withConnection({}, async () => 'ok');
|
||||
|
||||
expect(stderrSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('writes info to stderr when verbose', async () => {
|
||||
await withConnection({ verbose: true }, async () => 'ok');
|
||||
|
||||
expect(stderrSpy).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Connecting to'),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,52 +1,30 @@
|
||||
import { mkdirSync } from 'fs';
|
||||
|
||||
import * as api from '@actual-app/api';
|
||||
|
||||
import type { CacheState } from './cache';
|
||||
import {
|
||||
CACHE_VERSION,
|
||||
decideSyncAction,
|
||||
getMetaDir,
|
||||
readCacheState,
|
||||
writeCacheState,
|
||||
} from './cache';
|
||||
import type { CliConfig, CliGlobalOpts } from './config';
|
||||
import { resolveConfig } from './config';
|
||||
import { acquireExclusive, acquireShared } from './lock';
|
||||
import type { Release } from './lock';
|
||||
|
||||
type ConnectionOptions = {
|
||||
mutates: boolean;
|
||||
skipBudget?: boolean;
|
||||
};
|
||||
import type { CliGlobalOpts } from './config';
|
||||
|
||||
function info(message: string, verbose?: boolean) {
|
||||
if (verbose) process.stderr.write(message + '\n');
|
||||
if (verbose) {
|
||||
process.stderr.write(message + '\n');
|
||||
}
|
||||
}
|
||||
|
||||
async function resolveBudgetIdForSyncId(syncId: string): Promise<string> {
|
||||
const budgets = (await api.getBudgets()) as Array<{
|
||||
id?: string;
|
||||
groupId?: string;
|
||||
cloudFileId?: string;
|
||||
}>;
|
||||
const match = budgets.find(
|
||||
b =>
|
||||
b.id !== undefined && (b.groupId === syncId || b.cloudFileId === syncId),
|
||||
);
|
||||
if (!match?.id) {
|
||||
throw new Error(
|
||||
`Could not resolve on-disk budget id for syncId ${syncId} after download.`,
|
||||
);
|
||||
}
|
||||
return match.id;
|
||||
}
|
||||
type ConnectionOptions = {
|
||||
loadBudget?: boolean;
|
||||
};
|
||||
|
||||
export async function withConnection<T>(
|
||||
globalOpts: CliGlobalOpts,
|
||||
fn: (config: CliConfig) => Promise<T>,
|
||||
{ mutates, skipBudget = false }: ConnectionOptions,
|
||||
fn: () => Promise<T>,
|
||||
options: ConnectionOptions = {},
|
||||
): Promise<T> {
|
||||
const { loadBudget = true } = options;
|
||||
const config = await resolveConfig(globalOpts);
|
||||
|
||||
mkdirSync(config.dataDir, { recursive: true });
|
||||
|
||||
info(`Connecting to ${config.serverUrl}...`, globalOpts.verbose);
|
||||
|
||||
if (config.sessionToken) {
|
||||
@@ -70,87 +48,17 @@ export async function withConnection<T>(
|
||||
}
|
||||
|
||||
try {
|
||||
if (skipBudget) return await fn(config);
|
||||
if (!config.syncId) {
|
||||
if (loadBudget && config.syncId) {
|
||||
info(`Downloading budget ${config.syncId}...`, globalOpts.verbose);
|
||||
await api.downloadBudget(config.syncId, {
|
||||
password: config.encryptionPassword,
|
||||
});
|
||||
} else if (loadBudget && !config.syncId) {
|
||||
throw new Error(
|
||||
'Sync ID is required for this command. Set --sync-id or ACTUAL_SYNC_ID.',
|
||||
);
|
||||
}
|
||||
|
||||
const meta = getMetaDir(config.dataDir, config.syncId);
|
||||
let release: Release | null = null;
|
||||
if (!config.noLock) {
|
||||
release = mutates
|
||||
? await acquireExclusive(meta, {
|
||||
timeoutMs: config.lockTimeout * 1000,
|
||||
})
|
||||
: await acquireShared(meta, {
|
||||
timeoutMs: config.lockTimeout * 1000,
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
const cachedState = readCacheState(meta);
|
||||
const decision = decideSyncAction({
|
||||
state: cachedState,
|
||||
config: { syncId: config.syncId, serverUrl: config.serverUrl },
|
||||
now: Date.now(),
|
||||
ttlMs: config.cacheTtl * 1000,
|
||||
mutates,
|
||||
refresh: config.refresh,
|
||||
encrypted: Boolean(config.encryptionPassword),
|
||||
});
|
||||
|
||||
let state: CacheState;
|
||||
if (decision.action === 'download') {
|
||||
info(
|
||||
cachedState === null
|
||||
? `Downloading budget ${config.syncId} for the first time...`
|
||||
: `Re-downloading budget ${config.syncId} (cache invalidated)...`,
|
||||
globalOpts.verbose,
|
||||
);
|
||||
await api.downloadBudget(config.syncId, {
|
||||
password: config.encryptionPassword,
|
||||
});
|
||||
const budgetId = await resolveBudgetIdForSyncId(config.syncId);
|
||||
const now = Date.now();
|
||||
state = {
|
||||
version: CACHE_VERSION,
|
||||
syncId: config.syncId,
|
||||
budgetId,
|
||||
serverUrl: config.serverUrl,
|
||||
lastSyncedAt: now,
|
||||
lastDownloadedAt: now,
|
||||
};
|
||||
writeCacheState(meta, state);
|
||||
} else if (decision.action === 'skip') {
|
||||
const age = Math.round(
|
||||
(Date.now() - decision.state.lastSyncedAt) / 1000,
|
||||
);
|
||||
info(`Using cached budget (synced ${age}s ago)...`, globalOpts.verbose);
|
||||
await api.loadBudget(decision.state.budgetId);
|
||||
state = decision.state;
|
||||
} else {
|
||||
info(`Syncing budget ${config.syncId}...`, globalOpts.verbose);
|
||||
await api.loadBudget(decision.state.budgetId);
|
||||
await api.sync();
|
||||
state = { ...decision.state, lastSyncedAt: Date.now() };
|
||||
writeCacheState(meta, state);
|
||||
}
|
||||
|
||||
const result = await fn(config);
|
||||
|
||||
if (mutates) {
|
||||
info(`Pushing changes for ${config.syncId}...`, globalOpts.verbose);
|
||||
await api.sync();
|
||||
state = { ...state, lastSyncedAt: Date.now() };
|
||||
writeCacheState(meta, state);
|
||||
}
|
||||
|
||||
return result;
|
||||
} finally {
|
||||
if (release) await release();
|
||||
}
|
||||
return await fn();
|
||||
} finally {
|
||||
await api.shutdown();
|
||||
}
|
||||
|
||||
@@ -9,10 +9,8 @@ import { registerQueryCommand } from './commands/query';
|
||||
import { registerRulesCommand } from './commands/rules';
|
||||
import { registerSchedulesCommand } from './commands/schedules';
|
||||
import { registerServerCommand } from './commands/server';
|
||||
import { registerSyncCommand } from './commands/sync';
|
||||
import { registerTagsCommand } from './commands/tags';
|
||||
import { registerTransactionsCommand } from './commands/transactions';
|
||||
import { parseNonNegativeIntFlag } from './utils';
|
||||
|
||||
declare const __CLI_VERSION__: string;
|
||||
|
||||
@@ -34,23 +32,6 @@ program
|
||||
'--encryption-password <password>',
|
||||
'E2E encryption password (env: ACTUAL_ENCRYPTION_PASSWORD)',
|
||||
)
|
||||
.option(
|
||||
'--cache-ttl <seconds>',
|
||||
'Cache TTL in seconds (env: ACTUAL_CACHE_TTL; default: 60)',
|
||||
value => parseNonNegativeIntFlag(value, '--cache-ttl'),
|
||||
)
|
||||
.option('--refresh', 'Force a sync on this call, ignoring the cache', false)
|
||||
.option('--no-cache', 'Alias for --refresh', false)
|
||||
.option(
|
||||
'--lock-timeout <seconds>',
|
||||
'How long to wait for another CLI process to release the lock (env: ACTUAL_LOCK_TIMEOUT; default: 10)',
|
||||
value => parseNonNegativeIntFlag(value, '--lock-timeout'),
|
||||
)
|
||||
.option(
|
||||
'--no-lock',
|
||||
'Disable the budget directory lock (use with care, env: ACTUAL_NO_LOCK)',
|
||||
false,
|
||||
)
|
||||
.addOption(
|
||||
new Option('--format <format>', 'Output format: json, table, csv')
|
||||
.choices(['json', 'table', 'csv'] as const)
|
||||
@@ -69,7 +50,6 @@ registerRulesCommand(program);
|
||||
registerSchedulesCommand(program);
|
||||
registerQueryCommand(program);
|
||||
registerServerCommand(program);
|
||||
registerSyncCommand(program);
|
||||
|
||||
function normalizeThrownMessage(err: unknown): string {
|
||||
if (err instanceof Error) return err.message;
|
||||
|
||||
@@ -1,159 +0,0 @@
|
||||
import {
|
||||
existsSync,
|
||||
mkdirSync,
|
||||
mkdtempSync,
|
||||
readdirSync,
|
||||
rmSync,
|
||||
writeFileSync,
|
||||
} from 'node:fs';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
|
||||
import { acquireExclusive, acquireShared } from './lock';
|
||||
|
||||
// In-memory stand-in for proper-lockfile. The real library spins up a
|
||||
// setTimeout loop to refresh lockfile mtimes; on some CI filesystems that
|
||||
// timer keeps Node's event loop alive even after tests complete, wedging the
|
||||
// test run. The mock behaves identically from our wrapper's perspective
|
||||
// (acquire, detect contention with ELOCKED, release) without touching the
|
||||
// filesystem or scheduling timers.
|
||||
const mockHeld = new Set<string>();
|
||||
|
||||
vi.mock('proper-lockfile', () => ({
|
||||
default: {
|
||||
lock: vi.fn(
|
||||
async (
|
||||
file: string,
|
||||
opts?: { lockfilePath?: string },
|
||||
): Promise<() => Promise<void>> => {
|
||||
const key = opts?.lockfilePath ?? file;
|
||||
if (mockHeld.has(key)) {
|
||||
const err = new Error('Lock is already held') as Error & {
|
||||
code?: string;
|
||||
};
|
||||
err.code = 'ELOCKED';
|
||||
throw err;
|
||||
}
|
||||
mockHeld.add(key);
|
||||
return async () => {
|
||||
mockHeld.delete(key);
|
||||
};
|
||||
},
|
||||
),
|
||||
},
|
||||
}));
|
||||
|
||||
describe('acquireExclusive', () => {
|
||||
let dir: string;
|
||||
|
||||
beforeEach(() => {
|
||||
mockHeld.clear();
|
||||
dir = mkdtempSync(join(tmpdir(), 'actual-cli-lock-'));
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
rmSync(dir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('creates the directory if it does not exist', async () => {
|
||||
const target = join(dir, 'nested', 'budget');
|
||||
const release = await acquireExclusive(target, { timeoutMs: 1000 });
|
||||
expect(existsSync(target)).toBe(true);
|
||||
await release();
|
||||
});
|
||||
|
||||
it('returns a release function that frees the lock', async () => {
|
||||
const release1 = await acquireExclusive(dir, { timeoutMs: 1000 });
|
||||
await release1();
|
||||
const release2 = await acquireExclusive(dir, { timeoutMs: 1000 });
|
||||
await release2();
|
||||
});
|
||||
|
||||
it('rejects with a user-friendly error when another holder has the lock', async () => {
|
||||
const release = await acquireExclusive(dir, { timeoutMs: 1000 });
|
||||
await expect(acquireExclusive(dir, { timeoutMs: 100 })).rejects.toThrow(
|
||||
/holding the budget/,
|
||||
);
|
||||
await release();
|
||||
});
|
||||
});
|
||||
|
||||
describe('acquireShared', () => {
|
||||
let dir: string;
|
||||
|
||||
beforeEach(() => {
|
||||
mockHeld.clear();
|
||||
dir = mkdtempSync(join(tmpdir(), 'actual-cli-lock-'));
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
rmSync(dir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('allows multiple concurrent shared holders', async () => {
|
||||
const r1 = await acquireShared(dir, { timeoutMs: 1000 });
|
||||
const r2 = await acquireShared(dir, { timeoutMs: 1000 });
|
||||
const readers = readdirSync(join(dir, 'readers'));
|
||||
expect(readers).toHaveLength(2);
|
||||
await r1();
|
||||
await r2();
|
||||
});
|
||||
|
||||
it('removes the reader marker on release', async () => {
|
||||
const release = await acquireShared(dir, { timeoutMs: 1000 });
|
||||
await release();
|
||||
const readers = readdirSync(join(dir, 'readers'));
|
||||
expect(readers).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('rejects when an exclusive lock is held', async () => {
|
||||
const releaseExclusive = await acquireExclusive(dir, { timeoutMs: 1000 });
|
||||
await expect(acquireShared(dir, { timeoutMs: 100 })).rejects.toThrow(
|
||||
/holding the budget/,
|
||||
);
|
||||
await releaseExclusive();
|
||||
});
|
||||
|
||||
it('sweeps stale reader markers whose PIDs no longer exist', async () => {
|
||||
const readersDir = join(dir, 'readers');
|
||||
mkdirSync(readersDir, { recursive: true });
|
||||
writeFileSync(join(readersDir, '-1-abc'), '');
|
||||
|
||||
const release = await acquireExclusive(dir, { timeoutMs: 1000 });
|
||||
expect(readdirSync(readersDir)).toHaveLength(0);
|
||||
await release();
|
||||
});
|
||||
});
|
||||
|
||||
describe('writer-reader interaction', () => {
|
||||
let dir: string;
|
||||
|
||||
beforeEach(() => {
|
||||
mockHeld.clear();
|
||||
dir = mkdtempSync(join(tmpdir(), 'actual-cli-lock-'));
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
rmSync(dir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('exclusive waits for active shared holders to release', async () => {
|
||||
const readerRelease = await acquireShared(dir, { timeoutMs: 500 });
|
||||
|
||||
let writerAcquired = false;
|
||||
const writerPromise = acquireExclusive(dir, { timeoutMs: 1000 }).then(
|
||||
release => {
|
||||
writerAcquired = true;
|
||||
return release;
|
||||
},
|
||||
);
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 150));
|
||||
expect(writerAcquired).toBe(false);
|
||||
|
||||
await readerRelease();
|
||||
const writerRelease = await writerPromise;
|
||||
expect(writerAcquired).toBe(true);
|
||||
await writerRelease();
|
||||
});
|
||||
});
|
||||
@@ -1,149 +0,0 @@
|
||||
import { randomBytes } from 'node:crypto';
|
||||
import { mkdirSync, readdirSync, rmSync, writeFileSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
|
||||
import lockfile from 'proper-lockfile';
|
||||
|
||||
export type Release = () => Promise<void>;
|
||||
|
||||
export type AcquireOptions = {
|
||||
timeoutMs: number;
|
||||
};
|
||||
|
||||
const LOCKFILE_NAME = 'lock';
|
||||
const READERS_DIR_NAME = 'readers';
|
||||
const READER_POLL_INTERVAL_MS = 100;
|
||||
|
||||
function lockfilePath(dir: string): string {
|
||||
return join(dir, LOCKFILE_NAME);
|
||||
}
|
||||
|
||||
function readersDir(dir: string): string {
|
||||
return join(dir, READERS_DIR_NAME);
|
||||
}
|
||||
|
||||
function ensureDir(dir: string) {
|
||||
mkdirSync(dir, { recursive: true });
|
||||
}
|
||||
|
||||
function retriesForTimeout(timeoutMs: number) {
|
||||
return {
|
||||
retries: Math.max(1, Math.floor(timeoutMs / 200)),
|
||||
minTimeout: 100,
|
||||
maxTimeout: 500,
|
||||
factor: 1.5,
|
||||
};
|
||||
}
|
||||
|
||||
function errorCode(err: unknown): string | undefined {
|
||||
if (err instanceof Error && 'code' in err) {
|
||||
const { code } = err as { code?: unknown };
|
||||
if (typeof code === 'string') return code;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function isLockedError(err: unknown): boolean {
|
||||
return errorCode(err) === 'ELOCKED';
|
||||
}
|
||||
|
||||
function lockedMessage(timeoutMs: number): string {
|
||||
return `Another CLI process is holding the budget (waited ${Math.round(
|
||||
timeoutMs / 1000,
|
||||
)}s). Retry, or use a different --data-dir.`;
|
||||
}
|
||||
|
||||
function pidIsAlive(pid: number): boolean {
|
||||
if (pid <= 0) return false;
|
||||
try {
|
||||
process.kill(pid, 0);
|
||||
return true;
|
||||
} catch (err) {
|
||||
return errorCode(err) === 'EPERM';
|
||||
}
|
||||
}
|
||||
|
||||
function readReaderNames(readers: string): string[] {
|
||||
try {
|
||||
return readdirSync(readers);
|
||||
} catch (err) {
|
||||
if (errorCode(err) === 'ENOENT') return [];
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
function sweepStaleReaders(dir: string) {
|
||||
const readers = readersDir(dir);
|
||||
for (const name of readReaderNames(readers)) {
|
||||
const pid = Number(name.split('-')[0]);
|
||||
if (!Number.isFinite(pid) || !pidIsAlive(pid)) {
|
||||
rmSync(join(readers, name), { force: true });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function waitForReadersEmpty(dir: string, timeoutMs: number) {
|
||||
const readers = readersDir(dir);
|
||||
const deadline = Date.now() + timeoutMs;
|
||||
while (Date.now() < deadline) {
|
||||
sweepStaleReaders(dir);
|
||||
if (readReaderNames(readers).length === 0) return;
|
||||
await new Promise(resolve => setTimeout(resolve, READER_POLL_INTERVAL_MS));
|
||||
}
|
||||
throw new Error(lockedMessage(timeoutMs));
|
||||
}
|
||||
|
||||
async function acquireGate(
|
||||
dir: string,
|
||||
timeoutMs: number,
|
||||
): Promise<() => Promise<void>> {
|
||||
ensureDir(dir);
|
||||
try {
|
||||
return await lockfile.lock(dir, {
|
||||
lockfilePath: lockfilePath(dir),
|
||||
retries: retriesForTimeout(timeoutMs),
|
||||
stale: 30_000,
|
||||
});
|
||||
} catch (err) {
|
||||
if (isLockedError(err)) throw new Error(lockedMessage(timeoutMs));
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
export async function acquireExclusive(
|
||||
dir: string,
|
||||
{ timeoutMs }: AcquireOptions,
|
||||
): Promise<Release> {
|
||||
const start = Date.now();
|
||||
const release = await acquireGate(dir, timeoutMs);
|
||||
try {
|
||||
const remaining = Math.max(0, timeoutMs - (Date.now() - start));
|
||||
await waitForReadersEmpty(dir, remaining);
|
||||
} catch (err) {
|
||||
await release();
|
||||
throw err;
|
||||
}
|
||||
return () => release();
|
||||
}
|
||||
|
||||
export async function acquireShared(
|
||||
dir: string,
|
||||
{ timeoutMs }: AcquireOptions,
|
||||
): Promise<Release> {
|
||||
const gate = await acquireGate(dir, timeoutMs);
|
||||
let markerPath: string;
|
||||
try {
|
||||
const readers = readersDir(dir);
|
||||
ensureDir(readers);
|
||||
const markerName = `${process.pid}-${randomBytes(6).toString('hex')}`;
|
||||
markerPath = join(readers, markerName);
|
||||
writeFileSync(markerPath, '');
|
||||
} catch (err) {
|
||||
await gate();
|
||||
throw err;
|
||||
}
|
||||
await gate();
|
||||
return async () => {
|
||||
rmSync(markerPath, { force: true });
|
||||
};
|
||||
}
|
||||
@@ -18,23 +18,3 @@ export function parseIntFlag(value: string, flagName: string): number {
|
||||
}
|
||||
return parsed;
|
||||
}
|
||||
|
||||
export function parseNonNegativeIntFlag(
|
||||
value: string,
|
||||
flagName: string,
|
||||
): number {
|
||||
const parsed = parseIntFlag(value, flagName);
|
||||
if (parsed < 0) {
|
||||
throw new Error(
|
||||
`Invalid ${flagName}: "${value}". Expected a non-negative integer.`,
|
||||
);
|
||||
}
|
||||
return parsed;
|
||||
}
|
||||
|
||||
export function parseBoolEnv(raw: string | undefined): boolean | undefined {
|
||||
if (raw === undefined) return undefined;
|
||||
if (raw === '1' || raw.toLowerCase() === 'true') return true;
|
||||
if (raw === '0' || raw.toLowerCase() === 'false') return false;
|
||||
return undefined;
|
||||
}
|
||||
|
||||
@@ -32,8 +32,5 @@ export default defineConfig({
|
||||
plugins: [visualizer({ template: 'raw-data', filename: 'dist/stats.json' })],
|
||||
test: {
|
||||
globals: true,
|
||||
include: ['src/**/*.test.ts'],
|
||||
exclude: ['**/node_modules/**', '**/dist/**'],
|
||||
testTimeout: 10_000,
|
||||
},
|
||||
});
|
||||
|
||||
@@ -139,7 +139,6 @@
|
||||
"@use-gesture/react": "^10.3.1",
|
||||
"@vitejs/plugin-basic-ssl": "^2.3.0",
|
||||
"@vitejs/plugin-react": "^6.0.1",
|
||||
"absurd-sql": "0.0.54",
|
||||
"auto-text-size": "^0.2.3",
|
||||
"babel-plugin-react-compiler": "^1.0.0",
|
||||
"cmdk": "^1.1.1",
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { startBrowserBackend } from '@actual-app/core/platform/client/browser-preload';
|
||||
import * as Platform from '@actual-app/core/shared/platform';
|
||||
import { initBackend as initSQLBackend } from 'absurd-sql/dist/indexeddb-main-thread';
|
||||
import { registerSW } from 'virtual:pwa-register';
|
||||
|
||||
// oxlint-disable-next-line typescript-paths/absolute-parent-import
|
||||
@@ -22,256 +22,23 @@ const ACTUAL_VERSION = Platform.isPlaywright
|
||||
: packageJson.version;
|
||||
|
||||
// *** Start the backend ***
|
||||
|
||||
let worker = null;
|
||||
// The regular Worker running the backend, created only on the leader tab
|
||||
let localBackendWorker = null;
|
||||
|
||||
/**
|
||||
* WorkerBridge wraps a SharedWorker port and presents a Worker-like interface
|
||||
* (onmessage, postMessage, addEventListener, start) to the connection layer.
|
||||
*
|
||||
* The SharedWorker coordinator assigns each tab a role per budget:
|
||||
* - LEADER: this tab runs the backend in a dedicated Worker
|
||||
* - FOLLOWER: this tab routes messages through the SharedWorker to the leader
|
||||
*
|
||||
* Multiple budgets can be open simultaneously — each has its own leader.
|
||||
*/
|
||||
class WorkerBridge {
|
||||
constructor(sharedPort) {
|
||||
this._sharedPort = sharedPort;
|
||||
this._onmessage = null;
|
||||
this._listeners = [];
|
||||
this._started = false;
|
||||
|
||||
// Listen for all messages from the SharedWorker port
|
||||
sharedPort.addEventListener('message', e => this._onSharedMessage(e));
|
||||
}
|
||||
|
||||
set onmessage(handler) {
|
||||
this._onmessage = handler;
|
||||
// Setting onmessage on a real MessagePort implicitly starts it.
|
||||
// We need to do this explicitly on the underlying port.
|
||||
if (!this._started) {
|
||||
this._started = true;
|
||||
this._sharedPort.start();
|
||||
}
|
||||
}
|
||||
|
||||
get onmessage() {
|
||||
return this._onmessage;
|
||||
}
|
||||
|
||||
postMessage(msg) {
|
||||
// All messages go through the SharedWorker for coordination.
|
||||
// The SharedWorker forwards to the leader's Worker via __to-worker.
|
||||
this._sharedPort.postMessage(msg);
|
||||
}
|
||||
|
||||
addEventListener(type, handler) {
|
||||
this._listeners.push({ type, handler });
|
||||
}
|
||||
|
||||
start() {
|
||||
if (!this._started) {
|
||||
this._started = true;
|
||||
this._sharedPort.start();
|
||||
}
|
||||
}
|
||||
|
||||
_dispatch(event) {
|
||||
if (this._onmessage) this._onmessage(event);
|
||||
for (const { type, handler } of this._listeners) {
|
||||
if (type === 'message') handler(event);
|
||||
}
|
||||
}
|
||||
|
||||
_onSharedMessage(event) {
|
||||
const msg = event.data;
|
||||
|
||||
// Elected as leader: create the real backend Worker on this tab
|
||||
if (msg && msg.type === '__become-leader') {
|
||||
this._createLocalWorker(msg.initMsg, msg.budgetToRestore, msg.pendingMsg);
|
||||
return;
|
||||
}
|
||||
|
||||
// Forward requests from SharedWorker to our local Worker
|
||||
if (msg && msg.type === '__to-worker') {
|
||||
if (localBackendWorker) {
|
||||
localBackendWorker.postMessage(msg.msg);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// Leadership transfer: this tab is closing the budget but other tabs
|
||||
// still need it. Terminate our Worker (don't actually close-budget on
|
||||
// the backend) and dispatch a synthetic reply so the UI navigates to
|
||||
// show-budgets normally.
|
||||
if (msg && msg.type === '__close-and-transfer') {
|
||||
console.log('[WorkerBridge] Leadership transferred — terminating Worker');
|
||||
if (localBackendWorker) {
|
||||
localBackendWorker.terminate();
|
||||
localBackendWorker = null;
|
||||
}
|
||||
// Only dispatch a synthetic reply if there's an actual close-budget
|
||||
// request to complete. When requestId is null the eviction was
|
||||
// triggered externally (e.g. another tab deleted this budget).
|
||||
if (msg.requestId) {
|
||||
this._dispatch({
|
||||
data: { type: 'reply', id: msg.requestId, data: {} },
|
||||
});
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// Role change notification
|
||||
if (msg && msg.type === '__role-change') {
|
||||
console.log(
|
||||
`[WorkerBridge] Role: ${msg.role}${msg.budgetId ? ` (budget: ${msg.budgetId})` : ''}`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// Surface SharedWorker console output in this tab's DevTools
|
||||
if (msg && msg.type === '__shared-worker-console') {
|
||||
const method = console[msg.level] || console.log;
|
||||
method(...msg.args);
|
||||
return;
|
||||
}
|
||||
|
||||
// Respond to heartbeat pings
|
||||
if (msg && msg.type === '__heartbeat-ping') {
|
||||
this._sharedPort.postMessage({ type: '__heartbeat-pong' });
|
||||
return;
|
||||
}
|
||||
|
||||
// Everything else goes to the connection layer
|
||||
this._dispatch(event);
|
||||
}
|
||||
|
||||
_createLocalWorker(initMsg, budgetToRestore, pendingMsg) {
|
||||
if (localBackendWorker) {
|
||||
localBackendWorker.terminate();
|
||||
}
|
||||
localBackendWorker = new Worker(backendWorkerUrl);
|
||||
initSQLBackend(localBackendWorker);
|
||||
|
||||
const sharedPort = this._sharedPort;
|
||||
localBackendWorker.onmessage = workerEvent => {
|
||||
const workerMsg = workerEvent.data;
|
||||
// absurd-sql internal messages are handled by initSQLBackend
|
||||
if (
|
||||
workerMsg &&
|
||||
workerMsg.type &&
|
||||
workerMsg.type.startsWith('__absurd:')
|
||||
) {
|
||||
return;
|
||||
}
|
||||
// After the backend connects, automatically reload the budget that was
|
||||
// open before the leader left (e.g. page refresh). This lets other tabs
|
||||
// continue working without being sent to the budget list.
|
||||
if (workerMsg.type === 'connect') {
|
||||
if (budgetToRestore) {
|
||||
console.log(
|
||||
`[WorkerBridge] Backend connected, restoring budget "${budgetToRestore}"`,
|
||||
);
|
||||
const id = budgetToRestore;
|
||||
budgetToRestore = null;
|
||||
localBackendWorker.postMessage({
|
||||
id: '__restore-budget',
|
||||
name: 'load-budget',
|
||||
args: { id },
|
||||
catchErrors: true,
|
||||
});
|
||||
// Tell SharedWorker to track the restore request so
|
||||
// currentBudgetId gets updated when the reply arrives.
|
||||
sharedPort.postMessage({
|
||||
type: '__track-restore',
|
||||
requestId: '__restore-budget',
|
||||
budgetId: id,
|
||||
});
|
||||
} else if (pendingMsg) {
|
||||
const toSend = pendingMsg;
|
||||
pendingMsg = null;
|
||||
localBackendWorker.postMessage(toSend);
|
||||
}
|
||||
}
|
||||
sharedPort.postMessage({ type: '__from-worker', msg: workerMsg });
|
||||
};
|
||||
|
||||
localBackendWorker.postMessage(initMsg);
|
||||
}
|
||||
}
|
||||
|
||||
function createBackendWorker() {
|
||||
// Use SharedWorker as a coordinator for multi-tab, multi-budget support.
|
||||
// Each budget gets its own leader tab running a dedicated Worker. All other
|
||||
// tabs on the same budget are followers — their messages are routed through
|
||||
// the SharedWorker to the leader's Worker.
|
||||
// The SharedWorker never touches SharedArrayBuffer, so this works on all
|
||||
// platforms including iOS/Safari.
|
||||
if (typeof SharedWorker !== 'undefined' && !Platform.isPlaywright) {
|
||||
try {
|
||||
const sharedWorker = new SharedBrowserServerWorker({
|
||||
name: 'actual-backend',
|
||||
});
|
||||
|
||||
const sharedPort = sharedWorker.port;
|
||||
worker = new WorkerBridge(sharedPort);
|
||||
console.log('[WorkerBridge] Connected to SharedWorker coordinator');
|
||||
|
||||
// Don't call start() here. The port must remain un-started so that
|
||||
// messages (especially 'connect') are queued until connectWorker()
|
||||
// sets onmessage, which implicitly starts the port via the bridge.
|
||||
|
||||
if (window.SharedArrayBuffer) {
|
||||
localStorage.removeItem('SharedArrayBufferOverride');
|
||||
}
|
||||
|
||||
sharedPort.postMessage({
|
||||
type: 'init',
|
||||
version: ACTUAL_VERSION,
|
||||
isDev: IS_DEV,
|
||||
publicUrl: process.env.PUBLIC_URL,
|
||||
hash: process.env.REACT_APP_BACKEND_WORKER_HASH,
|
||||
isSharedArrayBufferOverrideEnabled: localStorage.getItem(
|
||||
'SharedArrayBufferOverride',
|
||||
),
|
||||
});
|
||||
|
||||
window.addEventListener('beforeunload', () => {
|
||||
sharedPort.postMessage({ type: 'tab-closing' });
|
||||
});
|
||||
|
||||
return;
|
||||
} catch (e) {
|
||||
console.log('SharedWorker failed, falling back to Worker:', e);
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: regular Worker (Playwright, no SharedWorker support, or failure)
|
||||
console.log('[WorkerBridge] No SharedWorker available, using direct Worker');
|
||||
worker = new Worker(backendWorkerUrl);
|
||||
initSQLBackend(worker);
|
||||
|
||||
if (window.SharedArrayBuffer) {
|
||||
localStorage.removeItem('SharedArrayBufferOverride');
|
||||
}
|
||||
|
||||
worker.postMessage({
|
||||
type: 'init',
|
||||
//
|
||||
// The multi-tab coordinator (leader/follower over SharedWorker), direct
|
||||
// Worker fallback, and sqlite worker bridge now all live in loot-core
|
||||
// (packages/loot-core/src/platform/client/browser-preload). We only
|
||||
// hand it the desktop-specific inputs.
|
||||
const worker = startBrowserBackend({
|
||||
backendWorkerUrl,
|
||||
initPayload: {
|
||||
version: ACTUAL_VERSION,
|
||||
isDev: IS_DEV,
|
||||
publicUrl: process.env.PUBLIC_URL,
|
||||
hash: process.env.REACT_APP_BACKEND_WORKER_HASH,
|
||||
hasSharedArrayBuffer: !!window.SharedArrayBuffer,
|
||||
isSharedArrayBufferOverrideEnabled: localStorage.getItem(
|
||||
'SharedArrayBufferOverride',
|
||||
),
|
||||
});
|
||||
}
|
||||
|
||||
createBackendWorker();
|
||||
},
|
||||
createSharedWorker: () =>
|
||||
new SharedBrowserServerWorker({ name: 'actual-backend' }),
|
||||
forceDirectWorker: Platform.isPlaywright,
|
||||
});
|
||||
|
||||
let isUpdateReadyForDownload = false;
|
||||
let markUpdateReadyForDownload;
|
||||
|
||||
@@ -128,16 +128,7 @@ export function SelectLinkedAccountsModal({
|
||||
const localAccounts = allAccounts.filter(a => a.closed === 0);
|
||||
const [draftLinkAccounts, setDraftLinkAccounts] = useState<
|
||||
Map<string, 'linking' | 'unlinking'>
|
||||
>(() => {
|
||||
const externalAccountIds = new Set(externalAccounts.map(a => a.account_id));
|
||||
const initial = new Map<string, 'linking' | 'unlinking'>();
|
||||
for (const acc of localAccounts) {
|
||||
if (acc.account_id && externalAccountIds.has(acc.account_id)) {
|
||||
initial.set(acc.account_id, 'linking');
|
||||
}
|
||||
}
|
||||
return initial;
|
||||
});
|
||||
>(new Map());
|
||||
const [chosenAccounts, setChosenAccounts] = useState<Record<string, string>>(
|
||||
() => {
|
||||
return Object.fromEntries(
|
||||
|
||||
@@ -122,7 +122,10 @@ export default defineConfig(async ({ mode }) => {
|
||||
base: '/',
|
||||
envPrefix: 'REACT_APP_',
|
||||
build: {
|
||||
minify: false,
|
||||
terserOptions: {
|
||||
compress: false,
|
||||
mangle: false,
|
||||
},
|
||||
target: 'es2022',
|
||||
sourcemap: true,
|
||||
outDir: mode === 'desktop' ? 'build-electron' : 'build',
|
||||
|
||||
@@ -60,6 +60,8 @@
|
||||
"default": "./src/shared/platform.ts"
|
||||
},
|
||||
"#mocks": "./src/mocks/index.ts",
|
||||
"#platform/client/backend-worker": "./src/platform/client/backend-worker/index.ts",
|
||||
"#platform/client/browser-preload": "./src/platform/client/browser-preload/index.ts",
|
||||
"#platform/client/undo": "./src/platform/client/undo/index.ts",
|
||||
"#platform/exceptions": "./src/platform/exceptions/index.ts",
|
||||
"#platform/server/indexeddb": "./src/platform/server/indexeddb/index.ts",
|
||||
@@ -104,6 +106,8 @@
|
||||
"./client/transfer": "./src/client/transfer.ts",
|
||||
"./client/undo": "./src/client/undo.ts",
|
||||
"./mocks": "./src/mocks/index.ts",
|
||||
"./platform/client/backend-worker": "./src/platform/client/backend-worker/index.ts",
|
||||
"./platform/client/browser-preload": "./src/platform/client/browser-preload/index.ts",
|
||||
"./platform/client/connection": {
|
||||
"electron-renderer": "./src/platform/client/connection/index.electron.ts",
|
||||
"default": "./src/platform/client/connection/index.ts"
|
||||
|
||||
@@ -0,0 +1,56 @@
|
||||
// Shared main-thread bootstrap for the browser backend Web Worker.
|
||||
//
|
||||
// The same absurd-sql plumbing is needed by:
|
||||
// - packages/desktop-client/src/browser-preload.js (full multi-tab setup)
|
||||
// - packages/api/browser/rpc.ts (thin api consumer)
|
||||
//
|
||||
// Both need: initSQLBackend(worker) so absurd-sql's __absurd:spawn-idb-worker
|
||||
// messages are handled, and a way to ignore those internal messages when
|
||||
// consuming the channel for loot-core's {id, name, args} protocol.
|
||||
|
||||
import { initBackend as initSQLBackend } from 'absurd-sql/dist/indexeddb-main-thread';
|
||||
|
||||
export type BackendWorker = {
|
||||
worker: Worker;
|
||||
/** Register a listener for non-internal messages from the worker. */
|
||||
onMessage: (handler: (data: unknown) => void) => () => void;
|
||||
/** Send a message to the worker (loot-core request shape or handshake ack). */
|
||||
postMessage: (msg: unknown) => void;
|
||||
/** Terminate the worker and drop all listeners. */
|
||||
terminate: () => void;
|
||||
};
|
||||
|
||||
export function createBackendWorker(worker: Worker): BackendWorker {
|
||||
// Hooks __absurd:spawn-idb-worker; without this any sqlite write inside
|
||||
// the worker hangs on Atomics.wait because the IDB helper never spawns.
|
||||
initSQLBackend(worker);
|
||||
|
||||
const listeners = new Set<(data: unknown) => void>();
|
||||
|
||||
worker.addEventListener('message', event => {
|
||||
const data = (event as MessageEvent).data;
|
||||
if (isAbsurdMessage(data)) return;
|
||||
for (const listener of listeners) listener(data);
|
||||
});
|
||||
|
||||
return {
|
||||
worker,
|
||||
onMessage(handler) {
|
||||
listeners.add(handler);
|
||||
return () => listeners.delete(handler);
|
||||
},
|
||||
postMessage(msg) {
|
||||
worker.postMessage(msg);
|
||||
},
|
||||
terminate() {
|
||||
worker.terminate();
|
||||
listeners.clear();
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function isAbsurdMessage(data: unknown): boolean {
|
||||
if (!data || typeof data !== 'object') return false;
|
||||
const type = (data as { type?: unknown }).type;
|
||||
return typeof type === 'string' && type.startsWith('__absurd:');
|
||||
}
|
||||
@@ -0,0 +1,7 @@
|
||||
export { WorkerBridge, type WorkerLike } from './worker-bridge';
|
||||
export {
|
||||
startBrowserBackend,
|
||||
type StartBackendOptions,
|
||||
type StartBackendInit,
|
||||
type StartBackendHandle,
|
||||
} from './start';
|
||||
143
packages/loot-core/src/platform/client/browser-preload/start.ts
Normal file
143
packages/loot-core/src/platform/client/browser-preload/start.ts
Normal file
@@ -0,0 +1,143 @@
|
||||
// @ts-strict-ignore
|
||||
// Parameterized backend bootstrap moved out of desktop-client's
|
||||
// browser-preload.js. Picks between a SharedWorker-coordinated multi-tab
|
||||
// setup and a direct Worker fallback. Consumers hand in the concrete URLs,
|
||||
// init payload, and (if they want multi-tab coordination) a SharedWorker
|
||||
// factory — keeping loot-core free of Vite-specific asset imports.
|
||||
|
||||
import { createBackendWorker as initSQLBackend } from '#platform/client/backend-worker';
|
||||
import { logger } from '#platform/server/log';
|
||||
|
||||
import { WorkerBridge } from './worker-bridge';
|
||||
|
||||
export type StartBackendInit = {
|
||||
version: string;
|
||||
isDev: boolean;
|
||||
publicUrl?: string;
|
||||
hash?: string;
|
||||
};
|
||||
|
||||
export type StartBackendOptions = {
|
||||
/** URL of the backend Worker script to spawn. */
|
||||
backendWorkerUrl: URL;
|
||||
/** Payload posted to the worker (or shared coordinator) as its init msg. */
|
||||
initPayload: StartBackendInit;
|
||||
/**
|
||||
* Optional factory returning a SharedWorker instance. When provided, the
|
||||
* backend runs through loot-core's multi-tab coordinator (leader/follower).
|
||||
* Omit to always spawn a direct Worker on this page.
|
||||
*/
|
||||
createSharedWorker?: () => SharedWorker;
|
||||
/**
|
||||
* Skip the SharedWorker path even if `createSharedWorker` is provided.
|
||||
* Typically wired to a platform flag (e.g. Playwright tests).
|
||||
*/
|
||||
forceDirectWorker?: boolean;
|
||||
};
|
||||
|
||||
export type StartBackendHandle = Worker | WorkerBridge;
|
||||
|
||||
export function startBrowserBackend(
|
||||
opts: StartBackendOptions,
|
||||
): StartBackendHandle {
|
||||
const {
|
||||
backendWorkerUrl,
|
||||
initPayload,
|
||||
createSharedWorker,
|
||||
forceDirectWorker,
|
||||
} = opts;
|
||||
|
||||
// Use SharedWorker as a coordinator for multi-tab, multi-budget support.
|
||||
// Each budget gets its own leader tab running a dedicated Worker. All other
|
||||
// tabs on the same budget are followers — their messages are routed through
|
||||
// the SharedWorker to the leader's Worker.
|
||||
// The SharedWorker never touches SharedArrayBuffer, so this works on all
|
||||
// platforms including iOS/Safari.
|
||||
if (
|
||||
!forceDirectWorker &&
|
||||
typeof SharedWorker !== 'undefined' &&
|
||||
createSharedWorker
|
||||
) {
|
||||
try {
|
||||
const sharedWorker = createSharedWorker();
|
||||
|
||||
const sharedPort = sharedWorker.port;
|
||||
const bridge = new WorkerBridge(sharedPort, backendWorkerUrl);
|
||||
logger.log('[WorkerBridge] Connected to SharedWorker coordinator');
|
||||
|
||||
// Don't call start() here. The port must remain un-started so that
|
||||
// messages (especially 'connect') are queued until connectWorker()
|
||||
// sets onmessage, which implicitly starts the port via the bridge.
|
||||
|
||||
if (
|
||||
(globalThis as unknown as { SharedArrayBuffer?: unknown })
|
||||
.SharedArrayBuffer
|
||||
) {
|
||||
try {
|
||||
localStorage.removeItem('SharedArrayBufferOverride');
|
||||
} catch {
|
||||
// localStorage may be unavailable in some embeddings; ignore.
|
||||
}
|
||||
}
|
||||
|
||||
let isSharedArrayBufferOverrideEnabled: string | null = null;
|
||||
try {
|
||||
isSharedArrayBufferOverrideEnabled = localStorage.getItem(
|
||||
'SharedArrayBufferOverride',
|
||||
);
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
|
||||
sharedPort.postMessage({
|
||||
type: 'init',
|
||||
...initPayload,
|
||||
isSharedArrayBufferOverrideEnabled,
|
||||
});
|
||||
|
||||
window.addEventListener('beforeunload', () => {
|
||||
sharedPort.postMessage({ type: 'tab-closing' });
|
||||
});
|
||||
|
||||
return bridge;
|
||||
} catch (e) {
|
||||
logger.log('SharedWorker failed, falling back to Worker:', e);
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: regular Worker (Playwright, no SharedWorker support, or the
|
||||
// consumer opted out by omitting createSharedWorker).
|
||||
logger.log('[WorkerBridge] No SharedWorker available, using direct Worker');
|
||||
const worker = new Worker(backendWorkerUrl);
|
||||
initSQLBackend(worker);
|
||||
|
||||
if (
|
||||
(globalThis as unknown as { SharedArrayBuffer?: unknown }).SharedArrayBuffer
|
||||
) {
|
||||
try {
|
||||
localStorage.removeItem('SharedArrayBufferOverride');
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
|
||||
let isSharedArrayBufferOverrideEnabled: string | null = null;
|
||||
try {
|
||||
isSharedArrayBufferOverrideEnabled = localStorage.getItem(
|
||||
'SharedArrayBufferOverride',
|
||||
);
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
|
||||
worker.postMessage({
|
||||
type: 'init',
|
||||
...initPayload,
|
||||
hasSharedArrayBuffer: !!(
|
||||
globalThis as unknown as { SharedArrayBuffer?: unknown }
|
||||
).SharedArrayBuffer,
|
||||
isSharedArrayBufferOverrideEnabled,
|
||||
});
|
||||
|
||||
return worker;
|
||||
}
|
||||
@@ -0,0 +1,202 @@
|
||||
// @ts-strict-ignore
|
||||
// Moved verbatim from packages/desktop-client/src/browser-preload.js — this
|
||||
// is the SharedWorker-port → Worker-like adapter loot-core's client
|
||||
// connection layer consumes. Works identically for any browser consumer
|
||||
// that opts into multi-tab coordination.
|
||||
|
||||
import { createBackendWorker as initSQLBackend } from '#platform/client/backend-worker';
|
||||
import { logger } from '#platform/server/log';
|
||||
|
||||
export type WorkerLike = {
|
||||
onmessage: ((e: MessageEvent) => void) | null;
|
||||
postMessage: (msg: unknown) => void;
|
||||
addEventListener: (type: string, handler: (e: MessageEvent) => void) => void;
|
||||
start?: () => void;
|
||||
terminate?: () => void;
|
||||
};
|
||||
|
||||
/**
|
||||
* WorkerBridge wraps a SharedWorker port and presents a Worker-like interface
|
||||
* (onmessage, postMessage, addEventListener, start) to the connection layer.
|
||||
*
|
||||
* The SharedWorker coordinator assigns each tab a role per budget:
|
||||
* - LEADER: this tab runs the backend in a dedicated Worker
|
||||
* - FOLLOWER: this tab routes messages through the SharedWorker to the leader
|
||||
*
|
||||
* Multiple budgets can be open simultaneously — each has its own leader.
|
||||
*/
|
||||
export class WorkerBridge {
|
||||
_sharedPort: MessagePort;
|
||||
_onmessage: ((e: MessageEvent) => void) | null;
|
||||
_listeners: Array<{ type: string; handler: (e: MessageEvent) => void }>;
|
||||
_started: boolean;
|
||||
localBackendWorker: Worker | null;
|
||||
backendWorkerUrl: URL;
|
||||
|
||||
constructor(sharedPort: MessagePort, backendWorkerUrl: URL) {
|
||||
this._sharedPort = sharedPort;
|
||||
this._onmessage = null;
|
||||
this._listeners = [];
|
||||
this._started = false;
|
||||
this.localBackendWorker = null;
|
||||
this.backendWorkerUrl = backendWorkerUrl;
|
||||
|
||||
// Listen for all messages from the SharedWorker port
|
||||
sharedPort.addEventListener('message', e => this._onSharedMessage(e));
|
||||
}
|
||||
|
||||
set onmessage(handler) {
|
||||
this._onmessage = handler;
|
||||
// Setting onmessage on a real MessagePort implicitly starts it.
|
||||
// We need to do this explicitly on the underlying port.
|
||||
if (!this._started) {
|
||||
this._started = true;
|
||||
this._sharedPort.start();
|
||||
}
|
||||
}
|
||||
|
||||
get onmessage() {
|
||||
return this._onmessage;
|
||||
}
|
||||
|
||||
postMessage(msg) {
|
||||
// All messages go through the SharedWorker for coordination.
|
||||
// The SharedWorker forwards to the leader's Worker via __to-worker.
|
||||
this._sharedPort.postMessage(msg);
|
||||
}
|
||||
|
||||
addEventListener(type, handler) {
|
||||
this._listeners.push({ type, handler });
|
||||
}
|
||||
|
||||
start() {
|
||||
if (!this._started) {
|
||||
this._started = true;
|
||||
this._sharedPort.start();
|
||||
}
|
||||
}
|
||||
|
||||
_dispatch(event) {
|
||||
if (this._onmessage) this._onmessage(event);
|
||||
for (const { type, handler } of this._listeners) {
|
||||
if (type === 'message') handler(event);
|
||||
}
|
||||
}
|
||||
|
||||
_onSharedMessage(event) {
|
||||
const msg = event.data;
|
||||
|
||||
// Elected as leader: create the real backend Worker on this tab
|
||||
if (msg && msg.type === '__become-leader') {
|
||||
this._createLocalWorker(msg.initMsg, msg.budgetToRestore, msg.pendingMsg);
|
||||
return;
|
||||
}
|
||||
|
||||
// Forward requests from SharedWorker to our local Worker
|
||||
if (msg && msg.type === '__to-worker') {
|
||||
if (this.localBackendWorker) {
|
||||
this.localBackendWorker.postMessage(msg.msg);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// Leadership transfer: this tab is closing the budget but other tabs
|
||||
// still need it. Terminate our Worker (don't actually close-budget on
|
||||
// the backend) and dispatch a synthetic reply so the UI navigates to
|
||||
// show-budgets normally.
|
||||
if (msg && msg.type === '__close-and-transfer') {
|
||||
logger.log('[WorkerBridge] Leadership transferred — terminating Worker');
|
||||
if (this.localBackendWorker) {
|
||||
this.localBackendWorker.terminate();
|
||||
this.localBackendWorker = null;
|
||||
}
|
||||
// Only dispatch a synthetic reply if there's an actual close-budget
|
||||
// request to complete. When requestId is null the eviction was
|
||||
// triggered externally (e.g. another tab deleted this budget).
|
||||
if (msg.requestId) {
|
||||
this._dispatch({
|
||||
data: { type: 'reply', id: msg.requestId, data: {} },
|
||||
} as MessageEvent);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// Role change notification
|
||||
if (msg && msg.type === '__role-change') {
|
||||
logger.log(
|
||||
`[WorkerBridge] Role: ${msg.role}${msg.budgetId ? ` (budget: ${msg.budgetId})` : ''}`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// Surface SharedWorker console output in this tab's DevTools
|
||||
if (msg && msg.type === '__shared-worker-console') {
|
||||
const method = console[msg.level] || console.log;
|
||||
method(...msg.args);
|
||||
return;
|
||||
}
|
||||
|
||||
// Respond to heartbeat pings
|
||||
if (msg && msg.type === '__heartbeat-ping') {
|
||||
this._sharedPort.postMessage({ type: '__heartbeat-pong' });
|
||||
return;
|
||||
}
|
||||
|
||||
// Everything else goes to the connection layer
|
||||
this._dispatch(event);
|
||||
}
|
||||
|
||||
_createLocalWorker(initMsg, budgetToRestore, pendingMsg) {
|
||||
if (this.localBackendWorker) {
|
||||
this.localBackendWorker.terminate();
|
||||
}
|
||||
this.localBackendWorker = new Worker(this.backendWorkerUrl);
|
||||
initSQLBackend(this.localBackendWorker);
|
||||
|
||||
const sharedPort = this._sharedPort;
|
||||
const localWorker = this.localBackendWorker;
|
||||
localWorker.onmessage = workerEvent => {
|
||||
const workerMsg = workerEvent.data;
|
||||
// absurd-sql internal messages are handled by initSQLBackend
|
||||
if (
|
||||
workerMsg &&
|
||||
workerMsg.type &&
|
||||
workerMsg.type.startsWith('__absurd:')
|
||||
) {
|
||||
return;
|
||||
}
|
||||
// After the backend connects, automatically reload the budget that was
|
||||
// open before the leader left (e.g. page refresh). This lets other tabs
|
||||
// continue working without being sent to the budget list.
|
||||
if (workerMsg.type === 'connect') {
|
||||
if (budgetToRestore) {
|
||||
logger.log(
|
||||
`[WorkerBridge] Backend connected, restoring budget "${budgetToRestore}"`,
|
||||
);
|
||||
const id = budgetToRestore;
|
||||
budgetToRestore = null;
|
||||
localWorker.postMessage({
|
||||
id: '__restore-budget',
|
||||
name: 'load-budget',
|
||||
args: { id },
|
||||
catchErrors: true,
|
||||
});
|
||||
// Tell SharedWorker to track the restore request so
|
||||
// currentBudgetId gets updated when the reply arrives.
|
||||
sharedPort.postMessage({
|
||||
type: '__track-restore',
|
||||
requestId: '__restore-budget',
|
||||
budgetId: id,
|
||||
});
|
||||
} else if (pendingMsg) {
|
||||
const toSend = pendingMsg;
|
||||
pendingMsg = null;
|
||||
localWorker.postMessage(toSend);
|
||||
}
|
||||
}
|
||||
sharedPort.postMessage({ type: '__from-worker', msg: workerMsg });
|
||||
};
|
||||
|
||||
localWorker.postMessage(initMsg);
|
||||
}
|
||||
}
|
||||
3
packages/loot-core/typings/absurd-sql.ts
Normal file
3
packages/loot-core/typings/absurd-sql.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
declare module 'absurd-sql/dist/indexeddb-main-thread' {
|
||||
export function initBackend(worker: Worker): void;
|
||||
}
|
||||
@@ -51,7 +51,13 @@ export default defineConfig(({ mode }) => {
|
||||
external: [],
|
||||
},
|
||||
sourcemap: true,
|
||||
minify: false,
|
||||
minify: isDev ? false : 'terser',
|
||||
terserOptions: {
|
||||
compress: {
|
||||
drop_debugger: false,
|
||||
},
|
||||
mangle: false,
|
||||
},
|
||||
},
|
||||
define: {
|
||||
'process.env': '{}',
|
||||
|
||||
@@ -19,7 +19,13 @@ export default defineConfig(({ mode }) => {
|
||||
fileName: () => `plugin-sw.js`,
|
||||
},
|
||||
sourcemap: true,
|
||||
minify: false,
|
||||
minify: isDev ? false : 'terser',
|
||||
terserOptions: {
|
||||
compress: {
|
||||
drop_debugger: false,
|
||||
},
|
||||
mangle: false,
|
||||
},
|
||||
},
|
||||
define: {
|
||||
'process.env': '{}',
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
---
|
||||
category: Bugfixes
|
||||
authors: [matt-fidd]
|
||||
---
|
||||
|
||||
Fix bank sync account linking modal being disabled when relinking existing accounts
|
||||
@@ -1,6 +0,0 @@
|
||||
---
|
||||
category: Maintenance
|
||||
authors: [actualbudget]
|
||||
---
|
||||
|
||||
Disable bundle minification so production error messages and stack traces are human-readable
|
||||
@@ -1,6 +0,0 @@
|
||||
---
|
||||
category: Enhancements
|
||||
authors: [MatissJanis]
|
||||
---
|
||||
|
||||
CLI: cache the downloaded budget between invocations so read commands within a 60-second TTL skip the server sync, reducing sync-server rate-limit pressure on scripted workflows. New `actual sync` command (with `--status` and `--clear` modes) plus `--cache-ttl`, `--refresh`, `--lock-timeout`, and `--no-lock` flags. Write commands still sync before and after every operation.
|
||||
6
upcoming-release-notes/7545.md
Normal file
6
upcoming-release-notes/7545.md
Normal file
@@ -0,0 +1,6 @@
|
||||
---
|
||||
category: Features
|
||||
authors: [MatissJanis]
|
||||
---
|
||||
|
||||
api: support using in browser context
|
||||
@@ -1,6 +0,0 @@
|
||||
---
|
||||
category: Maintenance
|
||||
authors: [MatissJanis]
|
||||
---
|
||||
|
||||
Disable fail-fast in Electron build workflows to allow all matrix jobs to complete independently.
|
||||
17
yarn.lock
17
yarn.lock
@@ -26,11 +26,16 @@ __metadata:
|
||||
"@actual-app/core": "workspace:*"
|
||||
"@actual-app/crdt": "workspace:*"
|
||||
"@typescript/native-preview": "npm:^7.0.0-dev.20260404.1"
|
||||
absurd-sql: "npm:0.0.54"
|
||||
better-sqlite3: "npm:^12.8.0"
|
||||
compare-versions: "npm:^6.1.1"
|
||||
fake-indexeddb: "npm:^6.2.5"
|
||||
jsdom: "npm:^27.4.0"
|
||||
npm-run-all: "npm:^4.1.5"
|
||||
rollup-plugin-visualizer: "npm:^7.0.1"
|
||||
typescript-strict-plugin: "npm:^2.4.4"
|
||||
vite: "npm:^8.0.5"
|
||||
vite-plugin-node-polyfills: "npm:^0.26.0"
|
||||
vite-plugin-peggy-loader: "npm:^2.0.1"
|
||||
vitest: "npm:^4.1.2"
|
||||
languageName: unknown
|
||||
@@ -55,12 +60,10 @@ __metadata:
|
||||
dependencies:
|
||||
"@actual-app/api": "workspace:*"
|
||||
"@types/node": "npm:^22.19.17"
|
||||
"@types/proper-lockfile": "npm:^4"
|
||||
"@typescript/native-preview": "npm:^7.0.0-dev.20260404.1"
|
||||
cli-table3: "npm:^0.6.5"
|
||||
commander: "npm:^14.0.3"
|
||||
cosmiconfig: "npm:^9.0.1"
|
||||
proper-lockfile: "npm:^4.1.2"
|
||||
rollup-plugin-visualizer: "npm:^7.0.1"
|
||||
vite: "npm:^8.0.5"
|
||||
vitest: "npm:^4.1.2"
|
||||
@@ -246,7 +249,6 @@ __metadata:
|
||||
"@use-gesture/react": "npm:^10.3.1"
|
||||
"@vitejs/plugin-basic-ssl": "npm:^2.3.0"
|
||||
"@vitejs/plugin-react": "npm:^6.0.1"
|
||||
absurd-sql: "npm:0.0.54"
|
||||
auto-text-size: "npm:^0.2.3"
|
||||
babel-plugin-react-compiler: "npm:^1.0.0"
|
||||
cmdk: "npm:^1.1.1"
|
||||
@@ -9962,15 +9964,6 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@types/proper-lockfile@npm:^4":
|
||||
version: 4.1.4
|
||||
resolution: "@types/proper-lockfile@npm:4.1.4"
|
||||
dependencies:
|
||||
"@types/retry": "npm:*"
|
||||
checksum: 10/b0d1b8e84a563b2c5f869f7ff7542b1d83dec03d1c9d980847cbb189865f44b4a854673cdde59767e41bcb8c31932e613ac43822d358a6f8eede6b79ccfceb1d
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@types/qs@npm:*":
|
||||
version: 6.14.0
|
||||
resolution: "@types/qs@npm:6.14.0"
|
||||
|
||||
Reference in New Issue
Block a user