Compare commits

...

32 Commits

Author SHA1 Message Date
Guillaume Jacquart
8908516934 Fix test for repo 2025-12-05 11:28:49 +01:00
Guillaume Jacquart
0e4deabf7e add resolver fields to the credentials 2025-12-05 11:28:28 +01:00
Suguru Inoue
132f9c6f70 fix(editor): Improve table rendering in chat history (no-changelog) (#22738) 2025-12-05 11:19:41 +01:00
Iván Ovejero
064f90ce1e fix: Add HOME env var to distroless runners image (#22796) 2025-12-05 11:15:29 +01:00
Andreas Fitzek
e78250f94c chore(core): Implement generic credential storage provider (#22662) 2025-12-05 11:04:26 +01:00
renovate[bot]
fcc6d86326 chore: Update peter-evans/create-pull-request digest to 84ae59a (#22376)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Co-authored-by: Declan Carroll <declan@n8n.io>
2025-12-05 10:03:18 +00:00
Jaakko Husso
480d1e609b feat(core): Put Chat users behind license checks (no-changelog) (#22781) 2025-12-05 11:38:12 +02:00
Nikhil Kuriakose
b22654709a feat(editor): Rename columns in data tables (#21747) 2025-12-05 10:06:54 +01:00
Suguru Inoue
8d7f438e1f fix(editor): Fix chat telemetry (no-changelog) (#22793) 2025-12-05 10:04:20 +01:00
Milorad FIlipović
829135ceee feat(editor): Open template setup modal automatically (no-changelog) (#22596) 2025-12-05 09:54:05 +01:00
Declan Carroll
3f382a0369 test: Fixing flaky/failing workflow action test (#22792) 2025-12-05 08:37:55 +00:00
Jaakko Husso
54ca0c1abc fix(core): Filter out workflows from custom agents that use too old agents (no-changelog) (#22752) 2025-12-05 00:53:02 +02:00
Artem Sorokin
e219e7e915 test: Move auth tests to separate folder (#22726) 2025-12-04 23:01:55 +01:00
Declan Carroll
6e77f0eb81 ci: GH bot has a bypass for our CLA (#22773) 2025-12-04 21:23:59 +00:00
Artem Sorokin
813d33372c test: Move AI features tests to separate folder (#22727) 2025-12-04 21:43:07 +01:00
Artem Sorokin
bcfc95b08f test: Move workflow executions to separate folder (#22723) 2025-12-04 21:01:23 +01:00
Artem Sorokin
ba1ac9e1a8 test: Move credentials tests to separate folder (#22724) 2025-12-04 21:01:09 +01:00
Artem Sorokin
8928522991 test: Reorganize Playwright settings tests to match UI structure (#22618) 2025-12-04 21:00:52 +01:00
Guillaume Jacquart
ad56240013 fix(core): Hide migration rule issues not relevant to cloud (#22749) 2025-12-04 20:20:35 +01:00
Guillaume Jacquart
b8d045b050 feat(core): Add credential resolver service for CRUD operations (#22653) 2025-12-04 20:20:24 +01:00
mfsiega
803ab42164 fix(core): During partial execution don't include loop as start node if the loop isn't closed (#22555)
Co-authored-by: Danny Martini <danny@n8n.io>
2025-12-04 18:20:33 +01:00
mfsiega
3026a813b0 fix(core, editor): Move single webhook trigger check to the backend (#22450)
Co-authored-by: Danny Martini <danny@n8n.io>
2025-12-04 18:20:13 +01:00
Declan Carroll
5851265ded test: Fixing task runner test (#22756) 2025-12-04 16:07:20 +01:00
Jaakko Husso
f3fa3f9c30 fix(core): Address chat feedback items (no-changelog) (#22725) 2025-12-04 15:17:59 +02:00
Michael Drury
0866f644b1 chore(ai-builder): Telemetry updates for pinned data (#22625) 2025-12-04 12:54:36 +00:00
Iván Ovejero
727a12da56 refactor(core): Mark native Python as stable (#22737) 2025-12-04 13:34:48 +01:00
Declan Carroll
70aad196d6 ci: Use multimain as default for e2e (#22255) 2025-12-04 12:23:11 +00:00
Declan Carroll
31e5b4590b ci: Add consolidated check job so we can change branch protection rules (#22732) 2025-12-04 12:01:52 +00:00
Benjamin Schroth
599c6ebe98 fix(editor): Don't create duplicate placeholder nodes on agent failure (#22715) 2025-12-04 12:32:05 +01:00
Raúl Gómez Morales
f2eb85dc08 fix(editor): Fix correct $fromAi expression showing as error (#22711) 2025-12-04 11:52:45 +01:00
Suguru Inoue
5fba6c9f2e fix(editor): Syntax highlighting in chat message not working (no-changelog) (#22721) 2025-12-04 11:51:32 +01:00
Guillaume Jacquart
9913991d20 fix(core): Do not prevent credential save if property has default value (#22720) 2025-12-04 11:09:50 +01:00
172 changed files with 7067 additions and 1991 deletions

View File

@@ -5,3 +5,4 @@ self-hosted-runner:
- blacksmith-2vcpu-ubuntu-2204-arm
- blacksmith-4vcpu-ubuntu-2204-arm
- blacksmith-8vcpu-ubuntu-2204
- ubuntu-slim

View File

@@ -123,24 +123,26 @@ jobs:
with:
ref: refs/pull/${{ github.event.pull_request.number }}/merge
e2e-test:
e2e-tests:
name: E2E Tests
needs: [install-and-build, unit-test, typecheck, lint]
if: |
always() &&
needs.install-and-build.result == 'success' &&
needs.unit-test.result != 'failure' &&
needs.typecheck.result != 'failure' &&
needs.lint.result != 'failure'
uses: ./.github/workflows/playwright-test-reusable.yml
needs: [install-and-build]
if: needs.install-and-build.outputs.non_python_changed == 'true'
uses: ./.github/workflows/playwright-test-ci.yml
secrets: inherit
e2e-checks:
name: E2E - Checks
runs-on: ubuntu-latest
needs: [e2e-test]
# This job is required by GitHub branch protection rules.
# PRs cannot be merged unless this job passes.
# If you add/remove jobs that should block merging, update the 'needs' array below.
required-checks:
name: Required Checks
needs: [install-and-build, unit-test, typecheck, lint, e2e-tests]
if: always()
runs-on: ubuntu-slim
steps:
- name: Fail if E2E tests failed
if: needs.e2e-test.result == 'failure'
- name: Fail if any required job failed or was skipped unexpectedly
# The non_python_changed check allows jobs to be skipped for python-only changes,
# since those jobs don't run when only python files are modified.
if: |
contains(needs.*.result, 'failure') ||
(needs.install-and-build.outputs.non_python_changed == 'true' && contains(needs.*.result, 'skipped'))
run: exit 1

View File

@@ -42,18 +42,6 @@ on:
required: false
type: string
pull_request:
types:
- opened
- ready_for_review
paths:
- '.github/workflows/docker-build-push.yml'
- '.github/scripts/docker/docker-config.mjs'
- '.github/scripts/docker/docker-tags.mjs'
- 'docker/images/n8n/Dockerfile'
- 'docker/images/runners/Dockerfile'
- 'docker/images/runners/Dockerfile.distroless'
jobs:
determine-build-context:
name: Determine Build Context

View File

@@ -1,29 +0,0 @@
name: Playwright Tests - Nightly
on:
schedule:
- cron: '0 4 * * *'
workflow_dispatch:
inputs:
image:
description: 'Docker image to test against'
required: false
default: 'n8nio/n8n:nightly'
type: string
push:
branches:
- ci-containers-nightly
jobs:
test-configurations:
strategy:
fail-fast: false
matrix:
config: [standard, postgres]
name: Test ${{ matrix.config }}
uses: ./.github/workflows/playwright-test-reusable.yml
with:
test-mode: docker-pull
docker-image: ${{ github.event.inputs.image || 'n8nio/n8n:nightly' }}
test-command: pnpm --filter=n8n-playwright test:container:${{ matrix.config }}
secrets: inherit

View File

@@ -0,0 +1,52 @@
name: E2E Tests for CI
on:
workflow_call:
jobs:
# Multi-main: postgres + redis + caddy + 2 mains + 1 worker
multi-main-ui:
name: 'Multi-Main: UI'
uses: ./.github/workflows/playwright-test-reusable.yml
with:
test-mode: docker-build
test-command: pnpm --filter=n8n-playwright test:container:multi-main:ui
shards: '[1, 2, 3, 4, 5, 6, 7, 8]'
runner: blacksmith-4vcpu-ubuntu-2204
workers: '1'
secrets: inherit
multi-main-isolated:
name: 'Multi-Main: Isolated'
uses: ./.github/workflows/playwright-test-reusable.yml
with:
test-mode: docker-build
test-command: pnpm --filter=n8n-playwright test:container:multi-main:isolated
shards: '[1]'
runner: blacksmith-4vcpu-ubuntu-2204
workers: '1'
secrets: inherit
# Standard: Single n8n instance with SQLite
# TODO: Enable after confirmed costs with currents/blacksmith
# standard-ui:
# name: 'Standard: UI'
# uses: ./.github/workflows/playwright-test-reusable.yml
# with:
# test-mode: docker-build
# test-command: pnpm --filter=n8n-playwright test:container:standard:ui
# shards: '[1, 2, 3, 4, 5]'
# runner: blacksmith-2vcpu-ubuntu-2204
# workers: '2'
# secrets: inherit
# standard-isolated:
# name: 'Standard: Isolated'
# uses: ./.github/workflows/playwright-test-reusable.yml
# with:
# test-mode: docker-build
# test-command: pnpm --filter=n8n-playwright test:container:standard:isolated
# shards: '[1]'
# runner: blacksmith-2vcpu-ubuntu-2204
# workers: '1'
# secrets: inherit

View File

@@ -1,14 +0,0 @@
name: Run Playwright Tests (Docker Build)
# This workflow is used to run Playwright tests in a Docker container built from the current branch
on:
workflow_call:
workflow_dispatch:
jobs:
build-and-test:
uses: ./.github/workflows/playwright-test-reusable.yml
with:
test-mode: docker-build
test-command: pnpm --filter=n8n-playwright test:container:standard
secrets: inherit

View File

@@ -32,6 +32,11 @@ on:
required: false
default: ''
type: string
runner:
description: 'GitHub runner to use'
required: false
default: 'blacksmith-2vcpu-ubuntu-2204'
type: string
secrets:
CURRENTS_RECORD_KEY:
@@ -42,6 +47,10 @@ on:
required: false
QA_PERFORMANCE_METRICS_WEBHOOK_PASSWORD:
required: false
N8N_LICENSE_ACTIVATION_KEY:
required: false
N8N_ENCRYPTION_KEY:
required: false
env:
PLAYWRIGHT_BROWSERS_PATH: packages/testing/playwright/ms-playwright-cache
@@ -49,10 +58,13 @@ env:
# Disable Ryuk to avoid issues with Docker since it needs privileged access, containers are cleaned on teardown anyway
TESTCONTAINERS_RYUK_DISABLED: true
PLAYWRIGHT_WORKERS: ${{ inputs.workers || '2' }} # Configurable workers, defaults to 2 to reduce resource contention
# Must match CI's COVERAGE_ENABLED to ensure Turbo cache hits (it's a globalEnv in turbo.json)
COVERAGE_ENABLED: 'true'
jobs:
test:
runs-on: blacksmith-2vcpu-ubuntu-2204
runs-on: ${{ inputs.runner }}
timeout-minutes: 20
strategy:
fail-fast: false
matrix:
@@ -101,3 +113,4 @@ jobs:
QA_PERFORMANCE_METRICS_WEBHOOK_URL: ${{ secrets.QA_PERFORMANCE_METRICS_WEBHOOK_URL }}
QA_PERFORMANCE_METRICS_WEBHOOK_USER: ${{ secrets.QA_PERFORMANCE_METRICS_WEBHOOK_USER }}
QA_PERFORMANCE_METRICS_WEBHOOK_PASSWORD: ${{ secrets.QA_PERFORMANCE_METRICS_WEBHOOK_PASSWORD }}
N8N_LICENSE_ACTIVATION_KEY: ${{ secrets.N8N_LICENSE_ACTIVATION_KEY }}

View File

@@ -137,7 +137,7 @@ jobs:
if: steps.verify_file.outputs.file_exists == 'true'
# Pin v7.0.8
uses: peter-evans/create-pull-request@18e469570b1cf0dfc11d60ec121099f8ff3e617a
uses: peter-evans/create-pull-request@84ae59a2cdc2258d6fa0732dd66352dddae2a412
with:
token: ${{ steps.generate_token.outputs.token }}

View File

@@ -56,5 +56,5 @@ jobs:
branch: update-node-popularity
base: master
delete-branch: true
author: n8n Bot <191478365+n8n-bot@users.noreply.github.com>
committer: n8n Bot <191478365+n8n-bot@users.noreply.github.com>
author: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
committer: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>

View File

@@ -172,7 +172,8 @@ ARG N8N_VERSION=snapshot
ARG N8N_RELEASE_TYPE=dev
ENV NODE_ENV=production \
N8N_RELEASE_TYPE=${N8N_RELEASE_TYPE}
N8N_RELEASE_TYPE=${N8N_RELEASE_TYPE} \
HOME=/home/runner
# Copy everything from the prepared runtime filesystem
COPY --from=runtime-prep --chown=root:root /runtime/ /

View File

@@ -267,6 +267,27 @@ export const chatAttachmentSchema = z.object({
fileName: z.string(),
});
export const isValidTimeZone = (tz: string): boolean => {
try {
// Throws if invalid timezone
new Intl.DateTimeFormat('en-US', { timeZone: tz });
return true;
} catch {
return false;
}
};
export const StrictTimeZoneSchema = z
.string()
.min(1)
.max(50)
.regex(/^[A-Za-z0-9_/+-]+$/)
.refine(isValidTimeZone, {
message: 'Unknown or invalid time zone',
});
export const TimeZoneSchema = StrictTimeZoneSchema.optional().catch(undefined);
export type ChatAttachment = z.infer<typeof chatAttachmentSchema>;
export class ChatHubSendMessageRequest extends Z.class({
@@ -283,7 +304,8 @@ export class ChatHubSendMessageRequest extends Z.class({
),
tools: z.array(INodeSchema),
attachments: z.array(chatAttachmentSchema),
agentName: z.string(),
agentName: z.string().optional(),
timeZone: TimeZoneSchema,
}) {}
export class ChatHubRegenerateMessageRequest extends Z.class({
@@ -294,6 +316,7 @@ export class ChatHubRegenerateMessageRequest extends Z.class({
name: z.string(),
}),
),
timeZone: TimeZoneSchema,
}) {}
export class ChatHubEditMessageRequest extends Z.class({
@@ -306,6 +329,7 @@ export class ChatHubEditMessageRequest extends Z.class({
name: z.string(),
}),
),
timeZone: TimeZoneSchema,
}) {}
export class ChatHubUpdateConversationRequest extends Z.class({

View File

@@ -0,0 +1,7 @@
import { Z } from 'zod-class';
import { dataTableColumnNameSchema } from '../../schemas/data-table.schema';
export class RenameDataTableColumnDto extends Z.class({
name: dataTableColumnNameSchema,
}) {}

View File

@@ -99,6 +99,7 @@ export { CreateDataTableColumnDto } from './data-table/create-data-table-column.
export { AddDataTableRowsDto } from './data-table/add-data-table-rows.dto';
export { AddDataTableColumnDto } from './data-table/add-data-table-column.dto';
export { MoveDataTableColumnDto } from './data-table/move-data-table-column.dto';
export { RenameDataTableColumnDto } from './data-table/rename-data-table-column.dto';
export {
OAuthClientResponseDto,

View File

@@ -87,7 +87,9 @@ type EntityName =
| 'AuthorizationCode'
| 'AccessToken'
| 'RefreshToken'
| 'UserConsent';
| 'UserConsent'
| 'DynamicCredentialEntry'
| 'DynamicCredentialResolver';
/**
* Truncate specific DB tables in a test DB.

View File

@@ -28,6 +28,7 @@ export const LOG_SCOPES = [
'chat-hub',
'breaking-changes',
'circuit-breaker',
'dynamic-credentials',
] as const;
export type LogScope = (typeof LOG_SCOPES)[number];

View File

@@ -42,6 +42,25 @@ export class CredentialsEntity extends WithTimestampsAndStringId implements ICre
@Column({ default: false })
isGlobal: boolean;
/**
* Whether the credential can be dynamically resolved by a resolver.
*/
@Column({ default: false })
isResolvable: boolean;
/**
* Whether the credential resolver should allow falling back to static credentials
* if dynamic resolution fails.
*/
@Column({ default: false })
resolvableAllowFallback: boolean;
/**
* ID of the dynamic credential resolver associated with this credential.
*/
@Column({ type: 'varchar', nullable: true })
resolverId?: string;
toJSON() {
const { shared, ...rest } = this;
return rest;

View File

@@ -0,0 +1,31 @@
import type { MigrationContext, ReversibleMigration } from '../migration-types';
const tableName = 'dynamic_credential_entry';
export class AddDynamicCredentialEntryTable1764689388394 implements ReversibleMigration {
async up({ schemaBuilder: { createTable, column } }: MigrationContext) {
await createTable(tableName)
.withColumns(
column('credential_id').varchar(16).primary.notNull,
column('subject_id').varchar(16).primary.notNull,
column('resolver_id').varchar(16).primary.notNull,
column('data').text.notNull,
)
.withTimestamps.withForeignKey('credential_id', {
tableName: 'credentials_entity',
columnName: 'id',
onDelete: 'CASCADE',
})
.withForeignKey('resolver_id', {
tableName: 'dynamic_credential_resolver',
columnName: 'id',
onDelete: 'CASCADE',
})
.withIndexOn(['subject_id'])
.withIndexOn(['resolver_id']);
}
async down({ schemaBuilder: { dropTable } }: MigrationContext) {
await dropTable(tableName);
}
}

View File

@@ -0,0 +1,42 @@
import type { MigrationContext, ReversibleMigration } from '../migration-types';
const credentialsTableName = 'credentials_entity';
const resolverTableName = 'dynamic_credential_resolver';
const FOREIGN_KEY_NAME = 'credentials_entity_resolverId_foreign';
export class AddResolvableFieldsToCredentials1764689448000 implements ReversibleMigration {
async up({ schemaBuilder: { addColumns, addForeignKey, column } }: MigrationContext) {
// Add isResolvable, resolvableAllowFallback, and resolverId columns to credentials_entity
await addColumns(credentialsTableName, [
column('isResolvable').bool.notNull.default(false),
column('resolvableAllowFallback').bool.notNull.default(false),
column('resolverId').varchar(16),
]);
// Add foreign key constraint
await addForeignKey(
credentialsTableName,
'resolverId',
[resolverTableName, 'id'],
FOREIGN_KEY_NAME,
'SET NULL',
);
}
async down({ schemaBuilder: { dropColumns, dropForeignKey } }: MigrationContext) {
// Drop foreign key constraint
await dropForeignKey(
credentialsTableName,
'resolverId',
[resolverTableName, 'id'],
FOREIGN_KEY_NAME,
);
// Drop columns from credentials_entity
await dropColumns(credentialsTableName, [
'isResolvable',
'resolvableAllowFallback',
'resolverId',
]);
}
}

View File

@@ -121,6 +121,8 @@ import { CreateBinaryDataTable1763716655000 } from '../common/1763716655000-Crea
import { CreateWorkflowPublishHistoryTable1764167920585 } from '../common/1764167920585-CreateWorkflowPublishHistoryTable';
import { AddCreatorIdToProjectTable1764276827837 } from '../common/1764276827837-AddCreatorIdToProjectTable';
import { CreateDynamicCredentialResolverTable1764682447000 } from '../common/1764682447000-CreateCredentialResolverTable';
import { AddDynamicCredentialEntryTable1764689388394 } from '../common/1764689388394-AddDynamicCredentialEntryTable';
import { AddResolvableFieldsToCredentials1764689448000 } from '../common/1764689448000-AddResolvableFieldsToCredentials';
import type { Migration } from '../migration-types';
export const mysqlMigrations: Migration[] = [
@@ -247,4 +249,6 @@ export const mysqlMigrations: Migration[] = [
CreateWorkflowPublishHistoryTable1764167920585,
AddCreatorIdToProjectTable1764276827837,
CreateDynamicCredentialResolverTable1764682447000,
AddDynamicCredentialEntryTable1764689388394,
AddResolvableFieldsToCredentials1764689448000,
];

View File

@@ -121,6 +121,8 @@ import { CreateBinaryDataTable1763716655000 } from '../common/1763716655000-Crea
import { CreateWorkflowPublishHistoryTable1764167920585 } from '../common/1764167920585-CreateWorkflowPublishHistoryTable';
import { AddCreatorIdToProjectTable1764276827837 } from '../common/1764276827837-AddCreatorIdToProjectTable';
import { CreateDynamicCredentialResolverTable1764682447000 } from '../common/1764682447000-CreateCredentialResolverTable';
import { AddDynamicCredentialEntryTable1764689388394 } from '../common/1764689388394-AddDynamicCredentialEntryTable';
import { AddResolvableFieldsToCredentials1764689448000 } from '../common/1764689448000-AddResolvableFieldsToCredentials';
import type { Migration } from '../migration-types';
export const postgresMigrations: Migration[] = [
@@ -247,4 +249,6 @@ export const postgresMigrations: Migration[] = [
CreateWorkflowPublishHistoryTable1764167920585,
AddCreatorIdToProjectTable1764276827837,
CreateDynamicCredentialResolverTable1764682447000,
AddDynamicCredentialEntryTable1764689388394,
AddResolvableFieldsToCredentials1764689448000,
];

View File

@@ -117,6 +117,8 @@ import { ChangeOAuthStateColumnToUnboundedVarchar1763572724000 } from '../common
import { CreateBinaryDataTable1763716655000 } from '../common/1763716655000-CreateBinaryDataTable';
import { CreateWorkflowPublishHistoryTable1764167920585 } from '../common/1764167920585-CreateWorkflowPublishHistoryTable';
import { CreateDynamicCredentialResolverTable1764682447000 } from '../common/1764682447000-CreateCredentialResolverTable';
import { AddDynamicCredentialEntryTable1764689388394 } from '../common/1764689388394-AddDynamicCredentialEntryTable';
import { AddResolvableFieldsToCredentials1764689448000 } from '../common/1764689448000-AddResolvableFieldsToCredentials';
import type { Migration } from '../migration-types';
const sqliteMigrations: Migration[] = [
@@ -239,6 +241,8 @@ const sqliteMigrations: Migration[] = [
CreateWorkflowPublishHistoryTable1764167920585,
AddCreatorIdToProjectTable1764276827837,
CreateDynamicCredentialResolverTable1764682447000,
AddDynamicCredentialEntryTable1764689388394,
AddResolvableFieldsToCredentials1764689448000,
];
export { sqliteMigrations };

View File

@@ -981,4 +981,16 @@ export class WorkflowRepository extends Repository<WorkflowEntity> {
return await qb.getMany();
}
/**
* Returns if the workflow is stored as `active`.
*
* @important Do not confuse with `ActiveWorkflows.isActive()`,
* which checks if the workflow is active in memory.
*/
async isActive(workflowId: string) {
const workflow = await this.findOne({ select: ['activeVersionId'], where: { id: workflowId } });
return !!workflow?.activeVersionId;
}
}

View File

@@ -16,13 +16,18 @@ export interface BaseEntity {
reload(): Promise<void>;
}
export interface TimestampedEntity {
export interface TimestampedIdEntity {
id: string;
createdAt: Date;
updatedAt: Date;
}
export type EntityClass = new () => BaseEntity | TimestampedEntity;
export interface TimestampedEntity {
createdAt: Date;
updatedAt: Date;
}
export type EntityClass = new () => BaseEntity | TimestampedIdEntity | TimestampedEntity;
export type ModuleSettings = Record<string, unknown>;
export type ModuleContext = Record<string, unknown>;

View File

@@ -323,7 +323,7 @@ export class ChatTrigger extends Node {
},
{
displayName:
'Chat will be live at the URL above once you activate this workflow. Live executions will show up in the executions tab',
'Chat will be live at the URL above once this workflow is published. Live executions will show up in the executions tab',
name: 'hostedChatNotice',
type: 'notice',
displayOptions: {
@@ -336,7 +336,7 @@ export class ChatTrigger extends Node {
},
{
displayName:
'Follow the instructions <a href="https://www.npmjs.com/package/@n8n/chat" target="_blank">here</a> to embed chat in a webpage (or just call the webhook URL at the top of this section). Chat will be live once you activate this workflow',
'Follow the instructions <a href="https://www.npmjs.com/package/@n8n/chat" target="_blank">here</a> to embed chat in a webpage (or just call the webhook URL at the top of this section). Chat will be live once you publish this workflow',
name: 'embeddedChatNotice',
type: 'notice',
displayOptions: {
@@ -406,6 +406,18 @@ export class ChatTrigger extends Node {
},
},
},
{
displayName:
'Your n8n users will be able to use this agent in <a href="/home/chat/" target="_blank">Chat</a> once this workflow is published. Make sure to share this workflow with at least viewer access to all users who should use it. Currently, only streaming response mode is supported.',
name: 'availableInChatNotice',
type: 'notice',
displayOptions: {
show: {
availableInChat: [true],
},
},
default: '',
},
{
displayName: 'Agent Name',
name: 'agentName',

View File

@@ -138,21 +138,6 @@ export class ActiveWorkflowManager {
return this.activeWorkflows.allActiveWorkflows();
}
/**
* Returns if the workflow is stored as `active`.
*
* @important Do not confuse with `ActiveWorkflows.isActive()`,
* which checks if the workflow is active in memory.
*/
async isActive(workflowId: WorkflowId) {
const workflow = await this.workflowRepository.findOne({
select: ['activeVersionId'],
where: { id: workflowId },
});
return !!workflow?.activeVersionId;
}
/**
* Register workflow-defined webhooks in the `workflow_entity` table.
*/

View File

@@ -3,6 +3,7 @@ import { Logger } from '@n8n/backend-common';
import type { BooleanLicenseFeature, NumericLicenseFeature } from '@n8n/constants';
import { LICENSE_FEATURES, LICENSE_QUOTAS, UNLIMITED_LICENSE_QUOTA } from '@n8n/constants';
import {
AuthRolesService,
GLOBAL_ADMIN_ROLE,
GLOBAL_MEMBER_ROLE,
GLOBAL_OWNER_ROLE,
@@ -191,6 +192,7 @@ export class E2EController {
await this.resetLogStreaming();
await this.removeActiveWorkflows();
await this.truncateAll();
await this.reseedRolesAndScopes();
await this.resetCache();
await this.setupUserManagement(req.body.owner, req.body.members, req.body.admin);
}
@@ -304,6 +306,13 @@ export class E2EController {
}
}
private async reseedRolesAndScopes() {
// Re-initialize scopes and roles after truncation so that foreign keys
// from users and project relations can be created safely, especially
// on databases that strictly enforce foreign keys like Postgres.
await Container.get(AuthRolesService).init();
}
private async setupUserManagement(
owner: UserSetupPayload,
members: UserSetupPayload[],

View File

@@ -1,14 +1,15 @@
import type { LicenseState } from '@n8n/backend-common';
import type { AuthenticatedRequest, SharedCredentialsRepository, CredentialsEntity } from '@n8n/db';
import { GLOBAL_OWNER_ROLE, GLOBAL_MEMBER_ROLE } from '@n8n/db';
import { mock } from 'jest-mock-extended';
import { createRawProjectData } from '@/__tests__/project.test-data';
import type { EventService } from '@/events/event.service';
import { createdCredentialsWithScopes, createNewCredentialsPayload } from './credentials.test-data';
import type { CredentialsFinderService } from '../credentials-finder.service';
import { CredentialsController } from '../credentials.controller';
import type { CredentialsService } from '../credentials.service';
import type { CredentialsFinderService } from '../credentials-finder.service';
import { createRawProjectData } from '@/__tests__/project.test-data';
import type { EventService } from '@/events/event.service';
import type { CredentialRequest } from '@/requests';
describe('CredentialsController', () => {
@@ -16,13 +17,14 @@ describe('CredentialsController', () => {
const credentialsService = mock<CredentialsService>();
const sharedCredentialsRepository = mock<SharedCredentialsRepository>();
const credentialsFinderService = mock<CredentialsFinderService>();
const licenseState = mock<LicenseState>();
const credentialsController = new CredentialsController(
mock(),
credentialsService,
mock(),
mock(),
mock(),
licenseState,
mock(),
mock(),
sharedCredentialsRepository,
@@ -126,7 +128,7 @@ describe('CredentialsController', () => {
] as any);
});
it('should allow owner to set isGlobal to true', async () => {
it('should not allow owner to set isGlobal to true if not licensed', async () => {
// ARRANGE
const ownerReq = {
user: { id: 'owner-id', role: GLOBAL_OWNER_ROLE },
@@ -139,6 +141,34 @@ describe('CredentialsController', () => {
},
} as unknown as CredentialRequest.Update;
licenseState.isSharingLicensed.mockReturnValue(false);
credentialsFinderService.findCredentialForUser.mockResolvedValue(existingCredential);
// ACT
await expect(credentialsController.updateCredentials(ownerReq)).rejects.toThrowError(
'You are not licensed for sharing credentials',
);
// ASSERT
expect(credentialsService.update).not.toHaveBeenCalled();
});
it('should allow owner to set isGlobal to true if licensed', async () => {
// ARRANGE
const ownerReq = {
user: { id: 'owner-id', role: GLOBAL_OWNER_ROLE },
params: { credentialId },
body: {
name: 'Updated Credential',
type: 'apiKey',
data: { apiKey: 'updated-key' },
isGlobal: true,
},
} as unknown as CredentialRequest.Update;
licenseState.isSharingLicensed.mockReturnValue(true);
credentialsFinderService.findCredentialForUser.mockResolvedValue(existingCredential);
credentialsService.update.mockResolvedValue({
...existingCredential,
@@ -163,7 +193,7 @@ describe('CredentialsController', () => {
});
});
it('should allow owner to set isGlobal to false', async () => {
it('should allow owner to set isGlobal to false if licensed', async () => {
// ARRANGE
const globalCredential = mock<CredentialsEntity>({
...existingCredential,
@@ -180,6 +210,8 @@ describe('CredentialsController', () => {
},
} as unknown as CredentialRequest.Update;
licenseState.isSharingLicensed.mockReturnValue(true);
credentialsFinderService.findCredentialForUser.mockResolvedValue(globalCredential);
credentialsService.update.mockResolvedValue({
...globalCredential,
@@ -198,7 +230,7 @@ describe('CredentialsController', () => {
);
});
it('should prevent non-owner from changing isGlobal', async () => {
it('should prevent non-owner from changing isGlobal if licensed', async () => {
// ARRANGE
const memberReq = {
user: { id: 'member-id', role: GLOBAL_MEMBER_ROLE },
@@ -211,6 +243,8 @@ describe('CredentialsController', () => {
},
} as unknown as CredentialRequest.Update;
licenseState.isSharingLicensed.mockReturnValue(true);
credentialsFinderService.findCredentialForUser.mockResolvedValue(existingCredential);
// ACT
@@ -235,6 +269,8 @@ describe('CredentialsController', () => {
},
} as unknown as CredentialRequest.Update;
licenseState.isSharingLicensed.mockReturnValue(true);
credentialsFinderService.findCredentialForUser.mockResolvedValue({
...existingCredential,
isGlobal: true,

View File

@@ -1479,6 +1479,75 @@ describe('CredentialsService', () => {
// ASSERT
expect(savedCredential.isGlobal).toBeUndefined();
});
it('should allow creating credential when required field has default value and is not provided', async () => {
// ARRANGE
// Mock credential properties with a required field that has a default value
credentialsHelper.getCredentialsProperties.mockReturnValue([
{
displayName: 'Host',
name: 'host',
type: 'string',
required: true,
default: 'https://generativelanguage.googleapis.com',
},
{
displayName: 'API Key',
name: 'apiKey',
type: 'string',
required: true,
default: '',
},
] as any);
// Payload without 'host' field - should use default value
const payload = {
name: 'Google Gemini Credential',
type: 'googlePalmApi',
data: { apiKey: 'test-api-key' }, // host is not provided
projectId: 'project-1',
};
// @ts-expect-error - Mocking manager for testing
credentialsRepository.manager = {
transaction: jest.fn().mockImplementation(async (callback) => {
const mockManager = {
save: jest.fn().mockImplementation(async (entity) => {
return { ...entity, id: 'new-cred-id' };
}),
};
return await callback(mockManager);
}),
};
// ACT & ASSERT
await expect(service.createUnmanagedCredential(payload, ownerUser)).resolves.toBeDefined();
});
it('should throw error when required field without default value is not provided', async () => {
// ARRANGE
credentialsHelper.getCredentialsProperties.mockReturnValue([
{
displayName: 'API Key',
name: 'apiKey',
type: 'string',
required: true,
default: '', // Empty default means no valid default
},
] as any);
const payload = {
name: 'Test Credential',
type: 'apiKey',
data: {}, // apiKey is missing
projectId: 'project-1',
};
// ACT & ASSERT
await expect(service.createUnmanagedCredential(payload, ownerUser)).rejects.toThrow(
'The field "apiKey" is mandatory for credentials of type "apiKey"',
);
});
});
describe('createManagedCredential', () => {

View File

@@ -4,7 +4,7 @@ import {
CredentialsGetOneRequestQuery,
GenerateCredentialNameRequestQuery,
} from '@n8n/api-types';
import { Logger } from '@n8n/backend-common';
import { LicenseState, Logger } from '@n8n/backend-common';
import { GlobalConfig } from '@n8n/config';
import {
SharedCredentials,
@@ -40,7 +40,6 @@ import { BadRequestError } from '@/errors/response-errors/bad-request.error';
import { ForbiddenError } from '@/errors/response-errors/forbidden.error';
import { NotFoundError } from '@/errors/response-errors/not-found.error';
import { EventService } from '@/events/event.service';
import { License } from '@/license';
import { listQueryMiddleware } from '@/middlewares';
import { CredentialRequest } from '@/requests';
import { NamingService } from '@/services/naming.service';
@@ -54,7 +53,7 @@ export class CredentialsController {
private readonly credentialsService: CredentialsService,
private readonly enterpriseCredentialsService: EnterpriseCredentialsService,
private readonly namingService: NamingService,
private readonly license: License,
private readonly licenseState: LicenseState,
private readonly logger: Logger,
private readonly userManagementMailer: UserManagementMailer,
private readonly sharedCredentialsRepository: SharedCredentialsRepository,
@@ -114,7 +113,7 @@ export class CredentialsController {
@Param('credentialId') credentialId: string,
@Query query: CredentialsGetOneRequestQuery,
) {
const { shared, ...credential } = this.license.isSharingEnabled()
const { shared, ...credential } = this.licenseState.isSharingLicensed()
? await this.enterpriseCredentialsService.getOne(
req.user,
credentialId,
@@ -246,6 +245,10 @@ export class CredentialsController {
// Update isGlobal if provided in the payload and user has permission
const isGlobal = body.isGlobal;
if (isGlobal !== undefined && isGlobal !== credential.isGlobal) {
if (!this.licenseState.isSharingLicensed()) {
throw new ForbiddenError('You are not licensed for sharing credentials');
}
const canShareGlobally = hasGlobalScope(req.user, 'credential:shareGlobally');
if (!canShareGlobally) {
throw new ForbiddenError(

View File

@@ -874,8 +874,11 @@ export class CredentialsService {
const credentialProperties = this.credentialsHelper.getCredentialsProperties(type);
for (const property of credentialProperties) {
if (property.required && displayParameter(data, property, null, null)) {
// Check if value is present in data, if not, check if default value exists
const value = data[property.name];
if (value === undefined || value === null || value === '') {
const hasDefault =
property.default !== undefined && property.default !== null && property.default !== '';
if ((value === undefined || value === null || value === '') && !hasDefault) {
throw new BadRequestError(
`The field "${property.name}" is mandatory for credentials of type "${type}"`,
);

View File

@@ -0,0 +1,10 @@
import { UserError } from 'n8n-workflow';
export class SingleWebhookTriggerError extends UserError {
constructor(triggerName: string) {
super(
`Because of limitations in ${triggerName}, n8n can't listen for test executions at the same time as listening for production ones. Unpublish the workflow to execute.`,
{ extra: { triggerName } },
);
}
}

View File

@@ -1,16 +1,20 @@
import type { InstanceSettingsConfig } from '@n8n/config';
import type { GlobalConfig } from '@n8n/config';
import { mock } from 'jest-mock-extended';
import { SettingsFilePermissionsRule } from '../settings-file-permissions.rule';
describe('SettingsFilePermissionsRule', () => {
let rule: SettingsFilePermissionsRule;
const instanceSettingsConfig = mock<InstanceSettingsConfig>({});
const mockGlobalConfig = mock<GlobalConfig>({
deployment: { type: 'default' },
});
let originalEnvValue: string | undefined;
beforeEach(() => {
rule = new SettingsFilePermissionsRule(instanceSettingsConfig);
rule = new SettingsFilePermissionsRule(mockGlobalConfig);
originalEnvValue = process.env.N8N_ENFORCE_SETTINGS_FILE_PERMISSIONS;
// Clear env var before each test
delete process.env.N8N_ENFORCE_SETTINGS_FILE_PERMISSIONS;
});
afterEach(() => {
@@ -22,8 +26,21 @@ describe('SettingsFilePermissionsRule', () => {
});
describe('detect()', () => {
it('should not be affected when enforceSettingsFilePermissions is set to false', async () => {
instanceSettingsConfig.enforceSettingsFilePermissions = false;
it('should not be affected on cloud deployments', async () => {
const cloudGlobalConfig = mock<GlobalConfig>({
deployment: { type: 'cloud' },
});
const cloudRule = new SettingsFilePermissionsRule(cloudGlobalConfig);
const result = await cloudRule.detect();
expect(result.isAffected).toBe(false);
expect(result.instanceIssues).toHaveLength(0);
expect(result.recommendations).toHaveLength(0);
});
it('should not be affected when N8N_ENFORCE_SETTINGS_FILE_PERMISSIONS is explicitly set to false', async () => {
process.env.N8N_ENFORCE_SETTINGS_FILE_PERMISSIONS = 'false';
const result = await rule.detect();
@@ -32,8 +49,18 @@ describe('SettingsFilePermissionsRule', () => {
expect(result.recommendations).toHaveLength(0);
});
it('should be affected when enforceSettingsFilePermissions is not set to false', async () => {
instanceSettingsConfig.enforceSettingsFilePermissions = true;
it('should not be affected when N8N_ENFORCE_SETTINGS_FILE_PERMISSIONS is explicitly set to true', async () => {
process.env.N8N_ENFORCE_SETTINGS_FILE_PERMISSIONS = 'true';
const result = await rule.detect();
expect(result.isAffected).toBe(false);
expect(result.instanceIssues).toHaveLength(0);
expect(result.recommendations).toHaveLength(0);
});
it('should be affected when N8N_ENFORCE_SETTINGS_FILE_PERMISSIONS is not set (default behavior change)', async () => {
// Env var is not set (cleared in beforeEach)
const result = await rule.detect();
expect(result.isAffected).toBe(true);

View File

@@ -1,10 +1,16 @@
import type { GlobalConfig } from '@n8n/config';
import { mock } from 'jest-mock-extended';
import { TaskRunnerDockerImageRule } from '../task-runner-docker-image.rule';
describe('TaskRunnerDockerImageRule', () => {
let rule: TaskRunnerDockerImageRule;
beforeEach(() => {
rule = new TaskRunnerDockerImageRule();
const mockGlobalConfig = mock<GlobalConfig>({
deployment: { type: 'default' },
});
rule = new TaskRunnerDockerImageRule(mockGlobalConfig);
});
describe('getMetadata()', () => {
@@ -18,6 +24,19 @@ describe('TaskRunnerDockerImageRule', () => {
});
describe('detect()', () => {
it('should not be affected on cloud deployments', async () => {
const mockGlobalConfig = mock<GlobalConfig>({
deployment: { type: 'cloud' },
});
const cloudRule = new TaskRunnerDockerImageRule(mockGlobalConfig);
const result = await cloudRule.detect();
expect(result.isAffected).toBe(false);
expect(result.instanceIssues).toHaveLength(0);
expect(result.recommendations).toHaveLength(0);
});
it('should always be affected (informational)', async () => {
const result = await rule.detect();

View File

@@ -1,12 +1,35 @@
import type { TaskRunnersConfig } from '@n8n/config';
import type { GlobalConfig, TaskRunnersConfig } from '@n8n/config';
import { mock } from 'jest-mock-extended';
import { TaskRunnersRule } from '../task-runners.rule';
describe('TaskRunnersRule', () => {
let mockGlobalConfig: GlobalConfig;
beforeEach(() => {
mockGlobalConfig = mock<GlobalConfig>({
deployment: { type: 'default' },
});
});
describe('detect()', () => {
it('should not be affected on cloud deployments', async () => {
const mockConfig = { enabled: false } as TaskRunnersConfig;
const cloudGlobalConfig = mock<GlobalConfig>({
deployment: { type: 'cloud' },
});
const rule = new TaskRunnersRule(mockConfig, cloudGlobalConfig);
const result = await rule.detect();
expect(result.isAffected).toBe(false);
expect(result.instanceIssues).toHaveLength(0);
expect(result.recommendations).toHaveLength(0);
});
it('should not be affected when runners are already enabled', async () => {
const mockConfig = { enabled: true } as TaskRunnersConfig;
const rule = new TaskRunnersRule(mockConfig);
const rule = new TaskRunnersRule(mockConfig, mockGlobalConfig);
const result = await rule.detect();
@@ -16,18 +39,19 @@ describe('TaskRunnersRule', () => {
it('should be affected when runners are not enabled', async () => {
const mockConfig = { enabled: false } as TaskRunnersConfig;
const rule = new TaskRunnersRule(mockConfig);
const rule = new TaskRunnersRule(mockConfig, mockGlobalConfig);
const result = await rule.detect();
expect(result.isAffected).toBe(true);
expect(result.instanceIssues).toHaveLength(1);
expect(result.instanceIssues[0].title).toBe('Task Runners will be enabled by default');
expect(result.recommendations).toHaveLength(3);
});
it('should be affected when runners are explicitly disabled', async () => {
const mockConfig = { enabled: false } as TaskRunnersConfig;
const rule = new TaskRunnersRule(mockConfig);
const rule = new TaskRunnersRule(mockConfig, mockGlobalConfig);
const result = await rule.detect();

View File

@@ -1,4 +1,4 @@
import { InstanceSettingsConfig } from '@n8n/config';
import { GlobalConfig } from '@n8n/config';
import { Service } from '@n8n/di';
import type {
@@ -10,7 +10,7 @@ import { BreakingChangeCategory } from '../../types';
@Service()
export class SettingsFilePermissionsRule implements IBreakingChangeInstanceRule {
constructor(private readonly instanceSettingsConfig: InstanceSettingsConfig) {}
constructor(private readonly globalConfig: GlobalConfig) {}
id: string = 'settings-file-permissions-v2';
@@ -28,9 +28,18 @@ export class SettingsFilePermissionsRule implements IBreakingChangeInstanceRule
}
async detect(): Promise<InstanceDetectionReport> {
// If enforceSettingsFilePermissions is explicitly set to 'false', users are not affected
// because they've configured the system to not enforce file permissions
if (!this.instanceSettingsConfig.enforceSettingsFilePermissions) {
// Not relevant for cloud deployments - cloud manages infrastructure and file permissions
if (this.globalConfig.deployment.type === 'cloud') {
return {
isAffected: false,
instanceIssues: [],
recommendations: [],
};
}
// If N8N_ENFORCE_SETTINGS_FILE_PERMISSIONS is explicitly set to any value, users are not affected
// because they've already handled the configuration and are aware of this setting.
if (process.env.N8N_ENFORCE_SETTINGS_FILE_PERMISSIONS) {
return {
isAffected: false,
instanceIssues: [],

View File

@@ -1,3 +1,4 @@
import { GlobalConfig } from '@n8n/config';
import { Service } from '@n8n/di';
import type {
@@ -9,6 +10,8 @@ import { BreakingChangeCategory } from '../../types';
@Service()
export class TaskRunnerDockerImageRule implements IBreakingChangeInstanceRule {
constructor(private readonly globalConfig: GlobalConfig) {}
id: string = 'task-runner-docker-image-v2';
getMetadata(): BreakingChangeRuleMetadata {
@@ -25,6 +28,15 @@ export class TaskRunnerDockerImageRule implements IBreakingChangeInstanceRule {
}
async detect(): Promise<InstanceDetectionReport> {
// Not relevant for cloud deployments - cloud manages Docker images
if (this.globalConfig.deployment.type === 'cloud') {
return {
isAffected: false,
instanceIssues: [],
recommendations: [],
};
}
const result: InstanceDetectionReport = {
isAffected: true,
instanceIssues: [

View File

@@ -1,4 +1,4 @@
import { TaskRunnersConfig } from '@n8n/config';
import { GlobalConfig, TaskRunnersConfig } from '@n8n/config';
import { Service } from '@n8n/di';
import type {
@@ -10,7 +10,10 @@ import { BreakingChangeCategory } from '../../types';
@Service()
export class TaskRunnersRule implements IBreakingChangeInstanceRule {
constructor(private readonly taskRunnersConfig: TaskRunnersConfig) {}
constructor(
private readonly taskRunnersConfig: TaskRunnersConfig,
private readonly globalConfig: GlobalConfig,
) {}
id: string = 'task-runners-v2';
@@ -27,6 +30,15 @@ export class TaskRunnersRule implements IBreakingChangeInstanceRule {
}
async detect(): Promise<InstanceDetectionReport> {
// Not relevant for cloud deployments - cloud manages task runner infrastructure
if (this.globalConfig.deployment.type === 'cloud') {
return {
isAffected: false,
instanceIssues: [],
recommendations: [],
};
}
const result: InstanceDetectionReport = {
isAffected: false,
instanceIssues: [],

View File

@@ -9,6 +9,7 @@ import {
} from '@n8n/db';
import { Service } from '@n8n/di';
import { EntityManager } from '@n8n/typeorm';
import { DateTime } from 'luxon';
import {
AGENT_LANGCHAIN_NODE_TYPE,
CHAT_TRIGGER_NODE_TYPE,
@@ -52,6 +53,7 @@ export class ChatHubWorkflowService {
model: ChatHubConversationModel,
systemMessage: string | undefined,
tools: INode[],
timeZone: string,
trx?: EntityManager,
): Promise<{ workflowData: IWorkflowBase; executionData: IRunExecutionData }> {
return await withTransaction(this.workflowRepository.manager, trx, async (em) => {
@@ -69,6 +71,7 @@ export class ChatHubWorkflowService {
model,
systemMessage,
tools,
timeZone,
});
const newWorkflow = new WorkflowEntity();
@@ -258,6 +261,7 @@ export class ChatHubWorkflowService {
model,
systemMessage,
tools,
timeZone,
}: {
userId: string;
sessionId: ChatSessionId;
@@ -268,9 +272,10 @@ export class ChatHubWorkflowService {
model: ChatHubConversationModel;
systemMessage?: string;
tools: INode[];
timeZone: string;
}) {
const chatTriggerNode = this.buildChatTriggerNode();
const toolsAgentNode = this.buildToolsAgentNode(model, systemMessage);
const toolsAgentNode = this.buildToolsAgentNode(model, timeZone, systemMessage);
const modelNode = this.buildModelNode(credentials, model);
const memoryNode = this.buildMemoryNode(20);
const restoreMemoryNode = this.buildRestoreMemoryNode(history);
@@ -463,7 +468,24 @@ export class ChatHubWorkflowService {
};
}
private buildToolsAgentNode(model: ChatHubConversationModel, systemMessage?: string): INode {
getSystemMessageMetadata(timeZone: string) {
const now = DateTime.now().setZone(timeZone).toISO({
includeOffset: true,
});
return `The user's current local date and time is: ${now} (timezone: ${timeZone}).
When you need to reference “now”, use this date and time.`;
}
private getBaseSystemMessage(timeZone: string) {
return 'You are a helpful assistant.\n' + this.getSystemMessageMetadata(timeZone);
}
private buildToolsAgentNode(
model: ChatHubConversationModel,
timeZone: string,
systemMessage?: string,
): INode {
return {
parameters: {
promptType: 'define',
@@ -474,7 +496,7 @@ export class ChatHubWorkflowService {
model.provider !== 'n8n' && model.provider !== 'custom-agent'
? getMaxContextWindowTokens(model.provider, model.model)
: undefined,
systemMessage,
systemMessage: systemMessage ?? this.getBaseSystemMessage(timeZone),
},
},
type: AGENT_LANGCHAIN_NODE_TYPE,

View File

@@ -23,10 +23,12 @@ import { Logger } from '@n8n/backend-common';
import { ExecutionRepository, IExecutionResponse, User, WorkflowRepository, In } from '@n8n/db';
import { Service } from '@n8n/di';
import type { EntityManager } from '@n8n/typeorm';
import { GlobalConfig } from '@n8n/config';
import type { Response } from 'express';
import { ErrorReporter, InstanceSettings } from 'n8n-core';
import {
CHAT_TRIGGER_NODE_TYPE,
AGENT_LANGCHAIN_NODE_TYPE,
OperationalError,
ManualExecutionCancelledError,
type INodeCredentials,
@@ -104,6 +106,7 @@ export class ChatHubService {
private readonly chatHubSettingsService: ChatHubSettingsService,
private readonly chatHubAttachmentService: ChatHubAttachmentService,
private readonly instanceSettings: InstanceSettings,
private readonly globalConfig: GlobalConfig,
) {}
async getModels(
@@ -938,7 +941,6 @@ export class ChatHubService {
}
const chatTrigger = activeVersion.nodes?.find((node) => node.type === CHAT_TRIGGER_NODE_TYPE);
if (!chatTrigger) {
continue;
}
@@ -948,6 +950,15 @@ export class ChatHubService {
continue;
}
const agentNodes = activeVersion.nodes?.filter(
(node) => node.type === AGENT_LANGCHAIN_NODE_TYPE,
);
// Agents older than this can't do streaming
if (agentNodes.some((node) => node.typeVersion < 2.1)) {
continue;
}
const inputModalities = this.chatHubWorkflowService.parseInputModalities(
chatTriggerParams.options,
);
@@ -1029,7 +1040,9 @@ export class ChatHubService {
previousMessageId,
tools,
attachments,
timeZone,
} = payload;
const tz = timeZone ?? this.globalConfig.generic.timezone;
const credentialId = this.getModelCredential(model, credentials);
@@ -1079,6 +1092,7 @@ export class ChatHubService {
message,
tools,
processedAttachments,
tz,
trx,
);
});
@@ -1125,7 +1139,8 @@ export class ChatHubService {
}
async editMessage(res: Response, user: User, payload: EditMessagePayload) {
const { sessionId, editId, messageId, message, model, credentials } = payload;
const { sessionId, editId, messageId, message, model, credentials, timeZone } = payload;
const tz = timeZone ?? this.globalConfig.generic.timezone;
const workflow = await this.messageRepository.manager.transaction(async (trx) => {
const session = await this.getChatSession(user, sessionId, trx);
@@ -1170,6 +1185,7 @@ export class ChatHubService {
message,
session.tools,
attachments,
tz,
trx,
);
}
@@ -1195,7 +1211,8 @@ export class ChatHubService {
}
async regenerateAIMessage(res: Response, user: User, payload: RegenerateMessagePayload) {
const { sessionId, retryId, model, credentials } = payload;
const { sessionId, retryId, model, credentials, timeZone } = payload;
const tz = timeZone ?? this.globalConfig.generic.timezone;
const {
workflow: { workflowData, executionData },
@@ -1242,6 +1259,7 @@ export class ChatHubService {
message,
session.tools,
attachments,
tz,
trx,
);
@@ -1273,6 +1291,7 @@ export class ChatHubService {
message: string,
tools: INode[],
attachments: IBinaryData[],
timeZone: string,
trx: EntityManager,
) {
if (model.provider === 'n8n') {
@@ -1293,6 +1312,7 @@ export class ChatHubService {
history,
message,
attachments,
timeZone,
trx,
);
}
@@ -1307,6 +1327,7 @@ export class ChatHubService {
undefined,
tools,
attachments,
timeZone,
trx,
);
}
@@ -1321,6 +1342,7 @@ export class ChatHubService {
systemMessage: string | undefined,
tools: INode[],
attachments: IBinaryData[],
timeZone: string,
trx: EntityManager,
) {
await this.chatHubSettingsService.ensureModelIsAllowed(model);
@@ -1342,6 +1364,7 @@ export class ChatHubService {
model,
systemMessage,
tools,
timeZone,
trx,
);
}
@@ -1353,6 +1376,7 @@ export class ChatHubService {
history: ChatHubMessage[],
message: string,
attachments: IBinaryData[],
timeZone: string,
trx: EntityManager,
) {
const agent = await this.chatHubAgentService.getAgentById(agentId, user.id);
@@ -1370,7 +1394,8 @@ export class ChatHubService {
throw new BadRequestError('Credentials not set for agent');
}
const systemMessage = agent.systemPrompt;
const systemMessage =
agent.systemPrompt + '\n' + this.chatHubWorkflowService.getSystemMessageMetadata(timeZone);
const model: ChatHubBaseLLMModel = {
provider: agent.provider,
@@ -1396,6 +1421,7 @@ export class ChatHubService {
systemMessage,
tools,
attachments,
timeZone,
trx,
);
}
@@ -2074,7 +2100,7 @@ export class ChatHubService {
model: ChatHubConversationModel,
credentialId: string | null,
tools: INode[],
agentName: string,
agentName?: string,
trx?: EntityManager,
) {
await this.ensureValidModel(user, model);

View File

@@ -22,6 +22,7 @@ export interface BaseMessagePayload {
sessionId: ChatSessionId;
model: ChatHubConversationModel;
credentials: INodeCredentials;
timeZone?: string;
}
export interface HumanMessagePayload extends BaseMessagePayload {
@@ -30,7 +31,7 @@ export interface HumanMessagePayload extends BaseMessagePayload {
previousMessageId: ChatMessageId | null;
attachments: ChatAttachment[];
tools: INode[];
agentName: string;
agentName?: string;
}
export interface RegenerateMessagePayload extends BaseMessagePayload {
retryId: ChatMessageId;

View File

@@ -0,0 +1,352 @@
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
import {
createTeamProject,
getPersonalProject,
linkUserToProject,
testDb,
} from '@n8n/backend-test-utils';
import type { Project, User } from '@n8n/db';
import { Container } from '@n8n/di';
import { createDataTable } from '@test-integration/db/data-tables';
import { createOwner, createMember, createAdmin } from '@test-integration/db/users';
import type { SuperAgentTest } from '@test-integration/types';
import * as utils from '@test-integration/utils';
import { DataTableColumnRepository } from '../data-table-column.repository';
import { mockDataTableSizeValidator } from './test-helpers';
let owner: User;
let member: User;
let admin: User;
let authOwnerAgent: SuperAgentTest;
let authMemberAgent: SuperAgentTest;
let authAdminAgent: SuperAgentTest;
let ownerProject: Project;
let memberProject: Project;
const testServer = utils.setupTestServer({
endpointGroups: ['data-table'],
modules: ['data-table'],
});
let dataTableColumnRepository: DataTableColumnRepository;
beforeAll(async () => {
mockDataTableSizeValidator();
dataTableColumnRepository = Container.get(DataTableColumnRepository);
owner = await createOwner();
member = await createMember();
admin = await createAdmin();
authOwnerAgent = testServer.authAgentFor(owner);
authMemberAgent = testServer.authAgentFor(member);
authAdminAgent = testServer.authAgentFor(admin);
ownerProject = await getPersonalProject(owner);
memberProject = await getPersonalProject(member);
});
beforeEach(async () => {
await testDb.truncate(['DataTable', 'DataTableColumn']);
});
describe('PATCH /projects/:projectId/data-tables/:dataTableId/columns/:columnId/rename', () => {
test('should not rename column when project does not exist', async () => {
const payload = {
name: 'new_column_name',
};
await authOwnerAgent
.patch(
'/projects/non-existing-id/data-tables/some-data-table-id/columns/some-column-id/rename',
)
.send(payload)
.expect(404);
});
test('should not rename column when data table does not exist', async () => {
const project = await createTeamProject('test project', owner);
const payload = {
name: 'new_column_name',
};
await authOwnerAgent
.patch(
`/projects/${project.id}/data-tables/non-existing-data-table/columns/some-column-id/rename`,
)
.send(payload)
.expect(404);
});
test('should not rename column when column does not exist', async () => {
const project = await createTeamProject('test project', owner);
const dataTable = await createDataTable(project, {
columns: [
{
name: 'test_column',
type: 'string',
},
],
});
const payload = {
name: 'new_column_name',
};
await authOwnerAgent
.patch(
`/projects/${project.id}/data-tables/${dataTable.id}/columns/non-existing-column-id/rename`,
)
.send(payload)
.expect(404);
});
test("should not rename column in another user's personal project data table", async () => {
const dataTable = await createDataTable(ownerProject, {
columns: [
{
name: 'test_column',
type: 'string',
},
],
});
await authMemberAgent
.patch(
`/projects/${ownerProject.id}/data-tables/${dataTable.id}/columns/${dataTable.columns[0].id}/rename`,
)
.send({ name: 'new_name' })
.expect(403);
const columnInDb = await dataTableColumnRepository.findOneBy({
id: dataTable.columns[0].id,
});
expect(columnInDb?.name).toBe('test_column');
});
test('should not rename column if user has project:viewer role in team project', async () => {
const project = await createTeamProject('test project', owner);
await linkUserToProject(member, project, 'project:viewer');
const dataTable = await createDataTable(project, {
columns: [
{
name: 'test_column',
type: 'string',
},
],
});
await authMemberAgent
.patch(
`/projects/${project.id}/data-tables/${dataTable.id}/columns/${dataTable.columns[0].id}/rename`,
)
.send({ name: 'new_name' })
.expect(403);
const columnInDb = await dataTableColumnRepository.findOneBy({
id: dataTable.columns[0].id,
});
expect(columnInDb?.name).toBe('test_column');
});
test('should rename column if user has project:editor role in team project', async () => {
const project = await createTeamProject('test project', owner);
await linkUserToProject(member, project, 'project:editor');
const dataTable = await createDataTable(project, {
columns: [
{
name: 'test_column',
type: 'string',
},
],
});
await authMemberAgent
.patch(
`/projects/${project.id}/data-tables/${dataTable.id}/columns/${dataTable.columns[0].id}/rename`,
)
.send({ name: 'renamed_column' })
.expect(200);
const columnInDb = await dataTableColumnRepository.findOneBy({
id: dataTable.columns[0].id,
});
expect(columnInDb?.name).toBe('renamed_column');
});
test('should rename column if user has project:admin role in team project', async () => {
const project = await createTeamProject('test project', owner);
await linkUserToProject(admin, project, 'project:admin');
const dataTable = await createDataTable(project, {
columns: [
{
name: 'test_column',
type: 'string',
},
],
});
await authAdminAgent
.patch(
`/projects/${project.id}/data-tables/${dataTable.id}/columns/${dataTable.columns[0].id}/rename`,
)
.send({ name: 'renamed_column' })
.expect(200);
const columnInDb = await dataTableColumnRepository.findOneBy({
id: dataTable.columns[0].id,
});
expect(columnInDb?.name).toBe('renamed_column');
});
test('should rename column if user is owner in team project', async () => {
const project = await createTeamProject('test project', owner);
const dataTable = await createDataTable(project, {
columns: [
{
name: 'test_column',
type: 'string',
},
],
});
await authOwnerAgent
.patch(
`/projects/${project.id}/data-tables/${dataTable.id}/columns/${dataTable.columns[0].id}/rename`,
)
.send({ name: 'renamed_column' })
.expect(200);
const columnInDb = await dataTableColumnRepository.findOneBy({
id: dataTable.columns[0].id,
});
expect(columnInDb?.name).toBe('renamed_column');
});
test('should rename column in personal project', async () => {
const dataTable = await createDataTable(memberProject, {
columns: [
{
name: 'test_column',
type: 'string',
},
],
});
await authMemberAgent
.patch(
`/projects/${memberProject.id}/data-tables/${dataTable.id}/columns/${dataTable.columns[0].id}/rename`,
)
.send({ name: 'renamed_column' })
.expect(200);
const columnInDb = await dataTableColumnRepository.findOneBy({
id: dataTable.columns[0].id,
});
expect(columnInDb?.name).toBe('renamed_column');
});
test('should not rename column to an existing column name', async () => {
const project = await createTeamProject('test project', owner);
const dataTable = await createDataTable(project, {
columns: [
{
name: 'first_column',
type: 'string',
},
{
name: 'second_column',
type: 'string',
},
],
});
await authOwnerAgent
.patch(
`/projects/${project.id}/data-tables/${dataTable.id}/columns/${dataTable.columns[0].id}/rename`,
)
.send({ name: 'second_column' })
.expect(409);
const firstColumnInDb = await dataTableColumnRepository.findOneBy({
id: dataTable.columns[0].id,
});
expect(firstColumnInDb?.name).toBe('first_column');
});
test('should not rename column with invalid column name', async () => {
const project = await createTeamProject('test project', owner);
const dataTable = await createDataTable(project, {
columns: [
{
name: 'test_column',
type: 'string',
},
],
});
await authOwnerAgent
.patch(
`/projects/${project.id}/data-tables/${dataTable.id}/columns/${dataTable.columns[0].id}/rename`,
)
.send({ name: 'invalid name with spaces' })
.expect(400);
const columnInDb = await dataTableColumnRepository.findOneBy({
id: dataTable.columns[0].id,
});
expect(columnInDb?.name).toBe('test_column');
});
test('should not rename column with empty name', async () => {
const project = await createTeamProject('test project', owner);
const dataTable = await createDataTable(project, {
columns: [
{
name: 'test_column',
type: 'string',
},
],
});
await authOwnerAgent
.patch(
`/projects/${project.id}/data-tables/${dataTable.id}/columns/${dataTable.columns[0].id}/rename`,
)
.send({ name: '' })
.expect(400);
const columnInDb = await dataTableColumnRepository.findOneBy({
id: dataTable.columns[0].id,
});
expect(columnInDb?.name).toBe('test_column');
});
test('should rename column successfully', async () => {
const project = await createTeamProject('test project', owner);
const dataTable = await createDataTable(project, {
columns: [
{
name: 'original_name',
type: 'string',
},
],
});
await authOwnerAgent
.patch(
`/projects/${project.id}/data-tables/${dataTable.id}/columns/${dataTable.columns[0].id}/rename`,
)
.send({ name: 'updated_name' })
.expect(200);
// Verify column name changed
const columnInDb = await dataTableColumnRepository.findOneBy({
id: dataTable.columns[0].id,
});
expect(columnInDb?.name).toBe('updated_name');
});
});

View File

@@ -0,0 +1,250 @@
import { testModules } from '@n8n/backend-test-utils';
import type { DataSource, EntityManager } from '@n8n/typeorm';
import { mock } from 'jest-mock-extended';
import { DataTableColumn } from '../data-table-column.entity';
import { DataTableColumnRepository } from '../data-table-column.repository';
import type { DataTableDDLService } from '../data-table-ddl.service';
import { DataTable } from '../data-table.entity';
import { DataTableColumnNameConflictError } from '../errors/data-table-column-name-conflict.error';
import { DataTableSystemColumnNameConflictError } from '../errors/data-table-system-column-name-conflict.error';
describe('DataTableColumnRepository', () => {
let repository: DataTableColumnRepository;
let mockDataSource: DataSource;
let mockDDLService: jest.Mocked<DataTableDDLService>;
let mockEntityManager: jest.Mocked<EntityManager>;
beforeAll(async () => {
await testModules.loadModules(['data-table']);
});
beforeEach(() => {
mockDDLService = mock<DataTableDDLService>();
mockEntityManager = mock<EntityManager>({
connection: {
options: { type: 'postgres' },
} as any,
});
// Mock the transaction method to execute the callback immediately
(mockEntityManager.transaction as jest.Mock) = jest.fn(
async (callback: (em: EntityManager) => Promise<any>) => {
return await callback(mockEntityManager);
},
);
mockDataSource = mock<DataSource>({
manager: mockEntityManager,
});
repository = new DataTableColumnRepository(mockDataSource, mockDDLService);
});
describe('renameColumn', () => {
const dataTableId = 'test-table-id';
const mockColumn: DataTableColumn = {
id: 'column-id',
name: 'old_name',
type: 'string',
index: 0,
dataTableId,
} as DataTableColumn;
describe('validateUniqueColumnName', () => {
it('should throw DataTableColumnNameConflictError when column name already exists', async () => {
// Arrange
const newName = 'duplicate_name';
const dataTable = { id: dataTableId, name: 'Test Table' } as DataTable;
mockEntityManager.existsBy.mockResolvedValue(true);
mockEntityManager.findOneBy.mockResolvedValue(dataTable);
// Act & Assert
await expect(repository.renameColumn(dataTableId, mockColumn, newName)).rejects.toThrow(
DataTableColumnNameConflictError,
);
await expect(repository.renameColumn(dataTableId, mockColumn, newName)).rejects.toThrow(
`Data table column with name '${newName}' already exists in data table '${dataTable.name}'`,
);
expect(mockEntityManager.existsBy).toHaveBeenCalledWith(DataTableColumn, {
name: newName,
dataTableId,
});
expect(mockEntityManager.findOneBy).toHaveBeenCalledWith(DataTable, { id: dataTableId });
});
it('should not throw when column name is unique', async () => {
// Arrange
const newName = 'unique_name';
mockEntityManager.existsBy.mockResolvedValue(false);
mockEntityManager.update.mockResolvedValue({ affected: 1 } as any);
Object.defineProperty(mockEntityManager, 'connection', {
value: {
options: { type: 'postgres' },
},
configurable: true,
});
mockDDLService.renameColumn.mockResolvedValue(undefined);
// Act
const result = await repository.renameColumn(dataTableId, mockColumn, newName);
// Assert
expect(mockEntityManager.existsBy).toHaveBeenCalledWith(DataTableColumn, {
name: newName,
dataTableId,
});
expect(result.name).toBe(newName);
});
});
describe('validateNotSystemColumn', () => {
it('should throw DataTableSystemColumnNameConflictError for system column names', async () => {
// Arrange - system columns: id, createdAt, updatedAt
const systemColumnNames = ['id', 'createdAt', 'updatedAt'];
for (const systemColumnName of systemColumnNames) {
mockEntityManager.existsBy.mockResolvedValue(false);
// Act & Assert
await expect(
repository.renameColumn(dataTableId, mockColumn, systemColumnName),
).rejects.toThrow(DataTableSystemColumnNameConflictError);
await expect(
repository.renameColumn(dataTableId, mockColumn, systemColumnName),
).rejects.toThrow(
`Column name "${systemColumnName}" is reserved as a system column name.`,
);
}
});
it('should throw DataTableSystemColumnNameConflictError for testing column name', async () => {
// Arrange
const testingColumnName = 'dryRunState';
mockEntityManager.existsBy.mockResolvedValue(false);
// Act & Assert
await expect(
repository.renameColumn(dataTableId, mockColumn, testingColumnName),
).rejects.toThrow(DataTableSystemColumnNameConflictError);
await expect(
repository.renameColumn(dataTableId, mockColumn, testingColumnName),
).rejects.toThrow(
`Column name "${testingColumnName}" is reserved as a testing column name.`,
);
});
});
describe('successful rename', () => {
it('should successfully rename column when all validations pass', async () => {
// Arrange
const newName = 'new_valid_name';
mockEntityManager.existsBy.mockResolvedValue(false);
mockEntityManager.update.mockResolvedValue({ affected: 1 } as any);
Object.defineProperty(mockEntityManager, 'connection', {
value: {
options: { type: 'postgres' },
},
configurable: true,
});
mockDDLService.renameColumn.mockResolvedValue(undefined);
// Act
const result = await repository.renameColumn(dataTableId, mockColumn, newName);
// Assert
expect(result).toEqual({
...mockColumn,
name: newName,
});
expect(mockEntityManager.update).toHaveBeenCalledWith(
DataTableColumn,
{ id: mockColumn.id },
{ name: newName },
);
expect(mockDDLService.renameColumn).toHaveBeenCalledWith(
dataTableId,
mockColumn.name,
newName,
'postgres',
mockEntityManager,
);
});
it('should call DDL service with correct database type', async () => {
// Arrange
const newName = 'new_valid_name';
const dbTypes = ['postgres', 'mysql', 'sqlite'] as const;
for (const dbType of dbTypes) {
mockEntityManager.existsBy.mockResolvedValue(false);
mockEntityManager.update.mockResolvedValue({ affected: 1 } as any);
Object.defineProperty(mockEntityManager, 'connection', {
value: {
options: { type: dbType },
},
configurable: true,
});
mockDDLService.renameColumn.mockResolvedValue(undefined);
// Act
await repository.renameColumn(dataTableId, mockColumn, newName);
// Assert
expect(mockDDLService.renameColumn).toHaveBeenCalledWith(
dataTableId,
mockColumn.name,
newName,
dbType,
mockEntityManager,
);
}
});
});
describe('validation order', () => {
it('should validate system column name before checking uniqueness', async () => {
// Arrange
const systemColumnName = 'id';
mockEntityManager.existsBy.mockResolvedValue(false);
// Act & Assert
await expect(
repository.renameColumn(dataTableId, mockColumn, systemColumnName),
).rejects.toThrow(DataTableSystemColumnNameConflictError);
// existsBy should not be called because system column validation happens first
expect(mockEntityManager.existsBy).not.toHaveBeenCalled();
});
it('should check uniqueness after system column validation passes', async () => {
// Arrange
const newName = 'valid_name';
const dataTable = { id: dataTableId, name: 'Test Table' } as DataTable;
mockEntityManager.existsBy.mockResolvedValue(true);
mockEntityManager.findOneBy.mockResolvedValue(dataTable);
// Act & Assert
await expect(repository.renameColumn(dataTableId, mockColumn, newName)).rejects.toThrow(
DataTableColumnNameConflictError,
);
// Both validations should have been called in order
expect(mockEntityManager.existsBy).toHaveBeenCalledWith(DataTableColumn, {
name: newName,
dataTableId,
});
});
});
});
});

View File

@@ -0,0 +1,384 @@
import { testModules } from '@n8n/backend-test-utils';
import type { DataSource, DataSourceOptions, EntityManager } from '@n8n/typeorm';
import { mock } from 'jest-mock-extended';
import { DataTableDDLService } from '../data-table-ddl.service';
import * as sqlUtils from '../utils/sql-utils';
// Mock the sql-utils module
jest.mock('../utils/sql-utils', () => ({
...jest.requireActual('../utils/sql-utils'),
renameColumnQuery: jest.fn(),
toTableName: jest.fn(),
}));
describe('DataTableDDLService', () => {
let ddlService: DataTableDDLService;
let mockDataSource: DataSource;
let mockEntityManager: jest.Mocked<EntityManager>;
beforeAll(async () => {
await testModules.loadModules(['data-table']);
});
beforeEach(() => {
mockEntityManager = mock<EntityManager>({
connection: {
options: { type: 'postgres' },
} as any,
});
// Mock the transaction method to execute the callback immediately
(mockEntityManager.transaction as jest.Mock) = jest.fn(
async (callback: (em: EntityManager) => Promise<any>) => {
return await callback(mockEntityManager);
},
);
// Mock the query method
mockEntityManager.query = jest.fn().mockResolvedValue(undefined);
mockDataSource = mock<DataSource>({
manager: mockEntityManager,
});
ddlService = new DataTableDDLService(mockDataSource);
// Reset all mocks
jest.clearAllMocks();
});
describe('renameColumn', () => {
const dataTableId = 'test-table-id';
const oldColumnName = 'old_column';
const newColumnName = 'new_column';
const tableName = 'n8n_data_table_user_test-table-id';
beforeEach(() => {
(sqlUtils.toTableName as jest.Mock).mockReturnValue(tableName);
});
describe('successful rename', () => {
it('should execute rename column query for PostgreSQL', async () => {
// Arrange
const dbType: DataSourceOptions['type'] = 'postgres';
const expectedQuery =
'ALTER TABLE "n8n_data_table_user_test-table-id" RENAME COLUMN "old_column" TO "new_column"';
(sqlUtils.renameColumnQuery as jest.Mock).mockReturnValue(expectedQuery);
// Act
await ddlService.renameColumn(dataTableId, oldColumnName, newColumnName, dbType);
// Assert
expect(sqlUtils.toTableName).toHaveBeenCalledWith(dataTableId);
expect(sqlUtils.renameColumnQuery).toHaveBeenCalledWith(
tableName,
oldColumnName,
newColumnName,
dbType,
);
expect(mockEntityManager.query).toHaveBeenCalledWith(expectedQuery);
});
it('should execute rename column query for MySQL', async () => {
// Arrange
const dbType: DataSourceOptions['type'] = 'mysql';
const expectedQuery =
'ALTER TABLE `n8n_data_table_user_test-table-id` RENAME COLUMN `old_column` TO `new_column`';
(sqlUtils.renameColumnQuery as jest.Mock).mockReturnValue(expectedQuery);
// Act
await ddlService.renameColumn(dataTableId, oldColumnName, newColumnName, dbType);
// Assert
expect(sqlUtils.renameColumnQuery).toHaveBeenCalledWith(
tableName,
oldColumnName,
newColumnName,
dbType,
);
expect(mockEntityManager.query).toHaveBeenCalledWith(expectedQuery);
});
it('should execute rename column query for SQLite', async () => {
// Arrange
const dbType: DataSourceOptions['type'] = 'sqlite';
const expectedQuery =
'ALTER TABLE "n8n_data_table_user_test-table-id" RENAME COLUMN "old_column" TO "new_column"';
(sqlUtils.renameColumnQuery as jest.Mock).mockReturnValue(expectedQuery);
// Act
await ddlService.renameColumn(dataTableId, oldColumnName, newColumnName, dbType);
// Assert
expect(sqlUtils.renameColumnQuery).toHaveBeenCalledWith(
tableName,
oldColumnName,
newColumnName,
dbType,
);
expect(mockEntityManager.query).toHaveBeenCalledWith(expectedQuery);
});
it('should call methods in correct order', async () => {
// Arrange
const dbType: DataSourceOptions['type'] = 'postgres';
const expectedQuery =
'ALTER TABLE "n8n_data_table_user_test-table-id" RENAME COLUMN "old_column" TO "new_column"';
const callOrder: string[] = [];
(sqlUtils.toTableName as jest.Mock).mockImplementation(() => {
callOrder.push('toTableName');
return tableName;
});
(sqlUtils.renameColumnQuery as jest.Mock).mockImplementation(() => {
callOrder.push('renameColumnQuery');
return expectedQuery;
});
mockEntityManager.query = jest.fn().mockImplementation(async () => {
callOrder.push('query');
return undefined;
});
// Act
await ddlService.renameColumn(dataTableId, oldColumnName, newColumnName, dbType);
// Assert
expect(callOrder).toEqual(['toTableName', 'renameColumnQuery', 'query']);
});
});
describe('with transaction parameter', () => {
it('should use provided transaction manager', async () => {
// Arrange
const dbType: DataSourceOptions['type'] = 'postgres';
const expectedQuery =
'ALTER TABLE "n8n_data_table_user_test-table-id" RENAME COLUMN "old_column" TO "new_column"';
const customTrx = mock<EntityManager>();
customTrx.query = jest.fn().mockResolvedValue(undefined) as any;
(sqlUtils.renameColumnQuery as jest.Mock).mockReturnValue(expectedQuery);
// Act
await ddlService.renameColumn(dataTableId, oldColumnName, newColumnName, dbType, customTrx);
// Assert
expect(sqlUtils.renameColumnQuery).toHaveBeenCalledWith(
tableName,
oldColumnName,
newColumnName,
dbType,
);
expect(customTrx.query).toHaveBeenCalledWith(expectedQuery);
});
it('should execute within transaction when no transaction manager is provided', async () => {
// Arrange
const dbType: DataSourceOptions['type'] = 'postgres';
const expectedQuery =
'ALTER TABLE "n8n_data_table_user_test-table-id" RENAME COLUMN "old_column" TO "new_column"';
(sqlUtils.renameColumnQuery as jest.Mock).mockReturnValue(expectedQuery);
// Act
await ddlService.renameColumn(dataTableId, oldColumnName, newColumnName, dbType);
// Assert
expect(mockEntityManager.transaction).toHaveBeenCalled();
expect(mockEntityManager.query).toHaveBeenCalledWith(expectedQuery);
});
});
describe('error handling', () => {
it('should propagate errors from query execution', async () => {
// Arrange
const dbType: DataSourceOptions['type'] = 'postgres';
const expectedQuery =
'ALTER TABLE "n8n_data_table_user_test-table-id" RENAME COLUMN "old_column" TO "new_column"';
const queryError = new Error('Database query failed');
(sqlUtils.renameColumnQuery as jest.Mock).mockReturnValue(expectedQuery);
mockEntityManager.query = jest.fn().mockRejectedValue(queryError);
// Act & Assert
await expect(
ddlService.renameColumn(dataTableId, oldColumnName, newColumnName, dbType),
).rejects.toThrow(queryError);
expect(mockEntityManager.query).toHaveBeenCalledWith(expectedQuery);
});
it('should propagate errors from renameColumnQuery', async () => {
// Arrange
const dbType: DataSourceOptions['type'] = 'postgres';
const queryError = new Error('Invalid column name');
(sqlUtils.renameColumnQuery as jest.Mock).mockImplementation(() => {
throw queryError;
});
// Act & Assert
await expect(
ddlService.renameColumn(dataTableId, oldColumnName, newColumnName, dbType),
).rejects.toThrow(queryError);
expect(sqlUtils.renameColumnQuery).toHaveBeenCalled();
expect(mockEntityManager.query).not.toHaveBeenCalled();
});
});
describe('parameter handling', () => {
it('should handle special characters in column names', async () => {
// Arrange
const dbType: DataSourceOptions['type'] = 'postgres';
const oldNameWithSpecialChars = 'old_column_2024';
const newNameWithSpecialChars = 'new_column_v2';
const expectedQuery =
'ALTER TABLE "n8n_data_table_user_test-table-id" RENAME COLUMN "old_column_2024" TO "new_column_v2"';
(sqlUtils.renameColumnQuery as jest.Mock).mockReturnValue(expectedQuery);
// Act
await ddlService.renameColumn(
dataTableId,
oldNameWithSpecialChars,
newNameWithSpecialChars,
dbType,
);
// Assert
expect(sqlUtils.renameColumnQuery).toHaveBeenCalledWith(
tableName,
oldNameWithSpecialChars,
newNameWithSpecialChars,
dbType,
);
expect(mockEntityManager.query).toHaveBeenCalledWith(expectedQuery);
});
it('should handle different data table IDs', async () => {
// Arrange
const dbType: DataSourceOptions['type'] = 'postgres';
const differentTableId = 'different-table-id';
const differentTableName = 'n8n_data_table_user_different-table-id';
const expectedQuery =
'ALTER TABLE "n8n_data_table_user_different-table-id" RENAME COLUMN "old_column" TO "new_column"';
(sqlUtils.toTableName as jest.Mock).mockReturnValue(differentTableName);
(sqlUtils.renameColumnQuery as jest.Mock).mockReturnValue(expectedQuery);
// Act
await ddlService.renameColumn(differentTableId, oldColumnName, newColumnName, dbType);
// Assert
expect(sqlUtils.toTableName).toHaveBeenCalledWith(differentTableId);
expect(sqlUtils.renameColumnQuery).toHaveBeenCalledWith(
differentTableName,
oldColumnName,
newColumnName,
dbType,
);
});
});
describe('database type specific behavior', () => {
const testCases: Array<{
dbType: DataSourceOptions['type'];
expectedQuery: string;
}> = [
{
dbType: 'postgres',
expectedQuery:
'ALTER TABLE "n8n_data_table_user_test-table-id" RENAME COLUMN "old_column" TO "new_column"',
},
{
dbType: 'mysql',
expectedQuery:
'ALTER TABLE `n8n_data_table_user_test-table-id` RENAME COLUMN `old_column` TO `new_column`',
},
{
dbType: 'mariadb',
expectedQuery:
'ALTER TABLE `n8n_data_table_user_test-table-id` RENAME COLUMN `old_column` TO `new_column`',
},
{
dbType: 'sqlite',
expectedQuery:
'ALTER TABLE "n8n_data_table_user_test-table-id" RENAME COLUMN "old_column" TO "new_column"',
},
];
testCases.forEach(({ dbType, expectedQuery }) => {
it(`should generate correct query for ${dbType}`, async () => {
// Arrange
(sqlUtils.renameColumnQuery as jest.Mock).mockReturnValue(expectedQuery);
// Act
await ddlService.renameColumn(dataTableId, oldColumnName, newColumnName, dbType);
// Assert
expect(sqlUtils.renameColumnQuery).toHaveBeenCalledWith(
tableName,
oldColumnName,
newColumnName,
dbType,
);
expect(mockEntityManager.query).toHaveBeenCalledWith(expectedQuery);
});
});
});
describe('integration with utilities', () => {
it('should properly convert dataTableId to table name', async () => {
// Arrange
const dbType: DataSourceOptions['type'] = 'postgres';
const customTableId = 'custom-uuid-1234';
const expectedTableName = 'n8n_data_table_user_custom-uuid-1234';
const expectedQuery =
'ALTER TABLE "n8n_data_table_user_custom-uuid-1234" RENAME COLUMN "old_column" TO "new_column"';
(sqlUtils.toTableName as jest.Mock).mockReturnValue(expectedTableName);
(sqlUtils.renameColumnQuery as jest.Mock).mockReturnValue(expectedQuery);
// Act
await ddlService.renameColumn(customTableId, oldColumnName, newColumnName, dbType);
// Assert
expect(sqlUtils.toTableName).toHaveBeenCalledTimes(1);
expect(sqlUtils.toTableName).toHaveBeenCalledWith(customTableId);
expect(sqlUtils.renameColumnQuery).toHaveBeenCalledWith(
expectedTableName,
oldColumnName,
newColumnName,
dbType,
);
});
it('should pass all parameters to renameColumnQuery utility', async () => {
// Arrange
const dbType: DataSourceOptions['type'] = 'mysql';
const expectedQuery = 'ALTER TABLE query';
(sqlUtils.renameColumnQuery as jest.Mock).mockReturnValue(expectedQuery);
// Act
await ddlService.renameColumn(dataTableId, oldColumnName, newColumnName, dbType);
// Assert
expect(sqlUtils.renameColumnQuery).toHaveBeenCalledWith(
tableName,
oldColumnName,
newColumnName,
dbType,
);
expect(sqlUtils.renameColumnQuery).toHaveBeenCalledTimes(1);
});
});
});
});

View File

@@ -0,0 +1,365 @@
import { mockInstance, testModules } from '@n8n/backend-test-utils';
import type { RenameDataTableColumnDto } from '@n8n/api-types';
import { Logger } from '@n8n/backend-common';
import { ProjectRelationRepository } from '@n8n/db';
import { CsvParserService } from '../csv-parser.service';
import type { DataTableColumn } from '../data-table-column.entity';
import { DataTableColumnRepository } from '../data-table-column.repository';
import { DataTableFileCleanupService } from '../data-table-file-cleanup.service';
import { DataTableRowsRepository } from '../data-table-rows.repository';
import { DataTableSizeValidator } from '../data-table-size-validator.service';
import type { DataTable } from '../data-table.entity';
import { DataTableRepository } from '../data-table.repository';
import { DataTableService } from '../data-table.service';
import { DataTableColumnNotFoundError } from '../errors/data-table-column-not-found.error';
import { DataTableNotFoundError } from '../errors/data-table-not-found.error';
import { RoleService } from '@/services/role.service';
describe('DataTableService', () => {
let dataTableService: DataTableService;
let mockDataTableRepository: jest.Mocked<DataTableRepository>;
let mockDataTableColumnRepository: jest.Mocked<DataTableColumnRepository>;
let mockDataTableRowsRepository: jest.Mocked<DataTableRowsRepository>;
let mockLogger: jest.Mocked<Logger>;
let mockDataTableSizeValidator: jest.Mocked<DataTableSizeValidator>;
let mockProjectRelationRepository: jest.Mocked<ProjectRelationRepository>;
let mockRoleService: jest.Mocked<RoleService>;
let mockCsvParserService: jest.Mocked<CsvParserService>;
let mockFileCleanupService: jest.Mocked<DataTableFileCleanupService>;
beforeAll(async () => {
await testModules.loadModules(['data-table']);
});
beforeEach(() => {
mockDataTableRepository = mockInstance(DataTableRepository);
mockDataTableColumnRepository = mockInstance(DataTableColumnRepository);
mockDataTableRowsRepository = mockInstance(DataTableRowsRepository);
mockLogger = mockInstance(Logger);
mockDataTableSizeValidator = mockInstance(DataTableSizeValidator);
mockProjectRelationRepository = mockInstance(ProjectRelationRepository);
mockRoleService = mockInstance(RoleService);
mockCsvParserService = mockInstance(CsvParserService);
mockFileCleanupService = mockInstance(DataTableFileCleanupService);
// Mock the logger.scoped method to return the logger itself
mockLogger.scoped = jest.fn().mockReturnValue(mockLogger);
dataTableService = new DataTableService(
mockDataTableRepository,
mockDataTableColumnRepository,
mockDataTableRowsRepository,
mockLogger,
mockDataTableSizeValidator,
mockProjectRelationRepository,
mockRoleService,
mockCsvParserService,
mockFileCleanupService,
);
jest.clearAllMocks();
});
describe('renameColumn', () => {
const projectId = 'test-project-id';
const dataTableId = 'test-data-table-id';
const columnId = 'test-column-id';
const mockDataTable: DataTable = {
id: dataTableId,
name: 'Test Table',
projectId,
} as DataTable;
const mockColumn: DataTableColumn = {
id: columnId,
name: 'old_column_name',
type: 'string',
index: 0,
dataTableId,
} as DataTableColumn;
const renameDto: RenameDataTableColumnDto = {
name: 'new_column_name',
};
describe('successful rename', () => {
it('should rename column when data table and column exist', async () => {
// Arrange
const renamedColumn = { ...mockColumn, name: renameDto.name };
mockDataTableRepository.findOneBy.mockResolvedValue(mockDataTable);
mockDataTableColumnRepository.findOneBy.mockResolvedValue(mockColumn);
mockDataTableColumnRepository.renameColumn.mockResolvedValue(renamedColumn);
// Act
const result = await dataTableService.renameColumn(
dataTableId,
projectId,
columnId,
renameDto,
);
// Assert
expect(result).toEqual(renamedColumn);
expect(mockDataTableRepository.findOneBy).toHaveBeenCalledWith({
id: dataTableId,
project: {
id: projectId,
},
});
expect(mockDataTableColumnRepository.findOneBy).toHaveBeenCalledWith({
id: columnId,
dataTableId,
});
expect(mockDataTableColumnRepository.renameColumn).toHaveBeenCalledWith(
dataTableId,
mockColumn,
renameDto.name,
);
});
it('should call repository methods in correct order', async () => {
// Arrange
const renamedColumn = { ...mockColumn, name: renameDto.name };
const callOrder: string[] = [];
mockDataTableRepository.findOneBy.mockImplementation(async () => {
callOrder.push('validateDataTableExists');
return mockDataTable;
});
mockDataTableColumnRepository.findOneBy.mockImplementation(async () => {
callOrder.push('validateColumnExists');
return mockColumn;
});
mockDataTableColumnRepository.renameColumn.mockImplementation(async () => {
callOrder.push('renameColumn');
return renamedColumn;
});
// Act
await dataTableService.renameColumn(dataTableId, projectId, columnId, renameDto);
// Assert
expect(callOrder).toEqual([
'validateDataTableExists',
'validateColumnExists',
'renameColumn',
]);
});
});
describe('validation errors', () => {
it('should throw DataTableNotFoundError when data table does not exist', async () => {
// Arrange
mockDataTableRepository.findOneBy.mockResolvedValue(null);
// Act & Assert
await expect(
dataTableService.renameColumn(dataTableId, projectId, columnId, renameDto),
).rejects.toThrow(DataTableNotFoundError);
await expect(
dataTableService.renameColumn(dataTableId, projectId, columnId, renameDto),
).rejects.toThrow(`Could not find the data table: '${dataTableId}'`);
// Verify that column validation and rename were not called
expect(mockDataTableColumnRepository.findOneBy).not.toHaveBeenCalled();
expect(mockDataTableColumnRepository.renameColumn).not.toHaveBeenCalled();
});
it('should throw DataTableNotFoundError when data table exists but belongs to different project', async () => {
// Arrange
const differentProjectId = 'different-project-id';
mockDataTableRepository.findOneBy.mockResolvedValue(null);
// Act & Assert
await expect(
dataTableService.renameColumn(dataTableId, differentProjectId, columnId, renameDto),
).rejects.toThrow(DataTableNotFoundError);
// Verify that the repository was called with the correct project filter
expect(mockDataTableRepository.findOneBy).toHaveBeenCalledWith({
id: dataTableId,
project: {
id: differentProjectId,
},
});
});
it('should throw DataTableColumnNotFoundError when column does not exist', async () => {
// Arrange
mockDataTableRepository.findOneBy.mockResolvedValue(mockDataTable);
mockDataTableColumnRepository.findOneBy.mockResolvedValue(null);
// Act & Assert
await expect(
dataTableService.renameColumn(dataTableId, projectId, columnId, renameDto),
).rejects.toThrow(DataTableColumnNotFoundError);
await expect(
dataTableService.renameColumn(dataTableId, projectId, columnId, renameDto),
).rejects.toThrow(
`Could not find the column '${columnId}' in the data table: ${dataTableId}`,
);
// Verify that data table validation was called but rename was not
expect(mockDataTableRepository.findOneBy).toHaveBeenCalled();
expect(mockDataTableColumnRepository.renameColumn).not.toHaveBeenCalled();
});
it('should throw DataTableColumnNotFoundError when column exists but belongs to different data table', async () => {
// Arrange
const differentDataTableId = 'different-table-id';
mockDataTableRepository.findOneBy.mockResolvedValue(mockDataTable);
mockDataTableColumnRepository.findOneBy.mockResolvedValue(null);
// Act & Assert
await expect(
dataTableService.renameColumn(differentDataTableId, projectId, columnId, renameDto),
).rejects.toThrow(DataTableColumnNotFoundError);
// Verify that the repository was called with the correct table filter
expect(mockDataTableColumnRepository.findOneBy).toHaveBeenCalledWith({
id: columnId,
dataTableId: differentDataTableId,
});
});
});
describe('validation order', () => {
it('should validate data table existence before validating column existence', async () => {
// Arrange
mockDataTableRepository.findOneBy.mockResolvedValue(null);
mockDataTableColumnRepository.findOneBy.mockResolvedValue(mockColumn);
// Act & Assert
await expect(
dataTableService.renameColumn(dataTableId, projectId, columnId, renameDto),
).rejects.toThrow(DataTableNotFoundError);
// Column validation should not be called if table validation fails
expect(mockDataTableRepository.findOneBy).toHaveBeenCalled();
expect(mockDataTableColumnRepository.findOneBy).not.toHaveBeenCalled();
});
it('should validate column existence before calling rename', async () => {
// Arrange
mockDataTableRepository.findOneBy.mockResolvedValue(mockDataTable);
mockDataTableColumnRepository.findOneBy.mockResolvedValue(null);
// Act & Assert
await expect(
dataTableService.renameColumn(dataTableId, projectId, columnId, renameDto),
).rejects.toThrow(DataTableColumnNotFoundError);
// Rename should not be called if column validation fails
expect(mockDataTableColumnRepository.renameColumn).not.toHaveBeenCalled();
});
});
describe('error propagation from repository', () => {
it('should propagate errors from dataTableColumnRepository.renameColumn', async () => {
// Arrange
const repositoryError = new Error('Database constraint violation');
mockDataTableRepository.findOneBy.mockResolvedValue(mockDataTable);
mockDataTableColumnRepository.findOneBy.mockResolvedValue(mockColumn);
mockDataTableColumnRepository.renameColumn.mockRejectedValue(repositoryError);
// Act & Assert
await expect(
dataTableService.renameColumn(dataTableId, projectId, columnId, renameDto),
).rejects.toThrow(repositoryError);
// Verify that all validations were performed before the error
expect(mockDataTableRepository.findOneBy).toHaveBeenCalled();
expect(mockDataTableColumnRepository.findOneBy).toHaveBeenCalled();
expect(mockDataTableColumnRepository.renameColumn).toHaveBeenCalled();
});
});
describe('edge cases', () => {
it('should handle empty column name in DTO', async () => {
// Arrange
const emptyNameDto: RenameDataTableColumnDto = { name: '' };
const renamedColumn = { ...mockColumn, name: '' };
mockDataTableRepository.findOneBy.mockResolvedValue(mockDataTable);
mockDataTableColumnRepository.findOneBy.mockResolvedValue(mockColumn);
mockDataTableColumnRepository.renameColumn.mockResolvedValue(renamedColumn);
// Act
const result = await dataTableService.renameColumn(
dataTableId,
projectId,
columnId,
emptyNameDto,
);
// Assert
expect(mockDataTableColumnRepository.renameColumn).toHaveBeenCalledWith(
dataTableId,
mockColumn,
'',
);
expect(result.name).toBe('');
});
it('should handle renaming to same name', async () => {
// Arrange
const sameNameDto: RenameDataTableColumnDto = { name: mockColumn.name };
const renamedColumn = { ...mockColumn, name: mockColumn.name };
mockDataTableRepository.findOneBy.mockResolvedValue(mockDataTable);
mockDataTableColumnRepository.findOneBy.mockResolvedValue(mockColumn);
mockDataTableColumnRepository.renameColumn.mockResolvedValue(renamedColumn);
// Act
const result = await dataTableService.renameColumn(
dataTableId,
projectId,
columnId,
sameNameDto,
);
// Assert
expect(mockDataTableColumnRepository.renameColumn).toHaveBeenCalledWith(
dataTableId,
mockColumn,
mockColumn.name,
);
expect(result.name).toBe(mockColumn.name);
});
it('should handle special characters in new column name', async () => {
// Arrange
const specialCharDto: RenameDataTableColumnDto = { name: 'column_with_special@chars!' };
const renamedColumn = { ...mockColumn, name: specialCharDto.name };
mockDataTableRepository.findOneBy.mockResolvedValue(mockDataTable);
mockDataTableColumnRepository.findOneBy.mockResolvedValue(mockColumn);
mockDataTableColumnRepository.renameColumn.mockResolvedValue(renamedColumn);
// Act
const result = await dataTableService.renameColumn(
dataTableId,
projectId,
columnId,
specialCharDto,
);
// Assert
expect(mockDataTableColumnRepository.renameColumn).toHaveBeenCalledWith(
dataTableId,
mockColumn,
specialCharDto.name,
);
expect(result.name).toBe(specialCharDto.name);
});
});
});
});

View File

@@ -24,6 +24,40 @@ export class DataTableColumnRepository extends Repository<DataTableColumn> {
super(DataTableColumn, dataSource.manager);
}
/**
* Validates that a column name is not reserved as a system column
*/
private validateNotSystemColumn(columnName: string): void {
if (DATA_TABLE_SYSTEM_COLUMNS.includes(columnName)) {
throw new DataTableSystemColumnNameConflictError(columnName);
}
if (columnName === DATA_TABLE_SYSTEM_TESTING_COLUMN) {
throw new DataTableSystemColumnNameConflictError(columnName, 'testing');
}
}
/**
* Validates that a column name is unique within a data table
*/
private async validateUniqueColumnName(
columnName: string,
dataTableId: string,
em: EntityManager,
): Promise<void> {
const existingColumnMatch = await em.existsBy(DataTableColumn, {
name: columnName,
dataTableId,
});
if (existingColumnMatch) {
const dataTable = await em.findOneBy(DataTable, { id: dataTableId });
if (!dataTable) {
throw new UnexpectedError('Data table not found');
}
throw new DataTableColumnNameConflictError(columnName, dataTable.name);
}
}
async getColumns(dataTableId: string, trx?: EntityManager) {
return await withTransaction(
this.manager,
@@ -46,25 +80,8 @@ export class DataTableColumnRepository extends Repository<DataTableColumn> {
async addColumn(dataTableId: string, schema: DataTableCreateColumnSchema, trx?: EntityManager) {
return await withTransaction(this.manager, trx, async (em) => {
if (DATA_TABLE_SYSTEM_COLUMNS.includes(schema.name)) {
throw new DataTableSystemColumnNameConflictError(schema.name);
}
if (schema.name === DATA_TABLE_SYSTEM_TESTING_COLUMN) {
throw new DataTableSystemColumnNameConflictError(schema.name, 'testing');
}
const existingColumnMatch = await em.existsBy(DataTableColumn, {
name: schema.name,
dataTableId,
});
if (existingColumnMatch) {
const dataTable = await em.findOneBy(DataTable, { id: dataTableId });
if (!dataTable) {
throw new UnexpectedError('Data table not found');
}
throw new DataTableColumnNameConflictError(schema.name, dataTable.name);
}
this.validateNotSystemColumn(schema.name);
await this.validateUniqueColumnName(schema.name, dataTableId, em);
if (schema.index === undefined) {
const columns = await this.getColumns(dataTableId, em);
@@ -125,6 +142,32 @@ export class DataTableColumnRepository extends Repository<DataTableColumn> {
});
}
async renameColumn(
dataTableId: string,
column: DataTableColumn,
newName: string,
trx?: EntityManager,
) {
return await withTransaction(this.manager, trx, async (em) => {
this.validateNotSystemColumn(newName);
await this.validateUniqueColumnName(newName, dataTableId, em);
const oldName = column.name;
await em.update(DataTableColumn, { id: column.id }, { name: newName });
await this.ddlService.renameColumn(
dataTableId,
oldName,
newName,
em.connection.options.type,
em,
);
return { ...column, name: newName };
});
}
async shiftColumns(dataTableId: string, lowestIndex: number, delta: -1 | 1, trx?: EntityManager) {
await withTransaction(this.manager, trx, async (em) => {
await em

View File

@@ -4,7 +4,13 @@ import { DataSource, DataSourceOptions, EntityManager } from '@n8n/typeorm';
import { UnexpectedError } from 'n8n-workflow';
import { DataTableColumn } from './data-table-column.entity';
import { addColumnQuery, deleteColumnQuery, toDslColumns, toTableName } from './utils/sql-utils';
import {
addColumnQuery,
deleteColumnQuery,
renameColumnQuery,
toDslColumns,
toTableName,
} from './utils/sql-utils';
/**
* Manages database schema operations for data tables (DDL).
@@ -63,4 +69,18 @@ export class DataTableDDLService {
await em.query(deleteColumnQuery(toTableName(dataTableId), columnName, dbType));
});
}
async renameColumn(
dataTableId: string,
oldColumnName: string,
newColumnName: string,
dbType: DataSourceOptions['type'],
trx?: EntityManager,
) {
await withTransaction(this.dataSource.manager, trx, async (em) => {
await em.query(
renameColumnQuery(toTableName(dataTableId), oldColumnName, newColumnName, dbType),
);
});
}
}

View File

@@ -6,6 +6,7 @@ import {
ListDataTableContentQueryDto,
ListDataTableQueryDto,
MoveDataTableColumnDto,
RenameDataTableColumnDto,
UpdateDataTableDto,
UpdateDataTableRowDto,
UpsertDataTableRowDto,
@@ -26,6 +27,7 @@ import {
import { NextFunction, Response } from 'express';
import { DataTableRowReturn } from 'n8n-workflow';
import { ResponseError } from '@/errors/response-errors/abstract/response.error';
import { BadRequestError } from '@/errors/response-errors/bad-request.error';
import { ConflictError } from '@/errors/response-errors/conflict.error';
import { InternalServerError } from '@/errors/response-errors/internal-server.error';
@@ -33,7 +35,6 @@ import { NotFoundError } from '@/errors/response-errors/not-found.error';
import { DataTableService } from './data-table.service';
import { DataTableColumnNameConflictError } from './errors/data-table-column-name-conflict.error';
import { DataTableColumnNotFoundError } from './errors/data-table-column-not-found.error';
import { DataTableNameConflictError } from './errors/data-table-name-conflict.error';
import { DataTableNotFoundError } from './errors/data-table-not-found.error';
import { DataTableSystemColumnNameConflictError } from './errors/data-table-system-column-name-conflict.error';
@@ -47,6 +48,26 @@ export class DataTableController {
private readonly projectService: ProjectService,
) {}
private handleDataTableColumnOperationError(e: unknown): never {
if (
e instanceof DataTableColumnNameConflictError ||
e instanceof DataTableSystemColumnNameConflictError ||
e instanceof DataTableNameConflictError
) {
throw new ConflictError(e.message);
}
if (e instanceof DataTableValidationError) {
throw new BadRequestError(e.message);
}
if (e instanceof ResponseError) {
throw e;
}
if (e instanceof Error) {
throw new InternalServerError(e.message, e);
}
throw e;
}
@Middleware()
async validateProjectExists(
req: AuthenticatedRequest<{ projectId: string }>,
@@ -171,18 +192,7 @@ export class DataTableController {
try {
return await this.dataTableService.addColumn(dataTableId, req.params.projectId, dto);
} catch (e: unknown) {
if (e instanceof DataTableNotFoundError) {
throw new NotFoundError(e.message);
} else if (
e instanceof DataTableColumnNameConflictError ||
e instanceof DataTableSystemColumnNameConflictError
) {
throw new ConflictError(e.message);
} else if (e instanceof Error) {
throw new InternalServerError(e.message, e);
} else {
throw e;
}
this.handleDataTableColumnOperationError(e);
}
}
@@ -197,13 +207,7 @@ export class DataTableController {
try {
return await this.dataTableService.deleteColumn(dataTableId, req.params.projectId, columnId);
} catch (e: unknown) {
if (e instanceof DataTableNotFoundError || e instanceof DataTableColumnNotFoundError) {
throw new NotFoundError(e.message);
} else if (e instanceof Error) {
throw new InternalServerError(e.message, e);
} else {
throw e;
}
this.handleDataTableColumnOperationError(e);
}
}
@@ -224,15 +228,28 @@ export class DataTableController {
dto,
);
} catch (e: unknown) {
if (e instanceof DataTableNotFoundError || e instanceof DataTableColumnNotFoundError) {
throw new NotFoundError(e.message);
} else if (e instanceof DataTableValidationError) {
throw new BadRequestError(e.message);
} else if (e instanceof Error) {
throw new InternalServerError(e.message, e);
} else {
throw e;
}
this.handleDataTableColumnOperationError(e);
}
}
@Patch('/:dataTableId/columns/:columnId/rename')
@ProjectScope('dataTable:update')
async renameColumn(
req: AuthenticatedRequest<{ projectId: string }>,
_res: Response,
@Param('dataTableId') dataTableId: string,
@Param('columnId') columnId: string,
@Body dto: RenameDataTableColumnDto,
) {
try {
return await this.dataTableService.renameColumn(
dataTableId,
req.params.projectId,
columnId,
dto,
);
} catch (e: unknown) {
this.handleDataTableColumnOperationError(e);
}
}

View File

@@ -4,6 +4,7 @@ import type {
DeleteDataTableRowsDto,
ListDataTableContentQueryDto,
MoveDataTableColumnDto,
RenameDataTableColumnDto,
DataTableListOptions,
UpsertDataTableRowDto,
UpdateDataTableDto,
@@ -203,6 +204,18 @@ export class DataTableService {
return true;
}
async renameColumn(
dataTableId: string,
projectId: string,
columnId: string,
dto: RenameDataTableColumnDto,
) {
await this.validateDataTableExists(dataTableId, projectId);
const existingColumn = await this.validateColumnExists(dataTableId, columnId);
return await this.dataTableColumnRepository.renameColumn(dataTableId, existingColumn, dto.name);
}
async getManyAndCount(options: DataTableListOptions) {
return await this.dataTableRepository.getManyAndCount(options);
}

View File

@@ -1,9 +1,7 @@
import { UserError } from 'n8n-workflow';
import { NotFoundError } from '@/errors/response-errors/not-found.error';
export class DataTableColumnNotFoundError extends UserError {
export class DataTableColumnNotFoundError extends NotFoundError {
constructor(dataTableId: string, columnId: string) {
super(`Could not find the column '${columnId}' in the data table: ${dataTableId}`, {
level: 'warning',
});
super(`Could not find the column '${columnId}' in the data table: ${dataTableId}`);
}
}

View File

@@ -1,9 +1,7 @@
import { UserError } from 'n8n-workflow';
import { NotFoundError } from '@/errors/response-errors/not-found.error';
export class DataTableNotFoundError extends UserError {
export class DataTableNotFoundError extends NotFoundError {
constructor(dataTableId: string) {
super(`Could not find the data table: '${dataTableId}'`, {
level: 'warning',
});
super(`Could not find the data table: '${dataTableId}'`);
}
}

View File

@@ -106,6 +106,23 @@ export function deleteColumnQuery(
return `ALTER TABLE ${quotedTableName} DROP COLUMN ${quoteIdentifier(column, dbType)}`;
}
export function renameColumnQuery(
tableName: DataTableUserTableName,
oldColumnName: string,
newColumnName: string,
dbType: DataSourceOptions['type'],
): string {
if (!isValidColumnName(oldColumnName) || !isValidColumnName(newColumnName)) {
throw new UnexpectedError(DATA_TABLE_COLUMN_ERROR_MESSAGE);
}
const quotedTableName = quoteIdentifier(tableName, dbType);
const quotedOldName = quoteIdentifier(oldColumnName, dbType);
const quotedNewName = quoteIdentifier(newColumnName, dbType);
return `ALTER TABLE ${quotedTableName} RENAME COLUMN ${quotedOldName} TO ${quotedNewName}`;
}
export function quoteIdentifier(name: string, dbType: DataSourceOptions['type']): string {
switch (dbType) {
case 'mysql':

View File

@@ -0,0 +1,64 @@
import { Service } from '@n8n/di';
import { ICredentialEntriesStorage } from './storage-interface';
import { DynamicCredentialEntry } from '../../database/entities/dynamic-credential-entry';
import { DynamicCredentialEntryRepository } from '../../database/repositories/dynamic-credential-entry.repository';
@Service()
export class DynamicCredentialEntryStorage implements ICredentialEntriesStorage {
constructor(
private readonly dynamicCredentialEntryRepository: DynamicCredentialEntryRepository,
) {}
async getCredentialData(
credentialId: string,
subjectId: string,
resolverId: string,
_: Record<string, unknown>,
): Promise<string | null> {
const entry = await this.dynamicCredentialEntryRepository.findOne({
where: {
credentialId,
subjectId,
resolverId,
},
});
return entry?.data ?? null;
}
async setCredentialData(
credentialId: string,
subjectId: string,
resolverId: string,
data: string,
_: Record<string, unknown>,
): Promise<void> {
let entry = await this.dynamicCredentialEntryRepository.findOne({
where: { credentialId, subjectId, resolverId },
});
if (!entry) {
entry = new DynamicCredentialEntry();
entry.credentialId = credentialId;
entry.subjectId = subjectId;
entry.resolverId = resolverId;
}
entry.data = data;
await this.dynamicCredentialEntryRepository.save(entry);
}
async deleteCredentialData(
credentialId: string,
subjectId: string,
resolverId: string,
_: Record<string, unknown>,
): Promise<void> {
await this.dynamicCredentialEntryRepository.delete({
credentialId,
subjectId,
resolverId,
});
}
}

View File

@@ -0,0 +1,38 @@
export interface ICredentialEntriesStorage {
/**
* Retrieves credential data for a specific entity from storage.
*
* @returns The credential data object, or null if not found
* @throws {Error} When storage operation fails
*/
getCredentialData(
credentialId: string,
subjectId: string,
resolverId: string,
storageOptions: Record<string, unknown>,
): Promise<string | null>;
/**
* Stores credential data for a specific entity in storage.
* @throws {Error} When storage operation fails
*/
setCredentialData(
credentialId: string,
subjectId: string,
resolverId: string,
data: string,
storageOptions: Record<string, unknown>,
): Promise<void>;
/**
* Deletes credential data for a specific entity from storage.
* Optional - not all storage implementations support deletion.
* @throws {Error} When deletion operation fails
*/
deleteCredentialData?(
credentialId: string,
subjectId: string,
resolverId: string,
storageOptions: Record<string, unknown>,
): Promise<void>;
}

View File

@@ -1,5 +1,6 @@
import { Column, Entity } from '@n8n/typeorm';
import { WithTimestampsAndStringId } from '@n8n/db';
import type { CredentialResolverConfiguration } from '@n8n/decorators';
import { Column, Entity } from '@n8n/typeorm';
@Entity()
export class DynamicCredentialResolver extends WithTimestampsAndStringId {
@@ -11,4 +12,7 @@ export class DynamicCredentialResolver extends WithTimestampsAndStringId {
@Column({ type: 'text' })
config: string;
/** Decrypted config, not persisted to the database */
decryptedConfig?: CredentialResolverConfiguration;
}

View File

@@ -0,0 +1,39 @@
import { CredentialsEntity, WithTimestamps } from '@n8n/db';
import { Column, Entity, JoinColumn, ManyToOne, PrimaryColumn } from '@n8n/typeorm';
import { DynamicCredentialResolver } from './credential-resolver';
@Entity({
name: 'dynamic_credential_entry',
})
export class DynamicCredentialEntry extends WithTimestamps {
constructor() {
super();
}
@PrimaryColumn({
name: 'credential_id',
})
credentialId: string;
@PrimaryColumn({
name: 'subject_id',
})
subjectId: string;
@PrimaryColumn({
name: 'resolver_id',
})
resolverId: string;
@Column('text')
data: string;
@ManyToOne(() => CredentialsEntity, { onDelete: 'CASCADE' })
@JoinColumn({ name: 'credential_id' })
credential: CredentialsEntity;
@ManyToOne(() => DynamicCredentialResolver, { onDelete: 'CASCADE' })
@JoinColumn({ name: 'resolver_id' })
resolver: DynamicCredentialResolver;
}

View File

@@ -0,0 +1,112 @@
import { testDb, testModules } from '@n8n/backend-test-utils';
import { CredentialsRepository } from '@n8n/db';
import { Container } from '@n8n/di';
import { DynamicCredentialResolverRepository } from '../credential-resolver.repository';
import type { DynamicCredentialResolver } from '../../entities/credential-resolver';
describe('DynamicCredentialResolverRepository', () => {
let resolverRepository: DynamicCredentialResolverRepository;
let credentialsRepository: CredentialsRepository;
beforeAll(async () => {
await testModules.loadModules(['dynamic-credentials']);
await testDb.init();
resolverRepository = Container.get(DynamicCredentialResolverRepository);
credentialsRepository = Container.get(CredentialsRepository);
});
afterEach(async () => {
await testDb.truncate(['CredentialsEntity', 'DynamicCredentialResolver']);
});
afterAll(async () => {
await testDb.terminate();
});
it('should create and find a resolver', async () => {
const resolver = resolverRepository.create({
name: 'Test Resolver',
type: 'oauth2',
config: JSON.stringify({ clientId: 'test' }),
});
const saved = await resolverRepository.save(resolver);
const found = await resolverRepository.findOne({ where: { id: saved.id } });
expect(found).toMatchObject({
id: saved.id,
name: 'Test Resolver',
type: 'oauth2',
});
});
describe('relationship with CredentialsEntity', () => {
let resolver: DynamicCredentialResolver;
beforeEach(async () => {
resolver = await resolverRepository.save(
resolverRepository.create({
name: 'Test Resolver',
type: 'oauth2',
config: JSON.stringify({}),
}),
);
});
it('should link credentials to resolver and query them', async () => {
await credentialsRepository.save([
credentialsRepository.create({
name: 'Cred 1',
type: 'oauth2',
data: '',
isResolvable: true,
resolverId: resolver.id,
}),
credentialsRepository.create({
name: 'Cred 2',
type: 'oauth2',
data: '',
isResolvable: true,
resolvableAllowFallback: true,
resolverId: resolver.id,
}),
]);
const linked = await credentialsRepository.find({ where: { resolverId: resolver.id } });
expect(linked).toHaveLength(2);
expect(linked[0].resolverId).toBe(resolver.id);
expect(linked[1].resolvableAllowFallback).toBe(true);
});
it('should handle nullable resolverId', async () => {
const credential = await credentialsRepository.save(
credentialsRepository.create({
name: 'Standalone',
type: 'apiKey',
data: '',
isResolvable: false,
}),
);
expect(credential.resolverId).toBeNull();
});
it('should set resolverId to null on resolver deletion (CASCADE SET NULL)', async () => {
const credential = await credentialsRepository.save(
credentialsRepository.create({
name: 'Linked',
type: 'oauth2',
data: '',
resolverId: resolver.id,
}),
);
await resolverRepository.delete(resolver.id);
const orphaned = await credentialsRepository.findOne({ where: { id: credential.id } });
expect(orphaned?.resolverId).toBeNull();
});
});
});

View File

@@ -0,0 +1,11 @@
import { Service } from '@n8n/di';
import { DataSource, Repository } from '@n8n/typeorm';
import { DynamicCredentialEntry } from '../entities/dynamic-credential-entry';
@Service()
export class DynamicCredentialEntryRepository extends Repository<DynamicCredentialEntry> {
constructor(dataSource: DataSource) {
super(DynamicCredentialEntry, dataSource.manager);
}
}

View File

@@ -14,8 +14,9 @@ export class DynamicCredentialsModule implements ModuleInterface {
async entities() {
const { DynamicCredentialResolver } = await import('./database/entities/credential-resolver');
const { DynamicCredentialEntry } = await import('./database/entities/dynamic-credential-entry');
return [DynamicCredentialResolver];
return [DynamicCredentialResolver, DynamicCredentialEntry];
}
@OnShutdown()

View File

@@ -0,0 +1,7 @@
import { UserError } from 'n8n-workflow';
export class DynamicCredentialResolverNotFoundError extends UserError {
constructor(resolverId: string) {
super(`Credential resolver with ID "${resolverId}" does not exist.`);
}
}

View File

@@ -0,0 +1,308 @@
import type { Logger } from '@n8n/backend-common';
import {
CredentialResolverValidationError,
type CredentialResolverConfiguration,
type ICredentialResolver,
} from '@n8n/decorators';
import type { Cipher } from 'n8n-core';
import { UnexpectedError } from 'n8n-workflow';
import { DynamicCredentialResolver } from '../../database/entities/credential-resolver';
import type { DynamicCredentialResolverRepository } from '../../database/repositories/credential-resolver.repository';
import { DynamicCredentialResolverNotFoundError } from '../../errors/credential-resolver-not-found.error';
import type { DynamicCredentialResolverRegistry } from '../credential-resolver-registry.service';
import { DynamicCredentialResolverService } from '../credential-resolver.service';
describe('DynamicCredentialResolverService', () => {
let service: DynamicCredentialResolverService;
let mockLogger: jest.Mocked<Logger>;
let mockRepository: jest.Mocked<DynamicCredentialResolverRepository>;
let mockRegistry: jest.Mocked<DynamicCredentialResolverRegistry>;
let mockCipher: jest.Mocked<Cipher>;
const mockResolverImplementation: jest.Mocked<ICredentialResolver> = {
metadata: {
name: 'test.resolver',
description: 'A test resolver',
},
getSecret: jest.fn(),
setSecret: jest.fn(),
validateOptions: jest.fn(),
};
const createMockEntity = (
overrides: Partial<DynamicCredentialResolver> = {},
): DynamicCredentialResolver => {
const entity = new DynamicCredentialResolver();
entity.id = 'resolver-id-123';
entity.name = 'Test Resolver';
entity.type = 'test.resolver';
entity.config = 'encrypted-config-data';
entity.createdAt = new Date('2024-01-01');
entity.updatedAt = new Date('2024-01-01');
Object.assign(entity, overrides);
return entity;
};
beforeEach(() => {
jest.clearAllMocks();
mockLogger = {
debug: jest.fn(),
info: jest.fn(),
warn: jest.fn(),
error: jest.fn(),
scoped: jest.fn().mockReturnThis(),
} as unknown as jest.Mocked<Logger>;
mockRepository = {
create: jest.fn(),
save: jest.fn(),
find: jest.fn(),
findOneBy: jest.fn(),
remove: jest.fn(),
} as unknown as jest.Mocked<DynamicCredentialResolverRepository>;
mockRegistry = {
getResolverByName: jest.fn(),
} as unknown as jest.Mocked<DynamicCredentialResolverRegistry>;
mockCipher = {
encrypt: jest.fn(),
decrypt: jest.fn(),
} as unknown as jest.Mocked<Cipher>;
service = new DynamicCredentialResolverService(
mockLogger,
mockRepository,
mockRegistry,
mockCipher,
);
});
describe('create', () => {
it('should create a resolver with encrypted config', async () => {
const config: CredentialResolverConfiguration = { prefix: 'test-prefix' };
const savedEntity = createMockEntity();
mockRegistry.getResolverByName.mockReturnValue(mockResolverImplementation);
mockResolverImplementation.validateOptions.mockResolvedValue(undefined);
mockCipher.encrypt.mockReturnValue('encrypted-config-data');
mockRepository.create.mockReturnValue(savedEntity);
mockRepository.save.mockResolvedValue(savedEntity);
mockCipher.decrypt.mockReturnValue(JSON.stringify(config));
const result = await service.create({
name: 'Test Resolver',
type: 'test.resolver',
config,
});
expect(mockRegistry.getResolverByName).toHaveBeenCalledWith('test.resolver');
expect(mockResolverImplementation.validateOptions).toHaveBeenCalledWith(config);
expect(mockCipher.encrypt).toHaveBeenCalledWith(config);
expect(mockRepository.create).toHaveBeenCalledWith({
name: 'Test Resolver',
type: 'test.resolver',
config: 'encrypted-config-data',
});
expect(mockRepository.save).toHaveBeenCalledWith(savedEntity);
expect(mockLogger.debug).toHaveBeenCalledWith(
expect.stringContaining('Created credential resolver'),
);
expect(result).toBeDefined();
});
it('should throw CredentialResolverValidationError for unknown resolver type', async () => {
mockRegistry.getResolverByName.mockReturnValue(undefined);
await expect(
service.create({
name: 'Test Resolver',
type: 'unknown.resolver',
config: {},
}),
).rejects.toThrow(CredentialResolverValidationError);
expect(mockRepository.create).not.toHaveBeenCalled();
expect(mockRepository.save).not.toHaveBeenCalled();
});
it('should throw CredentialResolverValidationError when config validation fails', async () => {
const config: CredentialResolverConfiguration = { invalidOption: 'value' };
mockRegistry.getResolverByName.mockReturnValue(mockResolverImplementation);
mockResolverImplementation.validateOptions.mockRejectedValue(
new CredentialResolverValidationError('Invalid option'),
);
await expect(
service.create({
name: 'Test Resolver',
type: 'test.resolver',
config,
}),
).rejects.toThrow(CredentialResolverValidationError);
expect(mockRepository.create).not.toHaveBeenCalled();
expect(mockRepository.save).not.toHaveBeenCalled();
});
});
describe('findAll', () => {
it('should return all resolvers with decryptedConfig populated', async () => {
const entities = [
createMockEntity({ id: 'id-1', name: 'Resolver 1' }),
createMockEntity({ id: 'id-2', name: 'Resolver 2' }),
];
const decryptedConfig = { prefix: 'test' };
mockRepository.find.mockResolvedValue(entities);
mockCipher.decrypt.mockReturnValue(JSON.stringify(decryptedConfig));
const result = await service.findAll();
expect(mockRepository.find).toHaveBeenCalled();
expect(mockCipher.decrypt).toHaveBeenCalledTimes(2);
expect(result).toHaveLength(2);
expect(result[0].decryptedConfig).toEqual(decryptedConfig);
expect(result[1].decryptedConfig).toEqual(decryptedConfig);
});
it('should return empty array when no resolvers exist', async () => {
mockRepository.find.mockResolvedValue([]);
const result = await service.findAll();
expect(result).toEqual([]);
});
});
describe('findById', () => {
it('should return resolver with decryptedConfig populated', async () => {
const entity = createMockEntity();
const decryptedConfig = { prefix: 'test' };
mockRepository.findOneBy.mockResolvedValue(entity);
mockCipher.decrypt.mockReturnValue(JSON.stringify(decryptedConfig));
const result = await service.findById('resolver-id-123');
expect(mockRepository.findOneBy).toHaveBeenCalledWith({ id: 'resolver-id-123' });
expect(mockCipher.decrypt).toHaveBeenCalledWith('encrypted-config-data');
expect(result.decryptedConfig).toEqual(decryptedConfig);
});
it('should throw DynamicCredentialResolverNotFoundError when resolver not found', async () => {
mockRepository.findOneBy.mockResolvedValue(null);
await expect(service.findById('non-existent-id')).rejects.toThrow(
DynamicCredentialResolverNotFoundError,
);
});
it('should throw UnexpectedError when decryption fails', async () => {
const entity = createMockEntity();
mockRepository.findOneBy.mockResolvedValue(entity);
mockCipher.decrypt.mockReturnValue('invalid-json{');
await expect(service.findById('resolver-id-123')).rejects.toThrow(UnexpectedError);
});
});
describe('update', () => {
it('should update resolver name', async () => {
const entity = createMockEntity();
const updatedEntity = createMockEntity({ name: 'Updated Name' });
const decryptedConfig = { prefix: 'test' };
mockRepository.findOneBy.mockResolvedValue(entity);
mockRepository.save.mockResolvedValue(updatedEntity);
mockCipher.decrypt.mockReturnValue(JSON.stringify(decryptedConfig));
const result = await service.update('resolver-id-123', { name: 'Updated Name' });
expect(mockRepository.findOneBy).toHaveBeenCalledWith({ id: 'resolver-id-123' });
expect(mockRepository.save).toHaveBeenCalled();
expect(mockLogger.debug).toHaveBeenCalledWith(
expect.stringContaining('Updated credential resolver'),
);
expect(result.name).toBe('Updated Name');
});
it('should update resolver config with encryption and validation', async () => {
const entity = createMockEntity();
const newConfig: CredentialResolverConfiguration = { prefix: 'new-prefix' };
const updatedEntity = createMockEntity({ config: 'new-encrypted-config' });
mockRepository.findOneBy.mockResolvedValue(entity);
mockRegistry.getResolverByName.mockReturnValue(mockResolverImplementation);
mockResolverImplementation.validateOptions.mockResolvedValue(undefined);
mockCipher.encrypt.mockReturnValue('new-encrypted-config');
mockRepository.save.mockResolvedValue(updatedEntity);
mockCipher.decrypt.mockReturnValue(JSON.stringify(newConfig));
await service.update('resolver-id-123', { config: newConfig });
expect(mockRegistry.getResolverByName).toHaveBeenCalledWith('test.resolver');
expect(mockResolverImplementation.validateOptions).toHaveBeenCalledWith(newConfig);
expect(mockCipher.encrypt).toHaveBeenCalledWith(newConfig);
expect(mockRepository.save).toHaveBeenCalled();
});
it('should throw DynamicCredentialResolverNotFoundError when resolver not found', async () => {
mockRepository.findOneBy.mockResolvedValue(null);
await expect(service.update('non-existent-id', { name: 'New Name' })).rejects.toThrow(
DynamicCredentialResolverNotFoundError,
);
expect(mockRepository.save).not.toHaveBeenCalled();
});
it('should throw CredentialResolverValidationError when config validation fails on update', async () => {
const entity = createMockEntity();
const invalidConfig: CredentialResolverConfiguration = { badOption: 'value' };
mockRepository.findOneBy.mockResolvedValue(entity);
mockRegistry.getResolverByName.mockReturnValue(mockResolverImplementation);
mockResolverImplementation.validateOptions.mockRejectedValue(
new CredentialResolverValidationError('Invalid config'),
);
await expect(service.update('resolver-id-123', { config: invalidConfig })).rejects.toThrow(
CredentialResolverValidationError,
);
expect(mockRepository.save).not.toHaveBeenCalled();
});
});
describe('delete', () => {
it('should delete an existing resolver', async () => {
const entity = createMockEntity();
mockRepository.findOneBy.mockResolvedValue(entity);
mockRepository.remove.mockResolvedValue(entity);
await service.delete('resolver-id-123');
expect(mockRepository.findOneBy).toHaveBeenCalledWith({ id: 'resolver-id-123' });
expect(mockRepository.remove).toHaveBeenCalledWith(entity);
expect(mockLogger.debug).toHaveBeenCalledWith(
expect.stringContaining('Deleted credential resolver'),
);
});
it('should throw DynamicCredentialResolverNotFoundError when resolver not found', async () => {
mockRepository.findOneBy.mockResolvedValue(null);
await expect(service.delete('non-existent-id')).rejects.toThrow(
DynamicCredentialResolverNotFoundError,
);
expect(mockRepository.remove).not.toHaveBeenCalled();
});
});
});

View File

@@ -0,0 +1,170 @@
import { Logger } from '@n8n/backend-common';
import {
CredentialResolverConfiguration,
CredentialResolverValidationError,
} from '@n8n/decorators';
import { Service } from '@n8n/di';
import { Cipher } from 'n8n-core';
import { jsonParse, UnexpectedError } from 'n8n-workflow';
import { DynamicCredentialResolverRegistry } from './credential-resolver-registry.service';
import { DynamicCredentialResolver } from '../database/entities/credential-resolver';
import { DynamicCredentialResolverRepository } from '../database/repositories/credential-resolver.repository';
import { DynamicCredentialResolverNotFoundError } from '../errors/credential-resolver-not-found.error';
export interface CreateResolverParams {
name: string;
type: string;
config: CredentialResolverConfiguration;
}
export interface UpdateResolverParams {
name?: string;
config?: CredentialResolverConfiguration;
}
/**
* Service layer for managing DynamicCredentialResolver entities.
* Provides CRUD operations with:
* - Config encryption at rest
* - Validation against resolver type's config schema
*/
@Service()
export class DynamicCredentialResolverService {
constructor(
private readonly logger: Logger,
private readonly repository: DynamicCredentialResolverRepository,
private readonly registry: DynamicCredentialResolverRegistry,
private readonly cipher: Cipher,
) {
this.logger = this.logger.scoped('dynamic-credentials');
}
/**
* Creates a new credential resolver.
* @throws {CredentialResolverValidationError} When the resolver type is unknown or config is invalid
*/
async create(params: CreateResolverParams): Promise<DynamicCredentialResolver> {
await this.validateConfig(params.type, params.config);
const encryptedConfig = this.encryptConfig(params.config);
const resolver = this.repository.create({
name: params.name,
type: params.type,
config: encryptedConfig,
});
const saved = await this.repository.save(resolver);
this.logger.debug(`Created credential resolver "${saved.name}" (${saved.id})`);
return saved;
}
/**
* Retrieves all credential resolvers.
* Config is returned decrypted.
*/
async findAll(): Promise<DynamicCredentialResolver[]> {
const resolvers = await this.repository.find();
return resolvers.map((resolver) => this.withDecryptedConfig(resolver));
}
/**
* Retrieves a credential resolver by ID.
* Config is returned decrypted.
* @throws {DynamicCredentialResolverNotFoundError} When resolver is not found
*/
async findById(id: string): Promise<DynamicCredentialResolver> {
const resolver = await this.repository.findOneBy({ id });
if (!resolver) {
throw new DynamicCredentialResolverNotFoundError(id);
}
return this.withDecryptedConfig(resolver);
}
/**
* Updates an existing credential resolver.
* @throws {DynamicCredentialResolverNotFoundError} When resolver is not found
* @throws {CredentialResolverValidationError} When the config is invalid for the resolver type
*/
async update(id: string, params: UpdateResolverParams): Promise<DynamicCredentialResolver> {
const existing = await this.repository.findOneBy({ id });
if (!existing) {
throw new DynamicCredentialResolverNotFoundError(id);
}
if (params.config !== undefined) {
await this.validateConfig(existing.type, params.config);
existing.config = this.encryptConfig(params.config);
}
if (params.name !== undefined) {
existing.name = params.name;
}
const saved = await this.repository.save(existing);
this.logger.debug(`Updated credential resolver "${saved.name}" (${saved.id})`);
return this.withDecryptedConfig(saved);
}
/**
* Deletes a credential resolver by ID.
* @throws {DynamicCredentialResolverNotFoundError} When resolver is not found
*/
async delete(id: string): Promise<void> {
const existing = await this.repository.findOneBy({ id });
if (!existing) {
throw new DynamicCredentialResolverNotFoundError(id);
}
await this.repository.remove(existing);
this.logger.debug(`Deleted credential resolver "${existing.name}" (${id})`);
}
/**
* Validates the config against the resolver type's schema.
* @throws {CredentialResolverValidationError} When the resolver type is unknown or config is invalid
*/
private async validateConfig(
type: string,
config: CredentialResolverConfiguration,
): Promise<void> {
const resolverImplementation = this.registry.getResolverByName(type);
if (!resolverImplementation) {
throw new CredentialResolverValidationError(`Unknown resolver type: ${type}`);
}
await resolverImplementation.validateOptions(config);
}
/**
* Encrypts the config for storage.
*/
private encryptConfig(config: CredentialResolverConfiguration): string {
return this.cipher.encrypt(config);
}
/**
* Decrypts the config from storage.
*/
private decryptConfig(encryptedConfig: string): CredentialResolverConfiguration {
const decryptedData = this.cipher.decrypt(encryptedConfig);
try {
return jsonParse<CredentialResolverConfiguration>(decryptedData);
} catch {
throw new UnexpectedError(
'Credential resolver config could not be decrypted. The likely reason is that a different "encryptionKey" was used to encrypt the data.',
);
}
}
/**
* Populates the decryptedConfig field on the resolver.
*/
private withDecryptedConfig(resolver: DynamicCredentialResolver): DynamicCredentialResolver {
resolver.decryptedConfig = this.decryptConfig(resolver.config);
return resolver;
}
}

View File

@@ -1 +1,2 @@
export * from './credential-resolver-registry.service';
export * from './credential-resolver.service';

View File

@@ -25,6 +25,8 @@ const mockCredentialsService = (
shared: [] as SharedCredentials[],
isManaged: false,
isGlobal: false,
isResolvable: false,
resolvableAllowFallback: false,
id,
// Methods present on entities via WithTimestampsAndStringId mixin
generateId() {},

View File

@@ -384,6 +384,64 @@ describe('Telemetry', () => {
}),
);
});
test('should include instance_id, version_cli, and user_id in track properties', () => {
const eventName = 'Test Event';
const properties = { user_id: '1234', custom_prop: 'value' };
telemetry.track(eventName, properties);
expect(mockRudderStack.track).toHaveBeenCalledWith(
expect.objectContaining({
event: eventName,
properties: expect.objectContaining({
instance_id: instanceId,
user_id: '1234',
version_cli: expect.any(String),
custom_prop: 'value',
}),
}),
);
});
test('should format userId with user_id when provided', () => {
const eventName = 'Test Event';
const properties = { user_id: '5678' };
telemetry.track(eventName, properties);
expect(mockRudderStack.track).toHaveBeenCalledWith(
expect.objectContaining({
userId: `${instanceId}#5678`,
}),
);
});
test('should format userId without user_id when not provided', () => {
const eventName = 'Test Event';
telemetry.track(eventName, {});
expect(mockRudderStack.track).toHaveBeenCalledWith(
expect.objectContaining({
userId: instanceId,
}),
);
});
test('should set user_id to undefined when not provided in properties', () => {
const eventName = 'Test Event';
telemetry.track(eventName, {});
expect(mockRudderStack.track).toHaveBeenCalledWith(
expect.objectContaining({
properties: expect.objectContaining({
user_id: undefined,
}),
}),
);
});
});
});

View File

@@ -225,6 +225,7 @@ export class Telemetry {
const updatedProperties = {
...properties,
instance_id: instanceId,
user_id: user_id ?? undefined,
version_cli: N8N_VERSION,
};

View File

@@ -149,6 +149,72 @@ describe('TestWebhooks', () => {
expect(webhookService.createWebhookIfNotExists.mock.calls[0][1].node).toBe(webhook2.node);
expect(needsWebhook).toBe(true);
});
test.each([
{ published: true, withSingleWebhookTrigger: true, shouldThrow: true },
{ published: true, withSingleWebhookTrigger: false, shouldThrow: false },
{ published: false, withSingleWebhookTrigger: true, shouldThrow: false },
{ published: false, withSingleWebhookTrigger: false, shouldThrow: false },
] satisfies Array<{
published: boolean;
withSingleWebhookTrigger: boolean;
shouldThrow: boolean;
}>)(
'handles single webhook trigger when workflowIsActive=%s',
async ({ published: workflowIsActive, withSingleWebhookTrigger, shouldThrow }) => {
const workflow = mock<Workflow>();
const regularWebhook = mock<IWebhookData>({
node: 'Webhook',
httpMethod,
path: 'regular-path',
workflowId: workflowEntity.id,
userId,
});
const telegramWebhook = mock<IWebhookData>({
node: 'Telegram Trigger',
httpMethod,
path: 'telegram-path',
workflowId: workflowEntity.id,
userId,
});
const webhookNode = mock<IWorkflowBase['nodes'][number]>({
name: 'Webhook',
type: 'n8n-nodes-base.webhook',
});
const telegramNode = mock<IWorkflowBase['nodes'][number]>({
name: 'Telegram Trigger',
type: 'n8n-nodes-base.telegramTrigger',
});
jest.spyOn(testWebhooks, 'toWorkflow').mockReturnValueOnce(workflow);
jest
.spyOn(WebhookHelpers, 'getWorkflowWebhooks')
.mockReturnValue([regularWebhook, telegramWebhook]);
jest.spyOn(workflow, 'getNode').mockImplementation((name: string) => {
if (name === 'Webhook') return webhookNode;
if (name === 'Telegram Trigger' && withSingleWebhookTrigger) return telegramNode;
return null;
});
if (shouldThrow) {
const promise = testWebhooks.needsWebhook({
...args,
workflowIsActive,
});
await expect(promise).rejects.toThrow(
"Because of limitations in Telegram Trigger, n8n can't listen for test executions at the same time as listening for production ones. Unpublish the workflow to execute.",
);
} else {
const needsWebhook = await testWebhooks.needsWebhook({
...args,
workflowIsActive,
});
expect(needsWebhook).toBe(true);
}
},
);
});
describe('executeWebhook()', () => {

View File

@@ -25,6 +25,7 @@ import type {
import { NotFoundError } from '@/errors/response-errors/not-found.error';
import { WebhookNotFoundError } from '@/errors/response-errors/webhook-not-found.error';
import { SingleWebhookTriggerError } from '@/errors/single-webhook-trigger.error';
import { WorkflowMissingIdError } from '@/errors/workflow-missing-id.error';
import { NodeTypes } from '@/node-types';
import { Push } from '@/push';
@@ -36,6 +37,12 @@ import * as WebhookHelpers from '@/webhooks/webhook-helpers';
import * as WorkflowExecuteAdditionalData from '@/workflow-execute-additional-data';
import type { WorkflowRequest } from '@/workflows/workflow.request';
const SINGLE_WEBHOOK_TRIGGERS = [
'n8n-nodes-base.telegramTrigger',
'n8n-nodes-base.slackTrigger',
'n8n-nodes-base.facebookLeadAdsTrigger',
];
/**
* Service for handling the execution of webhooks of manual executions
* that use the [Test URL](https://docs.n8n.io/integrations/builtin/core-nodes/n8n-nodes-base.webhook/#webhook-urls).
@@ -272,6 +279,7 @@ export class TestWebhooks implements IWebhookManager {
pushRef?: string;
destinationNode?: IDestinationNode;
triggerToStartFrom?: WorkflowRequest.FullManualExecutionFromKnownTriggerPayload['triggerToStartFrom'];
workflowIsActive?: boolean;
}) {
const {
userId,
@@ -281,6 +289,7 @@ export class TestWebhooks implements IWebhookManager {
pushRef,
destinationNode,
triggerToStartFrom,
workflowIsActive,
} = options;
if (!workflowEntity.id) throw new WorkflowMissingIdError(workflowEntity);
@@ -310,6 +319,18 @@ export class TestWebhooks implements IWebhookManager {
return false; // no webhooks found to start a workflow
}
// Check if any webhook is a single webhook trigger and workflow is active
if (workflowIsActive) {
const singleWebhookTrigger = webhooks.find((w) =>
SINGLE_WEBHOOK_TRIGGERS.includes(workflow.getNode(w.node)?.type ?? ''),
);
if (singleWebhookTrigger) {
throw new SingleWebhookTriggerError(
workflow.getNode(singleWebhookTrigger.node)?.name ?? '',
);
}
}
const timeout = setTimeout(
async () => await this.cancelWebhook(workflow.id),
TEST_WEBHOOK_TIMEOUT,

View File

@@ -15,6 +15,7 @@ import {
import type { IWorkflowErrorData } from '@/interfaces';
import type { NodeTypes } from '@/node-types';
import type { TestWebhooks } from '@/webhooks/test-webhooks';
import * as WorkflowExecuteAdditionalData from '@/workflow-execute-additional-data';
import type { WorkflowRunner } from '@/workflow-runner';
import { WorkflowExecutionService } from '@/workflows/workflow-execution.service';
@@ -336,6 +337,66 @@ describe('WorkflowExecutionService', () => {
expect(callArgs.executionMode).toBe('manual');
expect(result).toEqual({ executionId });
});
test('should pass workflowIsActive to testWebhooks.needsWebhook', async () => {
const userId = 'user-id';
const user = mock<User>({ id: userId });
const testWebhooks = mock<TestWebhooks>();
const workflowRepositoryMock = mock<WorkflowRepository>();
const telegramTrigger: INode = {
id: '1',
typeVersion: 1,
position: [1, 2],
parameters: {},
name: 'Telegram Trigger',
type: 'n8n-nodes-base.telegramTrigger',
};
const activeWorkflowData = {
id: 'workflow-id',
name: 'Test Workflow',
active: true,
activeVersionId: 'version-123',
isArchived: false,
nodes: [telegramTrigger],
connections: {},
createdAt: new Date(),
updatedAt: new Date(),
};
workflowRepositoryMock.isActive.mockResolvedValue(true);
const service = new WorkflowExecutionService(
mock(),
mock(),
mock(),
workflowRepositoryMock,
nodeTypes,
testWebhooks,
workflowRunner,
mock(),
mock(),
mock(),
);
const runPayload: WorkflowRequest.FullManualExecutionFromKnownTriggerPayload = {
workflowData: activeWorkflowData,
triggerToStartFrom: { name: telegramTrigger.name },
};
testWebhooks.needsWebhook.mockRejectedValue(
new Error(
'Cannot test webhook for node "Telegram Trigger" while workflow is active. Please deactivate the workflow first.',
),
);
await expect(service.executeManually(runPayload, user)).rejects.toThrow(
'Cannot test webhook for node "Telegram Trigger" while workflow is active. Please deactivate the workflow first.',
);
expect(testWebhooks.needsWebhook).toHaveBeenCalledWith(
expect.objectContaining({
workflowIsActive: true,
}),
);
});
});
describe('selectPinnedTrigger()', () => {

View File

@@ -101,6 +101,9 @@ export class WorkflowExecutionService {
user: User,
pushRef?: string,
): Promise<{ executionId: string } | { waitingForWebhook: boolean }> {
// Check whether this workflow is active.
const workflowIsActive = await this.workflowRepository.isActive(payload.workflowData.id);
// For manual testing always set to not active
payload.workflowData.active = false;
payload.workflowData.activeVersionId = null;
@@ -146,6 +149,7 @@ export class WorkflowExecutionService {
pushRef,
triggerToStartFrom: payload.triggerToStartFrom,
destinationNode: payload.destinationNode,
workflowIsActive,
}))
) {
return { waitingForWebhook: true };
@@ -182,6 +186,7 @@ export class WorkflowExecutionService {
}),
pushRef,
destinationNode: payload.destinationNode,
workflowIsActive,
}))
) {
return { waitingForWebhook: true };

View File

@@ -123,24 +123,6 @@ describe('init()', () => {
});
});
describe('isActive()', () => {
it('should return `true` for active workflow in storage', async () => {
const dbWorkflow = await createActiveWorkflow();
await activeWorkflowManager.init();
await expect(activeWorkflowManager.isActive(dbWorkflow.id)).resolves.toBe(true);
});
it('should return `false` for inactive workflow in storage', async () => {
const dbWorkflow = await createInactiveWorkflow();
await activeWorkflowManager.init();
await expect(activeWorkflowManager.isActive(dbWorkflow.id)).resolves.toBe(false);
});
});
describe('add()', () => {
describe('in single-main mode', () => {
test.each(['activate', 'update'])(

View File

@@ -44,6 +44,7 @@ const { any } = expect;
const testServer = setupTestServer({
endpointGroups: ['credentials'],
enabledFeatures: ['feat:sharing'],
});
let owner: User;

View File

@@ -6,6 +6,7 @@ import {
createWorkflow,
testDb,
getWorkflowById,
setActiveVersion,
} from '@n8n/backend-test-utils';
import { GlobalConfig } from '@n8n/config';
import { WorkflowRepository, WorkflowDependencyRepository, WorkflowDependencies } from '@n8n/db';
@@ -262,6 +263,25 @@ describe('WorkflowRepository', () => {
});
});
describe('isActive()', () => {
it('should return `true` for active workflow in storage', async () => {
const workflowRepository = Container.get(WorkflowRepository);
const workflow = await createWorkflowWithHistory();
await setActiveVersion(workflow.id, workflow.versionId);
await expect(workflowRepository.isActive(workflow.id)).resolves.toBe(true);
});
it('should return `false` for inactive workflow in storage', async () => {
const workflowRepository = Container.get(WorkflowRepository);
const workflow = await createWorkflowWithHistory();
await expect(workflowRepository.isActive(workflow.id)).resolves.toBe(false);
});
});
// NOTE: these tests use the workflow dependency repository, which is not enabled
// on legacy Sqlite.
const globalConfig = Container.get(GlobalConfig);

View File

@@ -0,0 +1,263 @@
import { testDb, testModules } from '@n8n/backend-test-utils';
import { Container } from '@n8n/di';
import { DynamicCredentialEntryStorage } from '@/modules/dynamic-credentials.ee/credential-resolvers/storage/dynamic-credential-entry-storage';
import { createCredentials } from '../shared/db/credentials';
import { createDynamicCredentialResolver } from './shared/db-helpers';
describe('DynamicCredentialEntryStorage', () => {
let storage: DynamicCredentialEntryStorage;
beforeAll(async () => {
await testModules.loadModules(['dynamic-credentials']);
await testDb.init();
storage = Container.get(DynamicCredentialEntryStorage);
});
afterAll(async () => {
await testDb.terminate();
});
beforeEach(async () => {
await testDb.truncate([
'DynamicCredentialEntry',
'DynamicCredentialResolver',
'CredentialsEntity',
]);
});
it('should store and retrieve credential data', async () => {
// ARRANGE
const credential = await createCredentials({
name: 'Test Credential',
type: 'testType',
data: 'test-data',
});
const resolver = await createDynamicCredentialResolver({
name: 'test-resolver',
type: 'test',
config: 'test-data',
});
const testData = 'encrypted-credential-data';
// ACT - Store
await storage.setCredentialData(credential.id, 'test-subject', resolver.id, testData, {});
// ACT - Retrieve
const retrievedData = await storage.getCredentialData(
credential.id,
'test-subject',
resolver.id,
{},
);
// ASSERT
expect(retrievedData).toBe(testData);
});
it('should update existing credential data (upsert)', async () => {
// ARRANGE
const credential = await createCredentials({
name: 'Test Credential',
type: 'testType',
data: 'test-data',
});
const resolver = await createDynamicCredentialResolver({
name: 'test-resolver',
type: 'test',
config: 'test-data',
});
// ACT - Insert
await storage.setCredentialData(
credential.id,
'upsert-subject',
resolver.id,
'original-data',
{},
);
// ACT - Update
await storage.setCredentialData(
credential.id,
'upsert-subject',
resolver.id,
'updated-data',
{},
);
// ACT - Retrieve
const data = await storage.getCredentialData(credential.id, 'upsert-subject', resolver.id, {});
// ASSERT
expect(data).toBe('updated-data');
});
it('should delete credential data', async () => {
// ARRANGE
const credential = await createCredentials({
name: 'Test Credential',
type: 'testType',
data: 'test-data',
});
const resolver = await createDynamicCredentialResolver({
name: 'test-resolver',
type: 'test',
config: 'test-data',
});
const testData = 'data-to-delete';
// Store data first
await storage.setCredentialData(credential.id, 'delete-subject', resolver.id, testData, {});
// Verify it exists
const beforeDelete = await storage.getCredentialData(
credential.id,
'delete-subject',
resolver.id,
{},
);
expect(beforeDelete).toBe(testData);
// ACT - Delete
await storage.deleteCredentialData(credential.id, 'delete-subject', resolver.id, {});
// ASSERT - Verify it's gone
const afterDelete = await storage.getCredentialData(
credential.id,
'delete-subject',
resolver.id,
{},
);
expect(afterDelete).toBeNull();
});
it('should isolate entries by composite key (multiple entries do not affect each other)', async () => {
// ARRANGE
const credential1 = await createCredentials({
name: 'Credential 1',
type: 'testType',
data: 'test-data-1',
});
const credential2 = await createCredentials({
name: 'Credential 2',
type: 'testType',
data: 'test-data-2',
});
const resolver1 = await createDynamicCredentialResolver({
name: 'resolver-1',
type: 'test',
config: 'test-config-1',
});
const resolver2 = await createDynamicCredentialResolver({
name: 'resolver-2',
type: 'test',
config: 'test-config-2',
});
// ACT - Create multiple entries with different combinations
// Same credential, different subjects
await storage.setCredentialData(
credential1.id,
'subject-A',
resolver1.id,
'data-cred1-subjA-res1',
{},
);
await storage.setCredentialData(
credential1.id,
'subject-B',
resolver1.id,
'data-cred1-subjB-res1',
{},
);
// Same credential and subject, different resolver
await storage.setCredentialData(
credential1.id,
'subject-A',
resolver2.id,
'data-cred1-subjA-res2',
{},
);
// Different credential, same subject and resolver
await storage.setCredentialData(
credential2.id,
'subject-A',
resolver1.id,
'data-cred2-subjA-res1',
{},
);
// ASSERT - Each entry should be isolated and return correct data
const data1 = await storage.getCredentialData(credential1.id, 'subject-A', resolver1.id, {});
expect(data1).toBe('data-cred1-subjA-res1');
const data2 = await storage.getCredentialData(credential1.id, 'subject-B', resolver1.id, {});
expect(data2).toBe('data-cred1-subjB-res1');
const data3 = await storage.getCredentialData(credential1.id, 'subject-A', resolver2.id, {});
expect(data3).toBe('data-cred1-subjA-res2');
const data4 = await storage.getCredentialData(credential2.id, 'subject-A', resolver1.id, {});
expect(data4).toBe('data-cred2-subjA-res1');
// ACT - Update one entry
await storage.setCredentialData(
credential1.id,
'subject-A',
resolver1.id,
'updated-data-cred1-subjA-res1',
{},
);
// ASSERT - Only the updated entry should change, others remain unchanged
const updatedData1 = await storage.getCredentialData(
credential1.id,
'subject-A',
resolver1.id,
{},
);
expect(updatedData1).toBe('updated-data-cred1-subjA-res1');
const unchangedData2 = await storage.getCredentialData(
credential1.id,
'subject-B',
resolver1.id,
{},
);
expect(unchangedData2).toBe('data-cred1-subjB-res1');
const unchangedData3 = await storage.getCredentialData(
credential1.id,
'subject-A',
resolver2.id,
{},
);
expect(unchangedData3).toBe('data-cred1-subjA-res2');
// ACT - Delete one entry
await storage.deleteCredentialData(credential1.id, 'subject-A', resolver1.id, {});
// ASSERT - Deleted entry should be gone, others remain
const deletedData = await storage.getCredentialData(
credential1.id,
'subject-A',
resolver1.id,
{},
);
expect(deletedData).toBeNull();
const stillExistingData = await storage.getCredentialData(
credential1.id,
'subject-B',
resolver1.id,
{},
);
expect(stillExistingData).toBe('data-cred1-subjB-res1');
});
});

View File

@@ -0,0 +1,459 @@
import { testDb, testModules } from '@n8n/backend-test-utils';
import { CredentialsRepository } from '@n8n/db';
import { Container } from '@n8n/di';
import { DynamicCredentialEntryRepository } from '@/modules/dynamic-credentials.ee/database/repositories/dynamic-credential-entry.repository';
import { DynamicCredentialEntry } from '@/modules/dynamic-credentials.ee/database/entities/dynamic-credential-entry';
import { DynamicCredentialResolverRepository } from '@/modules/dynamic-credentials.ee/database/repositories/credential-resolver.repository';
import { createCredentials } from '../shared/db/credentials';
import { createDynamicCredentialResolver } from './shared/db-helpers';
describe('DynamicCredentialEntryRepository', () => {
let repository: DynamicCredentialEntryRepository;
beforeAll(async () => {
await testModules.loadModules(['dynamic-credentials']);
await testDb.init();
repository = Container.get(DynamicCredentialEntryRepository);
});
afterAll(async () => {
await testDb.terminate();
});
beforeEach(async () => {
await testDb.truncate([
'DynamicCredentialEntry',
'DynamicCredentialResolver',
'CredentialsEntity',
]);
});
it('should save and retrieve a dynamic credential entry', async () => {
// ARRANGE
const credential = await createCredentials({
name: 'Test Credential',
type: 'testType',
data: 'test-data',
});
const resolver = await createDynamicCredentialResolver({
name: 'test-resolver',
type: 'test',
config: 'test-data',
});
const entry = new DynamicCredentialEntry();
entry.credentialId = credential.id;
entry.subjectId = 'subject-123';
entry.resolverId = resolver.id;
entry.data = 'encrypted-test-data';
// ACT
const savedEntry = await repository.save(entry);
// Retrieve it back
const foundEntry = await repository.findOne({
where: {
credentialId: credential.id,
subjectId: 'subject-123',
resolverId: resolver.id,
},
});
// ASSERT
expect(savedEntry).toBeDefined();
expect(savedEntry.credentialId).toBe(credential.id);
expect(savedEntry.subjectId).toBe('subject-123');
expect(savedEntry.resolverId).toBe(resolver.id);
expect(savedEntry.data).toBe('encrypted-test-data');
expect(savedEntry.createdAt).toBeInstanceOf(Date);
expect(savedEntry.updatedAt).toBeInstanceOf(Date);
expect(foundEntry).toBeDefined();
expect(foundEntry?.data).toBe('encrypted-test-data');
});
it('should cascade delete entries when credential is deleted', async () => {
// ARRANGE
const credential = await createCredentials({
name: 'Test Credential',
type: 'testType',
data: 'test-data',
});
const resolver = await createDynamicCredentialResolver({
name: 'test-resolver',
type: 'test',
config: 'test-data',
});
// Create multiple entries for the same credential
const entry1 = new DynamicCredentialEntry();
entry1.credentialId = credential.id;
entry1.subjectId = 'subject-1';
entry1.resolverId = resolver.id;
entry1.data = 'data-1';
const entry2 = new DynamicCredentialEntry();
entry2.credentialId = credential.id;
entry2.subjectId = 'subject-2';
entry2.resolverId = resolver.id;
entry2.data = 'data-2';
await repository.save(entry1);
await repository.save(entry2);
// Verify entries exist
const entriesBeforeDelete = await repository.find({
where: {
credentialId: credential.id,
},
});
expect(entriesBeforeDelete).toHaveLength(2);
// ACT - Delete the credential
const credentialsRepository = Container.get(CredentialsRepository);
await credentialsRepository.delete({ id: credential.id });
// ASSERT - All entries for this credential should be cascade deleted
const entriesAfterDelete = await repository.find({
where: {
credentialId: credential.id,
},
});
expect(entriesAfterDelete).toHaveLength(0);
});
it('should cascade delete entries when resolver is deleted', async () => {
// ARRANGE
const credential1 = await createCredentials({
name: 'Credential 1',
type: 'testType',
data: 'test-data-1',
});
const credential2 = await createCredentials({
name: 'Credential 2',
type: 'testType',
data: 'test-data-2',
});
const resolver = await createDynamicCredentialResolver({
name: 'test-resolver',
type: 'test',
config: 'test-data',
});
// Create entries for multiple credentials using the same resolver
const entry1 = new DynamicCredentialEntry();
entry1.credentialId = credential1.id;
entry1.subjectId = 'subject-1';
entry1.resolverId = resolver.id;
entry1.data = 'data-1';
const entry2 = new DynamicCredentialEntry();
entry2.credentialId = credential2.id;
entry2.subjectId = 'subject-2';
entry2.resolverId = resolver.id;
entry2.data = 'data-2';
await repository.save(entry1);
await repository.save(entry2);
// Verify entries exist
const entriesBeforeDelete = await repository.find({
where: {
resolverId: resolver.id,
},
});
expect(entriesBeforeDelete).toHaveLength(2);
// ACT - Delete the resolver
const resolverRepository = Container.get(DynamicCredentialResolverRepository);
await resolverRepository.delete({ id: resolver.id });
// ASSERT - All entries for this resolver should be cascade deleted
const entriesAfterDelete = await repository.find({
where: {
resolverId: resolver.id,
},
});
expect(entriesAfterDelete).toHaveLength(0);
});
it('should fetch CredentialsEntity through ManyToOne relationship', async () => {
// ARRANGE
const credential = await createCredentials({
name: 'Test Credential for Relationship',
type: 'testType',
data: 'test-data',
});
const resolver = await createDynamicCredentialResolver({
name: 'test-resolver',
type: 'test',
config: 'test-data',
});
const entry = new DynamicCredentialEntry();
entry.credentialId = credential.id;
entry.subjectId = 'subject-123';
entry.resolverId = resolver.id;
entry.data = 'encrypted-test-data';
await repository.save(entry);
// ACT - Fetch entry with credential relationship loaded
const foundEntry = await repository.findOne({
where: {
credentialId: credential.id,
subjectId: 'subject-123',
resolverId: resolver.id,
},
relations: ['credential'],
});
// ASSERT
expect(foundEntry).toBeDefined();
expect(foundEntry?.credential).toBeDefined();
expect(foundEntry?.credential.id).toBe(credential.id);
expect(foundEntry?.credential.name).toBe('Test Credential for Relationship');
expect(foundEntry?.credential.type).toBe('testType');
});
it('should fetch DynamicCredentialResolver through ManyToOne relationship', async () => {
// ARRANGE
const credential = await createCredentials({
name: 'Test Credential',
type: 'testType',
data: 'test-data',
});
const resolver = await createDynamicCredentialResolver({
name: 'test-resolver-for-relationship',
type: 'test-type',
config: 'test-config-data',
});
const entry = new DynamicCredentialEntry();
entry.credentialId = credential.id;
entry.subjectId = 'subject-456';
entry.resolverId = resolver.id;
entry.data = 'encrypted-test-data';
await repository.save(entry);
// ACT - Fetch entry with resolver relationship loaded
const foundEntry = await repository.findOne({
where: {
credentialId: credential.id,
subjectId: 'subject-456',
resolverId: resolver.id,
},
relations: ['resolver'],
});
// ASSERT
expect(foundEntry).toBeDefined();
expect(foundEntry?.resolver).toBeDefined();
expect(foundEntry?.resolver.id).toBe(resolver.id);
expect(foundEntry?.resolver.name).toBe('test-resolver-for-relationship');
expect(foundEntry?.resolver.type).toBe('test-type');
});
it('should filter entries by credential type using find method', async () => {
// ARRANGE
const credential1 = await createCredentials({
name: 'OAuth Credential',
type: 'oAuth2Api',
data: 'oauth-data',
});
const credential2 = await createCredentials({
name: 'API Key Credential',
type: 'apiKeyAuth',
data: 'api-key-data',
});
const credential3 = await createCredentials({
name: 'Another OAuth Credential',
type: 'oAuth2Api',
data: 'oauth-data-2',
});
const resolver = await createDynamicCredentialResolver({
name: 'test-resolver',
type: 'test',
config: 'test-data',
});
// Create entries for different credential types
const entry1 = new DynamicCredentialEntry();
entry1.credentialId = credential1.id;
entry1.subjectId = 'subject-1';
entry1.resolverId = resolver.id;
entry1.data = 'data-1';
const entry2 = new DynamicCredentialEntry();
entry2.credentialId = credential2.id;
entry2.subjectId = 'subject-2';
entry2.resolverId = resolver.id;
entry2.data = 'data-2';
const entry3 = new DynamicCredentialEntry();
entry3.credentialId = credential3.id;
entry3.subjectId = 'subject-3';
entry3.resolverId = resolver.id;
entry3.data = 'data-3';
await repository.save([entry1, entry2, entry3]);
// ACT - Query entries where credential type is 'oAuth2Api'
const oauthEntries = await repository.find({
where: {
credential: {
type: 'oAuth2Api',
},
},
relations: ['credential'],
});
// ASSERT
expect(oauthEntries).toHaveLength(2);
expect(oauthEntries.every((entry) => entry.credential.type === 'oAuth2Api')).toBe(true);
expect(oauthEntries.map((e) => e.subjectId).sort()).toEqual(['subject-1', 'subject-3']);
});
it('should filter entries by resolver type using find method', async () => {
// ARRANGE
const credential = await createCredentials({
name: 'Test Credential',
type: 'testType',
data: 'test-data',
});
const resolver1 = await createDynamicCredentialResolver({
name: 'AWS Resolver',
type: 'aws-secrets-manager',
config: 'aws-config',
});
const resolver2 = await createDynamicCredentialResolver({
name: 'Azure Resolver',
type: 'azure-key-vault',
config: 'azure-config',
});
const resolver3 = await createDynamicCredentialResolver({
name: 'Another AWS Resolver',
type: 'aws-secrets-manager',
config: 'aws-config-2',
});
// Create entries for different resolver types
const entry1 = new DynamicCredentialEntry();
entry1.credentialId = credential.id;
entry1.subjectId = 'subject-1';
entry1.resolverId = resolver1.id;
entry1.data = 'data-1';
const entry2 = new DynamicCredentialEntry();
entry2.credentialId = credential.id;
entry2.subjectId = 'subject-2';
entry2.resolverId = resolver2.id;
entry2.data = 'data-2';
const entry3 = new DynamicCredentialEntry();
entry3.credentialId = credential.id;
entry3.subjectId = 'subject-3';
entry3.resolverId = resolver3.id;
entry3.data = 'data-3';
await repository.save([entry1, entry2, entry3]);
// ACT - Query entries where resolver type is 'aws-secrets-manager'
const awsEntries = await repository.find({
where: {
resolver: {
type: 'aws-secrets-manager',
},
},
relations: ['resolver'],
});
// ASSERT
expect(awsEntries).toHaveLength(2);
expect(awsEntries.every((entry) => entry.resolver.type === 'aws-secrets-manager')).toBe(true);
expect(awsEntries.map((e) => e.subjectId).sort()).toEqual(['subject-1', 'subject-3']);
});
it('should filter entries by both credential type and resolver type using find method', async () => {
// ARRANGE
const credential1 = await createCredentials({
name: 'OAuth Credential 1',
type: 'oAuth2Api',
data: 'oauth-data-1',
});
const credential2 = await createCredentials({
name: 'OAuth Credential 2',
type: 'oAuth2Api',
data: 'oauth-data-2',
});
const credential3 = await createCredentials({
name: 'API Key Credential',
type: 'apiKeyAuth',
data: 'api-key-data',
});
const resolver1 = await createDynamicCredentialResolver({
name: 'AWS Resolver',
type: 'aws-secrets-manager',
config: 'aws-config',
});
const resolver2 = await createDynamicCredentialResolver({
name: 'Azure Resolver',
type: 'azure-key-vault',
config: 'azure-config',
});
// Create entries with various combinations
const entry1 = new DynamicCredentialEntry();
entry1.credentialId = credential1.id;
entry1.subjectId = 'subject-1';
entry1.resolverId = resolver1.id;
entry1.data = 'data-1';
const entry2 = new DynamicCredentialEntry();
entry2.credentialId = credential1.id;
entry2.subjectId = 'subject-2';
entry2.resolverId = resolver2.id;
entry2.data = 'data-2';
const entry3 = new DynamicCredentialEntry();
entry3.credentialId = credential2.id;
entry3.subjectId = 'subject-3';
entry3.resolverId = resolver1.id;
entry3.data = 'data-3';
const entry4 = new DynamicCredentialEntry();
entry4.credentialId = credential3.id;
entry4.subjectId = 'subject-4';
entry4.resolverId = resolver1.id;
entry4.data = 'data-4';
await repository.save([entry1, entry2, entry3, entry4]);
// ACT - Query entries where credential type is 'oAuth2Api' AND resolver type is 'aws-secrets-manager'
const filteredEntries = await repository.find({
where: {
credential: {
type: 'oAuth2Api',
},
resolver: {
type: 'aws-secrets-manager',
},
},
relations: ['credential', 'resolver'],
});
// ASSERT - Should only return entries with both OAuth credentials and AWS resolver
expect(filteredEntries).toHaveLength(2);
expect(
filteredEntries.every(
(entry) =>
entry.credential.type === 'oAuth2Api' && entry.resolver.type === 'aws-secrets-manager',
),
).toBe(true);
expect(filteredEntries.map((e) => e.subjectId).sort()).toEqual(['subject-1', 'subject-3']);
});
});

View File

@@ -0,0 +1,22 @@
import { Container } from '@n8n/di';
import type { DynamicCredentialResolver } from '@/modules/dynamic-credentials.ee/database/entities/credential-resolver';
import { DynamicCredentialResolverRepository } from '@/modules/dynamic-credentials.ee/database/repositories/credential-resolver.repository';
/**
* Creates a dynamic credential resolver for testing
*/
export async function createDynamicCredentialResolver(
attributes: Partial<DynamicCredentialResolver>,
): Promise<DynamicCredentialResolver> {
const repository = Container.get(DynamicCredentialResolverRepository);
const resolver = repository.create({
name: attributes.name ?? 'test-resolver',
type: attributes.type ?? 'test-type',
config: attributes.config ?? '{}',
...attributes,
});
return await repository.save(resolver);
}

View File

@@ -9,7 +9,12 @@ import {
} from '@n8n/backend-test-utils';
import { GlobalConfig } from '@n8n/config';
import type { Project, TagEntity, User, WorkflowHistory } from '@n8n/db';
import { ProjectRepository, WorkflowHistoryRepository, SharedWorkflowRepository } from '@n8n/db';
import {
WorkflowRepository,
ProjectRepository,
WorkflowHistoryRepository,
SharedWorkflowRepository,
} from '@n8n/db';
import { Container } from '@n8n/di';
import { Not } from '@n8n/typeorm';
import { InstanceSettings } from 'n8n-core';
@@ -37,6 +42,7 @@ let memberPersonalProject: Project;
let authOwnerAgent: SuperAgentTest;
let authMemberAgent: SuperAgentTest;
let activeWorkflowManager: ActiveWorkflowManager;
let workflowRepository: WorkflowRepository;
const testServer = utils.setupTestServer({ endpointGroups: ['publicApi'] });
const license = testServer.license;
@@ -61,6 +67,7 @@ beforeAll(async () => {
await utils.initNodeTypes();
activeWorkflowManager = Container.get(ActiveWorkflowManager);
workflowRepository = Container.get(WorkflowRepository);
await activeWorkflowManager.init();
});
@@ -898,7 +905,7 @@ describe('POST /workflows/:id/activate', () => {
expect(sharedWorkflow?.workflow.activeVersionId).toBe(workflow.versionId);
// check whether the workflow is on the active workflow runner
expect(await activeWorkflowManager.isActive(workflow.id)).toBe(true);
expect(await workflowRepository.isActive(workflow.id)).toBe(true);
});
test('should set activeVersionId when activating workflow', async () => {
@@ -974,7 +981,7 @@ describe('POST /workflows/:id/activate', () => {
expect(sharedWorkflow?.workflow.activeVersionId).toBe(workflow.versionId);
// check whether the workflow is on the active workflow runner
expect(await activeWorkflowManager.isActive(workflow.id)).toBe(true);
expect(await workflowRepository.isActive(workflow.id)).toBe(true);
});
});
@@ -1039,7 +1046,7 @@ describe('POST /workflows/:id/deactivate', () => {
// check whether the workflow is deactivated in the database
expect(sharedWorkflow?.workflow.activeVersionId).toBeNull();
expect(await activeWorkflowManager.isActive(workflow.id)).toBe(false);
expect(await workflowRepository.isActive(workflow.id)).toBe(false);
});
test('should clear activeVersionId when deactivating workflow', async () => {
@@ -1126,7 +1133,7 @@ describe('POST /workflows/:id/deactivate', () => {
expect(sharedWorkflow?.workflow.activeVersionId).toBeNull();
expect(await activeWorkflowManager.isActive(workflow.id)).toBe(false);
expect(await workflowRepository.isActive(workflow.id)).toBe(false);
});
});

View File

@@ -624,5 +624,46 @@ describe('findStartNodes', () => {
expect(startNodes.size).toBe(1);
expect(startNodes).toContainEqual(afterLoop);
});
// done (empty)
// ┌────►
// ┌─────────┐1 ┌────┐1 │
// │ trigger ┼──►loop┼─┬┘ ►►
// └─────────┘ └────┘ │ ┌────────┐
// └─►│in loop │
// └────────┘
test('if a loop node does not actually form a loop in the graph, it uses loop output instead of done output', () => {
// ARRANGE
const trigger = createNodeData({ name: 'trigger' });
const loop = createNodeData({ name: 'loop', type: 'n8n-nodes-base.splitInBatches' });
const inLoop = createNodeData({ name: 'inLoop' });
const graph = new DirectedGraph().addNodes(trigger, loop, inLoop).addConnections(
{ from: trigger, to: loop },
// Note: loop connects to inLoop via output 1, but there's no connection
// back to loop, so it's not actually a loop
{ from: loop, outputIndex: 1, to: inLoop },
);
const runData: IRunData = {
[trigger.name]: [toITaskData([{ data: { name: 'trigger' } }])],
// The loop node has data on output 1 (the first output), but not on output 0 (done)
[loop.name]: [toITaskData([{ outputIndex: 1, data: { name: 'loop' } }])],
};
// ACT
const startNodes = findStartNodes({
graph,
trigger,
destination: inLoop,
runData,
pinData: {},
});
// ASSERT
// Because the loop node doesn't form an actual loop, it should check output 1
// for run data (not output 0). Since output 1 has data, the loop node should
// not be a start node, and we should continue to inLoop.
expect(startNodes.size).toBe(1);
expect(startNodes).toContainEqual(inLoop);
});
});
});

View File

@@ -83,7 +83,11 @@ function findStartNodesRecursive(
// last run
-1,
NodeConnectionTypes.Main,
0,
// Although this is a Loop node, the graph may not actually have a loop here e.g.,
// while the workflow is under development. If there's not a loop, we treat the loop
// node as a normal node and take the data from the first output at index 1.
// If there *is* a loop, we take the data from the `done` output at index 0.
isALoop(graph, current) ? 0 : 1,
);
if (nodeRunData === null || nodeRunData.length === 0) {
@@ -130,6 +134,10 @@ function findStartNodesRecursive(
return startNodes;
}
function isALoop(graph: DirectedGraph, node: INode): boolean {
return graph.getChildren(node).has(node);
}
/**
* The start node is the node from which a partial execution starts. The start
* node will be executed or re-executed.

View File

@@ -3083,7 +3083,6 @@
"workflowPreview.showError.previewError.title": "Preview error",
"workflowRun.noActiveConnectionToTheServer": "Lost connection to the server",
"workflowRun.showError.deactivate": "Deactivate workflow to execute",
"workflowRun.showError.productionActive": "Because of limitations in {nodeName}, n8n can't listen for test executions at the same time as listening for production ones",
"workflowRun.showError.title": "Problem running workflow",
"workflowRun.showError.payloadTooLarge": "Please execute the whole workflow, rather than just the node. (Existing execution data is too large.)",
"workflowRun.showError.resolveOutstandingIssues": "Please resolve outstanding issues before you activate it",
@@ -3471,13 +3470,15 @@
"dataTable.addColumn.nameInput.placeholder": "Enter column name",
"dataTable.addColumn.typeInput.label": "@:_reusableBaseText.type",
"dataTable.addColumn.error": "Error adding column",
"dataTable.addColumn.alreadyExistsError": "This column already exists",
"dataTable.column.alreadyExistsError": "This column already exists",
"dataTable.moveColumn.error": "Error moving column",
"dataTable.deleteColumn.error": "Error deleting column",
"dataTable.deleteColumn.confirm.title": "Delete column",
"dataTable.deleteColumn.confirm.message": "Are you sure you want to delete the column '{name}'? This action cannot be undone.",
"dataTable.addColumn.invalidName.error": "Invalid column name",
"dataTable.addColumn.invalidName.description": "Column names must begin with a letter and can only include letters, numbers, or underscores",
"dataTable.renameColumn.label": "Rename column",
"dataTable.renameColumn.error": "Error renaming column",
"dataTable.fetchContent.error": "Error fetching data table content",
"dataTable.addRow.label": "Add Row",
"dataTable.addRow.error": "Error adding row",

View File

@@ -147,4 +147,52 @@ Object.defineProperty(HTMLElement.prototype, 'scrollTo', {
value: vi.fn(),
});
class SpeechSynthesisUtterance {
text = '';
lang = '';
voice = null;
volume = 1;
rate = 1;
pitch = 1;
onstart = null;
onend = null;
onerror = null;
onpause = null;
onresume = null;
onmark = null;
onboundary = null;
constructor(text?: string) {
if (text) {
this.text = text;
}
}
addEventListener = vi.fn();
removeEventListener = vi.fn();
dispatchEvent = vi.fn(() => true);
}
Object.defineProperty(window, 'SpeechSynthesisUtterance', {
writable: true,
value: SpeechSynthesisUtterance,
});
Object.defineProperty(window, 'speechSynthesis', {
writable: true,
value: {
cancel: vi.fn(),
speak: vi.fn(),
pause: vi.fn(),
resume: vi.fn(),
getVoices: vi.fn(() => []),
pending: false,
speaking: false,
paused: false,
addEventListener: vi.fn(),
removeEventListener: vi.fn(),
dispatchEvent: vi.fn(() => true),
},
});
loadLanguage('en', englishBaseText as LocaleMessages);

View File

@@ -102,16 +102,35 @@ describe('usePinnedData', () => {
});
describe('onSetDataSuccess()', () => {
it('should trigger telemetry on successful data setting', async () => {
it('should trigger telemetry on successful data setting with correct payload values', async () => {
const workflowsStore = useWorkflowsStore();
workflowsStore.workflow.id = 'test-workflow-id';
const telemetry = useTelemetry();
const spy = vi.spyOn(telemetry, 'track');
const pinnedData = usePinnedData(ref({ name: 'testNode', type: 'someType' } as INodeUi), {
const node = ref({
name: 'testNode',
type: 'n8n-nodes-base.httpRequest',
id: 'test-node-id',
} as INodeUi);
const pinnedData = usePinnedData(node, {
displayMode: ref('json'),
runIndex: ref(0),
runIndex: ref(2),
});
pinnedData.onSetDataSuccess({ source: 'pin-icon-click' });
expect(spy).toHaveBeenCalled();
expect(spy).toHaveBeenCalledWith(
'Ndv data pinning success',
expect.objectContaining({
pinning_source: 'pin-icon-click',
node_type: 'n8n-nodes-base.httpRequest',
view: 'json',
run_index: 2,
workflow_id: 'test-workflow-id',
node_id: 'test-node-id',
}),
);
});
});

View File

@@ -215,6 +215,8 @@ export function usePinnedData(
data_size: stringSizeInBytes(data.value),
view: displayMode,
run_index: runIndex,
workflow_id: workflowsStore.workflowId,
node_id: targetNode?.id,
};
void externalHooks.run('runData.onDataPinningSuccess', telemetryPayload);

View File

@@ -25,14 +25,11 @@ import type { WorkflowData } from '@n8n/rest-api-client/api/workflows';
import { useWorkflowsStore } from '@/app/stores/workflows.store';
import { useUIStore } from '@/app/stores/ui.store';
import { useWorkflowHelpers } from '@/app/composables/useWorkflowHelpers';
import { useToast } from './useToast';
import { useI18n } from '@n8n/i18n';
import { captor, mock } from 'vitest-mock-extended';
import { usePushConnectionStore } from '@/app/stores/pushConnection.store';
import { createTestNode, createTestWorkflow } from '@/__tests__/mocks';
import { waitFor } from '@testing-library/vue';
import { useAgentRequestStore } from '@n8n/stores/useAgentRequestStore';
import { SLACK_TRIGGER_NODE_TYPE, MANUAL_TRIGGER_NODE_TYPE } from '@/app/constants';
vi.mock('@/app/stores/workflows.store', () => {
const storeState: Partial<ReturnType<typeof useWorkflowsStore>> & {
@@ -250,177 +247,6 @@ describe('useRunWorkflow({ router })', () => {
});
describe('runWorkflow()', () => {
it('should prevent execution and show error message when workflow is active with single webhook trigger', async () => {
const pinia = createTestingPinia({ stubActions: false });
setActivePinia(pinia);
const toast = useToast();
const i18n = useI18n();
const { runWorkflow } = useRunWorkflow({ router });
vi.mocked(workflowsStore).isWorkflowActive = true;
vi.mocked(useWorkflowHelpers()).getWorkflowDataToSave.mockResolvedValue({
nodes: [
{
name: 'Slack',
type: SLACK_TRIGGER_NODE_TYPE,
disabled: false,
},
],
} as unknown as WorkflowData);
const result = await runWorkflow({});
expect(result).toBeUndefined();
expect(toast.showMessage).toHaveBeenCalledWith({
title: i18n.baseText('workflowRun.showError.deactivate'),
message: i18n.baseText('workflowRun.showError.productionActive', {
interpolate: { nodeName: 'Webhook' },
}),
type: 'error',
});
});
it('should execute the workflow if the single webhook trigger has pin data', async () => {
const pinia = createTestingPinia({ stubActions: false });
setActivePinia(pinia);
const toast = useToast();
const i18n = useI18n();
const { runWorkflow } = useRunWorkflow({ router });
vi.mocked(workflowsStore).isWorkflowActive = true;
vi.mocked(useWorkflowHelpers()).getWorkflowDataToSave.mockResolvedValue({
nodes: [
{
name: 'Slack',
type: SLACK_TRIGGER_NODE_TYPE,
disabled: false,
},
],
pinData: {
Slack: [{ json: { value: 'data2' } }],
},
} as unknown as WorkflowData);
const mockExecutionResponse = { executionId: '123' };
vi.mocked(uiStore).activeActions = [''];
vi.mocked(workflowsStore).workflowObject = {
name: 'Test Workflow',
} as unknown as Workflow;
vi.mocked(workflowsStore).runWorkflow.mockResolvedValue(mockExecutionResponse);
vi.mocked(workflowsStore).nodesIssuesExist = true;
vi.mocked(workflowsStore).getWorkflowRunData = {
NodeName: [],
};
const result = await runWorkflow({});
expect(result).toEqual(mockExecutionResponse);
expect(toast.showMessage).not.toHaveBeenCalledWith({
title: i18n.baseText('workflowRun.showError.deactivate'),
message: i18n.baseText('workflowRun.showError.productionActive', {
interpolate: { nodeName: 'Webhook' },
}),
type: 'error',
});
});
it('should execute the workflow if there is a single webhook trigger, but another trigger is chosen', async () => {
// ARRANGE
const pinia = createTestingPinia({ stubActions: false });
setActivePinia(pinia);
const toast = useToast();
const i18n = useI18n();
const { runWorkflow } = useRunWorkflow({ router });
const mockExecutionResponse = { executionId: '123' };
const triggerNode = 'Manual';
vi.mocked(workflowsStore).isWorkflowActive = true;
vi.mocked(useWorkflowHelpers()).getWorkflowDataToSave.mockResolvedValue({
nodes: [
{
name: 'Slack',
type: SLACK_TRIGGER_NODE_TYPE,
disabled: false,
},
{
name: triggerNode,
type: MANUAL_TRIGGER_NODE_TYPE,
disabled: false,
},
],
} as unknown as WorkflowData);
vi.mocked(uiStore).activeActions = [''];
vi.mocked(workflowsStore).workflowObject = {
name: 'Test Workflow',
} as unknown as Workflow;
vi.mocked(workflowsStore).runWorkflow.mockResolvedValue(mockExecutionResponse);
vi.mocked(workflowsStore).nodesIssuesExist = true;
vi.mocked(workflowsStore).getWorkflowRunData = { NodeName: [] };
// ACT
const result = await runWorkflow({ triggerNode });
// ASSERT
expect(result).toEqual(mockExecutionResponse);
expect(toast.showMessage).not.toHaveBeenCalledWith({
title: i18n.baseText('workflowRun.showError.deactivate'),
message: i18n.baseText('workflowRun.showError.productionActive', {
interpolate: { nodeName: 'Webhook' },
}),
type: 'error',
});
});
it('should prevent execution and show error message when workflow is active with multiple triggers and a single webhook trigger is chosen', async () => {
// ARRANGE
const pinia = createTestingPinia({ stubActions: false });
setActivePinia(pinia);
const toast = useToast();
const i18n = useI18n();
const { runWorkflow } = useRunWorkflow({ router });
const mockExecutionResponse = { executionId: '123' };
const triggerNode = 'Slack';
vi.mocked(workflowsStore).isWorkflowActive = true;
vi.mocked(useWorkflowHelpers()).getWorkflowDataToSave.mockResolvedValue({
nodes: [
{
name: triggerNode,
type: SLACK_TRIGGER_NODE_TYPE,
disabled: false,
},
{
name: 'Manual',
type: MANUAL_TRIGGER_NODE_TYPE,
disabled: false,
},
],
} as unknown as WorkflowData);
vi.mocked(uiStore).activeActions = [''];
vi.mocked(workflowsStore).workflowObject = {
name: 'Test Workflow',
} as unknown as Workflow;
vi.mocked(workflowsStore).runWorkflow.mockResolvedValue(mockExecutionResponse);
vi.mocked(workflowsStore).nodesIssuesExist = true;
vi.mocked(workflowsStore).getWorkflowRunData = { NodeName: [] };
// ACT
const result = await runWorkflow({ triggerNode });
// ASSERT
expect(result).toBeUndefined();
expect(toast.showMessage).toHaveBeenCalledWith({
title: i18n.baseText('workflowRun.showError.deactivate'),
message: i18n.baseText('workflowRun.showError.productionActive', {
interpolate: { nodeName: 'Webhook' },
}),
type: 'error',
});
});
it('should return undefined if UI action "workflowRunning" is active', async () => {
const { runWorkflow } = useRunWorkflow({ router });
workflowState.setActiveExecutionId('123');

View File

@@ -22,11 +22,7 @@ import { retry } from '@n8n/utils/retry';
import { useToast } from '@/app/composables/useToast';
import { useNodeHelpers } from '@/app/composables/useNodeHelpers';
import {
CHAT_TRIGGER_NODE_TYPE,
IN_PROGRESS_EXECUTION_ID,
SINGLE_WEBHOOK_TRIGGERS,
} from '@/app/constants';
import { CHAT_TRIGGER_NODE_TYPE, IN_PROGRESS_EXECUTION_ID } from '@/app/constants';
import { useRootStore } from '@n8n/stores/useRootStore';
import { useWorkflowsStore } from '@/app/stores/workflows.store';
@@ -287,31 +283,6 @@ export function useRunWorkflow(useRunWorkflowOpts: {
return true;
});
const singleWebhookTrigger =
options.triggerNode === undefined
? // if there is no chosen trigger we check all triggers
triggers.find((node) => SINGLE_WEBHOOK_TRIGGERS.includes(node.type))
: // if there is a chosen trigger we check this one only
workflowData.nodes.find(
(node) =>
node.name === options.triggerNode && SINGLE_WEBHOOK_TRIGGERS.includes(node.type),
);
if (
singleWebhookTrigger &&
workflowsStore.isWorkflowActive &&
!workflowData.pinData?.[singleWebhookTrigger.name]
) {
toast.showMessage({
title: i18n.baseText('workflowRun.showError.deactivate'),
message: i18n.baseText('workflowRun.showError.productionActive', {
interpolate: { nodeName: singleWebhookTrigger.name },
}),
type: 'error',
});
return undefined;
}
const startRunData: IStartRunData = {
workflowData,
runData: isPartialExecution

View File

@@ -143,12 +143,6 @@ export const OPEN_URL_PANEL_TRIGGER_NODE_TYPES = [
MCP_TRIGGER_NODE_TYPE,
];
export const SINGLE_WEBHOOK_TRIGGERS = [
TELEGRAM_TRIGGER_NODE_TYPE,
SLACK_TRIGGER_NODE_TYPE,
FACEBOOK_LEAD_ADS_TRIGGER_NODE_TYPE,
];
export const LIST_LIKE_NODE_OPERATIONS = ['getAll', 'getMany', 'read', 'search'];
export const PRODUCTION_ONLY_TRIGGER_NODE_TYPES = [CHAT_TRIGGER_NODE_TYPE];

View File

@@ -32,7 +32,7 @@ import {
type ChatModelDto,
} from '@n8n/api-types';
import { N8nIconButton, N8nScrollArea, N8nText } from '@n8n/design-system';
import { useLocalStorage, useMediaQuery, useScroll } from '@vueuse/core';
import { useElementSize, useLocalStorage, useMediaQuery, useScroll } from '@vueuse/core';
import { v4 as uuidv4 } from 'uuid';
import { computed, nextTick, ref, useTemplateRef, watch } from 'vue';
import { useRoute, useRouter } from 'vue-router';
@@ -62,12 +62,15 @@ const i18n = useI18n();
const headerRef = useTemplateRef('headerRef');
const inputRef = useTemplateRef('inputRef');
const scrollableRef = useTemplateRef('scrollable');
const scrollableSize = useElementSize(scrollableRef);
const sessionId = computed<string>(() =>
typeof route.params.id === 'string' ? route.params.id : uuidv4(),
);
const isResponding = computed(() => chatStore.isResponding(sessionId.value));
const isNewSession = computed(() => sessionId.value !== route.params.id);
const scrollableRef = useTemplateRef('scrollable');
const scrollContainerRef = computed(() => scrollableRef.value?.parentElement ?? null);
const currentConversation = computed(() =>
sessionId.value ? chatStore.sessions.byId[sessionId.value] : undefined,
@@ -566,6 +569,7 @@ function onFilesDropped(files: File[]) {
:selected-model="selectedModel"
:credentials="credentialsByProvider"
:ready-to-show-model-selector="isNewSession || !!currentConversation"
:is-new-session="isNewSession"
@select-model="handleSelectModel"
@edit-custom-agent="handleEditAgent"
@create-custom-agent="openNewAgentCreator"
@@ -604,6 +608,7 @@ function onFilesDropped(files: File[]) {
? scrollContainerRef.offsetHeight - 30 /* padding-top */ - 200 /* padding-bottom */
: undefined
"
:container-width="scrollableSize.width.value ?? 0"
@start-edit="handleStartEditMessage(message.id)"
@cancel-edit="handleCancelEditMessage"
@regenerate="handleRegenerateMessage"

View File

@@ -0,0 +1,27 @@
import type {} from '@n8n/api-types';
import type { ChatMessage } from '../chat.types';
export function createTestChatMessage(overrides: Partial<ChatMessage> = {}): ChatMessage {
return {
id: 'm0',
type: 'ai',
content: 'test message',
status: 'success',
sessionId: 's0',
createdAt: new Date().toISOString(),
model: 'm0',
provider: 'anthropic',
alternatives: [],
attachments: [],
responses: [],
name: '',
workflowId: null,
agentId: null,
executionId: null,
updatedAt: new Date().toISOString(),
previousMessageId: null,
retryOfMessageId: null,
revisionOfMessageId: null,
...overrides,
};
}

View File

@@ -548,6 +548,7 @@ export const useChatStore = defineStore(CHAT_STORE, () => {
tools,
attachments,
agentName,
timeZone: Intl.DateTimeFormat().resolvedOptions().timeZone,
},
onStreamMessage,
onStreamDone,
@@ -618,11 +619,19 @@ export const useChatStore = defineStore(CHAT_STORE, () => {
messageId: promptId,
message: content,
credentials,
timeZone: Intl.DateTimeFormat().resolvedOptions().timeZone,
},
onStreamMessage,
onStreamDone,
onStreamError,
);
telemetry.track('User edited chat hub message', {
...flattenModel(model),
is_custom: model.provider === 'custom-agent',
chat_session_id: sessionId,
chat_message_id: editId,
});
}
function regenerateMessage(
@@ -654,11 +663,19 @@ export const useChatStore = defineStore(CHAT_STORE, () => {
{
model,
credentials,
timeZone: Intl.DateTimeFormat().resolvedOptions().timeZone,
},
onStreamMessage,
onStreamDone,
onStreamError,
);
telemetry.track('User regenerated chat hub message', {
...flattenModel(model),
is_custom: model.provider === 'custom-agent',
chat_session_id: sessionId,
chat_message_id: retryId,
});
}
async function stopStreamingMessage(sessionId: ChatSessionId) {

View File

@@ -17,7 +17,8 @@ import { useI18n } from '@n8n/i18n';
import { computed, useTemplateRef } from 'vue';
import { useRouter } from 'vue-router';
const { selectedModel, credentials, readyToShowModelSelector } = defineProps<{
const { isNewSession, selectedModel, credentials, readyToShowModelSelector } = defineProps<{
isNewSession: boolean;
selectedModel: ChatModelDto | null;
credentials: CredentialsMap | null;
readyToShowModelSelector: boolean;
@@ -79,6 +80,7 @@ defineExpose({
text
icon-size="large"
:aria-label="i18n.baseText('chatHub.chat.header.button.newChat')"
:disabled="isNewSession"
@click="onNewChat"
/>
<ModelSelector

View File

@@ -0,0 +1,41 @@
import { describe, it, expect, beforeEach } from 'vitest';
import { createComponentRenderer } from '@/__tests__/render';
import { createTestingPinia } from '@pinia/testing';
import ChatMessage from './ChatMessage.vue';
import type { ChatMessage as ChatMessageType } from '../chat.types';
import { waitFor } from '@testing-library/vue';
import { createTestChatMessage } from '../__test__/data';
const renderComponent = createComponentRenderer(ChatMessage);
describe('ChatMessage', () => {
let pinia: ReturnType<typeof createTestingPinia>;
beforeEach(() => {
pinia = createTestingPinia();
});
it('should render syntax highlighting for code blocks', async () => {
const message: ChatMessageType = createTestChatMessage({
content: '```javascript\nconst foo = "bar";\nfunction test() {\n return true;\n}\n```',
});
const { container } = renderComponent({
props: {
message,
compact: false,
isEditing: false,
isStreaming: false,
cachedAgentDisplayName: null,
},
pinia,
});
// Wait for highlight.js to load and apply syntax highlighting
await waitFor(() => {
const highlightedElements = container.querySelectorAll('.hljs-keyword');
expect(highlightedElements.length).toBeGreaterThan(0);
});
});
});

View File

@@ -18,18 +18,26 @@ import { useDeviceSupport } from '@n8n/composables/useDeviceSupport';
import { useI18n } from '@n8n/i18n';
import CopyButton from '@/features/ai/chatHub/components/CopyButton.vue';
const { message, compact, isEditing, isStreaming, minHeight, cachedAgentDisplayName } =
defineProps<{
message: ChatMessage;
compact: boolean;
isEditing: boolean;
isStreaming: boolean;
cachedAgentDisplayName: string | null;
/**
* minHeight allows scrolling agent's response to the top while it is being generated
*/
minHeight?: number;
}>();
const {
message,
compact,
isEditing,
isStreaming,
minHeight,
cachedAgentDisplayName,
containerWidth,
} = defineProps<{
message: ChatMessage;
compact: boolean;
isEditing: boolean;
isStreaming: boolean;
cachedAgentDisplayName: string | null;
/**
* minHeight allows scrolling agent's response to the top while it is being generated
*/
minHeight?: number;
containerWidth: number;
}>();
const emit = defineEmits<{
startEdit: [];
@@ -48,7 +56,7 @@ const styles = useCssModule();
const editedText = ref('');
const hoveredCodeBlockActions = ref<HTMLElement | null>(null);
const textareaRef = useTemplateRef('textarea');
const markdown = useChatHubMarkdownOptions(styles.codeBlockActions);
const markdown = useChatHubMarkdownOptions(styles.codeBlockActions, styles.tableContainer);
const messageContent = computed(() => message.content);
const speech = useSpeechSynthesis(messageContent, {
@@ -181,7 +189,10 @@ onBeforeMount(() => {
[$style.compact]: compact,
},
]"
:style="minHeight ? { minHeight: `${minHeight}px` } : undefined"
:style="{
minHeight: minHeight ? `${minHeight}px` : undefined,
'--container--width': `${containerWidth}px`,
}"
:data-message-id="message.id"
>
<div :class="$style.avatar">
@@ -240,7 +251,7 @@ onBeforeMount(() => {
<div v-if="message.type === 'human'">{{ message.content }}</div>
<VueMarkdown
v-else
:key="markdown.forceReRenderKey"
:key="markdown.forceReRenderKey.value"
:class="[$style.chatMessageMarkdown, 'chat-message-markdown']"
:source="
message.status === 'error' && !message.content
@@ -404,6 +415,7 @@ onBeforeMount(() => {
}
pre {
width: 100%;
font-family: inherit;
font-size: inherit;
margin: 0;
@@ -430,18 +442,27 @@ onBeforeMount(() => {
}
}
.tableContainer {
width: var(--container--width);
padding-bottom: 1em;
padding-left: calc((var(--container--width) - 100%) / 2);
padding-right: var(--spacing--lg);
margin-left: calc(-1 * (var(--container--width) - 100%) / 2);
overflow-x: auto;
}
table {
width: 100%;
width: fit-content;
border-bottom: var(--border);
border-top: var(--border);
border-width: 2px;
margin-bottom: 1em;
border-color: var(--color--text--shade-1);
}
th,
td {
padding: 0.25em 1em 0.25em 0;
min-width: 12em;
}
th {

View File

@@ -8,6 +8,7 @@ import { providerDisplayNames } from '@/features/ai/chatHub/constants';
import CredentialIcon from '@/features/credentials/components/CredentialIcon.vue';
import CredentialPicker from '@/features/credentials/components/CredentialPicker/CredentialPicker.vue';
import { useI18n } from '@n8n/i18n';
import { useTelemetry } from '@/app/composables/useTelemetry';
const props = defineProps<{
modalName: string;
@@ -15,11 +16,12 @@ const props = defineProps<{
provider: ChatHubLLMProvider;
initialValue: string | null;
onSelect: (provider: ChatHubLLMProvider, credentialId: string | null) => void;
onCreateNew: (provider: ChatHubLLMProvider) => void;
};
}>();
const i18n = useI18n();
const telemetry = useTelemetry();
const modalBus = ref(createEventBus());
const selectedCredentialId = ref<string | null>(props.data.initialValue);
@@ -45,6 +47,15 @@ function onDeleteCredential(credentialId: string) {
}
}
function onCredentialModalOpened(credentialId?: string) {
telemetry.track('User opened Credential modal', {
credential_type: credentialType.value,
source: 'chat',
new_credential: !credentialId,
workflow_id: null,
});
}
function onConfirm() {
if (selectedCredentialId.value) {
props.data.onSelect(props.data.provider, selectedCredentialId.value);
@@ -106,6 +117,7 @@ function onCancel() {
@credential-selected="onCredentialSelect"
@credential-deselected="onCredentialDeselect"
@credential-deleted="onDeleteCredential"
@credential-modal-opened="onCredentialModalOpened"
/>
</div>
</div>

View File

@@ -156,13 +156,13 @@ const menu = computed(() => {
id: provider,
title: providerDisplayNames[provider],
submenu: [
configureMenu,
{ isDivider: true as const, id: 'divider' },
{
id: `${provider}::loading`,
title: i18n.baseText('generic.loadingEllipsis'),
disabled: true,
},
{ isDivider: true as const, id: 'divider' },
configureMenu,
],
});
continue;
@@ -234,9 +234,13 @@ const menu = computed(() => {
} as const,
]
: []),
configureMenu,
]);
submenu.unshift(
configureMenu,
...(submenu.length > 1 ? [{ isDivider: true as const, id: 'divider' }] : []),
);
menuItems.push({
id: provider,
title: providerDisplayNames[provider],
@@ -270,7 +274,6 @@ function openCredentialsSelectorOrCreate(provider: ChatHubLLMProvider) {
provider,
initialValue: credentials?.[provider] ?? null,
onSelect: handleSelectCredentials,
onCreateNew: handleCreateNewCredential,
},
});
}
@@ -317,19 +320,6 @@ function onSelect(id: string) {
emit('change', parsedModel);
}
function handleCreateNewCredential(provider: ChatHubLLMProvider) {
const credentialType = PROVIDER_CREDENTIAL_TYPE_MAP[provider];
telemetry.track('User opened Credential modal', {
credential_type: credentialType,
source: 'chat',
new_credential: true,
workflow_id: null,
});
uiStore.openNewCredential(credentialType);
}
onClickOutside(
computed(() => dropdownRef.value?.$el),
() => dropdownRef.value?.close(),

View File

@@ -23,6 +23,7 @@ import { useNodeTypesStore } from '@/app/stores/nodeTypes.store';
import { useI18n } from '@n8n/i18n';
import { useProjectsStore } from '@/features/collaboration/projects/projects.store';
import { getResourcePermissions } from '@n8n/permissions';
import { useTelemetry } from '@/app/composables/useTelemetry';
const props = defineProps<{
modalName: string;
@@ -38,6 +39,7 @@ const credentialsStore = useCredentialsStore();
const nodeTypesStore = useNodeTypesStore();
const projectStore = useProjectsStore();
const uiStore = useUIStore();
const telemetry = useTelemetry();
const canCreateCredentials = computed(() => {
return getResourcePermissions(projectStore.personalProject?.scopes).credential.create;
@@ -141,6 +143,13 @@ function onCreateNewCredential(providerKey: ChatHubAgentTool) {
const provider = AVAILABLE_TOOLS[providerKey];
if (!provider.credentialType) return;
telemetry.track('User opened Credential modal', {
credential_type: provider.credentialType,
source: 'chat',
new_credential: true,
workflow_id: null,
});
uiStore.openNewCredential(provider.credentialType);
}

View File

@@ -13,7 +13,10 @@ let asyncImport:
| { status: 'uninitialized' }
| { status: 'done' } = { status: 'uninitialized' };
export function useChatHubMarkdownOptions(codeBlockActionsClassName: string) {
export function useChatHubMarkdownOptions(
codeBlockActionsClassName: string,
tableContainerClassName: string,
) {
const forceReRenderKey = ref(0);
const codeBlockContents = ref<Map<string, string>>();
@@ -100,7 +103,28 @@ export function useChatHubMarkdownOptions(codeBlockActionsClassName: string) {
);
};
};
return [linksNewTabPlugin, codeBlockPlugin];
const tablePlugin = (vueMarkdownItInstance: MarkdownIt) => {
const defaultTableOpenRenderer = vueMarkdownItInstance.renderer.rules.table_open;
const defaultTableCloseRenderer = vueMarkdownItInstance.renderer.rules.table_close;
vueMarkdownItInstance.renderer.rules.table_open = (tokens, idx, options, env, self) => {
const defaultRendered =
defaultTableOpenRenderer?.(tokens, idx, options, env, self) ??
self.renderToken(tokens, idx, options);
return defaultRendered.replace('<table', `<div class="${tableContainerClassName}"><table`);
};
vueMarkdownItInstance.renderer.rules.table_close = (tokens, idx, options, env, self) => {
const defaultRendered =
defaultTableCloseRenderer?.(tokens, idx, options, env, self) ??
self.renderToken(tokens, idx, options);
return defaultRendered.replace('</table>', '</table></div>');
};
};
return [linksNewTabPlugin, codeBlockPlugin, tablePlugin];
});
return { options, forceReRenderKey, plugins, codeBlockContents };

View File

@@ -57,7 +57,6 @@ export const ChatModule: FrontendModuleDescription = {
provider: null,
initialValue: null,
onSelect: () => {},
onCreateNew: () => {},
},
},
},

View File

@@ -45,7 +45,7 @@ vi.mock('@n8n/i18n', async (importOriginal) => ({
'dataTable.addColumn.invalidName.description':
'Column names must start with a letter and contain only letters, numbers, and hyphens',
'dataTable.addColumn.error': 'Error adding column',
'dataTable.addColumn.alreadyExistsError': `Column "${options?.interpolate?.name}" already exists`,
'dataTable.column.alreadyExistsError': `Column "${options?.interpolate?.name}" already exists`,
'dataTable.addColumn.systemColumnDescription': 'This is a system column',
'dataTable.addColumn.testingColumnDescription': 'This is a testing column',
'dataTable.addColumn.alreadyExistsDescription': 'Column already exists',

View File

@@ -80,7 +80,7 @@ const onAddButtonClicked = async () => {
let errorDescription = response.errorMessage;
// Provide custom error message for conflict (column already exists)
if (response.httpStatus === 409) {
errorMessage = i18n.baseText('dataTable.addColumn.alreadyExistsError', {
errorMessage = i18n.baseText('dataTable.column.alreadyExistsError', {
interpolate: { name: columnName.value },
});
errorDescription = response.errorMessage?.includes('system')

View File

@@ -80,4 +80,200 @@ describe('ColumnHeader', () => {
await userEvent.click(getByTestId('action-delete'));
expect(onDeleteMock).toHaveBeenCalledWith('col-1');
});
describe('onNameSubmit', () => {
it('should call onRename when valid new name is provided', async () => {
const onRenameMock = vi.fn();
const { container } = renderComponent({
props: {
params: {
displayName: 'Original Name',
column: {
getColId: () => 'col-1',
getColDef: () => ({ cellDataType: 'string' }),
getSort: () => null,
},
onRename: onRenameMock,
onDelete: onDeleteMock,
allowMenuActions: true,
api: {
getFilterModel: vi.fn().mockReturnValue({}),
addEventListener: vi.fn(),
removeEventListener: vi.fn(),
},
} as unknown as HeaderParamsWithDelete,
},
});
// Find the actual input element within N8nInlineTextEdit
const input = container.querySelector('input') as HTMLInputElement;
expect(input).toBeTruthy();
await userEvent.clear(input);
await userEvent.type(input, 'New Name{Enter}');
expect(onRenameMock).toHaveBeenCalledWith('col-1', 'New Name');
});
it('should trim whitespace before calling onRename', async () => {
const onRenameMock = vi.fn();
const { container } = renderComponent({
props: {
params: {
displayName: 'Original Name',
column: {
getColId: () => 'col-1',
getColDef: () => ({ cellDataType: 'string' }),
getSort: () => null,
},
onRename: onRenameMock,
onDelete: onDeleteMock,
allowMenuActions: true,
api: {
getFilterModel: vi.fn().mockReturnValue({}),
addEventListener: vi.fn(),
removeEventListener: vi.fn(),
},
} as unknown as HeaderParamsWithDelete,
},
});
const input = container.querySelector('input') as HTMLInputElement;
expect(input).toBeTruthy();
await userEvent.clear(input);
await userEvent.type(input, ' Trimmed Name {Enter}');
expect(onRenameMock).toHaveBeenCalledWith('col-1', 'Trimmed Name');
});
it('should not call onRename when name is empty', async () => {
const onRenameMock = vi.fn();
const { container } = renderComponent({
props: {
params: {
displayName: 'Original Name',
column: {
getColId: () => 'col-1',
getColDef: () => ({ cellDataType: 'string' }),
getSort: () => null,
},
onRename: onRenameMock,
onDelete: onDeleteMock,
allowMenuActions: true,
api: {
getFilterModel: vi.fn().mockReturnValue({}),
addEventListener: vi.fn(),
removeEventListener: vi.fn(),
},
} as unknown as HeaderParamsWithDelete,
},
});
const input = container.querySelector('input') as HTMLInputElement;
expect(input).toBeTruthy();
await userEvent.clear(input);
await fireEvent.blur(input);
expect(onRenameMock).not.toHaveBeenCalled();
});
it('should not call onRename when name is only whitespace', async () => {
const onRenameMock = vi.fn();
const { container } = renderComponent({
props: {
params: {
displayName: 'Original Name',
column: {
getColId: () => 'col-1',
getColDef: () => ({ cellDataType: 'string' }),
getSort: () => null,
},
onRename: onRenameMock,
onDelete: onDeleteMock,
allowMenuActions: true,
api: {
getFilterModel: vi.fn().mockReturnValue({}),
addEventListener: vi.fn(),
removeEventListener: vi.fn(),
},
} as unknown as HeaderParamsWithDelete,
},
});
const input = container.querySelector('input') as HTMLInputElement;
expect(input).toBeTruthy();
await userEvent.clear(input);
await userEvent.type(input, ' ');
await fireEvent.blur(input);
expect(onRenameMock).not.toHaveBeenCalled();
});
it('should not call onRename when name is unchanged', async () => {
const onRenameMock = vi.fn();
const { container } = renderComponent({
props: {
params: {
displayName: 'Original Name',
column: {
getColId: () => 'col-1',
getColDef: () => ({ cellDataType: 'string' }),
getSort: () => null,
},
onRename: onRenameMock,
onDelete: onDeleteMock,
allowMenuActions: true,
api: {
getFilterModel: vi.fn().mockReturnValue({}),
addEventListener: vi.fn(),
removeEventListener: vi.fn(),
},
} as unknown as HeaderParamsWithDelete,
},
});
const input = container.querySelector('input') as HTMLInputElement;
expect(input).toBeTruthy();
await userEvent.clear(input);
await userEvent.type(input, 'Original Name');
await fireEvent.blur(input);
expect(onRenameMock).not.toHaveBeenCalled();
});
it('should not call onRename when onRename callback is not provided', async () => {
const { container } = renderComponent({
props: {
params: {
displayName: 'Original Name',
column: {
getColId: () => 'col-1',
getColDef: () => ({ cellDataType: 'string' }),
getSort: () => null,
},
onDelete: onDeleteMock,
allowMenuActions: true,
api: {
getFilterModel: vi.fn().mockReturnValue({}),
addEventListener: vi.fn(),
removeEventListener: vi.fn(),
},
} as unknown as HeaderParamsWithDelete,
},
});
const input = container.querySelector('input') as HTMLInputElement;
expect(input).toBeTruthy();
await userEvent.clear(input);
await userEvent.type(input, 'New Name');
// Should not throw an error
await expect(fireEvent.blur(input)).resolves.not.toThrow();
});
});
});

View File

@@ -1,13 +1,15 @@
<script setup lang="ts">
import type { IHeaderParams, SortDirection } from 'ag-grid-community';
import { useDataTableTypes } from '@/features/core/dataTable/composables/useDataTableTypes';
import { ref, computed, onMounted, onUnmounted } from 'vue';
import { ref, computed, onMounted, onUnmounted, useTemplateRef } from 'vue';
import { useI18n } from '@n8n/i18n';
import { isAGGridCellType } from '@/features/core/dataTable/typeGuards';
import { N8nActionDropdown, N8nIcon, N8nIconButton } from '@n8n/design-system';
import { N8nActionDropdown, N8nIcon, N8nIconButton, N8nInlineTextEdit } from '@n8n/design-system';
import { DATA_TABLE_SYSTEM_COLUMNS } from 'n8n-workflow';
export type HeaderParamsWithDelete = IHeaderParams & {
onDelete?: (columnId: string) => void;
onRename?: (columnId: string, newName: string) => void;
allowMenuActions: boolean;
showTypeIcon?: boolean;
};
@@ -19,6 +21,7 @@ const props = defineProps<{
const { getIconForType, mapToDataTableColumnType } = useDataTableTypes();
const i18n = useI18n();
const renameInput = useTemplateRef<InstanceType<typeof N8nInlineTextEdit>>('renameInput');
const isHovered = ref(false);
const isDropdownOpen = ref(false);
const isFilterOpen = ref(false);
@@ -28,13 +31,32 @@ const shouldShowTypeIcon = computed(() => props.params.showTypeIcon !== false);
const isFilterable = computed(() => props.params.column.getColDef().filter !== false);
const enum ItemAction {
Rename = 'rename',
Delete = 'delete',
}
const onNameSubmit = (newName: string) => {
const trimmed = newName.trim();
if (!trimmed || trimmed === props.params.displayName) {
renameInput.value?.forceCancel();
return;
}
props.params.onRename?.(props.params.column.getColId(), trimmed);
};
const onNameToggle = (e?: Event) => {
e?.stopPropagation();
if (renameInput.value?.forceFocus && !isSystemColumn.value) {
renameInput.value.forceFocus();
}
};
const onItemClick = (action: string) => {
const actionEnum = action as ItemAction;
if (actionEnum === ItemAction.Delete) {
props.params.onDelete?.(props.params.column.getColId());
} else if (actionEnum === ItemAction.Rename) {
onNameToggle();
}
};
@@ -87,14 +109,42 @@ const typeIcon = computed(() => {
return getIconForType(mapToDataTableColumnType(cellDataType));
});
const columnActionItems = [
{
const isSystemColumn = computed(() => {
const columnId = props.params.column.getColId();
return DATA_TABLE_SYSTEM_COLUMNS.includes(columnId);
});
// Constants for width calculation
const CHAR_WIDTH_PX = 7; // Average character width
const PADDING_PX = 16; // Padding and cursor space
const MIN_WIDTH_PX = 50; // Minimum width for short names
const MAX_WIDTH_PX = 250; // Maximum width to prevent overflow
const columnWidth = computed(() => {
const textLength = (props.params.displayName || '').length;
const calculatedWidth = textLength * CHAR_WIDTH_PX + PADDING_PX;
return Math.min(Math.max(calculatedWidth, MIN_WIDTH_PX), MAX_WIDTH_PX);
});
const columnActionItems = computed(() => {
const items = [];
items.push({
id: ItemAction.Rename,
label: i18n.baseText('dataTable.renameColumn.label'),
icon: 'pen',
customClass: 'data-table-column-header-action-item',
} as const);
items.push({
id: ItemAction.Delete,
label: i18n.baseText('dataTable.deleteColumn.confirm.title'),
icon: 'trash-2',
customClass: 'data-table-column-header-action-item',
} as const,
];
} as const);
return items;
});
const isSortable = computed(() => {
return props.params.column.getColDef().sortable;
@@ -158,9 +208,22 @@ onUnmounted(() => {
>
<div class="data-table-column-header-icon-wrapper">
<N8nIcon v-if="typeIcon" :icon="typeIcon" />
<span class="ag-header-cell-text" data-test-id="data-table-column-header-text">{{
props.params.displayName
}}</span>
<N8nInlineTextEdit
v-if="!isSystemColumn"
ref="renameInput"
:model-value="props.params.displayName"
:max-width="columnWidth"
:read-only="false"
:disabled="false"
class="ag-header-cell-text"
data-test-id="data-table-column-header-text"
@update:model-value="onNameSubmit"
@click="onNameToggle"
@keydown.stop
/>
<span v-else class="ag-header-cell-text" data-test-id="data-table-column-header-text">
{{ props.params.displayName }}
</span>
<div v-if="showSortIndicator" class="sort-indicator">
<N8nIcon v-if="currentSort === 'asc'" icon="arrow-up" class="sort-icon-active" />
@@ -216,6 +279,15 @@ onUnmounted(() => {
align-items: center;
gap: var(--spacing--2xs);
min-width: 0;
.n8n-icon,
.n8n-inline-text-edit,
.ag-header-cell-text {
display: inline-flex;
align-items: center;
vertical-align: middle;
line-height: 1;
}
}
.data-table-column-header-icon-wrapper .n8n-icon {
@@ -225,6 +297,12 @@ onUnmounted(() => {
.ag-header-cell-text {
@include mixins.utils-ellipsis;
min-width: 0;
// Remove overflow hidden when inline edit is active to show border
&.n8n-inline-text-edit--active,
&:focus-within {
overflow: visible;
}
}
.sort-indicator {

Some files were not shown because too many files have changed in this diff Show More