chore: Update langchain (#22500)

This commit is contained in:
Benjamin Schroth
2025-12-01 16:38:55 +01:00
committed by GitHub
parent 0999726caa
commit 143136fb02
100 changed files with 2066 additions and 1029 deletions

View File

@@ -19,6 +19,8 @@ const esmDependencies = [
'openid-client',
'oauth4webapi',
'jose',
'p-retry',
'is-network-error',
// Add other ESM dependencies that need to be transformed here
];

View File

@@ -118,9 +118,12 @@
"tmp": "0.2.4",
"nodemailer": "7.0.10",
"validator": "13.15.20",
"zod": "3.25.67",
"js-yaml": "4.1.1",
"node-forge": "1.3.2",
"body-parser": "2.2.1"
"body-parser": "2.2.1",
"glob@10": "10.5.0",
"glob@7": "7.2.3"
},
"patchedDependencies": {
"bull@4.16.4": "patches/bull@4.16.4.patch",

View File

@@ -50,7 +50,7 @@
"dependencies": {
"@langchain/anthropic": "catalog:",
"@langchain/core": "catalog:",
"@langchain/langgraph": "0.2.74",
"@langchain/langgraph": "1.0.2",
"@langchain/openai": "catalog:",
"@n8n/backend-common": "workspace:*",
"@n8n/config": "workspace:*",

View File

@@ -1,4 +1,5 @@
import { ChatAnthropic } from '@langchain/anthropic';
import type { BaseMessage } from '@langchain/core/messages';
import { MemorySaver } from '@langchain/langgraph';
import type { Logger } from '@n8n/backend-common';
import type { AiAssistantClient } from '@n8n_io/ai-assistant-sdk';
@@ -13,9 +14,27 @@ import { SessionManagerService } from '@/session-manager.service';
import { formatMessages } from '@/utils/stream-processor';
import { WorkflowBuilderAgent, type ChatPayload } from '@/workflow-builder-agent';
// Types for mock
type Messages = BaseMessage[] | BaseMessage;
type StateDefinition = Record<string, unknown>;
// Mock dependencies
jest.mock('@langchain/anthropic');
jest.mock('@langchain/langgraph');
jest.mock('@langchain/langgraph', () => {
const mockAnnotation = Object.assign(
jest.fn(<T>(config: T) => config),
{
Root: jest.fn(<S extends StateDefinition>(config: S) => config),
},
);
return {
MemorySaver: jest.fn(),
Annotation: mockAnnotation,
messagesStateReducer: jest.fn((messages: Messages, newMessages: Messages): BaseMessage[] =>
Array.isArray(messages) && Array.isArray(newMessages) ? [...messages, ...newMessages] : [],
),
};
});
jest.mock('langsmith');
jest.mock('@/workflow-builder-agent');
jest.mock('@/session-manager.service');

View File

@@ -8,7 +8,9 @@ import { SessionManagerService } from '../session-manager.service';
import { getBuilderToolsForDisplay } from '../tools/builder-tools';
import * as streamProcessor from '../utils/stream-processor';
jest.mock('@langchain/langgraph');
jest.mock('@langchain/langgraph', () => ({
MemorySaver: jest.fn(),
}));
jest.mock('../utils/stream-processor');
jest.mock('../tools/builder-tools', () => ({
getBuilderToolsForDisplay: jest.fn().mockReturnValue([]),

View File

@@ -33,7 +33,7 @@ type MessageContent = { content: string | Array<{ type: string; text: string }>
/** Stream event types from LangGraph */
type SubgraphEvent = [string[], string, unknown];
type ParentEvent = [string, unknown];
type StreamEvent = SubgraphEvent | ParentEvent;
export type StreamEvent = SubgraphEvent | ParentEvent;
// ============================================================================
// CONFIGURATION
@@ -316,7 +316,7 @@ function processEvent(event: StreamEvent): StreamOutput | null {
* - Subgraph events: [namespace[], streamMode, data]
*/
export async function* createStreamProcessor(
stream: AsyncGenerator<StreamEvent, void, unknown>,
stream: AsyncIterable<StreamEvent>,
): AsyncGenerator<StreamOutput> {
for await (const event of stream) {
const result = processEvent(event);

View File

@@ -48,7 +48,12 @@ function concatenateMessageContent(messages: BaseMessage[]): string {
return (
acc +
message.content.reduce((innerAcc: string, item) => {
if (typeof item === 'object' && item !== null && 'text' in item) {
if (
typeof item === 'object' &&
item !== null &&
'text' in item &&
typeof item.text === 'string'
) {
return innerAcc + item.text;
}
return innerAcc;

View File

@@ -36,7 +36,11 @@ import {
} from './utils/cache-control/helpers';
import { cleanupDanglingToolCallMessages } from './utils/cleanup-dangling-tool-call-messages';
import { processOperations } from './utils/operations-processor';
import { createStreamProcessor, type BuilderTool } from './utils/stream-processor';
import {
createStreamProcessor,
type BuilderTool,
type StreamEvent,
} from './utils/stream-processor';
import { estimateTokenCountFromMessages } from './utils/token-usage';
import { executeToolsInParallel } from './utils/tool-executor';
import { WorkflowState } from './workflow-state';
@@ -284,7 +288,10 @@ export class WorkflowBuilderAgent {
};
const shouldContinue = ({ messages }: typeof WorkflowState.State) => {
const lastMessage: AIMessage = messages[messages.length - 1];
const lastMessage = messages[messages.length - 1];
if (!(lastMessage instanceof AIMessage)) {
throw new WorkflowStateError('Expected last message to be generated by the AI agent');
}
if (lastMessage.tool_calls?.length) {
return 'tools';
@@ -331,7 +338,7 @@ export class WorkflowBuilderAgent {
}
const { messages, previousSummary } = state;
const lastHumanMessage = messages[messages.length - 1] satisfies HumanMessage;
const lastHumanMessage = messages[messages.length - 1] as HumanMessage;
const isAutoCompact = lastHumanMessage.content !== '/compact';
const compactedMessages = await conversationCompactChain(
@@ -518,8 +525,8 @@ export class WorkflowBuilderAgent {
payload: ChatPayload,
streamConfig: RunnableConfig,
agent: ReturnType<typeof this.createWorkflow>,
) {
return await agent.stream(
): Promise<AsyncIterable<StreamEvent>> {
const stream = await agent.stream(
{
messages: [new HumanMessage({ content: payload.message })],
workflowJSON: this.getDefaultWorkflowJSON(payload),
@@ -528,6 +535,12 @@ export class WorkflowBuilderAgent {
},
streamConfig,
);
// LangGraph's stream has a complex type that doesn't match our StreamEvent definition,
// but at runtime it produces the correct shape based on streamMode configuration.
// With streamMode: ['updates', 'custom'] and subgraphs enabled, events are:
// - Subgraph events: [namespace[], streamMode, data]
// - Parent events: [streamMode, data]
return stream as AsyncIterable<StreamEvent>;
}
private handleStreamError(error: unknown): never {
@@ -540,7 +553,7 @@ export class WorkflowBuilderAgent {
}
private async *processAgentStream(
stream: AsyncGenerator<[string, unknown], void, unknown>,
stream: Awaited<ReturnType<typeof this.createAgentStream>>,
agent: ReturnType<typeof this.createWorkflow>,
threadConfig: RunnableConfig,
) {

View File

@@ -1,7 +1,10 @@
const baseConfig = require('../../../jest.config');
/** @type {import('jest').Config} */
module.exports = {
...require('../../../jest.config'),
...baseConfig,
transform: {
...baseConfig.transform,
'^.+\\.ts$': ['ts-jest', { isolatedModules: false }],
},
};

View File

@@ -95,12 +95,12 @@ async function runLLM(
// FIXME: https://github.com/langchain-ai/langchainjs/issues/9012
// This is a manual fix to extract the text from the response.
// Replace with const chain = chatPrompt.pipe(model).pipe(outputParser); when the issue is fixed.
const extractText = (content: MessageContent) => {
const extractText = (content: MessageContent): string => {
if (typeof content === 'string') {
return content;
}
if (content[0].type === 'text') {
return content[0].text;
return content[0].text as string;
}
throw new Error('Invalid content type');
};

View File

@@ -1,5 +1,5 @@
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
import type { AgentExecutor } from 'langchain/agents';
import type { AgentExecutor } from '@langchain/classic/agents';
import type { IExecuteFunctions } from 'n8n-workflow';
import { NodeConnectionTypes } from 'n8n-workflow';
@@ -21,7 +21,7 @@ jest.mock('@langchain/core/prompts', () => ({
},
}));
jest.mock('langchain/agents', () => ({
jest.mock('@langchain/classic/agents', () => ({
AgentExecutor: jest.fn().mockImplementation(() => ({
invoke: jest.fn(),
})),
@@ -101,7 +101,7 @@ describe('model helper', () => {
};
jest
.mocked((await import('langchain/agents')).AgentExecutor)
.mocked((await import('@langchain/classic/agents')).AgentExecutor)
.mockImplementation(() => mockAgentExecutor as unknown as AgentExecutor);
const result = await runLLMValidation('test-guardrail', 'Test input', {
@@ -128,7 +128,7 @@ describe('model helper', () => {
};
jest
.mocked((await import('langchain/agents')).AgentExecutor)
.mocked((await import('@langchain/classic/agents')).AgentExecutor)
.mockImplementation(() => mockAgentExecutor as unknown as AgentExecutor);
const result = await runLLMValidation('test-guardrail', 'Test input', {

View File

@@ -1,5 +1,5 @@
import { Tool, StructuredTool } from '@langchain/core/tools';
import type { Toolkit } from 'langchain/agents';
import type { Toolkit } from '@langchain/classic/agents';
import type {
IExecuteFunctions,
INodeExecutionData,

View File

@@ -1,6 +1,6 @@
import type { BaseChatMemory } from '@langchain/community/memory/chat_memory';
import { PromptTemplate } from '@langchain/core/prompts';
import { initializeAgentExecutorWithOptions } from 'langchain/agents';
import { initializeAgentExecutorWithOptions } from '@langchain/classic/agents';
import type { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow';
import { NodeConnectionTypes, NodeOperationError } from 'n8n-workflow';

View File

@@ -1,122 +1,20 @@
import { PromptTemplate } from '@langchain/core/prompts';
import { ChatOpenAI } from '@langchain/openai';
import type { AgentExecutorInput } from 'langchain/agents';
import { AgentExecutor, OpenAIAgent } from 'langchain/agents';
import { BufferMemory, type BaseChatMemory } from 'langchain/memory';
import {
type IExecuteFunctions,
type INodeExecutionData,
NodeConnectionTypes,
NodeOperationError,
} from 'n8n-workflow';
import type { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow';
import { getConnectedTools, getPromptInputByType } from '@utils/helpers';
import { getOptionalOutputParser } from '@utils/output_parsers/N8nOutputParser';
import { getTracingConfig } from '@utils/tracing';
import { extractParsedOutput } from '../utils';
import { toolsAgentExecute } from '../ToolsAgent/V1/execute';
/**
* OpenAI Functions Agent (legacy) - redirects to Tools Agent
*
* The OpenAI Functions Agent uses the legacy @langchain/classic API which has
* compatibility issues with langchain 1.0. The Tools Agent uses the modern
* createToolCallingAgent API which works correctly.
*
* Since both agents provide similar functionality (calling tools/functions),
* we redirect to the Tools Agent implementation for better compatibility.
*/
export async function openAiFunctionsAgentExecute(
this: IExecuteFunctions,
nodeVersion: number,
_nodeVersion: number,
): Promise<INodeExecutionData[][]> {
this.logger.debug('Executing OpenAi Functions Agent');
const model = (await this.getInputConnectionData(
NodeConnectionTypes.AiLanguageModel,
0,
)) as ChatOpenAI;
if (!(model instanceof ChatOpenAI)) {
throw new NodeOperationError(
this.getNode(),
'OpenAI Functions Agent requires OpenAI Chat Model',
);
}
const memory = (await this.getInputConnectionData(NodeConnectionTypes.AiMemory, 0)) as
| BaseChatMemory
| undefined;
const tools = await getConnectedTools(this, nodeVersion >= 1.5, false);
const outputParser = await getOptionalOutputParser(this);
const options = this.getNodeParameter('options', 0, {}) as {
systemMessage?: string;
maxIterations?: number;
returnIntermediateSteps?: boolean;
};
const agentConfig: AgentExecutorInput = {
tags: ['openai-functions'],
agent: OpenAIAgent.fromLLMAndTools(model, tools, {
prefix: options.systemMessage,
}),
tools,
maxIterations: options.maxIterations ?? 10,
returnIntermediateSteps: options?.returnIntermediateSteps === true,
memory:
memory ??
new BufferMemory({
returnMessages: true,
memoryKey: 'chat_history',
inputKey: 'input',
outputKey: 'output',
}),
};
const agentExecutor = AgentExecutor.fromAgentAndTools(agentConfig);
const returnData: INodeExecutionData[] = [];
let prompt: PromptTemplate | undefined;
if (outputParser) {
const formatInstructions = outputParser.getFormatInstructions();
prompt = new PromptTemplate({
template: '{input}\n{formatInstructions}',
inputVariables: ['input'],
partialVariables: { formatInstructions },
});
}
const items = this.getInputData();
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
try {
let input;
if (this.getNode().typeVersion <= 1.2) {
input = this.getNodeParameter('text', itemIndex) as string;
} else {
input = getPromptInputByType({
ctx: this,
i: itemIndex,
inputKey: 'text',
promptTypeKey: 'promptType',
});
}
if (input === undefined) {
throw new NodeOperationError(this.getNode(), 'The text parameter is empty.');
}
if (prompt) {
input = (await prompt.invoke({ input })).value;
}
const response = await agentExecutor
.withConfig(getTracingConfig(this))
.invoke({ input, outputParser });
if (outputParser) {
response.output = await extractParsedOutput(this, outputParser, response.output as string);
}
returnData.push({ json: response });
} catch (error) {
if (this.continueOnFail()) {
returnData.push({ json: { error: error.message }, pairedItem: { item: itemIndex } });
continue;
}
throw error;
}
}
return [returnData];
return await toolsAgentExecute.call(this);
}

View File

@@ -1,6 +1,6 @@
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { PromptTemplate } from '@langchain/core/prompts';
import { PlanAndExecuteAgentExecutor } from 'langchain/experimental/plan_and_execute';
import { PlanAndExecuteAgentExecutor } from '@langchain/classic/experimental/plan_and_execute';
import {
type IExecuteFunctions,
type INodeExecutionData,

View File

@@ -1,7 +1,7 @@
import type { BaseLanguageModel } from '@langchain/core/language_models/base';
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { PromptTemplate } from '@langchain/core/prompts';
import { AgentExecutor, ChatAgent, ZeroShotAgent } from 'langchain/agents';
import { AgentExecutor, ChatAgent, ZeroShotAgent } from '@langchain/classic/agents';
import {
type IExecuteFunctions,
type INodeExecutionData,

View File

@@ -1,9 +1,9 @@
import type { BaseChatMemory } from '@langchain/community/memory/chat_memory';
import type { BaseLanguageModel } from '@langchain/core/language_models/base';
import type { DataSource } from '@n8n/typeorm';
import type { SqlCreatePromptArgs } from 'langchain/agents/toolkits/sql';
import { SqlToolkit, createSqlAgent } from 'langchain/agents/toolkits/sql';
import { SqlDatabase } from 'langchain/sql_db';
import type { SqlCreatePromptArgs } from '@langchain/classic/agents/toolkits/sql';
import { SqlToolkit, createSqlAgent } from '@langchain/classic/agents/toolkits/sql';
import { SqlDatabase } from '@langchain/classic/sql_db';
import {
type IExecuteFunctions,
type INodeExecutionData,

View File

@@ -1,6 +1,6 @@
import type { BaseLanguageModel } from '@langchain/core/language_models/base';
import { RunnableSequence } from '@langchain/core/runnables';
import { AgentExecutor, createToolCallingAgent } from 'langchain/agents';
import { AgentExecutor, createToolCallingAgent } from '@langchain/classic/agents';
import omit from 'lodash/omit';
import { jsonParse, NodeOperationError } from 'n8n-workflow';
import type { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow';

View File

@@ -8,9 +8,9 @@ import {
AgentExecutor,
type AgentRunnableSequence,
createToolCallingAgent,
} from 'langchain/agents';
import type { BaseChatMemory } from 'langchain/memory';
import type { DynamicStructuredTool, Tool } from 'langchain/tools';
} from '@langchain/classic/agents';
import type { BaseChatMemory } from '@langchain/classic/memory';
import type { DynamicStructuredTool, Tool } from '@langchain/classic/tools';
import omit from 'lodash/omit';
import { jsonParse, NodeOperationError, sleep } from 'n8n-workflow';
import type { IExecuteFunctions, INodeExecutionData, ISupplyDataFunctions } from 'n8n-workflow';

View File

@@ -1,5 +1,5 @@
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
import type { BaseChatMemory } from 'langchain/memory';
import type { BaseChatMemory } from '@langchain/classic/memory';
import { NodeOperationError } from 'n8n-workflow';
import type { IExecuteFunctions, ISupplyDataFunctions, INodeExecutionData } from 'n8n-workflow';
import assert from 'node:assert';

View File

@@ -1,9 +1,9 @@
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
import type { ChatPromptTemplate } from '@langchain/core/prompts';
import { RunnableSequence } from '@langchain/core/runnables';
import { type AgentRunnableSequence, createToolCallingAgent } from 'langchain/agents';
import type { BaseChatMemory } from 'langchain/memory';
import type { DynamicStructuredTool, Tool } from 'langchain/tools';
import { type AgentRunnableSequence, createToolCallingAgent } from '@langchain/classic/agents';
import type { BaseChatMemory } from '@langchain/classic/memory';
import type { DynamicStructuredTool, Tool } from '@langchain/classic/tools';
import type { N8nOutputParser } from '@utils/output_parsers/N8nOutputParser';

View File

@@ -1,6 +1,6 @@
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
import type { AgentRunnableSequence } from 'langchain/agents';
import type { BaseChatMemory } from 'langchain/memory';
import type { AgentRunnableSequence } from '@langchain/classic/agents';
import type { BaseChatMemory } from '@langchain/classic/memory';
import { NodeOperationError } from 'n8n-workflow';
import type {
IExecuteFunctions,

View File

@@ -1,4 +1,4 @@
import type { BaseChatMemory } from 'langchain/memory';
import type { BaseChatMemory } from '@langchain/classic/memory';
import omit from 'lodash/omit';
import { jsonParse } from 'n8n-workflow';
import type { INodeExecutionData } from 'n8n-workflow';

View File

@@ -1,5 +1,5 @@
import type { ChatPromptTemplate } from '@langchain/core/prompts';
import type { DynamicStructuredTool, Tool } from 'langchain/tools';
import type { DynamicStructuredTool, Tool } from '@langchain/classic/tools';
import { NodeOperationError } from 'n8n-workflow';
import type { IExecuteFunctions, ISupplyDataFunctions, EngineResponse } from 'n8n-workflow';

View File

@@ -1,6 +1,6 @@
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
import type { AgentRunnableSequence } from 'langchain/agents';
import type { BaseChatMemory } from 'langchain/memory';
import type { AgentRunnableSequence } from '@langchain/classic/agents';
import type { BaseChatMemory } from '@langchain/classic/memory';
import type {
IExecuteFunctions,
ISupplyDataFunctions,

View File

@@ -2,13 +2,13 @@ import type { BaseChatModel } from '@langchain/core/language_models/chat_models'
import type { ChatPromptTemplate } from '@langchain/core/prompts';
import { RunnableSequence } from '@langchain/core/runnables';
import { mock } from 'jest-mock-extended';
import { createToolCallingAgent } from 'langchain/agents';
import type { Tool } from 'langchain/tools';
import { createToolCallingAgent } from '@langchain/classic/agents';
import type { Tool } from '@langchain/classic/tools';
import * as commonHelpers from '../../../common';
import { createAgentSequence } from '../createAgentSequence';
jest.mock('langchain/agents', () => ({
jest.mock('@langchain/classic/agents', () => ({
createToolCallingAgent: jest.fn(),
}));

View File

@@ -1,5 +1,5 @@
import { mock } from 'jest-mock-extended';
import type { BaseChatMemory } from 'langchain/memory';
import type { BaseChatMemory } from '@langchain/classic/memory';
import type { N8nOutputParser } from '@utils/output_parsers/N8nOutputParser';

View File

@@ -1,6 +1,6 @@
import type { ChatPromptTemplate } from '@langchain/core/prompts';
import { mock } from 'jest-mock-extended';
import type { Tool } from 'langchain/tools';
import type { Tool } from '@langchain/classic/tools';
import type { IExecuteFunctions, INode } from 'n8n-workflow';
import * as helpers from '@utils/helpers';

View File

@@ -1,7 +1,7 @@
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { mock } from 'jest-mock-extended';
import type { AgentRunnableSequence } from 'langchain/agents';
import type { Tool } from 'langchain/tools';
import type { AgentRunnableSequence } from '@langchain/classic/agents';
import type { Tool } from '@langchain/classic/tools';
import type { IExecuteFunctions, INode, EngineResponse } from 'n8n-workflow';
import * as agentExecution from '@utils/agent-execution';

View File

@@ -2,10 +2,10 @@ import type { BaseChatModel } from '@langchain/core/language_models/chat_models'
import { HumanMessage } from '@langchain/core/messages';
import type { BaseMessage } from '@langchain/core/messages';
import { ChatPromptTemplate, type BaseMessagePromptTemplateLike } from '@langchain/core/prompts';
import type { AgentAction, AgentFinish } from 'langchain/agents';
import type { ToolsAgentAction } from 'langchain/dist/agents/tool_calling/output_parser';
import type { BaseChatMemory } from 'langchain/memory';
import { DynamicStructuredTool, type Tool } from 'langchain/tools';
import type { AgentAction, AgentFinish } from '@langchain/classic/agents';
import type { ToolsAgentAction } from '@langchain/classic/dist/agents/tool_calling/output_parser';
import type { BaseChatMemory } from '@langchain/classic/memory';
import { DynamicStructuredTool, type Tool } from '@langchain/classic/tools';
import { BINARY_ENCODING, jsonParse, NodeConnectionTypes, NodeOperationError } from 'n8n-workflow';
import type { IExecuteFunctions, ISupplyDataFunctions } from 'n8n-workflow';
import type { ZodObject } from 'zod';

View File

@@ -1,5 +1,5 @@
import type { BaseOutputParser } from '@langchain/core/output_parsers';
import type { DynamicStructuredTool, Tool } from 'langchain/tools';
import type { DynamicStructuredTool, Tool } from '@langchain/classic/tools';
import { NodeOperationError, type IExecuteFunctions, type INode } from 'n8n-workflow';
import type { ZodObjectAny } from '../../../../types/types';

View File

@@ -1,7 +1,7 @@
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { mock } from 'jest-mock-extended';
import { AgentExecutor } from 'langchain/agents';
import type { Tool } from 'langchain/tools';
import { AgentExecutor } from '@langchain/classic/agents';
import type { Tool } from '@langchain/classic/tools';
import type { IExecuteFunctions, INode } from 'n8n-workflow';
import * as helpers from '../../../../../utils/helpers';

View File

@@ -1,7 +1,7 @@
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { mock } from 'jest-mock-extended';
import { AgentExecutor } from 'langchain/agents';
import type { Tool } from 'langchain/tools';
import { AgentExecutor } from '@langchain/classic/agents';
import type { Tool } from '@langchain/classic/tools';
import type { ISupplyDataFunctions, IExecuteFunctions, INode } from 'n8n-workflow';
import * as helpers from '../../../../../utils/helpers';

View File

@@ -20,7 +20,7 @@ jest.mock('../../agents/ToolsAgent/V3/helpers', () => ({
}));
// Mock langchain modules
jest.mock('langchain/agents', () => ({
jest.mock('@langchain/classic/agents', () => ({
createToolCallingAgent: jest.fn(),
}));

View File

@@ -5,9 +5,9 @@ import type { BaseMessagePromptTemplateLike } from '@langchain/core/prompts';
import { FakeLLM, FakeStreamingChatModel } from '@langchain/core/utils/testing';
import { Buffer } from 'buffer';
import { mock } from 'jest-mock-extended';
import type { AgentAction, AgentFinish } from 'langchain/agents';
import type { ToolsAgentAction } from 'langchain/dist/agents/tool_calling/output_parser';
import type { Tool } from 'langchain/tools';
import type { AgentAction, AgentFinish } from '@langchain/classic/agents';
import type { ToolsAgentAction } from '@langchain/classic/dist/agents/tool_calling/output_parser';
import type { Tool } from '@langchain/classic/tools';
import type { IExecuteFunctions, INode } from 'n8n-workflow';
import { NodeOperationError, BINARY_ENCODING, NodeConnectionTypes } from 'n8n-workflow';
import type { ZodType } from 'zod';
@@ -225,8 +225,10 @@ describe('fixEmptyContentMessage', () => {
const messageContent = fixed?.[0]?.messageLog?.[0].content;
// Type assertion needed since we're extending MessageContentComplex
expect((messageContent?.[0] as { input: unknown })?.input).toEqual({});
expect((messageContent?.[1] as { input: unknown })?.input).toEqual({ already: 'object' });
expect((messageContent?.[0] as unknown as { input: unknown })?.input).toEqual({});
expect((messageContent?.[1] as unknown as { input: unknown })?.input).toEqual({
already: 'object',
});
});
});

View File

@@ -1,5 +1,5 @@
import type { Tool } from 'langchain/tools';
import { DynamicStructuredTool } from 'langchain/tools';
import type { Tool } from '@langchain/classic/tools';
import { DynamicStructuredTool } from '@langchain/classic/tools';
import { NodeOperationError } from 'n8n-workflow';
import type { INode } from 'n8n-workflow';
import { z } from 'zod';

View File

@@ -1,6 +1,6 @@
import { AgentExecutor } from 'langchain/agents';
import type { OpenAIToolType } from 'langchain/dist/experimental/openai_assistant/schema';
import { OpenAIAssistantRunnable } from 'langchain/experimental/openai_assistant';
import { AgentExecutor } from '@langchain/classic/agents';
import type { OpenAIToolType } from '@langchain/classic/dist/experimental/openai_assistant/schema';
import { OpenAIAssistantRunnable } from '@langchain/classic/experimental/openai_assistant';
import { NodeConnectionTypes, NodeOperationError } from 'n8n-workflow';
import type {
IExecuteFunctions,

View File

@@ -6,8 +6,8 @@ import {
SystemMessagePromptTemplate,
} from '@langchain/core/prompts';
import type { BaseRetriever } from '@langchain/core/retrievers';
import { createStuffDocumentsChain } from 'langchain/chains/combine_documents';
import { createRetrievalChain } from 'langchain/chains/retrieval';
import { createStuffDocumentsChain } from '@langchain/classic/chains/combine_documents';
import { createRetrievalChain } from '@langchain/classic/chains/retrieval';
import { type IExecuteFunctions, NodeConnectionTypes, NodeOperationError } from 'n8n-workflow';
import { getPromptInputByType, isChatInstance } from '@utils/helpers';

View File

@@ -1,8 +1,8 @@
import type { Document } from '@langchain/core/documents';
import type { BaseLanguageModel } from '@langchain/core/language_models/base';
import { PromptTemplate } from '@langchain/core/prompts';
import type { SummarizationChainParams } from 'langchain/chains';
import { loadSummarizationChain } from 'langchain/chains';
import type { SummarizationChainParams } from '@langchain/classic/chains';
import { loadSummarizationChain } from '@langchain/classic/chains';
import {
NodeConnectionTypes,
type INodeTypeBaseDescription,

View File

@@ -2,7 +2,7 @@ import type { Document } from '@langchain/core/documents';
import type { BaseLanguageModel } from '@langchain/core/language_models/base';
import type { ChainValues } from '@langchain/core/utils/types';
import { RecursiveCharacterTextSplitter, type TextSplitter } from '@langchain/textsplitters';
import { loadSummarizationChain } from 'langchain/chains';
import { loadSummarizationChain } from '@langchain/classic/chains';
import { type IExecuteFunctions, type INodeExecutionData, NodeConnectionTypes } from 'n8n-workflow';
import { N8nBinaryLoader } from '@utils/N8nBinaryLoader';

View File

@@ -1,5 +1,5 @@
import { PromptTemplate } from '@langchain/core/prompts';
import type { SummarizationChainParams } from 'langchain/chains';
import type { SummarizationChainParams } from '@langchain/classic/chains';
interface ChainTypeOptions {
combineMapPrompt?: string;
prompt?: string;

View File

@@ -1,6 +1,6 @@
import type { BaseLanguageModel } from '@langchain/core/language_models/base';
import type { JSONSchema7 } from 'json-schema';
import { OutputFixingParser, StructuredOutputParser } from 'langchain/output_parsers';
import { OutputFixingParser, StructuredOutputParser } from '@langchain/classic/output_parsers';
import { jsonParse, NodeConnectionTypes, NodeOperationError, sleep } from 'n8n-workflow';
import type {
INodeType,

View File

@@ -1,7 +1,7 @@
import type { BaseLanguageModel } from '@langchain/core/language_models/base';
import { HumanMessage } from '@langchain/core/messages';
import { ChatPromptTemplate, SystemMessagePromptTemplate } from '@langchain/core/prompts';
import type { OutputFixingParser } from 'langchain/output_parsers';
import type { OutputFixingParser } from '@langchain/classic/output_parsers';
import { NodeOperationError, type IExecuteFunctions } from 'n8n-workflow';
import { getTracingConfig } from '@utils/tracing';

View File

@@ -1,5 +1,5 @@
import { FakeLLM, FakeListChatModel } from '@langchain/core/utils/testing';
import { OutputFixingParser, StructuredOutputParser } from 'langchain/output_parsers';
import { OutputFixingParser, StructuredOutputParser } from '@langchain/classic/output_parsers';
import { NodeOperationError } from 'n8n-workflow';
import { makeZodSchemaFromAttributes } from '../helpers';

View File

@@ -1,7 +1,7 @@
import type { BaseLanguageModel } from '@langchain/core/language_models/base';
import { HumanMessage } from '@langchain/core/messages';
import { SystemMessagePromptTemplate, ChatPromptTemplate } from '@langchain/core/prompts';
import { OutputFixingParser, StructuredOutputParser } from 'langchain/output_parsers';
import { OutputFixingParser, StructuredOutputParser } from '@langchain/classic/output_parsers';
import { NodeConnectionTypes, NodeOperationError, sleep } from 'n8n-workflow';
import type {
IDataObject,

View File

@@ -1,5 +1,5 @@
import type { BaseLanguageModel } from '@langchain/core/language_models/base';
import { OutputFixingParser, StructuredOutputParser } from 'langchain/output_parsers';
import { OutputFixingParser, StructuredOutputParser } from '@langchain/classic/output_parsers';
import { NodeOperationError, NodeConnectionTypes, sleep } from 'n8n-workflow';
import type {
IDataObject,

View File

@@ -1,7 +1,7 @@
import type { BaseLanguageModel } from '@langchain/core/language_models/base';
import { HumanMessage } from '@langchain/core/messages';
import { ChatPromptTemplate, SystemMessagePromptTemplate } from '@langchain/core/prompts';
import type { OutputFixingParser, StructuredOutputParser } from 'langchain/output_parsers';
import type { OutputFixingParser, StructuredOutputParser } from '@langchain/classic/output_parsers';
import { NodeOperationError, type IExecuteFunctions, type INodeExecutionData } from 'n8n-workflow';
import { getTracingConfig } from '@utils/tracing';

View File

@@ -0,0 +1,118 @@
import { transformLegacyLangchainImport } from './Code.node';
describe('Code.node', () => {
describe('transformLegacyLangchainImport', () => {
describe('transforms legacy langchain imports to @langchain/classic', () => {
it('should transform langchain/chains to @langchain/classic/chains', () => {
const result = transformLegacyLangchainImport('langchain/chains');
expect(result).toBe('@langchain/classic/chains');
});
it('should transform langchain/agents to @langchain/classic/agents', () => {
const result = transformLegacyLangchainImport('langchain/agents');
expect(result).toBe('@langchain/classic/agents');
});
it('should transform langchain/memory to @langchain/classic/memory', () => {
const result = transformLegacyLangchainImport('langchain/memory');
expect(result).toBe('@langchain/classic/memory');
});
it('should transform langchain/retrievers to @langchain/classic/retrievers', () => {
const result = transformLegacyLangchainImport('langchain/retrievers');
expect(result).toBe('@langchain/classic/retrievers');
});
it('should transform langchain/tools to @langchain/classic/tools', () => {
const result = transformLegacyLangchainImport('langchain/tools');
expect(result).toBe('@langchain/classic/tools');
});
it('should transform langchain/output_parsers to @langchain/classic/output_parsers', () => {
const result = transformLegacyLangchainImport('langchain/output_parsers');
expect(result).toBe('@langchain/classic/output_parsers');
});
it('should transform nested paths like langchain/chains/combine_documents', () => {
const result = transformLegacyLangchainImport('langchain/chains/combine_documents');
expect(result).toBe('@langchain/classic/chains/combine_documents');
});
it('should transform langchain/embeddings/cache_backed', () => {
const result = transformLegacyLangchainImport('langchain/embeddings/cache_backed');
expect(result).toBe('@langchain/classic/embeddings/cache_backed');
});
it('should transform langchain/document_loaders/fs/text', () => {
const result = transformLegacyLangchainImport('langchain/document_loaders/fs/text');
expect(result).toBe('@langchain/classic/document_loaders/fs/text');
});
it('should transform langchain/text_splitter', () => {
const result = transformLegacyLangchainImport('langchain/text_splitter');
expect(result).toBe('@langchain/classic/text_splitter');
});
it('should transform langchain/experimental/autogpt', () => {
const result = transformLegacyLangchainImport('langchain/experimental/autogpt');
expect(result).toBe('@langchain/classic/experimental/autogpt');
});
});
describe('does not transform non-classic imports', () => {
it('should not transform @langchain/core imports', () => {
const result = transformLegacyLangchainImport('@langchain/core/prompts');
expect(result).toBe('@langchain/core/prompts');
});
it('should not transform @langchain/community imports', () => {
const result = transformLegacyLangchainImport(
'@langchain/community/tools/wikipedia_query_run',
);
expect(result).toBe('@langchain/community/tools/wikipedia_query_run');
});
it('should not transform @langchain/openai imports', () => {
const result = transformLegacyLangchainImport('@langchain/openai');
expect(result).toBe('@langchain/openai');
});
it('should not transform already correct @langchain/classic imports', () => {
const result = transformLegacyLangchainImport('@langchain/classic/chains');
expect(result).toBe('@langchain/classic/chains');
});
it('should return original module name for non-langchain imports', () => {
const result = transformLegacyLangchainImport('lodash');
expect(result).toBe('lodash');
});
});
describe('handles edge cases', () => {
it('should handle langchain/hub imports', () => {
const result = transformLegacyLangchainImport('langchain/hub');
expect(result).toBe('@langchain/classic/hub');
});
it('should handle langchain/indexes imports', () => {
const result = transformLegacyLangchainImport('langchain/indexes');
expect(result).toBe('@langchain/classic/indexes');
});
it('should handle langchain/sql_db imports', () => {
const result = transformLegacyLangchainImport('langchain/sql_db');
expect(result).toBe('@langchain/classic/sql_db');
});
it('should handle langchain/storage/in_memory', () => {
const result = transformLegacyLangchainImport('langchain/storage/in_memory');
expect(result).toBe('@langchain/classic/storage/in_memory');
});
it('should handle langchain/stores/message/in_memory', () => {
const result = transformLegacyLangchainImport('langchain/stores/message/in_memory');
expect(result).toBe('@langchain/classic/stores/message/in_memory');
});
});
});
});

View File

@@ -48,11 +48,100 @@ const prompt = PromptTemplate.fromTemplate(query);
const llm = await this.getInputConnectionData('ai_languageModel', 0);
let chain = prompt.pipe(llm);
const output = await chain.invoke();
return [ {json: { output } } ];`;
return [ {json: { output } } ];
// NOTE: Old langchain imports (e.g., 'langchain/chains') are automatically
// converted to '@langchain/classic' imports for backwards compatibility.`;
const defaultCodeSupplyData = `const { WikipediaQueryRun } = require( '@langchain/community/tools/wikipedia_query_run');
return new WikipediaQueryRun();`;
/**
* Transforms old langchain import paths to @langchain/classic for backwards compatibility.
* Only transforms paths that actually moved to the classic package.
*
* @param moduleName - The original module name from the import statement
* @returns The transformed module name, or the original if no transformation is needed
*/
export function transformLegacyLangchainImport(moduleName: string): string {
// List of langchain submodules that moved to @langchain/classic
// Based on https://www.npmjs.com/package/@langchain/classic exports
const classicModules = [
'agents',
'callbacks',
'chains',
'chat_models/universal',
'document',
'document_loaders',
'document_transformers',
'embeddings/cache_backed',
'embeddings/fake',
'evaluation',
'experimental',
'hub',
'indexes',
'load',
'memory',
'output_parsers',
'retrievers',
'schema',
'smith',
'sql_db',
'storage',
'stores',
'text_splitter',
'tools',
'util',
'vectorstores',
];
// Check if this is a langchain/ import (old style)
if (moduleName.startsWith('langchain/')) {
const subpath = moduleName.substring('langchain/'.length);
// Check if this subpath or any parent path is in the classic modules list
for (const classicModule of classicModules) {
if (subpath === classicModule || subpath.startsWith(classicModule + '/')) {
// Transform to @langchain/classic
return `@langchain/classic/${subpath}`;
}
}
}
return moduleName;
}
/**
* Transforms user code to replace old langchain require/import statements
* with @langchain/classic equivalents.
*
* @param code - The user's code string
* @returns The transformed code with updated import paths
*/
function transformLegacyLangchainCode(code: string): string {
// Transform require statements: require('langchain/...')
let transformedCode = code.replace(
/require\s*\(\s*['"]langchain\/([\w/_]+)['"]\s*\)/g,
(match, subpath) => {
const oldPath = `langchain/${subpath}`;
const newPath = transformLegacyLangchainImport(oldPath);
return newPath === oldPath ? match : `require('${newPath}')`;
},
);
// Transform import statements: from 'langchain/...'
transformedCode = transformedCode.replace(
/from\s+['"]langchain\/([\w/_]+)['"]/g,
(match, subpath) => {
const oldPath = `langchain/${subpath}`;
const newPath = transformLegacyLangchainImport(oldPath);
return newPath === oldPath ? match : `from '${newPath}'`;
},
);
return transformedCode;
}
const langchainModules = ['langchain', '@langchain/*'];
export const vmResolver = makeResolverFromLegacyOptions({
external: {
@@ -79,6 +168,9 @@ function getSandbox(
const node = this.getNode();
const workflowMode = this.getMode();
// Transform legacy langchain imports to @langchain/classic
const transformedCode = transformLegacyLangchainCode(code);
const context = getSandboxContext.call(this, itemIndex);
context.addInputData = this.addInputData.bind(this);
context.addOutputData = this.addOutputData.bind(this);
@@ -95,7 +187,7 @@ function getSandbox(
context.items = context.$input.all();
}
const sandbox = new JavaScriptSandbox(context, code, this.helpers, {
const sandbox = new JavaScriptSandbox(context, transformedCode, this.helpers, {
resolver: vmResolver,
});

View File

@@ -1,4 +1,4 @@
import { Ollama } from '@langchain/community/llms/ollama';
import { Ollama } from '@langchain/ollama';
import {
NodeConnectionTypes,
type INodeType,

View File

@@ -1,7 +1,7 @@
import { DynamicStructuredTool, type DynamicStructuredToolInput } from '@langchain/core/tools';
import type { Client } from '@modelcontextprotocol/sdk/client/index.js';
import { CompatibilityCallToolResultSchema } from '@modelcontextprotocol/sdk/types.js';
import { Toolkit } from 'langchain/agents';
import { Toolkit } from '@langchain/classic/agents';
import { type IDataObject } from 'n8n-workflow';
import { z } from 'zod';

View File

@@ -1,5 +1,5 @@
import type { BufferWindowMemoryInput } from 'langchain/memory';
import { BufferWindowMemory } from 'langchain/memory';
import type { BufferWindowMemoryInput } from '@langchain/classic/memory';
import { BufferWindowMemory } from '@langchain/classic/memory';
import {
NodeConnectionTypes,
type INodeType,

View File

@@ -1,5 +1,5 @@
import { MongoDBChatMessageHistory } from '@langchain/mongodb';
import { BufferWindowMemory } from 'langchain/memory';
import { BufferWindowMemory } from '@langchain/classic/memory';
import { MongoClient } from 'mongodb';
import type {
ISupplyDataFunctions,

View File

@@ -1,5 +1,5 @@
import { PostgresChatMessageHistory } from '@langchain/community/stores/message/postgres';
import { BufferMemory, BufferWindowMemory } from 'langchain/memory';
import { BufferMemory, BufferWindowMemory } from '@langchain/classic/memory';
import { configurePostgres } from 'n8n-nodes-base/dist/nodes/Postgres/transport/index';
import type { PostgresNodeCredentials } from 'n8n-nodes-base/dist/nodes/Postgres/v2/helpers/interfaces';
import { postgresConnectionTest } from 'n8n-nodes-base/dist/nodes/Postgres/v2/methods/credentialTest';

View File

@@ -1,6 +1,6 @@
import type { RedisChatMessageHistoryInput } from '@langchain/redis';
import { RedisChatMessageHistory } from '@langchain/redis';
import { BufferMemory, BufferWindowMemory } from 'langchain/memory';
import { BufferMemory, BufferWindowMemory } from '@langchain/classic/memory';
import {
NodeOperationError,
type INodeType,

View File

@@ -1,6 +1,6 @@
import { XataChatMessageHistory } from '@langchain/community/stores/message/xata';
import { BaseClient } from '@xata.io/client';
import { BufferMemory, BufferWindowMemory } from 'langchain/memory';
import { BufferMemory, BufferWindowMemory } from '@langchain/classic/memory';
import { NodeConnectionTypes, NodeOperationError } from 'n8n-workflow';
import type {
ISupplyDataFunctions,

View File

@@ -1,7 +1,7 @@
import type { BaseLanguageModel } from '@langchain/core/language_models/base';
import type { BaseRetriever } from '@langchain/core/retrievers';
import { ContextualCompressionRetriever } from 'langchain/retrievers/contextual_compression';
import { LLMChainExtractor } from 'langchain/retrievers/document_compressors/chain_extract';
import { ContextualCompressionRetriever } from '@langchain/classic/retrievers/contextual_compression';
import { LLMChainExtractor } from '@langchain/classic/retrievers/document_compressors/chain_extract';
import {
NodeConnectionTypes,
type INodeType,

View File

@@ -1,6 +1,6 @@
import type { BaseLanguageModel } from '@langchain/core/language_models/base';
import type { BaseRetriever } from '@langchain/core/retrievers';
import { MultiQueryRetriever } from 'langchain/retrievers/multi_query';
import { MultiQueryRetriever } from '@langchain/classic/retrievers/multi_query';
import {
NodeConnectionTypes,
type INodeType,

View File

@@ -1,6 +1,6 @@
import type { BaseDocumentCompressor } from '@langchain/core/retrievers/document_compressors';
import { VectorStore } from '@langchain/core/vectorstores';
import { ContextualCompressionRetriever } from 'langchain/retrievers/contextual_compression';
import { ContextualCompressionRetriever } from '@langchain/classic/retrievers/contextual_compression';
import {
NodeConnectionTypes,
type INodeType,

View File

@@ -1,6 +1,6 @@
import type { BaseDocumentCompressor } from '@langchain/core/retrievers/document_compressors';
import { VectorStore } from '@langchain/core/vectorstores';
import { ContextualCompressionRetriever } from 'langchain/retrievers/contextual_compression';
import { ContextualCompressionRetriever } from '@langchain/classic/retrievers/contextual_compression';
import type { ISupplyDataFunctions } from 'n8n-workflow';
import { NodeConnectionTypes } from 'n8n-workflow';

View File

@@ -1,5 +1,5 @@
import { mock } from 'jest-mock-extended';
import { DynamicTool } from 'langchain/tools';
import { DynamicTool } from '@langchain/classic/tools';
import {
type IExecuteFunctions,
type INode,

View File

@@ -1,4 +1,4 @@
import { DynamicTool } from 'langchain/tools';
import { DynamicTool } from '@langchain/classic/tools';
import {
type IExecuteFunctions,
NodeConnectionTypes,

View File

@@ -1,5 +1,5 @@
import { mock } from 'jest-mock-extended';
import { DynamicTool } from 'langchain/tools';
import { DynamicTool } from '@langchain/classic/tools';
import type {
IExecuteFunctions,
INodeExecutionData,

View File

@@ -1,5 +1,5 @@
import { mock } from 'jest-mock-extended';
import { VectorStoreQATool } from 'langchain/tools';
import { VectorStoreQATool } from '@langchain/classic/tools';
import {
NodeConnectionTypes,
type IExecuteFunctions,

View File

@@ -1,7 +1,7 @@
import type { BaseLanguageModel } from '@langchain/core/language_models/base';
import type { VectorStore } from '@langchain/core/vectorstores';
import { VectorDBQAChain } from 'langchain/chains';
import { VectorStoreQATool } from 'langchain/tools';
import { VectorDBQAChain } from '@langchain/classic/chains';
import { VectorStoreQATool } from '@langchain/classic/tools';
import type {
IExecuteFunctions,
INodeExecutionData,

View File

@@ -1,5 +1,5 @@
import { mock } from 'jest-mock-extended';
import { DynamicTool } from 'langchain/tools';
import { DynamicTool } from '@langchain/classic/tools';
import {
type INode,
type ISupplyDataFunctions,

View File

@@ -1,5 +1,5 @@
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
import type { BaseChatMemory } from 'langchain/memory';
import type { BaseChatMemory } from '@langchain/classic/memory';
import {
CHAT_TRIGGER_NODE_TYPE,
CHAT_WAIT_USER_REPLY,
@@ -291,7 +291,7 @@ export class Chat implements INodeType {
| undefined;
if (memory) {
await memory.chatHistory.addAIChatMessage(message);
await memory.chatHistory.addAIMessage(message);
}
}

View File

@@ -67,12 +67,12 @@ describe('Test Chat Node', () => {
} as any,
]);
const memory = { chatHistory: { addAIChatMessage: jest.fn() } };
const memory = { chatHistory: { addAIMessage: jest.fn() } };
mockExecuteFunctions.getInputConnectionData.mockResolvedValueOnce(memory);
await chat.execute.call(mockExecuteFunctions);
expect(memory.chatHistory.addAIChatMessage).toHaveBeenCalledWith('message');
expect(memory.chatHistory.addAIMessage).toHaveBeenCalledWith('message');
});
it('should execute without memory connection', async () => {

View File

@@ -1,5 +1,5 @@
import type { Embeddings } from '@langchain/core/embeddings';
import type { MemoryVectorStore } from 'langchain/vectorstores/memory';
import type { MemoryVectorStore } from '@langchain/classic/vectorstores/memory';
import {
type INodeProperties,
type ILoadOptionsFunctions,

View File

@@ -1,5 +1,5 @@
import type { Embeddings } from '@langchain/core/embeddings';
import type { Document } from 'langchain/document';
import type { Document } from '@langchain/classic/document';
import {
NodeConnectionTypes,
type INodeExecutionData,

View File

@@ -1,5 +1,5 @@
import type { Document } from '@langchain/core/documents';
import type { MemoryVectorStore } from 'langchain/vectorstores/memory';
import type { MemoryVectorStore } from '@langchain/classic/vectorstores/memory';
import type { IMemoryCalculator } from './types';

View File

@@ -1,7 +1,7 @@
import type { Document } from '@langchain/core/documents';
import type { Embeddings } from '@langchain/core/embeddings';
import type { OpenAIEmbeddings, AzureOpenAIEmbeddings } from '@langchain/openai';
import { MemoryVectorStore } from 'langchain/vectorstores/memory';
import { MemoryVectorStore } from '@langchain/classic/vectorstores/memory';
import type { Logger } from 'n8n-workflow';
import { getConfig, mbToBytes, hoursToMs } from './config';

View File

@@ -1,4 +1,4 @@
import type { MemoryVectorStore } from 'langchain/vectorstores/memory';
import type { MemoryVectorStore } from '@langchain/classic/vectorstores/memory';
import type { VectorStoreMetadata, IStoreCleanupService } from './types';

View File

@@ -1,6 +1,6 @@
import { Document } from '@langchain/core/documents';
import { mock } from 'jest-mock-extended';
import type { MemoryVectorStore } from 'langchain/vectorstores/memory';
import type { MemoryVectorStore } from '@langchain/classic/vectorstores/memory';
import { MemoryCalculator } from '../MemoryCalculator';

View File

@@ -1,7 +1,7 @@
import { Document } from '@langchain/core/documents';
import type { OpenAIEmbeddings } from '@langchain/openai';
import { mock } from 'jest-mock-extended';
import type { MemoryVectorStore } from 'langchain/vectorstores/memory';
import type { MemoryVectorStore } from '@langchain/classic/vectorstores/memory';
import type { Logger } from 'n8n-workflow';
import * as configModule from '../config';
@@ -11,7 +11,7 @@ function createTestEmbedding(dimensions = 1536, initialValue = 0.1, multiplier =
return new Array(dimensions).fill(initialValue).map((value) => value * multiplier);
}
jest.mock('langchain/vectorstores/memory', () => {
jest.mock('@langchain/classic/vectorstores/memory', () => {
return {
MemoryVectorStore: {
fromExistingIndex: jest.fn().mockImplementation(() => {

View File

@@ -1,5 +1,5 @@
import { mock } from 'jest-mock-extended';
import type { MemoryVectorStore } from 'langchain/vectorstores/memory';
import type { MemoryVectorStore } from '@langchain/classic/vectorstores/memory';
import { StoreCleanupService } from '../StoreCleanupService';
import type { VectorStoreMetadata } from '../types';

View File

@@ -1,5 +1,5 @@
import type { Document } from '@langchain/core/documents';
import type { MemoryVectorStore } from 'langchain/vectorstores/memory';
import type { MemoryVectorStore } from '@langchain/classic/vectorstores/memory';
/**
* Configuration options for the memory vector store

View File

@@ -4,7 +4,7 @@ import type { DocumentInterface } from '@langchain/core/documents';
import type { Embeddings } from '@langchain/core/embeddings';
import type { VectorStore } from '@langchain/core/vectorstores';
import { mock } from 'jest-mock-extended';
import type { DynamicTool } from 'langchain/tools';
import type { DynamicTool } from '@langchain/classic/tools';
import type {
IExecuteFunctions,
ISupplyDataFunctions,

View File

@@ -1,12 +1,11 @@
/* eslint-disable @typescript-eslint/unbound-method */
import { type DynamicTool, DynamicStructuredTool } from '@langchain/classic/tools';
import type { Document } from '@langchain/core/documents';
import type { Embeddings } from '@langchain/core/embeddings';
import type { BaseDocumentCompressor } from '@langchain/core/retrievers/document_compressors';
import type { VectorStore } from '@langchain/core/vectorstores';
import type { MockProxy } from 'jest-mock-extended';
import { mock } from 'jest-mock-extended';
import type { DynamicTool } from 'langchain/tools';
import { DynamicStructuredTool } from 'langchain/tools';
import type { ISupplyDataFunctions } from 'n8n-workflow';
import { NodeConnectionTypes } from 'n8n-workflow';

View File

@@ -1,7 +1,7 @@
import type { BaseMessage } from '@langchain/core/messages';
import type { Tool } from '@langchain/core/tools';
import type { OpenAIClient } from '@langchain/openai';
import type { BufferWindowMemory } from 'langchain/memory';
import type { BufferWindowMemory } from '@langchain/classic/memory';
import { isObjectEmpty } from 'n8n-workflow';
import { zodToJsonSchema } from 'zod-to-json-schema';

View File

@@ -1,6 +1,6 @@
import { AIMessage, HumanMessage } from '@langchain/core/messages';
import type { Tool } from '@langchain/core/tools';
import { BufferWindowMemory } from 'langchain/memory';
import { BufferWindowMemory } from '@langchain/classic/memory';
import { z } from 'zod';
import { zodToJsonSchema } from 'zod-to-json-schema';

View File

@@ -8,7 +8,7 @@ import * as transport from '../../../../transport';
import * as helpers from '../../../../v2/actions/text/helpers/responses';
import { execute } from '../../../../v2/actions/text/response.operation';
import { formatToOpenAIResponsesTool } from '../../../../helpers/utils';
import type { Tool } from 'langchain/tools';
import type { Tool } from '@langchain/classic/tools';
jest.mock('../../../../transport');
jest.mock('../../../../v2/actions/text/helpers/responses');

View File

@@ -1,8 +1,8 @@
import type { BaseMessage } from '@langchain/core/messages';
import { AgentExecutor } from 'langchain/agents';
import type { OpenAIToolType } from 'langchain/dist/experimental/openai_assistant/schema';
import { OpenAIAssistantRunnable } from 'langchain/experimental/openai_assistant';
import type { BufferWindowMemory } from 'langchain/memory';
import { AgentExecutor } from '@langchain/classic/agents';
import type { OpenAIToolType } from '@langchain/classic/dist/experimental/openai_assistant/schema';
import { OpenAIAssistantRunnable } from '@langchain/classic/experimental/openai_assistant';
import type { BufferWindowMemory } from '@langchain/classic/memory';
import omit from 'lodash/omit';
import type {
IDataObject,

View File

@@ -180,29 +180,29 @@
"@aws-sdk/client-sso-oidc": "3.808.0",
"@azure/identity": "4.3.0",
"@azure/search-documents": "12.1.0",
"@getzep/zep-cloud": "1.0.12",
"@getzep/zep-cloud": "1.0.6",
"@getzep/zep-js": "0.9.0",
"@google-cloud/resource-manager": "5.3.0",
"@google/generative-ai": "0.21.0",
"@google/genai": "1.19.0",
"@huggingface/inference": "4.0.5",
"@langchain/anthropic": "catalog:",
"@langchain/aws": "0.1.11",
"@langchain/cohere": "0.3.4",
"@langchain/aws": "1.0.3",
"@langchain/cohere": "1.0.1",
"@langchain/community": "catalog:",
"@langchain/core": "catalog:",
"@langchain/google-genai": "0.2.17",
"@langchain/google-vertexai": "0.2.18",
"@langchain/groq": "0.2.3",
"@langchain/mistralai": "0.2.3",
"@langchain/mongodb": "^0.1.0",
"@langchain/ollama": "0.2.3",
"@langchain/google-genai": "2.0.0",
"@langchain/google-vertexai": "2.0.0",
"@langchain/groq": "1.0.2",
"@langchain/mistralai": "1.0.1",
"@langchain/mongodb": "1.0.1",
"@langchain/ollama": "1.0.2",
"@langchain/openai": "catalog:",
"@langchain/pinecone": "0.2.0",
"@langchain/qdrant": "0.1.2",
"@langchain/redis": "0.1.1",
"@langchain/textsplitters": "0.1.0",
"@langchain/weaviate": "0.2.0",
"@langchain/pinecone": "1.0.1",
"@langchain/qdrant": "1.0.1",
"@langchain/redis": "1.0.1",
"@langchain/textsplitters": "1.0.1",
"@langchain/weaviate": "1.0.1",
"@modelcontextprotocol/sdk": "1.20.0",
"@mozilla/readability": "0.6.0",
"@n8n/client-oauth2": "workspace:*",
@@ -214,7 +214,7 @@
"@n8n/typescript-config": "workspace:*",
"@n8n/vm2": "3.9.25",
"@pinecone-database/pinecone": "^5.0.2",
"@qdrant/js-client-rest": "1.14.1",
"@qdrant/js-client-rest": "^1.15.0",
"@supabase/supabase-js": "2.49.9",
"@xata.io/client": "0.28.4",
"@zilliz/milvus2-sdk-node": "^2.5.7",
@@ -230,14 +230,15 @@
"ignore": "^5.2.0",
"js-tiktoken": "^1.0.12",
"jsdom": "23.0.1",
"langchain": "0.3.33",
"langchain": "1.1.1",
"@langchain/classic": "1.0.5",
"lodash": "catalog:",
"mammoth": "1.11.0",
"mime-types": "catalog:",
"mongodb": "6.11.0",
"mongodb": "^6.17.0",
"n8n-nodes-base": "workspace:*",
"n8n-workflow": "workspace:*",
"openai": "5.12.2",
"openai": "^6.9.0",
"pdf-parse": "1.1.1",
"pg": "8.12.0",
"proxy-from-env": "^1.1.0",

View File

@@ -5,8 +5,8 @@ import { PDFLoader } from '@langchain/community/document_loaders/fs/pdf';
import type { Document } from '@langchain/core/documents';
import type { TextSplitter } from '@langchain/textsplitters';
import { createWriteStream } from 'fs';
import { JSONLoader } from 'langchain/document_loaders/fs/json';
import { TextLoader } from 'langchain/document_loaders/fs/text';
import { JSONLoader } from '@langchain/classic/document_loaders/fs/json';
import { TextLoader } from '@langchain/classic/document_loaders/fs/text';
import type {
IBinaryData,
IExecuteFunctions,

View File

@@ -1,7 +1,7 @@
import type { Document } from '@langchain/core/documents';
import type { TextSplitter } from '@langchain/textsplitters';
import { JSONLoader } from 'langchain/document_loaders/fs/json';
import { TextLoader } from 'langchain/document_loaders/fs/text';
import { JSONLoader } from '@langchain/classic/document_loaders/fs/json';
import { TextLoader } from '@langchain/classic/document_loaders/fs/text';
import {
type IExecuteFunctions,
type INodeExecutionData,

View File

@@ -1,6 +1,6 @@
import type { DynamicStructuredToolInput } from '@langchain/core/tools';
import { DynamicStructuredTool, DynamicTool } from '@langchain/core/tools';
import { StructuredOutputParser } from 'langchain/output_parsers';
import { StructuredOutputParser } from '@langchain/classic/output_parsers';
import type { ISupplyDataFunctions, IDataObject } from 'n8n-workflow';
import { NodeConnectionTypes, jsonParse, NodeOperationError } from 'n8n-workflow';
import type { ZodTypeAny } from 'zod';

View File

@@ -45,16 +45,26 @@ export function buildSteps(
}
// Create a synthetic AI message for the messageLog
// This represents the AI's decision to call the tool
// Extract thought_signature from metadata if present (for Gemini 3)
const rawThoughtSignature = tool.action.metadata?.thoughtSignature;
const thoughtSignature =
typeof rawThoughtSignature === 'string' ? rawThoughtSignature : undefined;
// Build the tool call object with thought_signature if present
// The thought_signature must be part of the tool call itself for Gemini 3
const toolCall = {
id: typeof toolInput?.id === 'string' ? toolInput.id : 'reconstructed_call',
name: nodeNameToToolName(tool.action.nodeName),
args: toolInput,
type: 'tool_call' as const,
additional_kwargs: {
...(thoughtSignature && { thought_signature: thoughtSignature }),
},
};
const syntheticAIMessage = new AIMessage({
content: `Calling ${tool.action.nodeName} with input: ${JSON.stringify(toolInput)}`,
tool_calls: [
{
id: (toolInput?.id as string) ?? 'reconstructed_call',
name: nodeNameToToolName(tool.action.nodeName),
args: toolInput,
type: 'tool_call',
},
],
tool_calls: [toolCall],
});
const toolResult = {

View File

@@ -1,4 +1,4 @@
import type { DynamicStructuredTool, Tool } from 'langchain/tools';
import type { DynamicStructuredTool, Tool } from '@langchain/classic/tools';
import { NodeConnectionTypes } from 'n8n-workflow';
import type { EngineRequest, IDataObject } from 'n8n-workflow';
@@ -39,6 +39,28 @@ export async function createEngineRequests(
? { ...toolCall.toolInput, tool: toolCall.tool }
: toolCall.toolInput;
// Extract thought_signature from the AIMessage in messageLog (for Gemini 3)
let thoughtSignature: string | undefined;
if (toolCall.messageLog && Array.isArray(toolCall.messageLog)) {
for (const message of toolCall.messageLog) {
// Check if message has content that could contain thought_signature
if (message && typeof message === 'object' && 'content' in message) {
const content = message.content;
// Content can be string or array of content blocks
if (Array.isArray(content)) {
// Look for thought_signature in content blocks
for (const block of content) {
if (block && typeof block === 'object' && 'thoughtSignature' in block) {
thoughtSignature = block.thoughtSignature as string;
break;
}
}
}
if (thoughtSignature) break;
}
}
}
return {
actionType: 'ExecutionNodeAction' as const,
nodeName,
@@ -47,6 +69,7 @@ export async function createEngineRequests(
id: toolCall.toolCallId,
metadata: {
itemIndex,
...(thoughtSignature && { thoughtSignature }),
},
};
})

View File

@@ -1,7 +1,7 @@
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
import type { BaseMessage } from '@langchain/core/messages';
import { trimMessages } from '@langchain/core/messages';
import type { BaseChatMemory } from 'langchain/memory';
import type { BaseChatMemory } from '@langchain/classic/memory';
import type { ToolCallData } from './types';

View File

@@ -1,4 +1,4 @@
import { DynamicStructuredTool } from 'langchain/tools';
import { DynamicStructuredTool } from '@langchain/classic/tools';
import { NodeConnectionTypes } from 'n8n-workflow';
import { z } from 'zod';

View File

@@ -1,7 +1,7 @@
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { HumanMessage, AIMessage, SystemMessage, trimMessages } from '@langchain/core/messages';
import { mock } from 'jest-mock-extended';
import type { BaseChatMemory } from 'langchain/memory';
import type { BaseChatMemory } from '@langchain/classic/memory';
import { loadMemory, saveToMemory, buildToolContext } from '../memoryManagement';
import type { ToolCallData } from '../types';

View File

@@ -58,4 +58,6 @@ export type RequestResponseMetadata = {
previousRequests?: ToolCallData[];
/** Current iteration count (for max iterations enforcement) */
iterationCount?: number;
/** Thought signature for Gemini 3 tool calls */
thoughtSignature?: string;
};

View File

@@ -3,8 +3,8 @@ import type { BaseChatModel } from '@langchain/core/language_models/chat_models'
import type { BaseLLM } from '@langchain/core/language_models/llms';
import type { BaseMessage } from '@langchain/core/messages';
import type { Tool } from '@langchain/core/tools';
import { Toolkit } from 'langchain/agents';
import type { BaseChatMemory } from 'langchain/memory';
import { Toolkit } from '@langchain/classic/agents';
import type { BaseChatMemory } from '@langchain/classic/memory';
import { NodeConnectionTypes, NodeOperationError, jsonStringify } from 'n8n-workflow';
import type {
AiEvent,

View File

@@ -10,7 +10,7 @@ import { BaseDocumentCompressor } from '@langchain/core/retrievers/document_comp
import type { StructuredTool, Tool } from '@langchain/core/tools';
import { VectorStore } from '@langchain/core/vectorstores';
import { TextSplitter } from '@langchain/textsplitters';
import type { BaseDocumentLoader } from 'langchain/dist/document_loaders/base';
import type { BaseDocumentLoader } from '@langchain/classic/dist/document_loaders/base';
import { OpenAIEmbeddings, AzureOpenAIEmbeddings } from '@langchain/openai';
import type {
IDataObject,
@@ -138,7 +138,7 @@ export function logWrapper<
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
method: target[prop] as (...args: any[]) => Promise<unknown>,
arguments: [values],
})) as MemoryVariables;
@@ -161,7 +161,7 @@ export function logWrapper<
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
method: target[prop] as (...args: any[]) => Promise<unknown>,
arguments: [input, output],
})) as MemoryVariables;
@@ -189,7 +189,7 @@ export function logWrapper<
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
method: target[prop] as (...args: any[]) => Promise<unknown>,
arguments: [],
})) as BaseMessage[];
@@ -209,7 +209,7 @@ export function logWrapper<
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
method: target[prop] as (...args: any[]) => Promise<unknown>,
arguments: [message],
});
@@ -235,7 +235,7 @@ export function logWrapper<
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
method: target[prop] as (...args: any[]) => Promise<unknown>,
arguments: [query, config],
})) as Array<Document<Record<string, any>>>;
@@ -280,7 +280,7 @@ export function logWrapper<
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
method: target[prop] as (...args: any[]) => Promise<unknown>,
arguments: [documents],
})) as number[][];
@@ -301,7 +301,7 @@ export function logWrapper<
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
method: target[prop] as (...args: any[]) => Promise<unknown>,
arguments: [query],
})) as number[];
logAiEvent(executeFunctions, 'ai-query-embedded');
@@ -324,7 +324,7 @@ export function logWrapper<
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
method: target[prop] as (...args: any[]) => Promise<unknown>,
// compressDocuments mutates the original object
// messing up the input data logging
arguments: [deepCopy(documents), query],
@@ -352,7 +352,7 @@ export function logWrapper<
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
method: target[prop] as (...args: any[]) => Promise<unknown>,
arguments: [items],
})) as number[];
@@ -371,7 +371,7 @@ export function logWrapper<
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
method: target[prop] as (...args: any[]) => Promise<unknown>,
arguments: [item, itemIndex],
})) as number[];
@@ -397,7 +397,7 @@ export function logWrapper<
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
method: target[prop] as (...args: any[]) => Promise<unknown>,
arguments: [text],
})) as string[];
@@ -429,7 +429,7 @@ export function logWrapper<
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
method: target[prop] as (...args: any[]) => Promise<unknown>,
arguments: [query],
})) as string;
@@ -460,7 +460,7 @@ export function logWrapper<
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
method: target[prop] as (...args: any[]) => Promise<unknown>,
arguments: [query, k, filter, _callbacks],
})) as Array<Document<Record<string, any>>>;

View File

@@ -1,5 +1,5 @@
import type { Callbacks } from '@langchain/core/callbacks/manager';
import { StructuredOutputParser } from 'langchain/output_parsers';
import { StructuredOutputParser } from '@langchain/classic/output_parsers';
import get from 'lodash/get';
import type { ISupplyDataFunctions } from 'n8n-workflow';
import { NodeConnectionTypes, NodeOperationError } from 'n8n-workflow';

View File

@@ -1,5 +1,5 @@
import { DynamicTool, type Tool } from '@langchain/core/tools';
import { Toolkit } from 'langchain/agents';
import { Toolkit } from '@langchain/classic/agents';
import { createMockExecuteFunction } from 'n8n-nodes-base/test/nodes/Helpers';
import { NodeOperationError } from 'n8n-workflow';
import type { ISupplyDataFunctions, IExecuteFunctions, INode } from 'n8n-workflow';

View File

@@ -242,7 +242,9 @@ export const metricHandlers = {
},
);
}
const chain = chatPrompt.pipe(llm.withStructuredOutput(responseSchema));
const chain = chatPrompt.pipe(
llm.withStructuredOutput<z.infer<typeof responseSchema>>(responseSchema),
);
try {
const response = await chain.invoke({
@@ -333,7 +335,9 @@ export const metricHandlers = {
},
);
}
const chain = chatPrompt.pipe(llm.withStructuredOutput(responseSchema));
const chain = chatPrompt.pipe(
llm.withStructuredOutput<z.infer<typeof responseSchema>>(responseSchema),
);
try {
const response = await chain.invoke({

2292
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@@ -6,10 +6,10 @@ packages:
- packages/testing/**
catalog:
'@langchain/anthropic': 0.3.26
'@langchain/community': 0.3.50
'@langchain/core': 0.3.68
'@langchain/openai': 0.6.16
'@langchain/anthropic': 1.1.3
'@langchain/community': 1.0.5
'@langchain/core': 1.1.0
'@langchain/openai': 1.1.3
'@n8n/typeorm': 0.3.20-15
'@n8n_io/ai-assistant-sdk': 1.17.0
'@sentry/node': ^9.42.1
@@ -87,5 +87,10 @@ minimumReleaseAgeExclude:
- '@n8n_io/*'
- 'tsdown@0.16.5'
- eslint-plugin-storybook
- '@langchain/*'
- 'langchain'
- '@anthropic-ai/sdk'
- '@google/generative-ai'
- '@google/genai'
- body-parser
- node-forge