Skip to content

Commit cd4c659

Browse files
authored
Feature/Mistral FunctionAgent (#1912)
* add mistral ai agent, add used tools streaming * fix AWS Bedrock imports * update pnpm lock
1 parent 58122e9 commit cd4c659

File tree

13 files changed

+30549
-29820
lines changed

13 files changed

+30549
-29820
lines changed

packages/components/nodes/agents/ConversationalAgent/ConversationalAgent.ts

+31-2
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ import { RunnableSequence } from '@langchain/core/runnables'
99
import { ChatConversationalAgent } from 'langchain/agents'
1010
import { getBaseClasses } from '../../../src/utils'
1111
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
12-
import { IVisionChatModal, FlowiseMemory, ICommonObject, IMessage, INode, INodeData, INodeParams } from '../../../src/Interface'
12+
import { IVisionChatModal, FlowiseMemory, ICommonObject, IMessage, INode, INodeData, INodeParams, IUsedTool } from '../../../src/Interface'
1313
import { AgentExecutor } from '../../../src/agents'
1414
import { addImagesToMessages, llmSupportsVision } from '../../../src/multiModalUtils'
1515
import { checkInputs, Moderation } from '../../moderation/Moderation'
@@ -120,12 +120,28 @@ class ConversationalAgent_Agents implements INode {
120120
const callbacks = await additionalCallbacks(nodeData, options)
121121

122122
let res: ChainValues = {}
123+
let sourceDocuments: ICommonObject[] = []
124+
let usedTools: IUsedTool[] = []
123125

124126
if (options.socketIO && options.socketIOClientId) {
125127
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId)
126128
res = await executor.invoke({ input }, { callbacks: [loggerHandler, handler, ...callbacks] })
129+
if (res.sourceDocuments) {
130+
options.socketIO.to(options.socketIOClientId).emit('sourceDocuments', flatten(res.sourceDocuments))
131+
sourceDocuments = res.sourceDocuments
132+
}
133+
if (res.usedTools) {
134+
options.socketIO.to(options.socketIOClientId).emit('usedTools', res.usedTools)
135+
usedTools = res.usedTools
136+
}
127137
} else {
128138
res = await executor.invoke({ input }, { callbacks: [loggerHandler, ...callbacks] })
139+
if (res.sourceDocuments) {
140+
sourceDocuments = res.sourceDocuments
141+
}
142+
if (res.usedTools) {
143+
usedTools = res.usedTools
144+
}
129145
}
130146

131147
await memory.addChatMessages(
@@ -142,7 +158,20 @@ class ConversationalAgent_Agents implements INode {
142158
this.sessionId
143159
)
144160

145-
return res?.output
161+
let finalRes = res?.output
162+
163+
if (sourceDocuments.length || usedTools.length) {
164+
finalRes = { text: res?.output }
165+
if (sourceDocuments.length) {
166+
finalRes.sourceDocuments = flatten(sourceDocuments)
167+
}
168+
if (usedTools.length) {
169+
finalRes.usedTools = usedTools
170+
}
171+
return finalRes
172+
}
173+
174+
return finalRes
146175
}
147176
}
148177

packages/components/nodes/agents/ConversationalRetrievalAgent/ConversationalRetrievalAgent.ts

+2
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@ class ConversationalRetrievalAgent_Agents implements INode {
2525
category: string
2626
baseClasses: string[]
2727
inputs: INodeParams[]
28+
badge?: string
2829
sessionId?: string
2930

3031
constructor(fields?: { sessionId?: string }) {
@@ -33,6 +34,7 @@ class ConversationalRetrievalAgent_Agents implements INode {
3334
this.version = 4.0
3435
this.type = 'AgentExecutor'
3536
this.category = 'Agents'
37+
this.badge = 'DEPRECATING'
3638
this.icon = 'agent.svg'
3739
this.description = `An agent optimized for retrieval during conversation, answering questions based on past dialogue, all using OpenAI's Function Calling`
3840
this.baseClasses = [this.type, ...getBaseClasses(AgentExecutor)]
Loading
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,207 @@
1+
import { flatten } from 'lodash'
2+
import { BaseMessage } from '@langchain/core/messages'
3+
import { ChainValues } from '@langchain/core/utils/types'
4+
import { AgentStep } from '@langchain/core/agents'
5+
import { RunnableSequence } from '@langchain/core/runnables'
6+
import { ChatOpenAI } from '@langchain/openai'
7+
import { convertToOpenAITool } from '@langchain/core/utils/function_calling'
8+
import { ChatPromptTemplate, MessagesPlaceholder } from '@langchain/core/prompts'
9+
import { OpenAIToolsAgentOutputParser } from 'langchain/agents/openai/output_parser'
10+
import { getBaseClasses } from '../../../src/utils'
11+
import { FlowiseMemory, ICommonObject, IMessage, INode, INodeData, INodeParams, IUsedTool } from '../../../src/Interface'
12+
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
13+
import { AgentExecutor, formatAgentSteps } from '../../../src/agents'
14+
import { Moderation, checkInputs, streamResponse } from '../../moderation/Moderation'
15+
import { formatResponse } from '../../outputparsers/OutputParserHelpers'
16+
17+
class MistralAIFunctionAgent_Agents implements INode {
18+
label: string
19+
name: string
20+
version: number
21+
description: string
22+
type: string
23+
icon: string
24+
category: string
25+
baseClasses: string[]
26+
inputs: INodeParams[]
27+
sessionId?: string
28+
badge?: string
29+
30+
constructor(fields?: { sessionId?: string }) {
31+
this.label = 'MistralAI Function Agent'
32+
this.name = 'mistralAIFunctionAgent'
33+
this.version = 1.0
34+
this.type = 'AgentExecutor'
35+
this.category = 'Agents'
36+
this.icon = 'MistralAI.svg'
37+
this.badge = 'NEW'
38+
this.description = `An agent that uses MistralAI Function Calling to pick the tool and args to call`
39+
this.baseClasses = [this.type, ...getBaseClasses(AgentExecutor)]
40+
this.inputs = [
41+
{
42+
label: 'Tools',
43+
name: 'tools',
44+
type: 'Tool',
45+
list: true
46+
},
47+
{
48+
label: 'Memory',
49+
name: 'memory',
50+
type: 'BaseChatMemory'
51+
},
52+
{
53+
label: 'MistralAI Chat Model',
54+
name: 'model',
55+
type: 'BaseChatModel'
56+
},
57+
{
58+
label: 'System Message',
59+
name: 'systemMessage',
60+
type: 'string',
61+
rows: 4,
62+
optional: true,
63+
additionalParams: true
64+
},
65+
{
66+
label: 'Input Moderation',
67+
description: 'Detect text that could generate harmful output and prevent it from being sent to the language model',
68+
name: 'inputModeration',
69+
type: 'Moderation',
70+
optional: true,
71+
list: true
72+
}
73+
]
74+
this.sessionId = fields?.sessionId
75+
}
76+
77+
async init(nodeData: INodeData, input: string, options: ICommonObject): Promise<any> {
78+
return prepareAgent(nodeData, { sessionId: this.sessionId, chatId: options.chatId, input }, options.chatHistory)
79+
}
80+
81+
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string | ICommonObject> {
82+
const memory = nodeData.inputs?.memory as FlowiseMemory
83+
const moderations = nodeData.inputs?.inputModeration as Moderation[]
84+
85+
if (moderations && moderations.length > 0) {
86+
try {
87+
// Use the output of the moderation chain as input for the OpenAI Function Agent
88+
input = await checkInputs(moderations, input)
89+
} catch (e) {
90+
await new Promise((resolve) => setTimeout(resolve, 500))
91+
streamResponse(options.socketIO && options.socketIOClientId, e.message, options.socketIO, options.socketIOClientId)
92+
return formatResponse(e.message)
93+
}
94+
}
95+
96+
const executor = prepareAgent(nodeData, { sessionId: this.sessionId, chatId: options.chatId, input }, options.chatHistory)
97+
98+
const loggerHandler = new ConsoleCallbackHandler(options.logger)
99+
const callbacks = await additionalCallbacks(nodeData, options)
100+
101+
let res: ChainValues = {}
102+
let sourceDocuments: ICommonObject[] = []
103+
let usedTools: IUsedTool[] = []
104+
105+
if (options.socketIO && options.socketIOClientId) {
106+
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId)
107+
res = await executor.invoke({ input }, { callbacks: [loggerHandler, handler, ...callbacks] })
108+
if (res.sourceDocuments) {
109+
options.socketIO.to(options.socketIOClientId).emit('sourceDocuments', flatten(res.sourceDocuments))
110+
sourceDocuments = res.sourceDocuments
111+
}
112+
if (res.usedTools) {
113+
options.socketIO.to(options.socketIOClientId).emit('usedTools', res.usedTools)
114+
usedTools = res.usedTools
115+
}
116+
} else {
117+
res = await executor.invoke({ input }, { callbacks: [loggerHandler, ...callbacks] })
118+
if (res.sourceDocuments) {
119+
sourceDocuments = res.sourceDocuments
120+
}
121+
if (res.usedTools) {
122+
usedTools = res.usedTools
123+
}
124+
}
125+
126+
await memory.addChatMessages(
127+
[
128+
{
129+
text: input,
130+
type: 'userMessage'
131+
},
132+
{
133+
text: res?.output,
134+
type: 'apiMessage'
135+
}
136+
],
137+
this.sessionId
138+
)
139+
140+
let finalRes = res?.output
141+
142+
if (sourceDocuments.length || usedTools.length) {
143+
finalRes = { text: res?.output }
144+
if (sourceDocuments.length) {
145+
finalRes.sourceDocuments = flatten(sourceDocuments)
146+
}
147+
if (usedTools.length) {
148+
finalRes.usedTools = usedTools
149+
}
150+
return finalRes
151+
}
152+
153+
return finalRes
154+
}
155+
}
156+
157+
const prepareAgent = (
158+
nodeData: INodeData,
159+
flowObj: { sessionId?: string; chatId?: string; input?: string },
160+
chatHistory: IMessage[] = []
161+
) => {
162+
const model = nodeData.inputs?.model as ChatOpenAI
163+
const memory = nodeData.inputs?.memory as FlowiseMemory
164+
const systemMessage = nodeData.inputs?.systemMessage as string
165+
let tools = nodeData.inputs?.tools
166+
tools = flatten(tools)
167+
const memoryKey = memory.memoryKey ? memory.memoryKey : 'chat_history'
168+
const inputKey = memory.inputKey ? memory.inputKey : 'input'
169+
170+
const prompt = ChatPromptTemplate.fromMessages([
171+
['system', systemMessage ? systemMessage : `You are a helpful AI assistant.`],
172+
new MessagesPlaceholder(memoryKey),
173+
['human', `{${inputKey}}`],
174+
new MessagesPlaceholder('agent_scratchpad')
175+
])
176+
177+
const llmWithTools = model.bind({
178+
tools: tools.map(convertToOpenAITool)
179+
})
180+
181+
const runnableAgent = RunnableSequence.from([
182+
{
183+
[inputKey]: (i: { input: string; steps: AgentStep[] }) => i.input,
184+
agent_scratchpad: (i: { input: string; steps: AgentStep[] }) => formatAgentSteps(i.steps),
185+
[memoryKey]: async (_: { input: string; steps: AgentStep[] }) => {
186+
const messages = (await memory.getChatMessages(flowObj?.sessionId, true, chatHistory)) as BaseMessage[]
187+
return messages ?? []
188+
}
189+
},
190+
prompt,
191+
llmWithTools,
192+
new OpenAIToolsAgentOutputParser()
193+
])
194+
195+
const executor = AgentExecutor.fromAgentAndTools({
196+
agent: runnableAgent,
197+
tools,
198+
sessionId: flowObj?.sessionId,
199+
chatId: flowObj?.chatId,
200+
input: flowObj?.input,
201+
verbose: process.env.DEBUG === 'true' ? true : false
202+
})
203+
204+
return executor
205+
}
206+
207+
module.exports = { nodeClass: MistralAIFunctionAgent_Agents }

packages/components/nodes/agents/OpenAIFunctionAgent/OpenAIFunctionAgent.ts

+23-2
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ import { ChatOpenAI, formatToOpenAIFunction } from '@langchain/openai'
77
import { ChatPromptTemplate, MessagesPlaceholder } from '@langchain/core/prompts'
88
import { OpenAIFunctionsAgentOutputParser } from 'langchain/agents/openai/output_parser'
99
import { getBaseClasses } from '../../../src/utils'
10-
import { FlowiseMemory, ICommonObject, IMessage, INode, INodeData, INodeParams } from '../../../src/Interface'
10+
import { FlowiseMemory, ICommonObject, IMessage, INode, INodeData, INodeParams, IUsedTool } from '../../../src/Interface'
1111
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
1212
import { AgentExecutor, formatAgentSteps } from '../../../src/agents'
1313
import { Moderation, checkInputs } from '../../moderation/Moderation'
@@ -97,6 +97,7 @@ class OpenAIFunctionAgent_Agents implements INode {
9797

9898
let res: ChainValues = {}
9999
let sourceDocuments: ICommonObject[] = []
100+
let usedTools: IUsedTool[] = []
100101

101102
if (options.socketIO && options.socketIOClientId) {
102103
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId)
@@ -105,11 +106,18 @@ class OpenAIFunctionAgent_Agents implements INode {
105106
options.socketIO.to(options.socketIOClientId).emit('sourceDocuments', flatten(res.sourceDocuments))
106107
sourceDocuments = res.sourceDocuments
107108
}
109+
if (res.usedTools) {
110+
options.socketIO.to(options.socketIOClientId).emit('usedTools', res.usedTools)
111+
usedTools = res.usedTools
112+
}
108113
} else {
109114
res = await executor.invoke({ input }, { callbacks: [loggerHandler, ...callbacks] })
110115
if (res.sourceDocuments) {
111116
sourceDocuments = res.sourceDocuments
112117
}
118+
if (res.usedTools) {
119+
usedTools = res.usedTools
120+
}
113121
}
114122

115123
await memory.addChatMessages(
@@ -126,7 +134,20 @@ class OpenAIFunctionAgent_Agents implements INode {
126134
this.sessionId
127135
)
128136

129-
return sourceDocuments.length ? { text: res?.output, sourceDocuments: flatten(sourceDocuments) } : res?.output
137+
let finalRes = res?.output
138+
139+
if (sourceDocuments.length || usedTools.length) {
140+
finalRes = { text: res?.output }
141+
if (sourceDocuments.length) {
142+
finalRes.sourceDocuments = flatten(sourceDocuments)
143+
}
144+
if (usedTools.length) {
145+
finalRes.usedTools = usedTools
146+
}
147+
return finalRes
148+
}
149+
150+
return finalRes
130151
}
131152
}
132153

0 commit comments

Comments
 (0)