1
- import { ICommonObject , INode , INodeData , INodeParams } from '../../../src/Interface'
1
+ import { ICommonObject , IMessage , INode , INodeData , INodeParams } from '../../../src/Interface'
2
2
import { initializeAgentExecutorWithOptions , AgentExecutor } from 'langchain/agents'
3
- import { Tool } from 'langchain/tools'
4
3
import { CustomChainHandler , getBaseClasses } from '../../../src/utils'
5
4
import { BaseLanguageModel } from 'langchain/base_language'
6
5
import { flatten } from 'lodash'
6
+ import { BaseChatMemory , ChatMessageHistory } from 'langchain/memory'
7
+ import { AIChatMessage , HumanChatMessage } from 'langchain/schema'
7
8
8
9
class OpenAIFunctionAgent_Agents implements INode {
9
10
label : string
@@ -30,30 +31,67 @@ class OpenAIFunctionAgent_Agents implements INode {
30
31
type : 'Tool' ,
31
32
list : true
32
33
} ,
34
+ {
35
+ label : 'Memory' ,
36
+ name : 'memory' ,
37
+ type : 'BaseChatMemory'
38
+ } ,
33
39
{
34
40
label : 'OpenAI Chat Model' ,
35
41
name : 'model' ,
36
42
description :
37
43
'Only works with gpt-3.5-turbo-0613 and gpt-4-0613. Refer <a target="_blank" href="https://platform.openai.com/docs/guides/gpt/function-calling">docs</a> for more info' ,
38
44
type : 'BaseChatModel'
45
+ } ,
46
+ {
47
+ label : 'System Message' ,
48
+ name : 'systemMessage' ,
49
+ type : 'string' ,
50
+ rows : 4 ,
51
+ optional : true ,
52
+ additionalParams : true
39
53
}
40
54
]
41
55
}
42
56
43
57
async init ( nodeData : INodeData ) : Promise < any > {
44
58
const model = nodeData . inputs ?. model as BaseLanguageModel
45
- let tools = nodeData . inputs ?. tools as Tool [ ]
59
+ const memory = nodeData . inputs ?. memory as BaseChatMemory
60
+ const systemMessage = nodeData . inputs ?. systemMessage as string
61
+
62
+ let tools = nodeData . inputs ?. tools
46
63
tools = flatten ( tools )
47
64
48
65
const executor = await initializeAgentExecutorWithOptions ( tools , model , {
49
66
agentType : 'openai-functions' ,
50
- verbose : process . env . DEBUG === 'true' ? true : false
67
+ verbose : process . env . DEBUG === 'true' ? true : false ,
68
+ agentArgs : {
69
+ prefix : systemMessage ?? `You are a helpful AI assistant.`
70
+ }
51
71
} )
72
+ if ( memory ) executor . memory = memory
73
+
52
74
return executor
53
75
}
54
76
55
77
async run ( nodeData : INodeData , input : string , options : ICommonObject ) : Promise < string > {
56
78
const executor = nodeData . instance as AgentExecutor
79
+ const memory = nodeData . inputs ?. memory as BaseChatMemory
80
+
81
+ if ( options && options . chatHistory ) {
82
+ const chatHistory = [ ]
83
+ const histories : IMessage [ ] = options . chatHistory
84
+
85
+ for ( const message of histories ) {
86
+ if ( message . type === 'apiMessage' ) {
87
+ chatHistory . push ( new AIChatMessage ( message . message ) )
88
+ } else if ( message . type === 'userMessage' ) {
89
+ chatHistory . push ( new HumanChatMessage ( message . message ) )
90
+ }
91
+ }
92
+ memory . chatHistory = new ChatMessageHistory ( chatHistory )
93
+ executor . memory = memory
94
+ }
57
95
58
96
if ( options . socketIO && options . socketIOClientId ) {
59
97
const handler = new CustomChainHandler ( options . socketIO , options . socketIOClientId )
0 commit comments