forked from FlowiseAI/Flowise
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathAnthropicAgent_LlamaIndex.ts
141 lines (125 loc) · 4.48 KB
/
AnthropicAgent_LlamaIndex.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
import { flatten } from 'lodash'
import { MessageContentTextDetail, ChatMessage, AnthropicAgent, Anthropic } from 'llamaindex'
import { getBaseClasses } from '../../../../src/utils'
import { FlowiseMemory, ICommonObject, IMessage, INode, INodeData, INodeParams, IUsedTool } from '../../../../src/Interface'
class AnthropicAgent_LlamaIndex_Agents implements INode {
label: string
name: string
version: number
description: string
type: string
icon: string
category: string
baseClasses: string[]
tags: string[]
inputs: INodeParams[]
sessionId?: string
constructor(fields?: { sessionId?: string }) {
this.label = 'Anthropic Agent'
this.name = 'anthropicAgentLlamaIndex'
this.version = 1.0
this.type = 'AnthropicAgent'
this.category = 'Agents'
this.icon = 'Anthropic.svg'
this.description = `Agent that uses Anthropic Claude Function Calling to pick the tools and args to call using LlamaIndex`
this.baseClasses = [this.type, ...getBaseClasses(AnthropicAgent)]
this.tags = ['LlamaIndex']
this.inputs = [
{
label: 'Tools',
name: 'tools',
type: 'Tool_LlamaIndex',
list: true
},
{
label: 'Memory',
name: 'memory',
type: 'BaseChatMemory'
},
{
label: 'Anthropic Claude Model',
name: 'model',
type: 'BaseChatModel_LlamaIndex'
},
{
label: 'System Message',
name: 'systemMessage',
type: 'string',
rows: 4,
optional: true,
additionalParams: true
}
]
this.sessionId = fields?.sessionId
}
async init(): Promise<any> {
return null
}
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string | ICommonObject> {
const memory = nodeData.inputs?.memory as FlowiseMemory
const model = nodeData.inputs?.model as Anthropic
const systemMessage = nodeData.inputs?.systemMessage as string
const prependMessages = options?.prependMessages
let tools = nodeData.inputs?.tools
tools = flatten(tools)
const chatHistory = [] as ChatMessage[]
if (systemMessage) {
chatHistory.push({
content: systemMessage,
role: 'system'
})
}
const msgs = (await memory.getChatMessages(this.sessionId, false, prependMessages)) as IMessage[]
for (const message of msgs) {
if (message.type === 'apiMessage') {
chatHistory.push({
content: message.message,
role: 'assistant'
})
} else if (message.type === 'userMessage') {
chatHistory.push({
content: message.message,
role: 'user'
})
}
}
const agent = new AnthropicAgent({
tools,
llm: model,
chatHistory: chatHistory,
verbose: process.env.DEBUG === 'true'
})
let text = ''
const usedTools: IUsedTool[] = []
const response = await agent.chat({ message: input, chatHistory, verbose: process.env.DEBUG === 'true' })
if (response.sources.length) {
for (const sourceTool of response.sources) {
usedTools.push({
tool: sourceTool.tool?.metadata.name ?? '',
toolInput: sourceTool.input,
toolOutput: sourceTool.output as any
})
}
}
if (Array.isArray(response.response.message.content) && response.response.message.content.length > 0) {
text = (response.response.message.content[0] as MessageContentTextDetail).text
} else {
text = response.response.message.content as string
}
await memory.addChatMessages(
[
{
text: input,
type: 'userMessage'
},
{
text: text,
type: 'apiMessage'
}
],
this.sessionId
)
return usedTools.length ? { text: text, usedTools } : text
}
}
module.exports = { nodeClass: AnthropicAgent_LlamaIndex_Agents }