Skip to content

Commit 25fa262

Browse files
committed
Merge remote-tracking branch 'flowise/main'
# Conflicts: # package.json # packages/components/package.json
2 parents 0ae78ee + 1c6694b commit 25fa262

File tree

72 files changed

+2761
-995
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

72 files changed

+2761
-995
lines changed
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,33 @@
1+
name: autoSyncMergedPullRequest
2+
on:
3+
pull_request_target:
4+
types:
5+
- closed
6+
branches: [ "main" ]
7+
jobs:
8+
autoSyncMergedPullRequest:
9+
if: github.event.pull_request.merged == true
10+
runs-on: ubuntu-latest
11+
permissions:
12+
contents: write
13+
steps:
14+
- uses: actions/checkout@v3
15+
- name: Show PR info
16+
env:
17+
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
18+
run: |
19+
echo The PR #${{ github.event.pull_request.number }} was merged on main branch!
20+
- name: Repository Dispatch
21+
uses: peter-evans/repository-dispatch@v2
22+
with:
23+
token: ${{ secrets.AUTOSYNC_TOKEN }}
24+
repository: ${{ secrets.AUTOSYNC_CH_URL }}
25+
event-type: ${{ secrets.AUTOSYNC_PR_EVENT_TYPE }}
26+
client-payload: >-
27+
{
28+
"ref": "${{ github.ref }}",
29+
"prNumber": "${{ github.event.pull_request.number }}",
30+
"prTitle": "${{ github.event.pull_request.title }}",
31+
"prDescription": "${{ github.event.pull_request.description }}",
32+
"sha": "${{ github.sha }}"
33+
}
+36
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,36 @@
1+
name: autoSyncSingleCommit
2+
on:
3+
push:
4+
branches:
5+
- main
6+
jobs:
7+
doNotAutoSyncSingleCommit:
8+
if: github.event.commits[1] != null
9+
runs-on: ubuntu-latest
10+
steps:
11+
- uses: actions/checkout@v3
12+
- name: IGNORE autoSyncSingleCommit
13+
run: |
14+
echo This single commit has came from a merged commit. We will ignore it. This case is handled in autoSyncMergedPullRequest workflow for merge commits comming from merged pull requests only! Beware, the regular merge commits are not handled by any workflow for the moment.
15+
autoSyncSingleCommit:
16+
if: github.event.commits[1] == null
17+
runs-on: ubuntu-latest
18+
steps:
19+
- uses: actions/checkout@v3
20+
- name: autoSyncSingleCommit
21+
env:
22+
GITHUB_CONTEXT: ${{ toJSON(github) }}
23+
run: |
24+
echo Autosync a single commit with id: ${{ github.sha }} from openSource main branch towards cloud hosted version.
25+
- name: Repository Dispatch
26+
uses: peter-evans/repository-dispatch@v2
27+
with:
28+
token: ${{ secrets.AUTOSYNC_TOKEN }}
29+
repository: ${{ secrets.AUTOSYNC_CH_URL }}
30+
event-type: ${{ secrets.AUTOSYNC_SC_EVENT_TYPE }}
31+
client-payload: >-
32+
{
33+
"ref": "${{ github.ref }}",
34+
"sha": "${{ github.sha }}",
35+
"commitMessage": "${{ github.event.commits[0].message }}"
36+
}

CONTRIBUTING-ZH.md

+1
Original file line numberDiff line numberDiff line change
@@ -138,6 +138,7 @@ Flowise 支持不同的环境变量来配置您的实例。您可以在 `package
138138
| DATABASE_NAME | 数据库名称(当 DATABASE_TYPE 不是 sqlite 时) | 字符串 | |
139139
| SECRETKEY_PATH | 保存加密密钥(用于加密/解密凭据)的位置 | 字符串 | `your-path/Flowise/packages/server` |
140140
| FLOWISE_SECRETKEY_OVERWRITE | 加密密钥用于替代存储在 SECRETKEY_PATH 中的密钥 | 字符串 |
141+
| DISABLE_FLOWISE_TELEMETRY | 关闭遥测 | 字符串 |
141142

142143
您也可以在使用 `npx` 时指定环境变量。例如:
143144

CONTRIBUTING.md

+1
Original file line numberDiff line numberDiff line change
@@ -141,6 +141,7 @@ Flowise support different environment variables to configure your instance. You
141141
| DATABASE_SSL | Database connection overssl (When DATABASE_TYPE is postgre) | Boolean | false |
142142
| SECRETKEY_PATH | Location where encryption key (used to encrypt/decrypt credentials) is saved | String | `your-path/Flowise/packages/server` |
143143
| FLOWISE_SECRETKEY_OVERWRITE | Encryption key to be used instead of the key stored in SECRETKEY_PATH | String |
144+
| DISABLE_FLOWISE_TELEMETRY | Turn off telemetry | Boolean |
144145

145146
You can also specify the env variables when using `npx`. For example:
146147

artillery-load-test.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -33,4 +33,4 @@ scenarios:
3333
# Seconds
3434
# Total Users = 2 + 3 + 3 = 8
3535
# Each making 1 HTTP call
36-
# Over a duration of 3 seconds
36+
# Over a durations of 3 seconds

docker/.env.example

+3-1
Original file line numberDiff line numberDiff line change
@@ -25,4 +25,6 @@ LOG_PATH=/root/.flowise/logs
2525
# LANGCHAIN_TRACING_V2=true
2626
# LANGCHAIN_ENDPOINT=https://api.smith.langchain.com
2727
# LANGCHAIN_API_KEY=your_api_key
28-
# LANGCHAIN_PROJECT=your_project
28+
# LANGCHAIN_PROJECT=your_project
29+
30+
# DISABLE_FLOWISE_TELEMETRY=true

docker/docker-compose.yml

+1
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@ services:
2222
- FLOWISE_SECRETKEY_OVERWRITE=${FLOWISE_SECRETKEY_OVERWRITE}
2323
- LOG_LEVEL=${LOG_LEVEL}
2424
- LOG_PATH=${LOG_PATH}
25+
- DISABLE_FLOWISE_TELEMETRY=${DISABLE_FLOWISE_TELEMETRY}
2526
ports:
2627
- '${PORT}:${PORT}'
2728
volumes:

package.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "flowise",
3-
"version": "1.4.9",
3+
"version": "1.4.11",
44
"private": true,
55
"homepage": "https://langflux.space",
66
"workspaces": [

packages/components/credentials/PineconeApi.credential.ts

-5
Original file line numberDiff line numberDiff line change
@@ -16,11 +16,6 @@ class PineconeApi implements INodeCredential {
1616
label: 'Pinecone Api Key',
1717
name: 'pineconeApiKey',
1818
type: 'password'
19-
},
20-
{
21-
label: 'Pinecone Environment',
22-
name: 'pineconeEnv',
23-
type: 'string'
2419
}
2520
]
2621
}

packages/components/nodes/chains/ConversationChain/ConversationChain.ts

+58-28
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,12 @@
11
import { FlowiseMemory, ICommonObject, IMessage, INode, INodeData, INodeParams } from '../../../src/Interface'
22
import { ConversationChain } from 'langchain/chains'
3-
import { getBaseClasses } from '../../../src/utils'
3+
import { getBaseClasses, handleEscapeCharacters } from '../../../src/utils'
44
import { ChatPromptTemplate, HumanMessagePromptTemplate, MessagesPlaceholder, SystemMessagePromptTemplate } from 'langchain/prompts'
55
import { BaseChatModel } from 'langchain/chat_models/base'
66
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
7-
import { flatten } from 'lodash'
8-
import { Document } from 'langchain/document'
97
import { RunnableSequence } from 'langchain/schema/runnable'
108
import { StringOutputParser } from 'langchain/schema/output_parser'
9+
import { ConsoleCallbackHandler as LCConsoleCallbackHandler } from '@langchain/core/tracers/console'
1110

1211
let systemMessage = `The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.`
1312
const inputKey = 'input'
@@ -27,7 +26,7 @@ class ConversationChain_Chains implements INode {
2726
constructor(fields?: { sessionId?: string }) {
2827
this.label = 'Conversation Chain'
2928
this.name = 'conversationChain'
30-
this.version = 1.0
29+
this.version = 2.0
3130
this.type = 'ConversationChain'
3231
this.icon = 'conv.svg'
3332
this.category = 'Chains'
@@ -44,6 +43,14 @@ class ConversationChain_Chains implements INode {
4443
name: 'memory',
4544
type: 'BaseMemory'
4645
},
46+
{
47+
label: 'Chat Prompt Template',
48+
name: 'chatPromptTemplate',
49+
type: 'ChatPromptTemplate',
50+
description: 'Override existing prompt with Chat Prompt Template. Human Message must includes {input} variable',
51+
optional: true
52+
},
53+
/* Deprecated
4754
{
4855
label: 'Document',
4956
name: 'document',
@@ -52,15 +59,17 @@ class ConversationChain_Chains implements INode {
5259
'Include whole document into the context window, if you get maximum context length error, please use model with higher context window like Claude 100k, or gpt4 32k',
5360
optional: true,
5461
list: true
55-
},
62+
},*/
5663
{
5764
label: 'System Message',
5865
name: 'systemMessagePrompt',
5966
type: 'string',
6067
rows: 4,
68+
description: 'If Chat Prompt Template is provided, this will be ignored',
6169
additionalParams: true,
6270
optional: true,
63-
placeholder: 'You are a helpful assistant that write codes'
71+
default: systemMessage,
72+
placeholder: systemMessage
6473
}
6574
]
6675
this.sessionId = fields?.sessionId
@@ -76,15 +85,21 @@ class ConversationChain_Chains implements INode {
7685
const chain = prepareChain(nodeData, this.sessionId, options.chatHistory)
7786

7887
const loggerHandler = new ConsoleCallbackHandler(options.logger)
79-
const callbacks = await additionalCallbacks(nodeData, options)
88+
const additionalCallback = await additionalCallbacks(nodeData, options)
8089

8190
let res = ''
91+
let callbacks = [loggerHandler, ...additionalCallback]
92+
93+
if (process.env.DEBUG === 'true') {
94+
callbacks.push(new LCConsoleCallbackHandler())
95+
}
8296

8397
if (options.socketIO && options.socketIOClientId) {
8498
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId)
85-
res = await chain.invoke({ input }, { callbacks: [loggerHandler, handler, ...callbacks] })
99+
callbacks.push(handler)
100+
res = await chain.invoke({ input }, { callbacks })
86101
} else {
87-
res = await chain.invoke({ input }, { callbacks: [loggerHandler, ...callbacks] })
102+
res = await chain.invoke({ input }, { callbacks })
88103
}
89104

90105
await memory.addChatMessages(
@@ -108,28 +123,27 @@ class ConversationChain_Chains implements INode {
108123
const prepareChatPrompt = (nodeData: INodeData) => {
109124
const memory = nodeData.inputs?.memory as FlowiseMemory
110125
const prompt = nodeData.inputs?.systemMessagePrompt as string
111-
const docs = nodeData.inputs?.document as Document[]
112-
113-
const flattenDocs = docs && docs.length ? flatten(docs) : []
114-
const finalDocs = []
115-
for (let i = 0; i < flattenDocs.length; i += 1) {
116-
if (flattenDocs[i] && flattenDocs[i].pageContent) {
117-
finalDocs.push(new Document(flattenDocs[i]))
126+
const chatPromptTemplate = nodeData.inputs?.chatPromptTemplate as ChatPromptTemplate
127+
128+
if (chatPromptTemplate && chatPromptTemplate.promptMessages.length) {
129+
const sysPrompt = chatPromptTemplate.promptMessages[0]
130+
const humanPrompt = chatPromptTemplate.promptMessages[chatPromptTemplate.promptMessages.length - 1]
131+
const chatPrompt = ChatPromptTemplate.fromMessages([
132+
sysPrompt,
133+
new MessagesPlaceholder(memory.memoryKey ?? 'chat_history'),
134+
humanPrompt
135+
])
136+
137+
if ((chatPromptTemplate as any).promptValues) {
138+
// @ts-ignore
139+
chatPrompt.promptValues = (chatPromptTemplate as any).promptValues
118140
}
119-
}
120141

121-
let finalText = ''
122-
for (let i = 0; i < finalDocs.length; i += 1) {
123-
finalText += finalDocs[i].pageContent
142+
return chatPrompt
124143
}
125144

126-
const replaceChar: string[] = ['{', '}']
127-
for (const char of replaceChar) finalText = finalText.replaceAll(char, '')
128-
129-
if (finalText) systemMessage = `${systemMessage}\nThe AI has the following context:\n${finalText}`
130-
131145
const chatPrompt = ChatPromptTemplate.fromMessages([
132-
SystemMessagePromptTemplate.fromTemplate(prompt ? `${prompt}\n${systemMessage}` : systemMessage),
146+
SystemMessagePromptTemplate.fromTemplate(prompt ? prompt : systemMessage),
133147
new MessagesPlaceholder(memory.memoryKey ?? 'chat_history'),
134148
HumanMessagePromptTemplate.fromTemplate(`{${inputKey}}`)
135149
])
@@ -142,15 +156,31 @@ const prepareChain = (nodeData: INodeData, sessionId?: string, chatHistory: IMes
142156
const memory = nodeData.inputs?.memory as FlowiseMemory
143157
const memoryKey = memory.memoryKey ?? 'chat_history'
144158

159+
const chatPrompt = prepareChatPrompt(nodeData)
160+
let promptVariables = {}
161+
const promptValuesRaw = (chatPrompt as any).promptValues
162+
if (promptValuesRaw) {
163+
const promptValues = handleEscapeCharacters(promptValuesRaw, true)
164+
for (const val in promptValues) {
165+
promptVariables = {
166+
...promptVariables,
167+
[val]: () => {
168+
return promptValues[val]
169+
}
170+
}
171+
}
172+
}
173+
145174
const conversationChain = RunnableSequence.from([
146175
{
147176
[inputKey]: (input: { input: string }) => input.input,
148177
[memoryKey]: async () => {
149178
const history = await memory.getChatMessages(sessionId, true, chatHistory)
150179
return history
151-
}
180+
},
181+
...promptVariables
152182
},
153-
prepareChatPrompt(nodeData),
183+
chatPrompt,
154184
model,
155185
new StringOutputParser()
156186
])

packages/components/nodes/chains/ConversationalRetrievalQAChain/ConversationalRetrievalQAChain.ts

+9-2
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@ import { applyPatch } from 'fast-json-patch'
1313
import { convertBaseMessagetoIMessage, getBaseClasses } from '../../../src/utils'
1414
import { ConsoleCallbackHandler, additionalCallbacks } from '../../../src/handler'
1515
import { FlowiseMemory, ICommonObject, IMessage, INode, INodeData, INodeParams, MemoryMethods } from '../../../src/Interface'
16+
import { ConsoleCallbackHandler as LCConsoleCallbackHandler } from '@langchain/core/tracers/console'
1617

1718
type RetrievalChainInput = {
1819
chat_history: string
@@ -176,11 +177,17 @@ class ConversationalRetrievalQAChain_Chains implements INode {
176177
const history = ((await memory.getChatMessages(this.sessionId, false, options.chatHistory)) as IMessage[]) ?? []
177178

178179
const loggerHandler = new ConsoleCallbackHandler(options.logger)
179-
const callbacks = await additionalCallbacks(nodeData, options)
180+
const additionalCallback = await additionalCallbacks(nodeData, options)
181+
182+
let callbacks = [loggerHandler, ...additionalCallback]
183+
184+
if (process.env.DEBUG === 'true') {
185+
callbacks.push(new LCConsoleCallbackHandler())
186+
}
180187

181188
const stream = answerChain.streamLog(
182189
{ question: input, chat_history: history },
183-
{ callbacks: [loggerHandler, ...callbacks] },
190+
{ callbacks },
184191
{
185192
includeNames: [sourceRunnableName]
186193
}

packages/components/nodes/chains/LLMChain/LLMChain.ts

+9-2
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,7 @@ class LLMChain_Chains implements INode {
8282
const model = nodeData.inputs?.model as BaseLanguageModel
8383
const prompt = nodeData.inputs?.prompt
8484
const output = nodeData.outputs?.output as string
85-
const promptValues = prompt.promptValues as ICommonObject
85+
let promptValues: ICommonObject | undefined = nodeData.inputs?.prompt.promptValues as ICommonObject
8686
const llmOutputParser = nodeData.inputs?.outputParser as BaseOutputParser
8787
this.outputParser = llmOutputParser
8888
if (llmOutputParser) {
@@ -107,17 +107,24 @@ class LLMChain_Chains implements INode {
107107
verbose: process.env.DEBUG === 'true'
108108
})
109109
const inputVariables = chain.prompt.inputVariables as string[] // ["product"]
110+
promptValues = injectOutputParser(this.outputParser, chain, promptValues)
110111
const res = await runPrediction(inputVariables, chain, input, promptValues, options, nodeData)
111112
// eslint-disable-next-line no-console
112113
console.log('\x1b[92m\x1b[1m\n*****OUTPUT PREDICTION*****\n\x1b[0m\x1b[0m')
113114
// eslint-disable-next-line no-console
114115
console.log(res)
116+
117+
let finalRes = res
118+
if (this.outputParser && typeof res === 'object' && Object.prototype.hasOwnProperty.call(res, 'json')) {
119+
finalRes = (res as ICommonObject).json
120+
}
121+
115122
/**
116123
* Apply string transformation to convert special chars:
117124
* FROM: hello i am ben\n\n\thow are you?
118125
* TO: hello i am benFLOWISE_NEWLINEFLOWISE_NEWLINEFLOWISE_TABhow are you?
119126
*/
120-
return handleEscapeCharacters(res, false)
127+
return handleEscapeCharacters(finalRes, false)
121128
}
122129
}
123130

0 commit comments

Comments
 (0)