Skip to content

Commit 340e85d

Browse files
authored
Merge pull request #1611 from FlowiseAI/feature/LlamaIndex
Feature/llama index
2 parents 37828de + 21c47d8 commit 340e85d

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

44 files changed

+4048
-75
lines changed
Original file line numberDiff line numberDiff line change
@@ -1,33 +1,33 @@
11
name: autoSyncMergedPullRequest
22
on:
3-
pull_request_target:
4-
types:
5-
- closed
6-
branches: [ "main" ]
3+
pull_request_target:
4+
types:
5+
- closed
6+
branches: ['main']
77
jobs:
8-
autoSyncMergedPullRequest:
9-
if: github.event.pull_request.merged == true
10-
runs-on: ubuntu-latest
11-
permissions:
12-
contents: write
13-
steps:
14-
- uses: actions/checkout@v3
15-
- name: Show PR info
16-
env:
17-
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
18-
run: |
19-
echo The PR #${{ github.event.pull_request.number }} was merged on main branch!
20-
- name: Repository Dispatch
21-
uses: peter-evans/repository-dispatch@v2
22-
with:
23-
token: ${{ secrets.AUTOSYNC_TOKEN }}
24-
repository: ${{ secrets.AUTOSYNC_CH_URL }}
25-
event-type: ${{ secrets.AUTOSYNC_PR_EVENT_TYPE }}
26-
client-payload: >-
27-
{
28-
"ref": "${{ github.ref }}",
29-
"prNumber": "${{ github.event.pull_request.number }}",
30-
"prTitle": "${{ github.event.pull_request.title }}",
31-
"prDescription": "${{ github.event.pull_request.description }}",
32-
"sha": "${{ github.sha }}"
33-
}
8+
autoSyncMergedPullRequest:
9+
if: github.event.pull_request.merged == true
10+
runs-on: ubuntu-latest
11+
permissions:
12+
contents: write
13+
steps:
14+
- uses: actions/checkout@v3
15+
- name: Show PR info
16+
env:
17+
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
18+
run: |
19+
echo The PR #${{ github.event.pull_request.number }} was merged on main branch!
20+
- name: Repository Dispatch
21+
uses: peter-evans/repository-dispatch@v2
22+
with:
23+
token: ${{ secrets.AUTOSYNC_TOKEN }}
24+
repository: ${{ secrets.AUTOSYNC_CH_URL }}
25+
event-type: ${{ secrets.AUTOSYNC_PR_EVENT_TYPE }}
26+
client-payload: >-
27+
{
28+
"ref": "${{ github.ref }}",
29+
"prNumber": "${{ github.event.pull_request.number }}",
30+
"prTitle": "${{ github.event.pull_request.title }}",
31+
"prDescription": "${{ github.event.pull_request.description }}",
32+
"sha": "${{ github.sha }}"
33+
}
+33-33
Original file line numberDiff line numberDiff line change
@@ -1,36 +1,36 @@
11
name: autoSyncSingleCommit
22
on:
3-
push:
4-
branches:
5-
- main
3+
push:
4+
branches:
5+
- main
66
jobs:
7-
doNotAutoSyncSingleCommit:
8-
if: github.event.commits[1] != null
9-
runs-on: ubuntu-latest
10-
steps:
11-
- uses: actions/checkout@v3
12-
- name: IGNORE autoSyncSingleCommit
13-
run: |
14-
echo This single commit has came from a merged commit. We will ignore it. This case is handled in autoSyncMergedPullRequest workflow for merge commits comming from merged pull requests only! Beware, the regular merge commits are not handled by any workflow for the moment.
15-
autoSyncSingleCommit:
16-
if: github.event.commits[1] == null
17-
runs-on: ubuntu-latest
18-
steps:
19-
- uses: actions/checkout@v3
20-
- name: autoSyncSingleCommit
21-
env:
22-
GITHUB_CONTEXT: ${{ toJSON(github) }}
23-
run: |
24-
echo Autosync a single commit with id: ${{ github.sha }} from openSource main branch towards cloud hosted version.
25-
- name: Repository Dispatch
26-
uses: peter-evans/repository-dispatch@v2
27-
with:
28-
token: ${{ secrets.AUTOSYNC_TOKEN }}
29-
repository: ${{ secrets.AUTOSYNC_CH_URL }}
30-
event-type: ${{ secrets.AUTOSYNC_SC_EVENT_TYPE }}
31-
client-payload: >-
32-
{
33-
"ref": "${{ github.ref }}",
34-
"sha": "${{ github.sha }}",
35-
"commitMessage": "${{ github.event.commits[0].message }}"
36-
}
7+
doNotAutoSyncSingleCommit:
8+
if: github.event.commits[1] != null
9+
runs-on: ubuntu-latest
10+
steps:
11+
- uses: actions/checkout@v3
12+
- name: IGNORE autoSyncSingleCommit
13+
run: |
14+
echo This single commit has came from a merged commit. We will ignore it. This case is handled in autoSyncMergedPullRequest workflow for merge commits comming from merged pull requests only! Beware, the regular merge commits are not handled by any workflow for the moment.
15+
autoSyncSingleCommit:
16+
if: github.event.commits[1] == null
17+
runs-on: ubuntu-latest
18+
steps:
19+
- uses: actions/checkout@v3
20+
- name: autoSyncSingleCommit
21+
env:
22+
GITHUB_CONTEXT: ${{ toJSON(github) }}
23+
run: |
24+
echo Autosync a single commit with id: ${{ github.sha }} from openSource main branch towards cloud hosted version.
25+
- name: Repository Dispatch
26+
uses: peter-evans/repository-dispatch@v2
27+
with:
28+
token: ${{ secrets.AUTOSYNC_TOKEN }}
29+
repository: ${{ secrets.AUTOSYNC_CH_URL }}
30+
event-type: ${{ secrets.AUTOSYNC_SC_EVENT_TYPE }}
31+
client-payload: >-
32+
{
33+
"ref": "${{ github.ref }}",
34+
"sha": "${{ github.sha }}",
35+
"commitMessage": "${{ github.event.commits[0].message }}"
36+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,135 @@
1+
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
2+
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
3+
import { OpenAI, ALL_AVAILABLE_OPENAI_MODELS } from 'llamaindex'
4+
5+
interface AzureOpenAIConfig {
6+
apiKey?: string
7+
endpoint?: string
8+
apiVersion?: string
9+
deploymentName?: string
10+
}
11+
12+
class AzureChatOpenAI_LlamaIndex_ChatModels implements INode {
13+
label: string
14+
name: string
15+
version: number
16+
type: string
17+
icon: string
18+
category: string
19+
description: string
20+
baseClasses: string[]
21+
tags: string[]
22+
credential: INodeParams
23+
inputs: INodeParams[]
24+
25+
constructor() {
26+
this.label = 'AzureChatOpenAI'
27+
this.name = 'azureChatOpenAI_LlamaIndex'
28+
this.version = 1.0
29+
this.type = 'AzureChatOpenAI'
30+
this.icon = 'Azure.svg'
31+
this.category = 'Chat Models'
32+
this.description = 'Wrapper around Azure OpenAI Chat LLM specific for LlamaIndex'
33+
this.baseClasses = [this.type, 'BaseChatModel_LlamaIndex', ...getBaseClasses(OpenAI)]
34+
this.tags = ['LlamaIndex']
35+
this.credential = {
36+
label: 'Connect Credential',
37+
name: 'credential',
38+
type: 'credential',
39+
credentialNames: ['azureOpenAIApi']
40+
}
41+
this.inputs = [
42+
{
43+
label: 'Model Name',
44+
name: 'modelName',
45+
type: 'options',
46+
options: [
47+
{
48+
label: 'gpt-4',
49+
name: 'gpt-4'
50+
},
51+
{
52+
label: 'gpt-4-32k',
53+
name: 'gpt-4-32k'
54+
},
55+
{
56+
label: 'gpt-3.5-turbo',
57+
name: 'gpt-3.5-turbo'
58+
},
59+
{
60+
label: 'gpt-3.5-turbo-16k',
61+
name: 'gpt-3.5-turbo-16k'
62+
}
63+
],
64+
default: 'gpt-3.5-turbo-16k',
65+
optional: true
66+
},
67+
{
68+
label: 'Temperature',
69+
name: 'temperature',
70+
type: 'number',
71+
step: 0.1,
72+
default: 0.9,
73+
optional: true
74+
},
75+
{
76+
label: 'Max Tokens',
77+
name: 'maxTokens',
78+
type: 'number',
79+
step: 1,
80+
optional: true,
81+
additionalParams: true
82+
},
83+
{
84+
label: 'Top Probability',
85+
name: 'topP',
86+
type: 'number',
87+
step: 0.1,
88+
optional: true,
89+
additionalParams: true
90+
},
91+
{
92+
label: 'Timeout',
93+
name: 'timeout',
94+
type: 'number',
95+
step: 1,
96+
optional: true,
97+
additionalParams: true
98+
}
99+
]
100+
}
101+
102+
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
103+
const modelName = nodeData.inputs?.modelName as keyof typeof ALL_AVAILABLE_OPENAI_MODELS
104+
const temperature = nodeData.inputs?.temperature as string
105+
const maxTokens = nodeData.inputs?.maxTokens as string
106+
const topP = nodeData.inputs?.topP as string
107+
const timeout = nodeData.inputs?.timeout as string
108+
109+
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
110+
const azureOpenAIApiKey = getCredentialParam('azureOpenAIApiKey', credentialData, nodeData)
111+
const azureOpenAIApiInstanceName = getCredentialParam('azureOpenAIApiInstanceName', credentialData, nodeData)
112+
const azureOpenAIApiDeploymentName = getCredentialParam('azureOpenAIApiDeploymentName', credentialData, nodeData)
113+
const azureOpenAIApiVersion = getCredentialParam('azureOpenAIApiVersion', credentialData, nodeData)
114+
115+
const obj: Partial<OpenAI> & { azure?: AzureOpenAIConfig } = {
116+
temperature: parseFloat(temperature),
117+
model: modelName,
118+
azure: {
119+
apiKey: azureOpenAIApiKey,
120+
endpoint: `https://${azureOpenAIApiInstanceName}.openai.azure.com`,
121+
apiVersion: azureOpenAIApiVersion,
122+
deploymentName: azureOpenAIApiDeploymentName
123+
}
124+
}
125+
126+
if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10)
127+
if (topP) obj.topP = parseFloat(topP)
128+
if (timeout) obj.timeout = parseInt(timeout, 10)
129+
130+
const model = new OpenAI(obj)
131+
return model
132+
}
133+
}
134+
135+
module.exports = { nodeClass: AzureChatOpenAI_LlamaIndex_ChatModels }
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,104 @@
1+
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
2+
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
3+
import { Anthropic } from 'llamaindex'
4+
5+
class ChatAnthropic_LlamaIndex_ChatModels implements INode {
6+
label: string
7+
name: string
8+
version: number
9+
type: string
10+
icon: string
11+
category: string
12+
description: string
13+
tags: string[]
14+
baseClasses: string[]
15+
credential: INodeParams
16+
inputs: INodeParams[]
17+
18+
constructor() {
19+
this.label = 'ChatAnthropic'
20+
this.name = 'chatAnthropic_LlamaIndex'
21+
this.version = 1.0
22+
this.type = 'ChatAnthropic'
23+
this.icon = 'Anthropic.svg'
24+
this.category = 'Chat Models'
25+
this.description = 'Wrapper around ChatAnthropic LLM specific for LlamaIndex'
26+
this.baseClasses = [this.type, 'BaseChatModel_LlamaIndex', ...getBaseClasses(Anthropic)]
27+
this.tags = ['LlamaIndex']
28+
this.credential = {
29+
label: 'Connect Credential',
30+
name: 'credential',
31+
type: 'credential',
32+
credentialNames: ['anthropicApi']
33+
}
34+
this.inputs = [
35+
{
36+
label: 'Model Name',
37+
name: 'modelName',
38+
type: 'options',
39+
options: [
40+
{
41+
label: 'claude-2',
42+
name: 'claude-2',
43+
description: 'Claude 2 latest major version, automatically get updates to the model as they are released'
44+
},
45+
{
46+
label: 'claude-instant-1',
47+
name: 'claude-instant-1',
48+
description: 'Claude Instant latest major version, automatically get updates to the model as they are released'
49+
}
50+
],
51+
default: 'claude-2',
52+
optional: true
53+
},
54+
{
55+
label: 'Temperature',
56+
name: 'temperature',
57+
type: 'number',
58+
step: 0.1,
59+
default: 0.9,
60+
optional: true
61+
},
62+
{
63+
label: 'Max Tokens',
64+
name: 'maxTokensToSample',
65+
type: 'number',
66+
step: 1,
67+
optional: true,
68+
additionalParams: true
69+
},
70+
{
71+
label: 'Top P',
72+
name: 'topP',
73+
type: 'number',
74+
step: 0.1,
75+
optional: true,
76+
additionalParams: true
77+
}
78+
]
79+
}
80+
81+
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
82+
const temperature = nodeData.inputs?.temperature as string
83+
const modelName = nodeData.inputs?.modelName as 'claude-2' | 'claude-instant-1' | undefined
84+
const maxTokensToSample = nodeData.inputs?.maxTokensToSample as string
85+
const topP = nodeData.inputs?.topP as string
86+
87+
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
88+
const anthropicApiKey = getCredentialParam('anthropicApiKey', credentialData, nodeData)
89+
90+
const obj: Partial<Anthropic> = {
91+
temperature: parseFloat(temperature),
92+
model: modelName,
93+
apiKey: anthropicApiKey
94+
}
95+
96+
if (maxTokensToSample) obj.maxTokens = parseInt(maxTokensToSample, 10)
97+
if (topP) obj.topP = parseFloat(topP)
98+
99+
const model = new Anthropic(obj)
100+
return model
101+
}
102+
}
103+
104+
module.exports = { nodeClass: ChatAnthropic_LlamaIndex_ChatModels }

0 commit comments

Comments
 (0)