1
- import { AzureOpenAIInput , ChatOpenAI as LangchainChatOpenAI , OpenAIChatInput } from '@langchain/openai'
1
+ import { AzureOpenAIInput , ChatOpenAI as LangchainChatOpenAI , OpenAIChatInput , ClientOptions , LegacyOpenAIInput } from '@langchain/openai'
2
2
import { BaseCache } from '@langchain/core/caches'
3
3
import { BaseLLMParams } from '@langchain/core/language_models/llms'
4
4
import { ICommonObject , IMultiModalOption , INode , INodeData , INodeOptionsValue , INodeParams } from '../../../src/Interface'
5
5
import { getBaseClasses , getCredentialData , getCredentialParam } from '../../../src/utils'
6
6
import { ChatOpenAI } from '../ChatOpenAI/FlowiseChatOpenAI'
7
7
import { getModels , MODEL_TYPE } from '../../../src/modelLoader'
8
+ import { BaseChatModelParams } from '@langchain/core/language_models/chat_models'
8
9
9
10
const serverCredentialsExists =
10
11
! ! process . env . AZURE_OPENAI_API_KEY &&
@@ -27,7 +28,7 @@ class AzureChatOpenAI_ChatModels implements INode {
27
28
constructor ( ) {
28
29
this . label = 'Azure ChatOpenAI'
29
30
this . name = 'azureChatOpenAI'
30
- this . version = 6 .0
31
+ this . version = 7 .0
31
32
this . type = 'AzureChatOpenAI'
32
33
this . icon = 'Azure.svg'
33
34
this . category = 'Chat Models'
@@ -116,6 +117,13 @@ class AzureChatOpenAI_ChatModels implements INode {
116
117
optional : true ,
117
118
additionalParams : true
118
119
} ,
120
+ {
121
+ label : 'BaseOptions' ,
122
+ name : 'baseOptions' ,
123
+ type : 'json' ,
124
+ optional : true ,
125
+ additionalParams : true
126
+ } ,
119
127
{
120
128
label : 'Allow Image Uploads' ,
121
129
name : 'allowImageUploads' ,
@@ -169,6 +177,7 @@ class AzureChatOpenAI_ChatModels implements INode {
169
177
const cache = nodeData . inputs ?. cache as BaseCache
170
178
const topP = nodeData . inputs ?. topP as string
171
179
const basePath = nodeData . inputs ?. basepath as string
180
+ const baseOptions = nodeData . inputs ?. baseOptions
172
181
173
182
const credentialData = await getCredentialData ( nodeData . credential ?? '' , options )
174
183
const azureOpenAIApiKey = getCredentialParam ( 'azureOpenAIApiKey' , credentialData , nodeData )
@@ -179,7 +188,10 @@ class AzureChatOpenAI_ChatModels implements INode {
179
188
const allowImageUploads = nodeData . inputs ?. allowImageUploads as boolean
180
189
const imageResolution = nodeData . inputs ?. imageResolution as string
181
190
182
- const obj : Partial < AzureOpenAIInput > & BaseLLMParams & Partial < OpenAIChatInput > = {
191
+ const obj : Partial < AzureOpenAIInput > &
192
+ BaseLLMParams &
193
+ Partial < OpenAIChatInput > &
194
+ BaseChatModelParams & { configuration ?: ClientOptions & LegacyOpenAIInput } = {
183
195
temperature : parseFloat ( temperature ) ,
184
196
modelName,
185
197
azureOpenAIApiKey,
@@ -196,6 +208,16 @@ class AzureChatOpenAI_ChatModels implements INode {
196
208
if ( cache ) obj . cache = cache
197
209
if ( topP ) obj . topP = parseFloat ( topP )
198
210
if ( basePath ) obj . azureOpenAIBasePath = basePath
211
+ if ( baseOptions ) {
212
+ try {
213
+ const parsedBaseOptions = typeof baseOptions === 'object' ? baseOptions : JSON . parse ( baseOptions )
214
+ obj . configuration = {
215
+ defaultHeaders : parsedBaseOptions
216
+ }
217
+ } catch ( exception ) {
218
+ console . error ( 'Error parsing base options' , exception )
219
+ }
220
+ }
199
221
200
222
const multiModalOption : IMultiModalOption = {
201
223
image : {
0 commit comments