forked from FlowiseAI/Flowise
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathGooglePaLM.ts
158 lines (149 loc) · 6.24 KB
/
GooglePaLM.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { GooglePaLM, GooglePaLMTextInput } from 'langchain/llms/googlepalm'
class GooglePaLM_LLMs implements INode {
label: string
name: string
version: number
type: string
icon: string
category: string
description: string
baseClasses: string[]
credential: INodeParams
inputs: INodeParams[]
constructor() {
this.label = 'GooglePaLM'
this.name = 'GooglePaLM'
this.version = 1.0
this.type = 'GooglePaLM'
this.icon = 'Google_PaLM_Logo.svg'
this.category = 'LLMs'
this.description = 'Wrapper around Google MakerSuite PaLM large language models'
this.baseClasses = [this.type, ...getBaseClasses(GooglePaLM)]
this.credential = {
label: 'Connect Credential',
name: 'credential',
type: 'credential',
credentialNames: ['googleMakerSuite']
}
this.inputs = [
{
label: 'Model Name',
name: 'modelName',
type: 'options',
options: [
{
label: 'models/text-bison-001',
name: 'models/text-bison-001'
}
],
default: 'models/text-bison-001',
optional: true
},
{
label: 'Temperature',
name: 'temperature',
type: 'number',
step: 0.1,
default: 0.7,
optional: true,
description:
'Controls the randomness of the output.\n' +
'Values can range from [0.0,1.0], inclusive. A value closer to 1.0 ' +
'will produce responses that are more varied and creative, while ' +
'a value closer to 0.0 will typically result in more straightforward ' +
'responses from the model.'
},
{
label: 'Max Output Tokens',
name: 'maxOutputTokens',
type: 'number',
step: 1,
optional: true,
additionalParams: true,
description: 'Maximum number of tokens to generate in the completion.'
},
{
label: 'Top Probability',
name: 'topP',
type: 'number',
step: 0.1,
optional: true,
additionalParams: true,
description:
'Top-p changes how the model selects tokens for output.\n' +
'Tokens are selected from most probable to least until ' +
'the sum of their probabilities equals the top-p value.\n' +
'For example, if tokens A, B, and C have a probability of .3, .2, and .1 ' +
'and the top-p value is .5, then the model will select either A or B ' +
'as the next token (using temperature).'
},
{
label: 'Top-k',
name: 'topK',
type: 'number',
step: 1,
optional: true,
additionalParams: true,
description:
'Top-k changes how the model selects tokens for output.\n' +
'A top-k of 1 means the selected token is the most probable among ' +
'all tokens in the model vocabulary (also called greedy decoding), ' +
'while a top-k of 3 means that the next token is selected from ' +
'among the 3 most probable tokens (using temperature).'
},
{
label: 'Stop Sequences',
name: 'stopSequencesObj',
type: 'json',
optional: true,
additionalParams: true
//default: { list:[] },
//description:
// 'The "list" field should contain a list of character strings (up to 5) that will stop output generation.\n' +
// ' * If specified, the API will stop at the first appearance of a stop sequence.\n' +
// 'Note: The stop sequence will not be included as part of the response.'
}
/*
{
label: 'Safety Settings',
name: 'safetySettings',
type: 'json',
optional: true,
additionalParams: true
}
*/
]
}
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const modelName = nodeData.inputs?.modelName as string
const temperature = nodeData.inputs?.temperature as string
const maxOutputTokens = nodeData.inputs?.maxOutputTokens as string
const topP = nodeData.inputs?.topP as string
const topK = nodeData.inputs?.topK as string
const stopSequencesObj = nodeData.inputs?.stopSequencesObj
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const googleMakerSuiteKey = getCredentialParam('googleMakerSuiteKey', credentialData, nodeData)
const obj: Partial<GooglePaLMTextInput> = {
modelName: modelName,
temperature: parseFloat(temperature),
apiKey: googleMakerSuiteKey
}
if (maxOutputTokens) obj.maxOutputTokens = parseInt(maxOutputTokens, 10)
if (topP) obj.topP = parseFloat(topP)
if (topK) obj.topK = parseFloat(topK)
let parsedStopSequences: any | undefined = undefined
if (stopSequencesObj) {
try {
parsedStopSequences = typeof stopSequencesObj === 'object' ? stopSequencesObj : JSON.parse(stopSequencesObj)
obj.stopSequences = parsedStopSequences.list || []
} catch (exception) {
throw new Error("Invalid JSON in the GooglePaLM's stopSequences: " + exception)
}
}
const model = new GooglePaLM(obj)
return model
}
}
module.exports = { nodeClass: GooglePaLM_LLMs }