|
| 1 | +import { getBaseClasses, ICommonObject, INode, INodeData, INodeParams } from '../../../src' |
| 2 | +import { BaseCache } from 'langchain/schema' |
| 3 | +import hash from 'object-hash' |
| 4 | + |
| 5 | +class InMemoryCache implements INode { |
| 6 | + label: string |
| 7 | + name: string |
| 8 | + version: number |
| 9 | + description: string |
| 10 | + type: string |
| 11 | + icon: string |
| 12 | + category: string |
| 13 | + baseClasses: string[] |
| 14 | + inputs: INodeParams[] |
| 15 | + credential: INodeParams |
| 16 | + |
| 17 | + constructor() { |
| 18 | + this.label = 'InMemory Cache' |
| 19 | + this.name = 'inMemoryCache' |
| 20 | + this.version = 1.0 |
| 21 | + this.type = 'InMemoryCache' |
| 22 | + this.description = 'Cache LLM response in memory, will be cleared once app restarted' |
| 23 | + this.icon = 'inmemorycache.png' |
| 24 | + this.category = 'Cache' |
| 25 | + this.baseClasses = [this.type, ...getBaseClasses(InMemoryCacheExtended)] |
| 26 | + this.inputs = [] |
| 27 | + } |
| 28 | + |
| 29 | + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> { |
| 30 | + const memoryMap = options.cachePool.getLLMCache(options.chatflowid) ?? new Map() |
| 31 | + const inMemCache = new InMemoryCacheExtended(memoryMap) |
| 32 | + |
| 33 | + inMemCache.lookup = async (prompt: string, llmKey: string): Promise<any | null> => { |
| 34 | + const memory = options.cachePool.getLLMCache(options.chatflowid) ?? inMemCache.cache |
| 35 | + return Promise.resolve(memory.get(getCacheKey(prompt, llmKey)) ?? null) |
| 36 | + } |
| 37 | + |
| 38 | + inMemCache.update = async (prompt: string, llmKey: string, value: any): Promise<void> => { |
| 39 | + inMemCache.cache.set(getCacheKey(prompt, llmKey), value) |
| 40 | + options.cachePool.addLLMCache(options.chatflowid, inMemCache.cache) |
| 41 | + } |
| 42 | + return inMemCache |
| 43 | + } |
| 44 | +} |
| 45 | + |
| 46 | +const getCacheKey = (...strings: string[]): string => hash(strings.join('_')) |
| 47 | + |
| 48 | +class InMemoryCacheExtended extends BaseCache { |
| 49 | + cache: Map<string, any> |
| 50 | + |
| 51 | + constructor(map: Map<string, any>) { |
| 52 | + super() |
| 53 | + this.cache = map |
| 54 | + } |
| 55 | + |
| 56 | + lookup(prompt: string, llmKey: string): Promise<any | null> { |
| 57 | + return Promise.resolve(this.cache.get(getCacheKey(prompt, llmKey)) ?? null) |
| 58 | + } |
| 59 | + |
| 60 | + async update(prompt: string, llmKey: string, value: any): Promise<void> { |
| 61 | + this.cache.set(getCacheKey(prompt, llmKey), value) |
| 62 | + } |
| 63 | +} |
| 64 | + |
| 65 | +module.exports = { nodeClass: InMemoryCache } |
0 commit comments