diff --git a/package.json b/package.json index 86bc78ddbeb..3060fded669 100644 --- a/package.json +++ b/package.json @@ -17,6 +17,9 @@ "start": "run-script-os", "start:windows": "cd packages/server/bin && run start", "start:default": "cd packages/server/bin && ./run start", + "start-worker": "run-script-os", + "start-worker:windows": "cd packages/server/bin && run worker", + "start-worker:default": "cd packages/server/bin && ./run worker", "clean": "pnpm --filter \"./packages/**\" clean", "nuke": "pnpm --filter \"./packages/**\" nuke && rimraf node_modules .turbo", "format": "prettier --write \"**/*.{ts,tsx,md}\"", diff --git a/packages/components/nodes/agents/ConversationalAgent/ConversationalAgent.ts b/packages/components/nodes/agents/ConversationalAgent/ConversationalAgent.ts index f9541b369e0..7008eb4e41f 100644 --- a/packages/components/nodes/agents/ConversationalAgent/ConversationalAgent.ts +++ b/packages/components/nodes/agents/ConversationalAgent/ConversationalAgent.ts @@ -21,7 +21,7 @@ import { } from '../../../src/Interface' import { AgentExecutor } from '../../../src/agents' import { addImagesToMessages, llmSupportsVision } from '../../../src/multiModalUtils' -import { checkInputs, Moderation } from '../../moderation/Moderation' +import { checkInputs, Moderation, streamResponse } from '../../moderation/Moderation' import { formatResponse } from '../../outputparsers/OutputParserHelpers' const DEFAULT_PREFIX = `Assistant is a large language model trained by OpenAI. @@ -124,10 +124,9 @@ class ConversationalAgent_Agents implements INode { input = await checkInputs(moderations, input) } catch (e) { await new Promise((resolve) => setTimeout(resolve, 500)) - // if (options.shouldStreamResponse) { - // streamResponse(options.sseStreamer, options.chatId, e.message) - // } - //streamResponse(options.socketIO && options.socketIOClientId, e.message, options.socketIO, options.socketIOClientId) + if (options.shouldStreamResponse) { + streamResponse(sseStreamer, chatId, e.message) + } return formatResponse(e.message) } } diff --git a/packages/components/nodes/cache/InMemoryCache/InMemoryCache.ts b/packages/components/nodes/cache/InMemoryCache/InMemoryCache.ts index bddcfb70c96..0e613834f55 100644 --- a/packages/components/nodes/cache/InMemoryCache/InMemoryCache.ts +++ b/packages/components/nodes/cache/InMemoryCache/InMemoryCache.ts @@ -27,17 +27,17 @@ class InMemoryCache implements INode { } async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { - const memoryMap = options.cachePool.getLLMCache(options.chatflowid) ?? new Map() + const memoryMap = (await options.cachePool.getLLMCache(options.chatflowid)) ?? new Map() const inMemCache = new InMemoryCacheExtended(memoryMap) inMemCache.lookup = async (prompt: string, llmKey: string): Promise => { - const memory = options.cachePool.getLLMCache(options.chatflowid) ?? inMemCache.cache + const memory = (await options.cachePool.getLLMCache(options.chatflowid)) ?? inMemCache.cache return Promise.resolve(memory.get(getCacheKey(prompt, llmKey)) ?? null) } inMemCache.update = async (prompt: string, llmKey: string, value: any): Promise => { inMemCache.cache.set(getCacheKey(prompt, llmKey), value) - options.cachePool.addLLMCache(options.chatflowid, inMemCache.cache) + await options.cachePool.addLLMCache(options.chatflowid, inMemCache.cache) } return inMemCache } diff --git a/packages/components/nodes/cache/InMemoryCache/InMemoryEmbeddingCache.ts b/packages/components/nodes/cache/InMemoryCache/InMemoryEmbeddingCache.ts index de426a72a34..d50bc722922 100644 --- a/packages/components/nodes/cache/InMemoryCache/InMemoryEmbeddingCache.ts +++ b/packages/components/nodes/cache/InMemoryCache/InMemoryEmbeddingCache.ts @@ -43,11 +43,11 @@ class InMemoryEmbeddingCache implements INode { async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { const namespace = nodeData.inputs?.namespace as string const underlyingEmbeddings = nodeData.inputs?.embeddings as Embeddings - const memoryMap = options.cachePool.getEmbeddingCache(options.chatflowid) ?? {} + const memoryMap = (await options.cachePool.getEmbeddingCache(options.chatflowid)) ?? {} const inMemCache = new InMemoryEmbeddingCacheExtended(memoryMap) inMemCache.mget = async (keys: string[]) => { - const memory = options.cachePool.getEmbeddingCache(options.chatflowid) ?? inMemCache.store + const memory = (await options.cachePool.getEmbeddingCache(options.chatflowid)) ?? inMemCache.store return keys.map((key) => memory[key]) } @@ -55,14 +55,14 @@ class InMemoryEmbeddingCache implements INode { for (const [key, value] of keyValuePairs) { inMemCache.store[key] = value } - options.cachePool.addEmbeddingCache(options.chatflowid, inMemCache.store) + await options.cachePool.addEmbeddingCache(options.chatflowid, inMemCache.store) } inMemCache.mdelete = async (keys: string[]): Promise => { for (const key of keys) { delete inMemCache.store[key] } - options.cachePool.addEmbeddingCache(options.chatflowid, inMemCache.store) + await options.cachePool.addEmbeddingCache(options.chatflowid, inMemCache.store) } return CacheBackedEmbeddings.fromBytesStore(underlyingEmbeddings, inMemCache, { diff --git a/packages/components/nodes/cache/RedisCache/RedisCache.ts b/packages/components/nodes/cache/RedisCache/RedisCache.ts index c43a9562474..6646575f8c9 100644 --- a/packages/components/nodes/cache/RedisCache/RedisCache.ts +++ b/packages/components/nodes/cache/RedisCache/RedisCache.ts @@ -1,47 +1,10 @@ -import { Redis, RedisOptions } from 'ioredis' -import { isEqual } from 'lodash' +import { Redis } from 'ioredis' import hash from 'object-hash' import { RedisCache as LangchainRedisCache } from '@langchain/community/caches/ioredis' import { StoredGeneration, mapStoredMessageToChatMessage } from '@langchain/core/messages' import { Generation, ChatGeneration } from '@langchain/core/outputs' import { getBaseClasses, getCredentialData, getCredentialParam, ICommonObject, INode, INodeData, INodeParams } from '../../../src' -let redisClientSingleton: Redis -let redisClientOption: RedisOptions -let redisClientUrl: string - -const getRedisClientbyOption = (option: RedisOptions) => { - if (!redisClientSingleton) { - // if client doesn't exists - redisClientSingleton = new Redis(option) - redisClientOption = option - return redisClientSingleton - } else if (redisClientSingleton && !isEqual(option, redisClientOption)) { - // if client exists but option changed - redisClientSingleton.quit() - redisClientSingleton = new Redis(option) - redisClientOption = option - return redisClientSingleton - } - return redisClientSingleton -} - -const getRedisClientbyUrl = (url: string) => { - if (!redisClientSingleton) { - // if client doesn't exists - redisClientSingleton = new Redis(url) - redisClientUrl = url - return redisClientSingleton - } else if (redisClientSingleton && url !== redisClientUrl) { - // if client exists but option changed - redisClientSingleton.quit() - redisClientSingleton = new Redis(url) - redisClientUrl = url - return redisClientSingleton - } - return redisClientSingleton -} - class RedisCache implements INode { label: string name: string @@ -85,33 +48,19 @@ class RedisCache implements INode { async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { const ttl = nodeData.inputs?.ttl as string - const credentialData = await getCredentialData(nodeData.credential ?? '', options) - const redisUrl = getCredentialParam('redisUrl', credentialData, nodeData) - - let client: Redis - if (!redisUrl || redisUrl === '') { - const username = getCredentialParam('redisCacheUser', credentialData, nodeData) - const password = getCredentialParam('redisCachePwd', credentialData, nodeData) - const portStr = getCredentialParam('redisCachePort', credentialData, nodeData) - const host = getCredentialParam('redisCacheHost', credentialData, nodeData) - const sslEnabled = getCredentialParam('redisCacheSslEnabled', credentialData, nodeData) - - const tlsOptions = sslEnabled === true ? { tls: { rejectUnauthorized: false } } : {} - - client = getRedisClientbyOption({ - port: portStr ? parseInt(portStr) : 6379, - host, - username, - password, - ...tlsOptions - }) - } else { - client = getRedisClientbyUrl(redisUrl) - } - + let client = await getRedisClient(nodeData, options) const redisClient = new LangchainRedisCache(client) redisClient.lookup = async (prompt: string, llmKey: string) => { + try { + const pingResp = await client.ping() + if (pingResp !== 'PONG') { + client = await getRedisClient(nodeData, options) + } + } catch (error) { + client = await getRedisClient(nodeData, options) + } + let idx = 0 let key = getCacheKey(prompt, llmKey, String(idx)) let value = await client.get(key) @@ -125,10 +74,21 @@ class RedisCache implements INode { value = await client.get(key) } + client.quit() + return generations.length > 0 ? generations : null } redisClient.update = async (prompt: string, llmKey: string, value: Generation[]) => { + try { + const pingResp = await client.ping() + if (pingResp !== 'PONG') { + client = await getRedisClient(nodeData, options) + } + } catch (error) { + client = await getRedisClient(nodeData, options) + } + for (let i = 0; i < value.length; i += 1) { const key = getCacheKey(prompt, llmKey, String(i)) if (ttl) { @@ -137,12 +97,43 @@ class RedisCache implements INode { await client.set(key, JSON.stringify(serializeGeneration(value[i]))) } } + + client.quit() } + client.quit() + return redisClient } } +const getRedisClient = async (nodeData: INodeData, options: ICommonObject) => { + let client: Redis + + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const redisUrl = getCredentialParam('redisUrl', credentialData, nodeData) + + if (!redisUrl || redisUrl === '') { + const username = getCredentialParam('redisCacheUser', credentialData, nodeData) + const password = getCredentialParam('redisCachePwd', credentialData, nodeData) + const portStr = getCredentialParam('redisCachePort', credentialData, nodeData) + const host = getCredentialParam('redisCacheHost', credentialData, nodeData) + const sslEnabled = getCredentialParam('redisCacheSslEnabled', credentialData, nodeData) + + const tlsOptions = sslEnabled === true ? { tls: { rejectUnauthorized: false } } : {} + + client = new Redis({ + port: portStr ? parseInt(portStr) : 6379, + host, + username, + password, + ...tlsOptions + }) + } else { + client = new Redis(redisUrl) + } + return client +} const getCacheKey = (...strings: string[]): string => hash(strings.join('_')) const deserializeStoredGeneration = (storedGeneration: StoredGeneration) => { if (storedGeneration.message !== undefined) { diff --git a/packages/components/nodes/cache/RedisCache/RedisEmbeddingsCache.ts b/packages/components/nodes/cache/RedisCache/RedisEmbeddingsCache.ts index 807d10b0016..1e4ed86c89d 100644 --- a/packages/components/nodes/cache/RedisCache/RedisEmbeddingsCache.ts +++ b/packages/components/nodes/cache/RedisCache/RedisEmbeddingsCache.ts @@ -1,45 +1,11 @@ -import { Redis, RedisOptions } from 'ioredis' -import { isEqual } from 'lodash' +import { Redis } from 'ioredis' import { RedisByteStore } from '@langchain/community/storage/ioredis' -import { Embeddings } from '@langchain/core/embeddings' -import { CacheBackedEmbeddings } from 'langchain/embeddings/cache_backed' +import { Embeddings, EmbeddingsInterface } from '@langchain/core/embeddings' +import { CacheBackedEmbeddingsFields } from 'langchain/embeddings/cache_backed' import { getBaseClasses, getCredentialData, getCredentialParam, ICommonObject, INode, INodeData, INodeParams } from '../../../src' - -let redisClientSingleton: Redis -let redisClientOption: RedisOptions -let redisClientUrl: string - -const getRedisClientbyOption = (option: RedisOptions) => { - if (!redisClientSingleton) { - // if client doesn't exists - redisClientSingleton = new Redis(option) - redisClientOption = option - return redisClientSingleton - } else if (redisClientSingleton && !isEqual(option, redisClientOption)) { - // if client exists but option changed - redisClientSingleton.quit() - redisClientSingleton = new Redis(option) - redisClientOption = option - return redisClientSingleton - } - return redisClientSingleton -} - -const getRedisClientbyUrl = (url: string) => { - if (!redisClientSingleton) { - // if client doesn't exists - redisClientSingleton = new Redis(url) - redisClientUrl = url - return redisClientSingleton - } else if (redisClientSingleton && url !== redisClientUrl) { - // if client exists but option changed - redisClientSingleton.quit() - redisClientSingleton = new Redis(url) - redisClientUrl = url - return redisClientSingleton - } - return redisClientSingleton -} +import { BaseStore } from '@langchain/core/stores' +import { insecureHash } from '@langchain/core/utils/hash' +import { Document } from '@langchain/core/documents' class RedisEmbeddingsCache implements INode { label: string @@ -112,7 +78,7 @@ class RedisEmbeddingsCache implements INode { const tlsOptions = sslEnabled === true ? { tls: { rejectUnauthorized: false } } : {} - client = getRedisClientbyOption({ + client = new Redis({ port: portStr ? parseInt(portStr) : 6379, host, username, @@ -120,7 +86,7 @@ class RedisEmbeddingsCache implements INode { ...tlsOptions }) } else { - client = getRedisClientbyUrl(redisUrl) + client = new Redis(redisUrl) } ttl ??= '3600' @@ -130,10 +96,143 @@ class RedisEmbeddingsCache implements INode { ttl: ttlNumber }) - return CacheBackedEmbeddings.fromBytesStore(underlyingEmbeddings, redisStore, { - namespace: namespace + const store = CacheBackedEmbeddings.fromBytesStore(underlyingEmbeddings, redisStore, { + namespace: namespace, + redisClient: client + }) + + return store + } +} + +class CacheBackedEmbeddings extends Embeddings { + protected underlyingEmbeddings: EmbeddingsInterface + + protected documentEmbeddingStore: BaseStore + + protected redisClient?: Redis + + constructor(fields: CacheBackedEmbeddingsFields & { redisClient?: Redis }) { + super(fields) + this.underlyingEmbeddings = fields.underlyingEmbeddings + this.documentEmbeddingStore = fields.documentEmbeddingStore + this.redisClient = fields.redisClient + } + + async embedQuery(document: string): Promise { + const res = this.underlyingEmbeddings.embedQuery(document) + this.redisClient?.quit() + return res + } + + async embedDocuments(documents: string[]): Promise { + const vectors = await this.documentEmbeddingStore.mget(documents) + const missingIndicies = [] + const missingDocuments = [] + for (let i = 0; i < vectors.length; i += 1) { + if (vectors[i] === undefined) { + missingIndicies.push(i) + missingDocuments.push(documents[i]) + } + } + if (missingDocuments.length) { + const missingVectors = await this.underlyingEmbeddings.embedDocuments(missingDocuments) + const keyValuePairs: [string, number[]][] = missingDocuments.map((document, i) => [document, missingVectors[i]]) + await this.documentEmbeddingStore.mset(keyValuePairs) + for (let i = 0; i < missingIndicies.length; i += 1) { + vectors[missingIndicies[i]] = missingVectors[i] + } + } + this.redisClient?.quit() + return vectors as number[][] + } + + static fromBytesStore( + underlyingEmbeddings: EmbeddingsInterface, + documentEmbeddingStore: BaseStore, + options?: { + namespace?: string + redisClient?: Redis + } + ) { + const encoder = new TextEncoder() + const decoder = new TextDecoder() + const encoderBackedStore = new EncoderBackedStore({ + store: documentEmbeddingStore, + keyEncoder: (key) => (options?.namespace ?? '') + insecureHash(key), + valueSerializer: (value) => encoder.encode(JSON.stringify(value)), + valueDeserializer: (serializedValue) => JSON.parse(decoder.decode(serializedValue)) + }) + return new this({ + underlyingEmbeddings, + documentEmbeddingStore: encoderBackedStore, + redisClient: options?.redisClient + }) + } +} + +class EncoderBackedStore extends BaseStore { + lc_namespace = ['langchain', 'storage'] + + store: BaseStore + + keyEncoder: (key: K) => string + + valueSerializer: (value: V) => SerializedType + + valueDeserializer: (value: SerializedType) => V + + constructor(fields: { + store: BaseStore + keyEncoder: (key: K) => string + valueSerializer: (value: V) => SerializedType + valueDeserializer: (value: SerializedType) => V + }) { + super(fields) + this.store = fields.store + this.keyEncoder = fields.keyEncoder + this.valueSerializer = fields.valueSerializer + this.valueDeserializer = fields.valueDeserializer + } + + async mget(keys: K[]): Promise<(V | undefined)[]> { + const encodedKeys = keys.map(this.keyEncoder) + const values = await this.store.mget(encodedKeys) + return values.map((value) => { + if (value === undefined) { + return undefined + } + return this.valueDeserializer(value) }) } + + async mset(keyValuePairs: [K, V][]): Promise { + const encodedPairs: [string, SerializedType][] = keyValuePairs.map(([key, value]) => [ + this.keyEncoder(key), + this.valueSerializer(value) + ]) + return this.store.mset(encodedPairs) + } + + async mdelete(keys: K[]): Promise { + const encodedKeys = keys.map(this.keyEncoder) + return this.store.mdelete(encodedKeys) + } + + async *yieldKeys(prefix?: string | undefined): AsyncGenerator { + yield* this.store.yieldKeys(prefix) + } +} + +export function createDocumentStoreFromByteStore(store: BaseStore) { + const encoder = new TextEncoder() + const decoder = new TextDecoder() + return new EncoderBackedStore({ + store, + keyEncoder: (key: string) => key, + valueSerializer: (doc: Document) => encoder.encode(JSON.stringify({ pageContent: doc.pageContent, metadata: doc.metadata })), + valueDeserializer: (bytes: Uint8Array) => new Document(JSON.parse(decoder.decode(bytes))) + }) } module.exports = { nodeClass: RedisEmbeddingsCache } diff --git a/packages/components/nodes/chains/SqlDatabaseChain/SqlDatabaseChain.ts b/packages/components/nodes/chains/SqlDatabaseChain/SqlDatabaseChain.ts index dbdd4698053..14b8c606d41 100644 --- a/packages/components/nodes/chains/SqlDatabaseChain/SqlDatabaseChain.ts +++ b/packages/components/nodes/chains/SqlDatabaseChain/SqlDatabaseChain.ts @@ -180,7 +180,6 @@ class SqlDatabaseChain_Chains implements INode { if (shouldStreamResponse) { streamResponse(sseStreamer, chatId, e.message) } - // streamResponse(options.socketIO && options.socketIOClientId, e.message, options.socketIO, options.socketIOClientId) return formatResponse(e.message) } } diff --git a/packages/components/nodes/memory/UpstashRedisBackedChatMemory/UpstashRedisBackedChatMemory.ts b/packages/components/nodes/memory/UpstashRedisBackedChatMemory/UpstashRedisBackedChatMemory.ts index 595803c52a2..1601ed103ee 100644 --- a/packages/components/nodes/memory/UpstashRedisBackedChatMemory/UpstashRedisBackedChatMemory.ts +++ b/packages/components/nodes/memory/UpstashRedisBackedChatMemory/UpstashRedisBackedChatMemory.ts @@ -1,5 +1,4 @@ -import { Redis, RedisConfigNodejs } from '@upstash/redis' -import { isEqual } from 'lodash' +import { Redis } from '@upstash/redis' import { BufferMemory, BufferMemoryInput } from 'langchain/memory' import { UpstashRedisChatMessageHistory } from '@langchain/community/stores/message/upstash_redis' import { mapStoredMessageToChatMessage, AIMessage, HumanMessage, StoredMessage, BaseMessage } from '@langchain/core/messages' @@ -13,24 +12,6 @@ import { } from '../../../src/utils' import { ICommonObject } from '../../../src/Interface' -let redisClientSingleton: Redis -let redisClientOption: RedisConfigNodejs - -const getRedisClientbyOption = (option: RedisConfigNodejs) => { - if (!redisClientSingleton) { - // if client doesn't exists - redisClientSingleton = new Redis(option) - redisClientOption = option - return redisClientSingleton - } else if (redisClientSingleton && !isEqual(option, redisClientOption)) { - // if client exists but option changed - redisClientSingleton = new Redis(option) - redisClientOption = option - return redisClientSingleton - } - return redisClientSingleton -} - class UpstashRedisBackedChatMemory_Memory implements INode { label: string name: string @@ -109,7 +90,7 @@ const initalizeUpstashRedis = async (nodeData: INodeData, options: ICommonObject const credentialData = await getCredentialData(nodeData.credential ?? '', options) const upstashRestToken = getCredentialParam('upstashRestToken', credentialData, nodeData) - const client = getRedisClientbyOption({ + const client = new Redis({ url: baseURL, token: upstashRestToken }) diff --git a/packages/components/nodes/vectorstores/Elasticsearch/Elasticsearch.ts b/packages/components/nodes/vectorstores/Elasticsearch/Elasticsearch.ts index d752928e7ab..9cd9969e74f 100644 --- a/packages/components/nodes/vectorstores/Elasticsearch/Elasticsearch.ts +++ b/packages/components/nodes/vectorstores/Elasticsearch/Elasticsearch.ts @@ -138,7 +138,14 @@ class Elasticsearch_VectorStores implements INode { }) // end of workaround - const elasticSearchClientArgs = prepareClientArgs(endPoint, cloudId, credentialData, nodeData, similarityMeasure, indexName) + const { elasticClient, elasticSearchClientArgs } = prepareClientArgs( + endPoint, + cloudId, + credentialData, + nodeData, + similarityMeasure, + indexName + ) const vectorStore = new ElasticVectorSearch(embeddings, elasticSearchClientArgs) try { @@ -155,9 +162,11 @@ class Elasticsearch_VectorStores implements INode { vectorStoreName: indexName } }) + await elasticClient.close() return res } else { await vectorStore.addDocuments(finalDocs) + await elasticClient.close() return { numAdded: finalDocs.length, addedDocs: finalDocs } } } catch (e) { @@ -174,7 +183,14 @@ class Elasticsearch_VectorStores implements INode { const endPoint = getCredentialParam('endpoint', credentialData, nodeData) const cloudId = getCredentialParam('cloudId', credentialData, nodeData) - const elasticSearchClientArgs = prepareClientArgs(endPoint, cloudId, credentialData, nodeData, similarityMeasure, indexName) + const { elasticClient, elasticSearchClientArgs } = prepareClientArgs( + endPoint, + cloudId, + credentialData, + nodeData, + similarityMeasure, + indexName + ) const vectorStore = new ElasticVectorSearch(embeddings, elasticSearchClientArgs) try { @@ -186,8 +202,10 @@ class Elasticsearch_VectorStores implements INode { await vectorStore.delete({ ids: keys }) await recordManager.deleteKeys(keys) + await elasticClient.close() } else { await vectorStore.delete({ ids }) + await elasticClient.close() } } catch (e) { throw new Error(e) @@ -206,8 +224,22 @@ class Elasticsearch_VectorStores implements INode { const k = topK ? parseFloat(topK) : 4 const output = nodeData.outputs?.output as string - const elasticSearchClientArgs = prepareClientArgs(endPoint, cloudId, credentialData, nodeData, similarityMeasure, indexName) + const { elasticClient, elasticSearchClientArgs } = prepareClientArgs( + endPoint, + cloudId, + credentialData, + nodeData, + similarityMeasure, + indexName + ) const vectorStore = await ElasticVectorSearch.fromExistingIndex(embeddings, elasticSearchClientArgs) + const originalSimilaritySearchVectorWithScore = vectorStore.similaritySearchVectorWithScore + + vectorStore.similaritySearchVectorWithScore = async (query: number[], k: number, filter?: any) => { + const results = await originalSimilaritySearchVectorWithScore.call(vectorStore, query, k, filter) + await elasticClient.close() + return results + } if (output === 'retriever') { return vectorStore.asRetriever(k) @@ -289,12 +321,17 @@ const prepareClientArgs = ( similarity: 'l2_norm' } } + + const elasticClient = new Client(elasticSearchClientOptions) const elasticSearchClientArgs: ElasticClientArgs = { - client: new Client(elasticSearchClientOptions), + client: elasticClient, indexName: indexName, vectorSearchOptions: vectorSearchOptions } - return elasticSearchClientArgs + return { + elasticClient, + elasticSearchClientArgs + } } module.exports = { nodeClass: Elasticsearch_VectorStores } diff --git a/packages/components/nodes/vectorstores/Pinecone/Pinecone.ts b/packages/components/nodes/vectorstores/Pinecone/Pinecone.ts index 19ff9286ffb..c9ff257fb18 100644 --- a/packages/components/nodes/vectorstores/Pinecone/Pinecone.ts +++ b/packages/components/nodes/vectorstores/Pinecone/Pinecone.ts @@ -1,5 +1,5 @@ -import { flatten, isEqual } from 'lodash' -import { Pinecone, PineconeConfiguration } from '@pinecone-database/pinecone' +import { flatten } from 'lodash' +import { Pinecone } from '@pinecone-database/pinecone' import { PineconeStoreParams, PineconeStore } from '@langchain/pinecone' import { Embeddings } from '@langchain/core/embeddings' import { Document } from '@langchain/core/documents' @@ -9,23 +9,6 @@ import { FLOWISE_CHATID, getBaseClasses, getCredentialData, getCredentialParam } import { addMMRInputParams, howToUseFileUpload, resolveVectorStoreOrRetriever } from '../VectorStoreUtils' import { index } from '../../../src/indexing' -let pineconeClientSingleton: Pinecone -let pineconeClientOption: PineconeConfiguration - -const getPineconeClient = (option: PineconeConfiguration) => { - if (!pineconeClientSingleton) { - // if client doesn't exists - pineconeClientSingleton = new Pinecone(option) - pineconeClientOption = option - return pineconeClientSingleton - } else if (pineconeClientSingleton && !isEqual(option, pineconeClientOption)) { - // if client exists but option changed - pineconeClientSingleton = new Pinecone(option) - return pineconeClientSingleton - } - return pineconeClientSingleton -} - class Pinecone_VectorStores implements INode { label: string name: string @@ -155,7 +138,7 @@ class Pinecone_VectorStores implements INode { const credentialData = await getCredentialData(nodeData.credential ?? '', options) const pineconeApiKey = getCredentialParam('pineconeApiKey', credentialData, nodeData) - const client = getPineconeClient({ apiKey: pineconeApiKey }) + const client = new Pinecone({ apiKey: pineconeApiKey }) const pineconeIndex = client.Index(_index) @@ -211,7 +194,7 @@ class Pinecone_VectorStores implements INode { const credentialData = await getCredentialData(nodeData.credential ?? '', options) const pineconeApiKey = getCredentialParam('pineconeApiKey', credentialData, nodeData) - const client = getPineconeClient({ apiKey: pineconeApiKey }) + const client = new Pinecone({ apiKey: pineconeApiKey }) const pineconeIndex = client.Index(_index) @@ -253,7 +236,7 @@ class Pinecone_VectorStores implements INode { const credentialData = await getCredentialData(nodeData.credential ?? '', options) const pineconeApiKey = getCredentialParam('pineconeApiKey', credentialData, nodeData) - const client = getPineconeClient({ apiKey: pineconeApiKey }) + const client = new Pinecone({ apiKey: pineconeApiKey }) const pineconeIndex = client.Index(index) diff --git a/packages/components/nodes/vectorstores/Postgres/Postgres.ts b/packages/components/nodes/vectorstores/Postgres/Postgres.ts index 1cdacb85fc7..1e7621c3f27 100644 --- a/packages/components/nodes/vectorstores/Postgres/Postgres.ts +++ b/packages/components/nodes/vectorstores/Postgres/Postgres.ts @@ -7,7 +7,7 @@ import { howToUseFileUpload } from '../VectorStoreUtils' import { VectorStore } from '@langchain/core/vectorstores' import { VectorStoreDriver } from './driver/Base' import { TypeORMDriver } from './driver/TypeORM' -import { PGVectorDriver } from './driver/PGVector' +// import { PGVectorDriver } from './driver/PGVector' import { getContentColumnName, getDatabase, getHost, getPort, getTableName } from './utils' const serverCredentialsExists = !!process.env.POSTGRES_VECTORSTORE_USER && !!process.env.POSTGRES_VECTORSTORE_PASSWORD @@ -91,7 +91,7 @@ class Postgres_VectorStores implements INode { additionalParams: true, optional: true }, - { + /*{ label: 'Driver', name: 'driver', type: 'options', @@ -109,7 +109,7 @@ class Postgres_VectorStores implements INode { ], optional: true, additionalParams: true - }, + },*/ { label: 'Distance Strategy', name: 'distanceStrategy', @@ -300,14 +300,15 @@ class Postgres_VectorStores implements INode { } static getDriverFromConfig(nodeData: INodeData, options: ICommonObject): VectorStoreDriver { - switch (nodeData.inputs?.driver) { + /*switch (nodeData.inputs?.driver) { case 'typeorm': return new TypeORMDriver(nodeData, options) case 'pgvector': return new PGVectorDriver(nodeData, options) default: return new TypeORMDriver(nodeData, options) - } + }*/ + return new TypeORMDriver(nodeData, options) } } diff --git a/packages/components/nodes/vectorstores/Postgres/driver/PGVector.ts b/packages/components/nodes/vectorstores/Postgres/driver/PGVector.ts index 727d7f31379..608858a1923 100644 --- a/packages/components/nodes/vectorstores/Postgres/driver/PGVector.ts +++ b/packages/components/nodes/vectorstores/Postgres/driver/PGVector.ts @@ -1,3 +1,7 @@ +/* +* Temporary disabled due to increasing open connections without releasing them +* Use TypeORM instead + import { VectorStoreDriver } from './Base' import { FLOWISE_CHATID } from '../../../../src' import { DistanceStrategy, PGVectorStore, PGVectorStoreArgs } from '@langchain/community/vectorstores/pgvector' @@ -120,3 +124,4 @@ export class PGVectorDriver extends VectorStoreDriver { return instance } } +*/ diff --git a/packages/components/nodes/vectorstores/Postgres/driver/TypeORM.ts b/packages/components/nodes/vectorstores/Postgres/driver/TypeORM.ts index 65593499462..64115f68150 100644 --- a/packages/components/nodes/vectorstores/Postgres/driver/TypeORM.ts +++ b/packages/components/nodes/vectorstores/Postgres/driver/TypeORM.ts @@ -51,7 +51,9 @@ export class TypeORMDriver extends VectorStoreDriver { } async instanciate(metadataFilters?: any) { - return this.adaptInstance(await TypeORMVectorStore.fromDataSource(this.getEmbeddings(), await this.getArgs()), metadataFilters) + // @ts-ignore + const instance = new TypeORMVectorStore(this.getEmbeddings(), await this.getArgs()) + return this.adaptInstance(instance, metadataFilters) } async fromDocuments(documents: Document[]) { @@ -77,7 +79,8 @@ export class TypeORMDriver extends VectorStoreDriver { [ERROR]: uncaughtException: Illegal invocation TypeError: Illegal invocation at Socket.ref (node:net:1524:18) at Connection.ref (.../node_modules/pg/lib/connection.js:183:17) at Client.ref (.../node_modules/pg/lib/client.js:591:21) at BoundPool._pulseQueue (/node_modules/pg-pool/index.js:148:28) at .../node_modules/pg-pool/index.js:184:37 at process.processTicksAndRejections (node:internal/process/task_queues:77:11) */ instance.similaritySearchVectorWithScore = async (query: number[], k: number, filter?: any) => { - return await TypeORMDriver.similaritySearchVectorWithScore( + await instance.appDataSource.initialize() + const res = await TypeORMDriver.similaritySearchVectorWithScore( query, k, tableName, @@ -85,6 +88,8 @@ export class TypeORMDriver extends VectorStoreDriver { filter ?? metadataFilters, this.computedOperatorString ) + await instance.appDataSource.destroy() + return res } instance.delete = async (params: { ids: string[] }): Promise => { @@ -92,9 +97,12 @@ export class TypeORMDriver extends VectorStoreDriver { if (ids?.length) { try { + await instance.appDataSource.initialize() instance.appDataSource.getRepository(instance.documentEntity).delete(ids) } catch (e) { console.error('Failed to delete') + } finally { + await instance.appDataSource.destroy() } } } @@ -102,7 +110,10 @@ export class TypeORMDriver extends VectorStoreDriver { const baseAddVectorsFn = instance.addVectors.bind(instance) instance.addVectors = async (vectors, documents) => { - return baseAddVectorsFn(vectors, this.sanitizeDocuments(documents)) + await instance.appDataSource.initialize() + const res = baseAddVectorsFn(vectors, this.sanitizeDocuments(documents)) + await instance.appDataSource.destroy() + return res } return instance diff --git a/packages/components/nodes/vectorstores/Redis/Redis.ts b/packages/components/nodes/vectorstores/Redis/Redis.ts index db8df1ea9b0..3c9fd773c64 100644 --- a/packages/components/nodes/vectorstores/Redis/Redis.ts +++ b/packages/components/nodes/vectorstores/Redis/Redis.ts @@ -1,32 +1,11 @@ -import { flatten, isEqual } from 'lodash' -import { createClient, SearchOptions, RedisClientOptions } from 'redis' +import { flatten } from 'lodash' +import { createClient, SearchOptions } from 'redis' import { Embeddings } from '@langchain/core/embeddings' import { RedisVectorStore, RedisVectorStoreConfig } from '@langchain/community/vectorstores/redis' import { Document } from '@langchain/core/documents' import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams, IndexingResult } from '../../../src/Interface' import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' -import { escapeAllStrings, escapeSpecialChars, unEscapeSpecialChars } from './utils' - -let redisClientSingleton: ReturnType -let redisClientOption: RedisClientOptions - -const getRedisClient = async (option: RedisClientOptions) => { - if (!redisClientSingleton) { - // if client doesn't exists - redisClientSingleton = createClient(option) - await redisClientSingleton.connect() - redisClientOption = option - return redisClientSingleton - } else if (redisClientSingleton && !isEqual(option, redisClientOption)) { - // if client exists but option changed - redisClientSingleton.quit() - redisClientSingleton = createClient(option) - await redisClientSingleton.connect() - redisClientOption = option - return redisClientSingleton - } - return redisClientSingleton -} +import { escapeSpecialChars, unEscapeSpecialChars } from './utils' class Redis_VectorStores implements INode { label: string @@ -163,13 +142,13 @@ class Redis_VectorStores implements INode { for (let i = 0; i < flattenDocs.length; i += 1) { if (flattenDocs[i] && flattenDocs[i].pageContent) { const document = new Document(flattenDocs[i]) - escapeAllStrings(document.metadata) finalDocs.push(document) } } try { - const redisClient = await getRedisClient({ url: redisUrl }) + const redisClient = createClient({ url: redisUrl }) + await redisClient.connect() const storeConfig: RedisVectorStoreConfig = { redisClient: redisClient, @@ -203,6 +182,8 @@ class Redis_VectorStores implements INode { ) } + await redisClient.quit() + return { numAdded: finalDocs.length, addedDocs: finalDocs } } catch (e) { throw new Error(e) @@ -231,7 +212,7 @@ class Redis_VectorStores implements INode { redisUrl = 'redis://' + username + ':' + password + '@' + host + ':' + portStr } - const redisClient = await getRedisClient({ url: redisUrl }) + const redisClient = createClient({ url: redisUrl }) const storeConfig: RedisVectorStoreConfig = { redisClient: redisClient, @@ -246,7 +227,19 @@ class Redis_VectorStores implements INode { // Avoid Illegal invocation error vectorStore.similaritySearchVectorWithScore = async (query: number[], k: number, filter?: any) => { - return await similaritySearchVectorWithScore(query, k, indexName, metadataKey, vectorKey, contentKey, redisClient, filter) + await redisClient.connect() + const results = await similaritySearchVectorWithScore( + query, + k, + indexName, + metadataKey, + vectorKey, + contentKey, + redisClient, + filter + ) + await redisClient.quit() + return results } if (output === 'retriever') { diff --git a/packages/components/package.json b/packages/components/package.json index e567bc00421..bbe432df92c 100644 --- a/packages/components/package.json +++ b/packages/components/package.json @@ -120,7 +120,6 @@ "redis": "^4.6.7", "replicate": "^0.31.1", "sanitize-filename": "^1.6.3", - "socket.io": "^4.6.1", "srt-parser-2": "^1.2.3", "typeorm": "^0.3.6", "weaviate-ts-client": "^1.1.0", diff --git a/packages/components/src/Interface.ts b/packages/components/src/Interface.ts index 1b7f4a2e617..1afd8b9ddb3 100644 --- a/packages/components/src/Interface.ts +++ b/packages/components/src/Interface.ts @@ -404,12 +404,9 @@ export interface IStateWithMessages extends ICommonObject { } export interface IServerSideEventStreamer { - streamEvent(chatId: string, data: string): void streamStartEvent(chatId: string, data: any): void - streamTokenEvent(chatId: string, data: string): void streamCustomEvent(chatId: string, eventType: string, data: any): void - streamSourceDocumentsEvent(chatId: string, data: any): void streamUsedToolsEvent(chatId: string, data: any): void streamFileAnnotationsEvent(chatId: string, data: any): void diff --git a/packages/server/package.json b/packages/server/package.json index 14b6acc6736..0ee30dc37ad 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -26,6 +26,8 @@ "nuke": "rimraf dist node_modules .turbo", "start:windows": "cd bin && run start", "start:default": "cd bin && ./run start", + "start-worker:windows": "cd bin && run worker", + "start-worker:default": "cd bin && ./run worker", "dev": "tsc-watch --noClear -p ./tsconfig.json --onSuccess \"pnpm start\"", "oclif-dev": "run-script-os", "oclif-dev:windows": "cd bin && dev start", @@ -54,8 +56,9 @@ }, "license": "SEE LICENSE IN LICENSE.md", "dependencies": { - "@oclif/core": "^1.13.10", + "@oclif/core": "4.0.7", "@opentelemetry/api": "^1.3.0", + "@opentelemetry/auto-instrumentations-node": "^0.52.0", "@opentelemetry/core": "1.27.0", "@opentelemetry/exporter-metrics-otlp-grpc": "0.54.0", "@opentelemetry/exporter-metrics-otlp-http": "0.54.0", @@ -65,14 +68,14 @@ "@opentelemetry/exporter-trace-otlp-proto": "0.54.0", "@opentelemetry/resources": "1.27.0", "@opentelemetry/sdk-metrics": "1.27.0", + "@opentelemetry/sdk-node": "^0.54.0", "@opentelemetry/sdk-trace-base": "1.27.0", "@opentelemetry/semantic-conventions": "1.27.0", - "@opentelemetry/auto-instrumentations-node": "^0.52.0", - "@opentelemetry/sdk-node": "^0.54.0", "@types/lodash": "^4.14.202", "@types/uuid": "^9.0.7", "async-mutex": "^0.4.0", "axios": "1.6.2", + "bullmq": "^5.13.2", "content-disposition": "0.5.4", "cors": "^2.8.5", "crypto-js": "^4.1.1", @@ -82,6 +85,7 @@ "express-rate-limit": "^6.9.0", "flowise-components": "workspace:^", "flowise-ui": "workspace:^", + "global-agent": "^3.0.0", "http-errors": "^2.0.0", "http-status-codes": "^2.3.0", "langchainhub": "^0.0.11", @@ -94,14 +98,13 @@ "pg": "^8.11.1", "posthog-node": "^3.5.0", "prom-client": "^15.1.3", + "rate-limit-redis": "^4.2.0", "reflect-metadata": "^0.1.13", "sanitize-html": "^2.11.0", - "socket.io": "^4.6.1", "sqlite3": "^5.1.6", "typeorm": "^0.3.6", "uuid": "^9.0.1", - "winston": "^3.9.0", - "global-agent": "^3.0.0" + "winston": "^3.9.0" }, "devDependencies": { "@types/content-disposition": "0.5.8", diff --git a/packages/server/src/AbortControllerPool.ts b/packages/server/src/AbortControllerPool.ts new file mode 100644 index 00000000000..9d8af011912 --- /dev/null +++ b/packages/server/src/AbortControllerPool.ts @@ -0,0 +1,45 @@ +/** + * This pool is to keep track of abort controllers mapped to chatflowid_chatid + */ +export class AbortControllerPool { + abortControllers: Record = {} + + /** + * Add to the pool + * @param {string} id + * @param {AbortController} abortController + */ + add(id: string, abortController: AbortController) { + this.abortControllers[id] = abortController + } + + /** + * Remove from the pool + * @param {string} id + */ + remove(id: string) { + if (Object.prototype.hasOwnProperty.call(this.abortControllers, id)) { + delete this.abortControllers[id] + } + } + + /** + * Get the abort controller + * @param {string} id + */ + get(id: string) { + return this.abortControllers[id] + } + + /** + * Abort + * @param {string} id + */ + abort(id: string) { + const abortController = this.abortControllers[id] + if (abortController) { + abortController.abort() + this.remove(id) + } + } +} diff --git a/packages/server/src/CachePool.ts b/packages/server/src/CachePool.ts index b59789d2701..69dd7ffc4ac 100644 --- a/packages/server/src/CachePool.ts +++ b/packages/server/src/CachePool.ts @@ -1,19 +1,47 @@ -import { IActiveCache } from './Interface' +import { IActiveCache, MODE } from './Interface' +import Redis from 'ioredis' /** * This pool is to keep track of in-memory cache used for LLM and Embeddings */ export class CachePool { + private redisClient: Redis | null = null activeLLMCache: IActiveCache = {} activeEmbeddingCache: IActiveCache = {} + constructor() { + if (process.env.MODE === MODE.QUEUE) { + this.redisClient = new Redis({ + host: process.env.REDIS_HOST || 'localhost', + port: parseInt(process.env.REDIS_PORT || '6379'), + username: process.env.REDIS_USERNAME || undefined, + password: process.env.REDIS_PASSWORD || undefined, + tls: + process.env.REDIS_TLS === 'true' + ? { + cert: process.env.REDIS_CERT ? Buffer.from(process.env.REDIS_CERT, 'base64') : undefined, + key: process.env.REDIS_KEY ? Buffer.from(process.env.REDIS_KEY, 'base64') : undefined, + ca: process.env.REDIS_CA ? Buffer.from(process.env.REDIS_CA, 'base64') : undefined + } + : undefined + }) + } + } + /** * Add to the llm cache pool * @param {string} chatflowid * @param {Map} value */ - addLLMCache(chatflowid: string, value: Map) { - this.activeLLMCache[chatflowid] = value + async addLLMCache(chatflowid: string, value: Map) { + if (process.env.MODE === MODE.QUEUE) { + if (this.redisClient) { + const serializedValue = JSON.stringify(Array.from(value.entries())) + await this.redisClient.set(`llmCache:${chatflowid}`, serializedValue) + } + } else { + this.activeLLMCache[chatflowid] = value + } } /** @@ -21,24 +49,60 @@ export class CachePool { * @param {string} chatflowid * @param {Map} value */ - addEmbeddingCache(chatflowid: string, value: Map) { - this.activeEmbeddingCache[chatflowid] = value + async addEmbeddingCache(chatflowid: string, value: Map) { + if (process.env.MODE === MODE.QUEUE) { + if (this.redisClient) { + const serializedValue = JSON.stringify(Array.from(value.entries())) + await this.redisClient.set(`embeddingCache:${chatflowid}`, serializedValue) + } + } else { + this.activeEmbeddingCache[chatflowid] = value + } } /** * Get item from llm cache pool * @param {string} chatflowid */ - getLLMCache(chatflowid: string): Map | undefined { - return this.activeLLMCache[chatflowid] + async getLLMCache(chatflowid: string): Promise | undefined> { + if (process.env.MODE === MODE.QUEUE) { + if (this.redisClient) { + const serializedValue = await this.redisClient.get(`llmCache:${chatflowid}`) + if (serializedValue) { + return new Map(JSON.parse(serializedValue)) + } + } + } else { + return this.activeLLMCache[chatflowid] + } + return undefined } /** * Get item from embedding cache pool * @param {string} chatflowid */ - getEmbeddingCache(chatflowid: string): Map | undefined { - return this.activeEmbeddingCache[chatflowid] + async getEmbeddingCache(chatflowid: string): Promise | undefined> { + if (process.env.MODE === MODE.QUEUE) { + if (this.redisClient) { + const serializedValue = await this.redisClient.get(`embeddingCache:${chatflowid}`) + if (serializedValue) { + return new Map(JSON.parse(serializedValue)) + } + } + } else { + return this.activeEmbeddingCache[chatflowid] + } + return undefined + } + + /** + * Close Redis connection if applicable + */ + async close() { + if (this.redisClient) { + await this.redisClient.quit() + } } } diff --git a/packages/server/src/ChatflowPool.ts b/packages/server/src/ChatflowPool.ts deleted file mode 100644 index 5f1e3a844c8..00000000000 --- a/packages/server/src/ChatflowPool.ts +++ /dev/null @@ -1,59 +0,0 @@ -import { ICommonObject } from 'flowise-components' -import { IActiveChatflows, INodeData, IReactFlowNode } from './Interface' -import logger from './utils/logger' - -/** - * This pool is to keep track of active chatflow pools - * so we can prevent building langchain flow all over again - */ -export class ChatflowPool { - activeChatflows: IActiveChatflows = {} - - /** - * Add to the pool - * @param {string} chatflowid - * @param {INodeData} endingNodeData - * @param {IReactFlowNode[]} startingNodes - * @param {ICommonObject} overrideConfig - */ - add( - chatflowid: string, - endingNodeData: INodeData | undefined, - startingNodes: IReactFlowNode[], - overrideConfig?: ICommonObject, - chatId?: string - ) { - this.activeChatflows[chatflowid] = { - startingNodes, - endingNodeData, - inSync: true - } - if (overrideConfig) this.activeChatflows[chatflowid].overrideConfig = overrideConfig - if (chatId) this.activeChatflows[chatflowid].chatId = chatId - - logger.info(`[server]: Chatflow ${chatflowid} added into ChatflowPool`) - } - - /** - * Update to the pool - * @param {string} chatflowid - * @param {boolean} inSync - */ - updateInSync(chatflowid: string, inSync: boolean) { - if (Object.prototype.hasOwnProperty.call(this.activeChatflows, chatflowid)) { - this.activeChatflows[chatflowid].inSync = inSync - logger.info(`[server]: Chatflow ${chatflowid} updated inSync=${inSync} in ChatflowPool`) - } - } - - /** - * Remove from the pool - * @param {string} chatflowid - */ - async remove(chatflowid: string) { - if (Object.prototype.hasOwnProperty.call(this.activeChatflows, chatflowid)) { - delete this.activeChatflows[chatflowid] - logger.info(`[server]: Chatflow ${chatflowid} removed from ChatflowPool`) - } - } -} diff --git a/packages/server/src/Interface.DocumentStore.ts b/packages/server/src/Interface.DocumentStore.ts index 44bb10dc449..f135329a0bd 100644 --- a/packages/server/src/Interface.DocumentStore.ts +++ b/packages/server/src/Interface.DocumentStore.ts @@ -1,5 +1,8 @@ import { ICommonObject } from 'flowise-components' import { DocumentStore } from './database/entities/DocumentStore' +import { DataSource } from 'typeorm' +import { IComponentNodes } from './Interface' +import { Telemetry } from './utils/telemetry' export enum DocumentStoreStatus { EMPTY_SYNC = 'EMPTY', @@ -110,6 +113,31 @@ export interface IDocumentStoreWhereUsed { name: string } +export interface IUpsertQueueAppServer { + appDataSource: DataSource + componentNodes: IComponentNodes + telemetry: Telemetry +} + +export interface IExecuteDocStoreUpsert extends IUpsertQueueAppServer { + storeId: string + totalItems: IDocumentStoreUpsertData[] + files: Express.Multer.File[] + isRefreshAPI: boolean +} + +export interface IExecuteProcessLoader extends IUpsertQueueAppServer { + data: IDocumentStoreLoaderForPreview + docLoaderId: string + isProcessWithoutUpsert: boolean +} + +export interface IExecuteVectorStoreInsert extends IUpsertQueueAppServer { + data: ICommonObject + isStrictSave: boolean + isVectorStoreInsert: boolean +} + const getFileName = (fileBase64: string) => { let fileNames = [] if (fileBase64.startsWith('FILE-STORAGE::')) { diff --git a/packages/server/src/Interface.ts b/packages/server/src/Interface.ts index 72cee13a868..fbf4ea1299e 100644 --- a/packages/server/src/Interface.ts +++ b/packages/server/src/Interface.ts @@ -1,4 +1,15 @@ -import { IAction, ICommonObject, IFileUpload, INode, INodeData as INodeDataFromComponent, INodeParams } from 'flowise-components' +import { + IAction, + ICommonObject, + IFileUpload, + INode, + INodeData as INodeDataFromComponent, + INodeParams, + IServerSideEventStreamer +} from 'flowise-components' +import { DataSource } from 'typeorm' +import { CachePool } from './CachePool' +import { Telemetry } from './utils/telemetry' export type MessageType = 'apiMessage' | 'userMessage' @@ -6,6 +17,11 @@ export type ChatflowType = 'CHATFLOW' | 'MULTIAGENT' | 'ASSISTANT' export type AssistantType = 'CUSTOM' | 'OPENAI' | 'AZURE' +export enum MODE { + QUEUE = 'queue', + MAIN = 'main' +} + export enum ChatType { INTERNAL = 'INTERNAL', EXTERNAL = 'EXTERNAL' @@ -28,6 +44,7 @@ export interface IChatFlow { isPublic?: boolean apikeyid?: string analytic?: string + speechToText?: string chatbotConfig?: string followUpPrompts?: string apiConfig?: string @@ -226,6 +243,7 @@ export interface IncomingInput { leadEmail?: string history?: IMessage[] action?: IAction + streaming?: boolean } export interface IActiveChatflows { @@ -290,6 +308,34 @@ export interface ICustomTemplate { usecases?: string } +export interface IFlowConfig { + chatflowid: string + chatId: string + sessionId: string + chatHistory: IMessage[] + apiMessageId: string + overrideConfig?: ICommonObject +} + +export interface IPredictionQueueAppServer { + appDataSource: DataSource + componentNodes: IComponentNodes + sseStreamer: IServerSideEventStreamer + telemetry: Telemetry + cachePool: CachePool +} + +export interface IExecuteFlowParams extends IPredictionQueueAppServer { + incomingInput: IncomingInput + chatflow: IChatFlow + chatId: string + baseURL: string + isInternal: boolean + signal?: AbortController + files?: Express.Multer.File[] + isUpsert?: boolean +} + export interface INodeOverrides { [key: string]: { label: string diff --git a/packages/server/src/commands/base.ts b/packages/server/src/commands/base.ts new file mode 100644 index 00000000000..ecf1dcd74b4 --- /dev/null +++ b/packages/server/src/commands/base.ts @@ -0,0 +1,191 @@ +import { Command, Flags } from '@oclif/core' +import path from 'path' +import dotenv from 'dotenv' +import logger from '../utils/logger' + +dotenv.config({ path: path.join(__dirname, '..', '..', '.env'), override: true }) + +enum EXIT_CODE { + SUCCESS = 0, + FAILED = 1 +} + +export abstract class BaseCommand extends Command { + static flags = { + FLOWISE_USERNAME: Flags.string(), + FLOWISE_PASSWORD: Flags.string(), + FLOWISE_FILE_SIZE_LIMIT: Flags.string(), + PORT: Flags.string(), + CORS_ORIGINS: Flags.string(), + IFRAME_ORIGINS: Flags.string(), + DEBUG: Flags.string(), + BLOB_STORAGE_PATH: Flags.string(), + APIKEY_STORAGE_TYPE: Flags.string(), + APIKEY_PATH: Flags.string(), + SECRETKEY_PATH: Flags.string(), + FLOWISE_SECRETKEY_OVERWRITE: Flags.string(), + LOG_PATH: Flags.string(), + LOG_LEVEL: Flags.string(), + TOOL_FUNCTION_BUILTIN_DEP: Flags.string(), + TOOL_FUNCTION_EXTERNAL_DEP: Flags.string(), + NUMBER_OF_PROXIES: Flags.string(), + DISABLE_CHATFLOW_REUSE: Flags.string(), + DATABASE_TYPE: Flags.string(), + DATABASE_PATH: Flags.string(), + DATABASE_PORT: Flags.string(), + DATABASE_HOST: Flags.string(), + DATABASE_NAME: Flags.string(), + DATABASE_USER: Flags.string(), + DATABASE_PASSWORD: Flags.string(), + DATABASE_SSL: Flags.string(), + DATABASE_SSL_KEY_BASE64: Flags.string(), + LANGCHAIN_TRACING_V2: Flags.string(), + LANGCHAIN_ENDPOINT: Flags.string(), + LANGCHAIN_API_KEY: Flags.string(), + LANGCHAIN_PROJECT: Flags.string(), + DISABLE_FLOWISE_TELEMETRY: Flags.string(), + MODEL_LIST_CONFIG_JSON: Flags.string(), + STORAGE_TYPE: Flags.string(), + S3_STORAGE_BUCKET_NAME: Flags.string(), + S3_STORAGE_ACCESS_KEY_ID: Flags.string(), + S3_STORAGE_SECRET_ACCESS_KEY: Flags.string(), + S3_STORAGE_REGION: Flags.string(), + S3_ENDPOINT_URL: Flags.string(), + S3_FORCE_PATH_STYLE: Flags.string(), + SHOW_COMMUNITY_NODES: Flags.string(), + MODE: Flags.string(), + WORKER_CONCURRENCY: Flags.string(), + QUEUE_NAME: Flags.string(), + QUEUE_REDIS_EVENT_STREAM_MAX_LEN: Flags.string(), + REDIS_HOST: Flags.string(), + REDIS_PORT: Flags.string(), + REDIS_USERNAME: Flags.string(), + REDIS_PASSWORD: Flags.string(), + REDIS_TLS: Flags.string(), + REDIS_CERT: Flags.string(), + REDIS_KEY: Flags.string(), + REDIS_CA: Flags.string() + } + + protected async stopProcess() { + // Overridden method by child class + } + + protected onTerminate() { + return async () => { + try { + // Shut down the app after timeout if it ever stuck removing pools + setTimeout(async () => { + logger.info('Flowise was forced to shut down after 30 secs') + await this.failExit() + }, 30000) + + await this.stopProcess() + } catch (error) { + logger.error('There was an error shutting down Flowise...', error) + } + } + } + + protected async gracefullyExit() { + process.exit(EXIT_CODE.SUCCESS) + } + + protected async failExit() { + process.exit(EXIT_CODE.FAILED) + } + + async init(): Promise { + await super.init() + + process.on('SIGTERM', this.onTerminate()) + process.on('SIGINT', this.onTerminate()) + + // Prevent throw new Error from crashing the app + // TODO: Get rid of this and send proper error message to ui + process.on('uncaughtException', (err) => { + logger.error('uncaughtException: ', err) + }) + + process.on('unhandledRejection', (err) => { + logger.error('unhandledRejection: ', err) + }) + + const { flags } = await this.parse(BaseCommand) + if (flags.PORT) process.env.PORT = flags.PORT + if (flags.CORS_ORIGINS) process.env.CORS_ORIGINS = flags.CORS_ORIGINS + if (flags.IFRAME_ORIGINS) process.env.IFRAME_ORIGINS = flags.IFRAME_ORIGINS + if (flags.DEBUG) process.env.DEBUG = flags.DEBUG + if (flags.NUMBER_OF_PROXIES) process.env.NUMBER_OF_PROXIES = flags.NUMBER_OF_PROXIES + if (flags.DISABLE_CHATFLOW_REUSE) process.env.DISABLE_CHATFLOW_REUSE = flags.DISABLE_CHATFLOW_REUSE + if (flags.SHOW_COMMUNITY_NODES) process.env.SHOW_COMMUNITY_NODES = flags.SHOW_COMMUNITY_NODES + + // Authorization + if (flags.FLOWISE_USERNAME) process.env.FLOWISE_USERNAME = flags.FLOWISE_USERNAME + if (flags.FLOWISE_PASSWORD) process.env.FLOWISE_PASSWORD = flags.FLOWISE_PASSWORD + if (flags.APIKEY_STORAGE_TYPE) process.env.APIKEY_STORAGE_TYPE = flags.APIKEY_STORAGE_TYPE + if (flags.APIKEY_PATH) process.env.APIKEY_PATH = flags.APIKEY_PATH + + // API Configuration + if (flags.FLOWISE_FILE_SIZE_LIMIT) process.env.FLOWISE_FILE_SIZE_LIMIT = flags.FLOWISE_FILE_SIZE_LIMIT + + // Credentials + if (flags.SECRETKEY_PATH) process.env.SECRETKEY_PATH = flags.SECRETKEY_PATH + if (flags.FLOWISE_SECRETKEY_OVERWRITE) process.env.FLOWISE_SECRETKEY_OVERWRITE = flags.FLOWISE_SECRETKEY_OVERWRITE + + // Logs + if (flags.LOG_PATH) process.env.LOG_PATH = flags.LOG_PATH + if (flags.LOG_LEVEL) process.env.LOG_LEVEL = flags.LOG_LEVEL + + // Tool functions + if (flags.TOOL_FUNCTION_BUILTIN_DEP) process.env.TOOL_FUNCTION_BUILTIN_DEP = flags.TOOL_FUNCTION_BUILTIN_DEP + if (flags.TOOL_FUNCTION_EXTERNAL_DEP) process.env.TOOL_FUNCTION_EXTERNAL_DEP = flags.TOOL_FUNCTION_EXTERNAL_DEP + + // Database config + if (flags.DATABASE_TYPE) process.env.DATABASE_TYPE = flags.DATABASE_TYPE + if (flags.DATABASE_PATH) process.env.DATABASE_PATH = flags.DATABASE_PATH + if (flags.DATABASE_PORT) process.env.DATABASE_PORT = flags.DATABASE_PORT + if (flags.DATABASE_HOST) process.env.DATABASE_HOST = flags.DATABASE_HOST + if (flags.DATABASE_NAME) process.env.DATABASE_NAME = flags.DATABASE_NAME + if (flags.DATABASE_USER) process.env.DATABASE_USER = flags.DATABASE_USER + if (flags.DATABASE_PASSWORD) process.env.DATABASE_PASSWORD = flags.DATABASE_PASSWORD + if (flags.DATABASE_SSL) process.env.DATABASE_SSL = flags.DATABASE_SSL + if (flags.DATABASE_SSL_KEY_BASE64) process.env.DATABASE_SSL_KEY_BASE64 = flags.DATABASE_SSL_KEY_BASE64 + + // Langsmith tracing + if (flags.LANGCHAIN_TRACING_V2) process.env.LANGCHAIN_TRACING_V2 = flags.LANGCHAIN_TRACING_V2 + if (flags.LANGCHAIN_ENDPOINT) process.env.LANGCHAIN_ENDPOINT = flags.LANGCHAIN_ENDPOINT + if (flags.LANGCHAIN_API_KEY) process.env.LANGCHAIN_API_KEY = flags.LANGCHAIN_API_KEY + if (flags.LANGCHAIN_PROJECT) process.env.LANGCHAIN_PROJECT = flags.LANGCHAIN_PROJECT + + // Telemetry + if (flags.DISABLE_FLOWISE_TELEMETRY) process.env.DISABLE_FLOWISE_TELEMETRY = flags.DISABLE_FLOWISE_TELEMETRY + + // Model list config + if (flags.MODEL_LIST_CONFIG_JSON) process.env.MODEL_LIST_CONFIG_JSON = flags.MODEL_LIST_CONFIG_JSON + + // Storage + if (flags.STORAGE_TYPE) process.env.STORAGE_TYPE = flags.STORAGE_TYPE + if (flags.BLOB_STORAGE_PATH) process.env.BLOB_STORAGE_PATH = flags.BLOB_STORAGE_PATH + if (flags.S3_STORAGE_BUCKET_NAME) process.env.S3_STORAGE_BUCKET_NAME = flags.S3_STORAGE_BUCKET_NAME + if (flags.S3_STORAGE_ACCESS_KEY_ID) process.env.S3_STORAGE_ACCESS_KEY_ID = flags.S3_STORAGE_ACCESS_KEY_ID + if (flags.S3_STORAGE_SECRET_ACCESS_KEY) process.env.S3_STORAGE_SECRET_ACCESS_KEY = flags.S3_STORAGE_SECRET_ACCESS_KEY + if (flags.S3_STORAGE_REGION) process.env.S3_STORAGE_REGION = flags.S3_STORAGE_REGION + if (flags.S3_ENDPOINT_URL) process.env.S3_ENDPOINT_URL = flags.S3_ENDPOINT_URL + if (flags.S3_FORCE_PATH_STYLE) process.env.S3_FORCE_PATH_STYLE = flags.S3_FORCE_PATH_STYLE + + // Queue + if (flags.MODE) process.env.MODE = flags.MODE + if (flags.REDIS_HOST) process.env.REDIS_HOST = flags.REDIS_HOST + if (flags.REDIS_PORT) process.env.REDIS_PORT = flags.REDIS_PORT + if (flags.REDIS_USERNAME) process.env.REDIS_USERNAME = flags.REDIS_USERNAME + if (flags.REDIS_PASSWORD) process.env.REDIS_PASSWORD = flags.REDIS_PASSWORD + if (flags.REDIS_TLS) process.env.REDIS_TLS = flags.REDIS_TLS + if (flags.REDIS_CERT) process.env.REDIS_CERT = flags.REDIS_CERT + if (flags.REDIS_KEY) process.env.REDIS_KEY = flags.REDIS_KEY + if (flags.REDIS_CA) process.env.REDIS_CA = flags.REDIS_CA + if (flags.WORKER_CONCURRENCY) process.env.WORKER_CONCURRENCY = flags.WORKER_CONCURRENCY + if (flags.QUEUE_NAME) process.env.QUEUE_NAME = flags.QUEUE_NAME + if (flags.QUEUE_REDIS_EVENT_STREAM_MAX_LEN) process.env.QUEUE_REDIS_EVENT_STREAM_MAX_LEN = flags.QUEUE_REDIS_EVENT_STREAM + } +} diff --git a/packages/server/src/commands/start.ts b/packages/server/src/commands/start.ts index 95e45f81448..42e4be3077e 100644 --- a/packages/server/src/commands/start.ts +++ b/packages/server/src/commands/start.ts @@ -1,171 +1,33 @@ -import { Command, Flags } from '@oclif/core' -import path from 'path' import * as Server from '../index' import * as DataSource from '../DataSource' -import dotenv from 'dotenv' import logger from '../utils/logger' +import { BaseCommand } from './base' -dotenv.config({ path: path.join(__dirname, '..', '..', '.env'), override: true }) - -enum EXIT_CODE { - SUCCESS = 0, - FAILED = 1 -} -let processExitCode = EXIT_CODE.SUCCESS +export default class Start extends BaseCommand { + async run(): Promise { + logger.info('Starting Flowise...') + await DataSource.init() + await Server.start() + } -export default class Start extends Command { - static args = [] - static flags = { - FLOWISE_USERNAME: Flags.string(), - FLOWISE_PASSWORD: Flags.string(), - FLOWISE_FILE_SIZE_LIMIT: Flags.string(), - PORT: Flags.string(), - CORS_ORIGINS: Flags.string(), - IFRAME_ORIGINS: Flags.string(), - DEBUG: Flags.string(), - BLOB_STORAGE_PATH: Flags.string(), - APIKEY_STORAGE_TYPE: Flags.string(), - APIKEY_PATH: Flags.string(), - SECRETKEY_PATH: Flags.string(), - FLOWISE_SECRETKEY_OVERWRITE: Flags.string(), - LOG_PATH: Flags.string(), - LOG_LEVEL: Flags.string(), - TOOL_FUNCTION_BUILTIN_DEP: Flags.string(), - TOOL_FUNCTION_EXTERNAL_DEP: Flags.string(), - NUMBER_OF_PROXIES: Flags.string(), - DISABLE_CHATFLOW_REUSE: Flags.string(), - DATABASE_TYPE: Flags.string(), - DATABASE_PATH: Flags.string(), - DATABASE_PORT: Flags.string(), - DATABASE_HOST: Flags.string(), - DATABASE_NAME: Flags.string(), - DATABASE_USER: Flags.string(), - DATABASE_PASSWORD: Flags.string(), - DATABASE_SSL: Flags.string(), - DATABASE_SSL_KEY_BASE64: Flags.string(), - LANGCHAIN_TRACING_V2: Flags.string(), - LANGCHAIN_ENDPOINT: Flags.string(), - LANGCHAIN_API_KEY: Flags.string(), - LANGCHAIN_PROJECT: Flags.string(), - DISABLE_FLOWISE_TELEMETRY: Flags.string(), - MODEL_LIST_CONFIG_JSON: Flags.string(), - STORAGE_TYPE: Flags.string(), - S3_STORAGE_BUCKET_NAME: Flags.string(), - S3_STORAGE_ACCESS_KEY_ID: Flags.string(), - S3_STORAGE_SECRET_ACCESS_KEY: Flags.string(), - S3_STORAGE_REGION: Flags.string(), - S3_ENDPOINT_URL: Flags.string(), - S3_FORCE_PATH_STYLE: Flags.string(), - SHOW_COMMUNITY_NODES: Flags.string() + async catch(error: Error) { + if (error.stack) logger.error(error.stack) + await new Promise((resolve) => { + setTimeout(resolve, 1000) + }) + await this.failExit() } async stopProcess() { - logger.info('Shutting down Flowise...') try { - // Shut down the app after timeout if it ever stuck removing pools - setTimeout(() => { - logger.info('Flowise was forced to shut down after 30 secs') - process.exit(processExitCode) - }, 30000) - - // Removing pools + logger.info(`Shutting down Flowise...`) const serverApp = Server.getInstance() if (serverApp) await serverApp.stopApp() } catch (error) { logger.error('There was an error shutting down Flowise...', error) + await this.failExit() } - process.exit(processExitCode) - } - - async run(): Promise { - process.on('SIGTERM', this.stopProcess) - process.on('SIGINT', this.stopProcess) - - // Prevent throw new Error from crashing the app - // TODO: Get rid of this and send proper error message to ui - process.on('uncaughtException', (err) => { - logger.error('uncaughtException: ', err) - }) - - process.on('unhandledRejection', (err) => { - logger.error('unhandledRejection: ', err) - }) - - const { flags } = await this.parse(Start) - - if (flags.PORT) process.env.PORT = flags.PORT - if (flags.CORS_ORIGINS) process.env.CORS_ORIGINS = flags.CORS_ORIGINS - if (flags.IFRAME_ORIGINS) process.env.IFRAME_ORIGINS = flags.IFRAME_ORIGINS - if (flags.DEBUG) process.env.DEBUG = flags.DEBUG - if (flags.NUMBER_OF_PROXIES) process.env.NUMBER_OF_PROXIES = flags.NUMBER_OF_PROXIES - if (flags.DISABLE_CHATFLOW_REUSE) process.env.DISABLE_CHATFLOW_REUSE = flags.DISABLE_CHATFLOW_REUSE - if (flags.SHOW_COMMUNITY_NODES) process.env.SHOW_COMMUNITY_NODES = flags.SHOW_COMMUNITY_NODES - - // Authorization - if (flags.FLOWISE_USERNAME) process.env.FLOWISE_USERNAME = flags.FLOWISE_USERNAME - if (flags.FLOWISE_PASSWORD) process.env.FLOWISE_PASSWORD = flags.FLOWISE_PASSWORD - if (flags.APIKEY_STORAGE_TYPE) process.env.APIKEY_STORAGE_TYPE = flags.APIKEY_STORAGE_TYPE - if (flags.APIKEY_PATH) process.env.APIKEY_PATH = flags.APIKEY_PATH - - // API Configuration - if (flags.FLOWISE_FILE_SIZE_LIMIT) process.env.FLOWISE_FILE_SIZE_LIMIT = flags.FLOWISE_FILE_SIZE_LIMIT - - // Credentials - if (flags.SECRETKEY_PATH) process.env.SECRETKEY_PATH = flags.SECRETKEY_PATH - if (flags.FLOWISE_SECRETKEY_OVERWRITE) process.env.FLOWISE_SECRETKEY_OVERWRITE = flags.FLOWISE_SECRETKEY_OVERWRITE - - // Logs - if (flags.LOG_PATH) process.env.LOG_PATH = flags.LOG_PATH - if (flags.LOG_LEVEL) process.env.LOG_LEVEL = flags.LOG_LEVEL - - // Tool functions - if (flags.TOOL_FUNCTION_BUILTIN_DEP) process.env.TOOL_FUNCTION_BUILTIN_DEP = flags.TOOL_FUNCTION_BUILTIN_DEP - if (flags.TOOL_FUNCTION_EXTERNAL_DEP) process.env.TOOL_FUNCTION_EXTERNAL_DEP = flags.TOOL_FUNCTION_EXTERNAL_DEP - - // Database config - if (flags.DATABASE_TYPE) process.env.DATABASE_TYPE = flags.DATABASE_TYPE - if (flags.DATABASE_PATH) process.env.DATABASE_PATH = flags.DATABASE_PATH - if (flags.DATABASE_PORT) process.env.DATABASE_PORT = flags.DATABASE_PORT - if (flags.DATABASE_HOST) process.env.DATABASE_HOST = flags.DATABASE_HOST - if (flags.DATABASE_NAME) process.env.DATABASE_NAME = flags.DATABASE_NAME - if (flags.DATABASE_USER) process.env.DATABASE_USER = flags.DATABASE_USER - if (flags.DATABASE_PASSWORD) process.env.DATABASE_PASSWORD = flags.DATABASE_PASSWORD - if (flags.DATABASE_SSL) process.env.DATABASE_SSL = flags.DATABASE_SSL - if (flags.DATABASE_SSL_KEY_BASE64) process.env.DATABASE_SSL_KEY_BASE64 = flags.DATABASE_SSL_KEY_BASE64 - - // Langsmith tracing - if (flags.LANGCHAIN_TRACING_V2) process.env.LANGCHAIN_TRACING_V2 = flags.LANGCHAIN_TRACING_V2 - if (flags.LANGCHAIN_ENDPOINT) process.env.LANGCHAIN_ENDPOINT = flags.LANGCHAIN_ENDPOINT - if (flags.LANGCHAIN_API_KEY) process.env.LANGCHAIN_API_KEY = flags.LANGCHAIN_API_KEY - if (flags.LANGCHAIN_PROJECT) process.env.LANGCHAIN_PROJECT = flags.LANGCHAIN_PROJECT - - // Telemetry - if (flags.DISABLE_FLOWISE_TELEMETRY) process.env.DISABLE_FLOWISE_TELEMETRY = flags.DISABLE_FLOWISE_TELEMETRY - - // Model list config - if (flags.MODEL_LIST_CONFIG_JSON) process.env.MODEL_LIST_CONFIG_JSON = flags.MODEL_LIST_CONFIG_JSON - - // Storage - if (flags.STORAGE_TYPE) process.env.STORAGE_TYPE = flags.STORAGE_TYPE - if (flags.BLOB_STORAGE_PATH) process.env.BLOB_STORAGE_PATH = flags.BLOB_STORAGE_PATH - if (flags.S3_STORAGE_BUCKET_NAME) process.env.S3_STORAGE_BUCKET_NAME = flags.S3_STORAGE_BUCKET_NAME - if (flags.S3_STORAGE_ACCESS_KEY_ID) process.env.S3_STORAGE_ACCESS_KEY_ID = flags.S3_STORAGE_ACCESS_KEY_ID - if (flags.S3_STORAGE_SECRET_ACCESS_KEY) process.env.S3_STORAGE_SECRET_ACCESS_KEY = flags.S3_STORAGE_SECRET_ACCESS_KEY - if (flags.S3_STORAGE_REGION) process.env.S3_STORAGE_REGION = flags.S3_STORAGE_REGION - if (flags.S3_ENDPOINT_URL) process.env.S3_ENDPOINT_URL = flags.S3_ENDPOINT_URL - if (flags.S3_FORCE_PATH_STYLE) process.env.S3_FORCE_PATH_STYLE = flags.S3_FORCE_PATH_STYLE - await (async () => { - try { - logger.info('Starting Flowise...') - await DataSource.init() - await Server.start() - } catch (error) { - logger.error('There was an error starting Flowise...', error) - processExitCode = EXIT_CODE.FAILED - // @ts-ignore - process.emit('SIGINT') - } - })() + await this.gracefullyExit() } } diff --git a/packages/server/src/commands/worker.ts b/packages/server/src/commands/worker.ts new file mode 100644 index 00000000000..963ff1e1c25 --- /dev/null +++ b/packages/server/src/commands/worker.ts @@ -0,0 +1,101 @@ +import logger from '../utils/logger' +import { QueueManager } from '../queue/QueueManager' +import { BaseCommand } from './base' +import { getDataSource } from '../DataSource' +import { Telemetry } from '../utils/telemetry' +import { NodesPool } from '../NodesPool' +import { CachePool } from '../CachePool' +import { QueueEvents, QueueEventsListener } from 'bullmq' +import { AbortControllerPool } from '../AbortControllerPool' + +interface CustomListener extends QueueEventsListener { + abort: (args: { id: string }, id: string) => void +} + +export default class Worker extends BaseCommand { + predictionWorkerId: string + upsertionWorkerId: string + + async run(): Promise { + logger.info('Starting Flowise Worker...') + + const { appDataSource, telemetry, componentNodes, cachePool, abortControllerPool } = await this.prepareData() + + const queueManager = QueueManager.getInstance() + queueManager.setupAllQueues({ + componentNodes, + telemetry, + cachePool, + appDataSource, + abortControllerPool + }) + + /** Prediction */ + const predictionQueue = queueManager.getQueue('prediction') + const predictionQueueName = predictionQueue.getQueueName() + + // pass in concurrency + const predictionWorker = predictionQueue.createWorker() + this.predictionWorkerId = predictionWorker.id + logger.info(`Prediction Worker ${this.predictionWorkerId} created`) + + const queueEvents = new QueueEvents(predictionQueueName) + + queueEvents.on('abort', async ({ id }: { id: string }) => { + abortControllerPool.abort(id) + }) + + /** Upsertion */ + const upsertionQueue = queueManager.getQueue('upsert') + const upsertionWorker = upsertionQueue.createWorker() + this.upsertionWorkerId = upsertionWorker.id + logger.info(`Upsertion Worker ${this.upsertionWorkerId} created`) + + // Keep the process running + process.stdin.resume() + } + + async prepareData() { + // Init database + const appDataSource = getDataSource() + await appDataSource.initialize() + await appDataSource.runMigrations({ transaction: 'each' }) + + // Initialize chatflow pool + const abortControllerPool = new AbortControllerPool() + + // Init telemetry + const telemetry = new Telemetry() + + // Initialize nodes pool + const nodesPool = new NodesPool() + await nodesPool.initialize() + + // Initialize cache pool + const cachePool = new CachePool() + + return { appDataSource, telemetry, componentNodes: nodesPool.componentNodes, cachePool, abortControllerPool } + } + + async catch(error: Error) { + if (error.stack) logger.error(error.stack) + await new Promise((resolve) => { + setTimeout(resolve, 1000) + }) + await this.failExit() + } + + async stopProcess() { + try { + logger.info(`Shutting down Flowise Prediction Worker ${this.predictionWorkerId}...`) + logger.info(`Shutting down Flowise Upsertion Worker ${this.upsertionWorkerId}...`) + //const serverApp = Server.getInstance() + //if (serverApp) await serverApp.stopApp() + } catch (error) { + logger.error('There was an error shutting down Flowise Worker...', error) + await this.failExit() + } + + await this.gracefullyExit() + } +} diff --git a/packages/server/src/controllers/chatflows/index.ts b/packages/server/src/controllers/chatflows/index.ts index 0edaab40fb7..930272cc5e2 100644 --- a/packages/server/src/controllers/chatflows/index.ts +++ b/packages/server/src/controllers/chatflows/index.ts @@ -2,7 +2,7 @@ import { NextFunction, Request, Response } from 'express' import { StatusCodes } from 'http-status-codes' import apiKeyService from '../../services/apikey' import { ChatFlow } from '../../database/entities/ChatFlow' -import { updateRateLimiter } from '../../utils/rateLimit' +import { RateLimiterManager } from '../../utils/rateLimit' import { InternalFlowiseError } from '../../errors/internalFlowiseError' import { ChatflowType } from '../../Interface' import chatflowsService from '../../services/chatflows' @@ -130,7 +130,8 @@ const updateChatflow = async (req: Request, res: Response, next: NextFunction) = Object.assign(updateChatFlow, body) updateChatFlow.id = chatflow.id - updateRateLimiter(updateChatFlow) + const rateLimiterManager = RateLimiterManager.getInstance() + await rateLimiterManager.updateRateLimiter(updateChatFlow) const apiResponse = await chatflowsService.updateChatflow(chatflow, updateChatFlow) return res.json(apiResponse) diff --git a/packages/server/src/controllers/documentstore/index.ts b/packages/server/src/controllers/documentstore/index.ts index 2499cb301df..84caf9dcc67 100644 --- a/packages/server/src/controllers/documentstore/index.ts +++ b/packages/server/src/controllers/documentstore/index.ts @@ -4,15 +4,7 @@ import documentStoreService from '../../services/documentstore' import { DocumentStore } from '../../database/entities/DocumentStore' import { InternalFlowiseError } from '../../errors/internalFlowiseError' import { DocumentStoreDTO } from '../../Interface' -import { getRateLimiter } from '../../utils/rateLimit' - -const getRateLimiterMiddleware = async (req: Request, res: Response, next: NextFunction) => { - try { - return getRateLimiter(req, res, next) - } catch (error) { - next(error) - } -} +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' const createDocumentStore = async (req: Request, res: Response, next: NextFunction) => { try { @@ -90,8 +82,14 @@ const getDocumentStoreFileChunks = async (req: Request, res: Response, next: Nex `Error: documentStoreController.getDocumentStoreFileChunks - fileId not provided!` ) } + const appDataSource = getRunningExpressApp().AppDataSource const page = req.params.pageNo ? parseInt(req.params.pageNo) : 1 - const apiResponse = await documentStoreService.getDocumentStoreFileChunks(req.params.storeId, req.params.fileId, page) + const apiResponse = await documentStoreService.getDocumentStoreFileChunks( + appDataSource, + req.params.storeId, + req.params.fileId, + page + ) return res.json(apiResponse) } catch (error) { next(error) @@ -171,6 +169,7 @@ const editDocumentStoreFileChunk = async (req: Request, res: Response, next: Nex const saveProcessingLoader = async (req: Request, res: Response, next: NextFunction) => { try { + const appServer = getRunningExpressApp() if (typeof req.body === 'undefined') { throw new InternalFlowiseError( StatusCodes.PRECONDITION_FAILED, @@ -178,7 +177,7 @@ const saveProcessingLoader = async (req: Request, res: Response, next: NextFunct ) } const body = req.body - const apiResponse = await documentStoreService.saveProcessingLoader(body) + const apiResponse = await documentStoreService.saveProcessingLoader(appServer.AppDataSource, body) return res.json(apiResponse) } catch (error) { next(error) @@ -201,7 +200,7 @@ const processLoader = async (req: Request, res: Response, next: NextFunction) => } const docLoaderId = req.params.loaderId const body = req.body - const apiResponse = await documentStoreService.processLoader(body, docLoaderId) + const apiResponse = await documentStoreService.processLoaderMiddleware(body, docLoaderId) return res.json(apiResponse) } catch (error) { next(error) @@ -264,7 +263,9 @@ const previewFileChunks = async (req: Request, res: Response, next: NextFunction } const body = req.body body.preview = true - const apiResponse = await documentStoreService.previewChunks(body) + const appDataSource = getRunningExpressApp().AppDataSource + const componentNodes = getRunningExpressApp().nodesPool.componentNodes + const apiResponse = await documentStoreService.previewChunks(appDataSource, componentNodes, body) return res.json(apiResponse) } catch (error) { next(error) @@ -286,7 +287,7 @@ const insertIntoVectorStore = async (req: Request, res: Response, next: NextFunc throw new Error('Error: documentStoreController.insertIntoVectorStore - body not provided!') } const body = req.body - const apiResponse = await documentStoreService.insertIntoVectorStore(body) + const apiResponse = await documentStoreService.insertIntoVectorStoreMiddleware(body) return res.json(DocumentStoreDTO.fromEntity(apiResponse)) } catch (error) { next(error) @@ -327,7 +328,9 @@ const saveVectorStoreConfig = async (req: Request, res: Response, next: NextFunc throw new Error('Error: documentStoreController.saveVectorStoreConfig - body not provided!') } const body = req.body - const apiResponse = await documentStoreService.saveVectorStoreConfig(body) + const appDataSource = getRunningExpressApp().AppDataSource + const componentNodes = getRunningExpressApp().nodesPool.componentNodes + const apiResponse = await documentStoreService.saveVectorStoreConfig(appDataSource, componentNodes, body) return res.json(apiResponse) } catch (error) { next(error) @@ -449,7 +452,6 @@ export default { queryVectorStore, deleteVectorStoreFromStore, updateVectorStoreConfigOnly, - getRateLimiterMiddleware, upsertDocStoreMiddleware, refreshDocStoreMiddleware, saveProcessingLoader, diff --git a/packages/server/src/controllers/internal-predictions/index.ts b/packages/server/src/controllers/internal-predictions/index.ts index b251eecb610..3d2967bdb4b 100644 --- a/packages/server/src/controllers/internal-predictions/index.ts +++ b/packages/server/src/controllers/internal-predictions/index.ts @@ -2,6 +2,7 @@ import { Request, Response, NextFunction } from 'express' import { utilBuildChatflow } from '../../utils/buildChatflow' import { getRunningExpressApp } from '../../utils/getRunningExpressApp' import { getErrorMessage } from '../../errors/utils' +import { MODE } from '../../Interface' // Send input message and get prediction result (Internal) const createInternalPrediction = async (req: Request, res: Response, next: NextFunction) => { @@ -11,7 +12,7 @@ const createInternalPrediction = async (req: Request, res: Response, next: NextF return } else { const apiResponse = await utilBuildChatflow(req, true) - return res.json(apiResponse) + if (apiResponse) return res.json(apiResponse) } } catch (error) { next(error) @@ -22,6 +23,7 @@ const createInternalPrediction = async (req: Request, res: Response, next: NextF const createAndStreamInternalPrediction = async (req: Request, res: Response, next: NextFunction) => { const chatId = req.body.chatId const sseStreamer = getRunningExpressApp().sseStreamer + const redisSubscriber = getRunningExpressApp().redisSubscriber try { sseStreamer.addClient(chatId, res) res.setHeader('Content-Type', 'text/event-stream') @@ -30,6 +32,11 @@ const createAndStreamInternalPrediction = async (req: Request, res: Response, ne res.setHeader('X-Accel-Buffering', 'no') //nginx config: https://serverfault.com/a/801629 res.flushHeaders() + if (process.env.MODE === MODE.QUEUE) { + console.log(`Subscribing to chatId ${chatId}:`, redisSubscriber) + redisSubscriber.subscribe(chatId) + } + const apiResponse = await utilBuildChatflow(req, true) sseStreamer.streamMetadataEvent(apiResponse.chatId, apiResponse) } catch (error) { diff --git a/packages/server/src/controllers/predictions/index.ts b/packages/server/src/controllers/predictions/index.ts index cda48d17bbb..263d2ed46d7 100644 --- a/packages/server/src/controllers/predictions/index.ts +++ b/packages/server/src/controllers/predictions/index.ts @@ -1,5 +1,5 @@ import { Request, Response, NextFunction } from 'express' -import { getRateLimiter } from '../../utils/rateLimit' +import { RateLimiterManager } from '../../utils/rateLimit' import chatflowsService from '../../services/chatflows' import logger from '../../utils/logger' import predictionsServices from '../../services/predictions' @@ -8,6 +8,7 @@ import { StatusCodes } from 'http-status-codes' import { getRunningExpressApp } from '../../utils/getRunningExpressApp' import { v4 as uuidv4 } from 'uuid' import { getErrorMessage } from '../../errors/utils' +import { MODE } from '../../Interface' // Send input message and get prediction result (External) const createPrediction = async (req: Request, res: Response, next: NextFunction) => { @@ -55,6 +56,8 @@ const createPrediction = async (req: Request, res: Response, next: NextFunction) const isStreamingRequested = req.body.streaming === 'true' || req.body.streaming === true if (streamable?.isStreaming && isStreamingRequested) { const sseStreamer = getRunningExpressApp().sseStreamer + const redisSubscriber = getRunningExpressApp().redisSubscriber + let chatId = req.body.chatId if (!req.body.chatId) { chatId = req.body.chatId ?? req.body.overrideConfig?.sessionId ?? uuidv4() @@ -68,8 +71,13 @@ const createPrediction = async (req: Request, res: Response, next: NextFunction) res.setHeader('X-Accel-Buffering', 'no') //nginx config: https://serverfault.com/a/801629 res.flushHeaders() + if (process.env.MODE === MODE.QUEUE) { + console.log(`Subscribing to chatId ${chatId}:`, redisSubscriber) + redisSubscriber.subscribe(chatId) + } + const apiResponse = await predictionsServices.buildChatflow(req) - sseStreamer.streamMetadataEvent(apiResponse.chatId, apiResponse) + if (apiResponse) sseStreamer.streamMetadataEvent(apiResponse.chatId, apiResponse) } catch (error) { if (chatId) { sseStreamer.streamErrorEvent(chatId, getErrorMessage(error)) @@ -80,7 +88,7 @@ const createPrediction = async (req: Request, res: Response, next: NextFunction) } } else { const apiResponse = await predictionsServices.buildChatflow(req) - return res.json(apiResponse) + if (apiResponse) return res.json(apiResponse) } } else { const isStreamingRequested = req.body.streaming === 'true' || req.body.streaming === true @@ -96,7 +104,7 @@ const createPrediction = async (req: Request, res: Response, next: NextFunction) const getRateLimiterMiddleware = async (req: Request, res: Response, next: NextFunction) => { try { - return getRateLimiter(req, res, next) + return RateLimiterManager.getInstance().getRateLimiter()(req, res, next) } catch (error) { next(error) } diff --git a/packages/server/src/controllers/vectors/index.ts b/packages/server/src/controllers/vectors/index.ts index 5d10bb68b1a..cd2e22eaf0d 100644 --- a/packages/server/src/controllers/vectors/index.ts +++ b/packages/server/src/controllers/vectors/index.ts @@ -1,10 +1,10 @@ import { Request, Response, NextFunction } from 'express' import vectorsService from '../../services/vectors' -import { getRateLimiter } from '../../utils/rateLimit' +import { RateLimiterManager } from '../../utils/rateLimit' const getRateLimiterMiddleware = async (req: Request, res: Response, next: NextFunction) => { try { - return getRateLimiter(req, res, next) + return RateLimiterManager.getInstance().getRateLimiter()(req, res, next) } catch (error) { next(error) } diff --git a/packages/server/src/index.ts b/packages/server/src/index.ts index 3b6742f6f59..3bc6e0948f2 100644 --- a/packages/server/src/index.ts +++ b/packages/server/src/index.ts @@ -4,17 +4,16 @@ import path from 'path' import cors from 'cors' import http from 'http' import basicAuth from 'express-basic-auth' -import { Server } from 'socket.io' import { DataSource } from 'typeorm' -import { IChatFlow } from './Interface' +import { MODE } from './Interface' import { getNodeModulesPackagePath, getEncryptionKey } from './utils' import logger, { expressRequestLogger } from './utils/logger' import { getDataSource } from './DataSource' import { NodesPool } from './NodesPool' import { ChatFlow } from './database/entities/ChatFlow' -import { ChatflowPool } from './ChatflowPool' import { CachePool } from './CachePool' -import { initializeRateLimiter } from './utils/rateLimit' +import { AbortControllerPool } from './AbortControllerPool' +import { RateLimiterManager } from './utils/rateLimit' import { getAPIKeys } from './utils/apiKey' import { sanitizeMiddleware, getCorsOptions, getAllowedIframeOrigins } from './utils/XSS' import { Telemetry } from './utils/telemetry' @@ -25,25 +24,22 @@ import { validateAPIKey } from './utils/validateKey' import { IMetricsProvider } from './Interface.Metrics' import { Prometheus } from './metrics/Prometheus' import { OpenTelemetry } from './metrics/OpenTelemetry' +import { QueueManager } from './queue/QueueManager' +import { RedisEventSubscriber } from './queue/RedisEventSubscriber' import 'global-agent/bootstrap' -declare global { - namespace Express { - interface Request { - io?: Server - } - } -} - export class App { app: express.Application nodesPool: NodesPool - chatflowPool: ChatflowPool + abortControllerPool: AbortControllerPool cachePool: CachePool telemetry: Telemetry + rateLimiterManager: RateLimiterManager AppDataSource: DataSource = getDataSource() sseStreamer: SSEStreamer metricsProvider: IMetricsProvider + queueManager: QueueManager + redisSubscriber: RedisEventSubscriber constructor() { this.app = express() @@ -62,8 +58,8 @@ export class App { this.nodesPool = new NodesPool() await this.nodesPool.initialize() - // Initialize chatflow pool - this.chatflowPool = new ChatflowPool() + // Initialize abort controllers pool + this.abortControllerPool = new AbortControllerPool() // Initialize API keys await getAPIKeys() @@ -72,21 +68,39 @@ export class App { await getEncryptionKey() // Initialize Rate Limit - const AllChatFlow: IChatFlow[] = await getAllChatFlow() - await initializeRateLimiter(AllChatFlow) + this.rateLimiterManager = RateLimiterManager.getInstance() + await this.rateLimiterManager.initializeRateLimiters(await getDataSource().getRepository(ChatFlow).find()) // Initialize cache pool this.cachePool = new CachePool() // Initialize telemetry this.telemetry = new Telemetry() + + // Initialize SSE Streamer + this.sseStreamer = new SSEStreamer() + + // Init Queues + if (process.env.MODE === MODE.QUEUE) { + this.queueManager = QueueManager.getInstance() + this.queueManager.setupAllQueues({ + componentNodes: this.nodesPool.componentNodes, + telemetry: this.telemetry, + cachePool: this.cachePool, + appDataSource: this.AppDataSource, + abortControllerPool: this.abortControllerPool + }) + this.redisSubscriber = new RedisEventSubscriber(this.sseStreamer) + await this.redisSubscriber.connect() + } + logger.info('📦 [server]: Data Source has been initialized!') } catch (error) { logger.error('❌ [server]: Error during Data Source initialization:', error) } } - async config(socketIO?: Server) { + async config() { // Limit is needed to allow sending/receiving base64 encoded string const flowise_file_size_limit = process.env.FLOWISE_FILE_SIZE_LIMIT || '50mb' this.app.use(express.json({ limit: flowise_file_size_limit })) @@ -118,12 +132,6 @@ export class App { // Add the sanitizeMiddleware to guard against XSS this.app.use(sanitizeMiddleware) - // Make io accessible to our router on req.io - this.app.use((req, res, next) => { - req.io = socketIO - next() - }) - const whitelistURLs = [ '/api/v1/verify/apikey/', '/api/v1/chatflows/apikey/', @@ -232,7 +240,6 @@ export class App { } this.app.use('/api/v1', flowiseApiV1Router) - this.sseStreamer = new SSEStreamer(this.app) // ---------------------------------------- // Configure number of proxies in Host Environment @@ -267,6 +274,9 @@ export class App { try { const removePromises: any[] = [] removePromises.push(this.telemetry.flush()) + if (this.queueManager) { + removePromises.push(this.redisSubscriber.disconnect()) + } await Promise.all(removePromises) } catch (e) { logger.error(`❌[server]: Flowise Server shut down error: ${e}`) @@ -276,10 +286,6 @@ export class App { let serverApp: App | undefined -export async function getAllChatFlow(): Promise { - return await getDataSource().getRepository(ChatFlow).find() -} - export async function start(): Promise { serverApp = new App() @@ -287,12 +293,8 @@ export async function start(): Promise { const port = parseInt(process.env.PORT || '', 10) || 3000 const server = http.createServer(serverApp.app) - const io = new Server(server, { - cors: getCorsOptions() - }) - await serverApp.initDatabase() - await serverApp.config(io) + await serverApp.config() server.listen(port, host, () => { logger.info(`⚡️ [server]: Flowise Server is listening at ${host ? 'http://' + host : ''}:${port}`) diff --git a/packages/server/src/queue/BaseQueue.ts b/packages/server/src/queue/BaseQueue.ts new file mode 100644 index 00000000000..6d50c9cfedd --- /dev/null +++ b/packages/server/src/queue/BaseQueue.ts @@ -0,0 +1,70 @@ +import { Queue, Worker, Job, QueueEvents, RedisOptions } from 'bullmq' +import { v4 as uuidv4 } from 'uuid' + +const QUEUE_REDIS_EVENT_STREAM_MAX_LEN = process.env.QUEUE_REDIS_EVENT_STREAM_MAX_LEN + ? parseInt(process.env.QUEUE_REDIS_EVENT_STREAM_MAX_LEN) + : 10000 +const WORKER_CONCURRENCY = process.env.WORKER_CONCURRENCY ? parseInt(process.env.WORKER_CONCURRENCY) : 300 + +export abstract class BaseQueue { + protected queue: Queue + protected queueEvents: QueueEvents + protected connection: RedisOptions + + constructor(queueName: string, connection: RedisOptions) { + this.connection = connection + this.queue = new Queue(queueName, { + connection: this.connection, + streams: { events: { maxLen: QUEUE_REDIS_EVENT_STREAM_MAX_LEN } } + }) + this.queueEvents = new QueueEvents(queueName, { connection: this.connection }) + } + + abstract processJob(data: any): Promise + + abstract getQueueName(): string + + public async addJob(jobData: any): Promise { + const jobId = jobData.id || uuidv4() + return await this.queue.add(jobId, jobData, { removeOnFail: true }) + } + + public createWorker(concurrency: number = WORKER_CONCURRENCY): Worker { + return new Worker( + this.queue.name, + async (job: Job) => { + console.log(`Processing job ${job.id} in ${this.queue.name}`) + const result = await this.processJob(job.data) + console.log(`Completed job ${job.id} in ${this.queue.name}`) + return result + }, + { + connection: this.connection, + concurrency + } + ) + } + + public async getJobs(): Promise { + return await this.queue.getJobs() + } + + public async getJobCounts(): Promise<{ [index: string]: number }> { + return await this.queue.getJobCounts() + } + + public async getJobByName(jobName: string): Promise { + const jobs = await this.queue.getJobs() + const job = jobs.find((job) => job.name === jobName) + if (!job) throw new Error(`Job name ${jobName} not found`) + return job + } + + public getQueueEvents(): QueueEvents { + return this.queueEvents + } + + public async clearQueue(): Promise { + await this.queue.obliterate({ force: true }) + } +} diff --git a/packages/server/src/queue/PredictionQueue.ts b/packages/server/src/queue/PredictionQueue.ts new file mode 100644 index 00000000000..3bf366ac65d --- /dev/null +++ b/packages/server/src/queue/PredictionQueue.ts @@ -0,0 +1,62 @@ +import dotenv from 'dotenv' +import { DataSource } from 'typeorm' +import { executeFlow } from '../utils/buildChatflow' +import { IComponentNodes, IExecuteFlowParams } from '../Interface' +import { Telemetry } from '../utils/telemetry' +import { CachePool } from '../CachePool' +import { RedisEventPublisher } from './RedisEventPublisher' +import { AbortControllerPool } from '../AbortControllerPool' +import { BaseQueue } from './BaseQueue' +import { RedisOptions } from 'bullmq' + +dotenv.config() + +interface PredictionQueueOptions { + appDataSource: DataSource + telemetry: Telemetry + cachePool: CachePool + componentNodes: IComponentNodes + abortControllerPool: AbortControllerPool +} + +export class PredictionQueue extends BaseQueue { + private componentNodes: IComponentNodes + private telemetry: Telemetry + private cachePool: CachePool + private appDataSource: DataSource + private abortControllerPool: AbortControllerPool + private redisPublisher: RedisEventPublisher + private queueName: string + + constructor(name: string, connection: RedisOptions, options: PredictionQueueOptions) { + super(name, connection) + this.queueName = name + this.componentNodes = options.componentNodes || {} + this.telemetry = options.telemetry + this.cachePool = options.cachePool + this.appDataSource = options.appDataSource + this.abortControllerPool = options.abortControllerPool + this.redisPublisher = new RedisEventPublisher() + this.redisPublisher.connect() + } + + public getQueueName() { + return this.queueName + } + + async processJob(data: IExecuteFlowParams) { + if (this.appDataSource) data.appDataSource = this.appDataSource + if (this.telemetry) data.telemetry = this.telemetry + if (this.cachePool) data.cachePool = this.cachePool + if (this.componentNodes) data.componentNodes = this.componentNodes + if (this.redisPublisher) data.sseStreamer = this.redisPublisher + + if (this.abortControllerPool) { + const signal = new AbortController() + this.abortControllerPool.add(`${data.chatflow.id}_${data.chatId}`, signal) + data.signal = signal + } + + return await executeFlow(data) + } +} diff --git a/packages/server/src/queue/QueueManager.ts b/packages/server/src/queue/QueueManager.ts new file mode 100644 index 00000000000..6e72692639a --- /dev/null +++ b/packages/server/src/queue/QueueManager.ts @@ -0,0 +1,103 @@ +import dotenv from 'dotenv' +import { BaseQueue } from './BaseQueue' +import { PredictionQueue } from './PredictionQueue' +import { UpsertQueue } from './UpsertQueue' +import { IComponentNodes } from '../Interface' +import { Telemetry } from '../utils/telemetry' +import { CachePool } from '../CachePool' +import { DataSource } from 'typeorm' +import { AbortControllerPool } from '../AbortControllerPool' +import { RedisOptions } from 'bullmq' + +dotenv.config() + +const QUEUE_NAME = process.env.QUEUE_NAME || 'flowise-queue' + +type QUEUE_TYPE = 'prediction' | 'upsert' + +export class QueueManager { + private static instance: QueueManager + private queues: Map = new Map() + private connection: RedisOptions + + private constructor() { + this.connection = { + host: process.env.REDIS_HOST || 'localhost', + port: parseInt(process.env.REDIS_PORT || '6379'), + username: process.env.REDIS_USERNAME || undefined, + password: process.env.REDIS_PASSWORD || undefined, + tls: + process.env.REDIS_TLS === 'true' + ? { + cert: process.env.REDIS_CERT ? Buffer.from(process.env.REDIS_CERT, 'base64') : undefined, + key: process.env.REDIS_KEY ? Buffer.from(process.env.REDIS_KEY, 'base64') : undefined, + ca: process.env.REDIS_CA ? Buffer.from(process.env.REDIS_CA, 'base64') : undefined + } + : undefined + } + } + + public static getInstance(): QueueManager { + if (!QueueManager.instance) { + QueueManager.instance = new QueueManager() + } + return QueueManager.instance + } + + public registerQueue(name: string, queue: BaseQueue) { + this.queues.set(name, queue) + } + + public getConnection() { + return this.connection + } + + public getQueue(name: QUEUE_TYPE): BaseQueue { + const queue = this.queues.get(name) + if (!queue) throw new Error(`Queue ${name} not found`) + return queue + } + + public async getAllJobCounts(): Promise<{ [queueName: string]: { [status: string]: number } }> { + const counts: { [queueName: string]: { [status: string]: number } } = {} + + for (const [name, queue] of this.queues) { + counts[name] = await queue.getJobCounts() + } + + return counts + } + + public setupAllQueues({ + componentNodes, + telemetry, + cachePool, + appDataSource, + abortControllerPool + }: { + componentNodes: IComponentNodes + telemetry: Telemetry + cachePool: CachePool + appDataSource: DataSource + abortControllerPool: AbortControllerPool + }) { + const predictionQueueName = `${QUEUE_NAME}-prediction` + const predictionQueue = new PredictionQueue(predictionQueueName, this.connection, { + componentNodes, + telemetry, + cachePool, + appDataSource, + abortControllerPool + }) + this.registerQueue('prediction', predictionQueue) + + const upsertionQueueName = `${QUEUE_NAME}-upsertion` + const upsertionQueue = new UpsertQueue(upsertionQueueName, this.connection, { + componentNodes, + telemetry, + cachePool, + appDataSource + }) + this.registerQueue('upsert', upsertionQueue) + } +} diff --git a/packages/server/src/queue/RedisEventPublisher.ts b/packages/server/src/queue/RedisEventPublisher.ts new file mode 100644 index 00000000000..86b97d303f5 --- /dev/null +++ b/packages/server/src/queue/RedisEventPublisher.ts @@ -0,0 +1,247 @@ +import { IServerSideEventStreamer } from 'flowise-components' +import { createClient } from 'redis' + +export class RedisEventPublisher implements IServerSideEventStreamer { + private redisPublisher: ReturnType + + constructor() { + this.redisPublisher = createClient() + } + + async connect() { + await this.redisPublisher.connect() + console.log('Redis publisher connected.') + } + + streamCustomEvent(chatId: string, eventType: string, data: any) { + try { + this.redisPublisher.publish( + chatId, + JSON.stringify({ + chatId, + eventType, + data + }) + ) + } catch (error) { + console.error('Error streaming custom event:', error) + } + } + + streamStartEvent(chatId: string, data: string) { + try { + this.redisPublisher.publish( + chatId, + JSON.stringify({ + chatId, + eventType: 'start', + data + }) + ) + } catch (error) { + console.error('Error streaming start event:', error) + } + } + + streamTokenEvent(chatId: string, data: string) { + try { + this.redisPublisher.publish( + chatId, + JSON.stringify({ + chatId, + eventType: 'token', + data + }) + ) + } catch (error) { + console.error('Error streaming token event:', error) + } + } + + streamSourceDocumentsEvent(chatId: string, data: any) { + try { + this.redisPublisher.publish( + chatId, + JSON.stringify({ + chatId, + eventType: 'sourceDocuments', + data + }) + ) + } catch (error) { + console.error('Error streaming sourceDocuments event:', error) + } + } + + streamArtifactsEvent(chatId: string, data: any) { + try { + this.redisPublisher.publish( + chatId, + JSON.stringify({ + chatId, + eventType: 'artifacts', + data + }) + ) + } catch (error) { + console.error('Error streaming artifacts event:', error) + } + } + + streamUsedToolsEvent(chatId: string, data: any) { + try { + this.redisPublisher.publish( + chatId, + JSON.stringify({ + chatId, + eventType: 'usedTools', + data + }) + ) + } catch (error) { + console.error('Error streaming usedTools event:', error) + } + } + + streamFileAnnotationsEvent(chatId: string, data: any) { + try { + this.redisPublisher.publish( + chatId, + JSON.stringify({ + chatId, + eventType: 'fileAnnotations', + data + }) + ) + } catch (error) { + console.error('Error streaming fileAnnotations event:', error) + } + } + + streamToolEvent(chatId: string, data: any): void { + try { + this.redisPublisher.publish( + chatId, + JSON.stringify({ + chatId, + eventType: 'tool', + data + }) + ) + } catch (error) { + console.error('Error streaming tool event:', error) + } + } + + streamAgentReasoningEvent(chatId: string, data: any): void { + try { + this.redisPublisher.publish( + chatId, + JSON.stringify({ + chatId, + eventType: 'agentReasoning', + data + }) + ) + } catch (error) { + console.error('Error streaming agentReasoning event:', error) + } + } + + streamNextAgentEvent(chatId: string, data: any): void { + try { + this.redisPublisher.publish( + chatId, + JSON.stringify({ + chatId, + eventType: 'nextAgent', + data + }) + ) + } catch (error) { + console.error('Error streaming nextAgent event:', error) + } + } + + streamActionEvent(chatId: string, data: any): void { + try { + this.redisPublisher.publish( + chatId, + JSON.stringify({ + chatId, + eventType: 'action', + data + }) + ) + } catch (error) { + console.error('Error streaming action event:', error) + } + } + + streamAbortEvent(chatId: string): void { + try { + this.redisPublisher.publish( + chatId, + JSON.stringify({ + chatId, + eventType: 'abort', + data: '[DONE]' + }) + ) + } catch (error) { + console.error('Error streaming abort event:', error) + } + } + + streamEndEvent(_: string) { + // placeholder for future use + } + + streamErrorEvent(chatId: string, msg: string) { + try { + this.redisPublisher.publish( + chatId, + JSON.stringify({ + chatId, + eventType: 'error', + data: msg + }) + ) + } catch (error) { + console.error('Error streaming error event:', error) + } + } + + streamMetadataEvent(chatId: string, apiResponse: any) { + try { + const metadataJson: any = {} + if (apiResponse.chatId) { + metadataJson['chatId'] = apiResponse.chatId + } + if (apiResponse.chatMessageId) { + metadataJson['chatMessageId'] = apiResponse.chatMessageId + } + if (apiResponse.question) { + metadataJson['question'] = apiResponse.question + } + if (apiResponse.sessionId) { + metadataJson['sessionId'] = apiResponse.sessionId + } + if (apiResponse.memoryType) { + metadataJson['memoryType'] = apiResponse.memoryType + } + if (Object.keys(metadataJson).length > 0) { + this.streamCustomEvent(chatId, 'metadata', metadataJson) + } + } catch (error) { + console.error('Error streaming metadata event:', error) + } + } + + async disconnect() { + if (this.redisPublisher) { + await this.redisPublisher.quit() + console.log('Redis publisher disconnected.') + } + } +} diff --git a/packages/server/src/queue/RedisEventSubscriber.ts b/packages/server/src/queue/RedisEventSubscriber.ts new file mode 100644 index 00000000000..1c3b0c24b9d --- /dev/null +++ b/packages/server/src/queue/RedisEventSubscriber.ts @@ -0,0 +1,97 @@ +import { createClient } from 'redis' +import { SSEStreamer } from '../utils/SSEStreamer' + +export class RedisEventSubscriber { + private redisSubscriber: ReturnType + private sseStreamer: SSEStreamer + private subscribedChannels: Set = new Set() + + constructor(sseStreamer: SSEStreamer) { + this.redisSubscriber = createClient() + this.sseStreamer = sseStreamer + } + + async connect() { + await this.redisSubscriber.connect() + console.log('Redis subscriber connected.') + } + + subscribe(channel: string) { + // Subscribe to the Redis channel for job events + if (!this.redisSubscriber) { + throw new Error('Redis subscriber not connected.') + } + + // Check if already subscribed + if (this.subscribedChannels.has(channel)) { + console.log(`Already subscribed to Redis channel: ${channel}`) + return // Prevent duplicate subscription + } + + this.redisSubscriber.subscribe(channel, (message) => { + this.handleEvent(message) + }) + + // Mark the channel as subscribed + this.subscribedChannels.add(channel) + console.log(`Subscribed to Redis channel: ${channel}`) + } + + private handleEvent(message: string) { + // Parse the message from Redis + const event = JSON.parse(message) + const { eventType, chatId, data } = event + + // Stream the event to the client + switch (eventType) { + case 'start': + this.sseStreamer.streamStartEvent(chatId, data) + break + case 'token': + this.sseStreamer.streamTokenEvent(chatId, data) + break + case 'sourceDocuments': + this.sseStreamer.streamSourceDocumentsEvent(chatId, data) + break + case 'artifacts': + this.sseStreamer.streamArtifactsEvent(chatId, data) + break + case 'usedTools': + this.sseStreamer.streamUsedToolsEvent(chatId, data) + break + case 'fileAnnotations': + this.sseStreamer.streamFileAnnotationsEvent(chatId, data) + break + case 'tool': + this.sseStreamer.streamToolEvent(chatId, data) + break + case 'agentReasoning': + this.sseStreamer.streamAgentReasoningEvent(chatId, data) + break + case 'nextAgent': + this.sseStreamer.streamNextAgentEvent(chatId, data) + break + case 'action': + this.sseStreamer.streamActionEvent(chatId, data) + break + case 'abort': + this.sseStreamer.streamAbortEvent(chatId) + break + case 'error': + this.sseStreamer.streamErrorEvent(chatId, data) + break + case 'metadata': + this.sseStreamer.streamMetadataEvent(chatId, data) + break + default: + console.log('Unknown event type:', eventType) + } + } + + async disconnect() { + if (this.redisSubscriber) { + await this.redisSubscriber.quit() + console.log('Redis subscriber disconnected.') + } + } +} diff --git a/packages/server/src/queue/UpsertQueue.ts b/packages/server/src/queue/UpsertQueue.ts new file mode 100644 index 00000000000..4f3d4d42b13 --- /dev/null +++ b/packages/server/src/queue/UpsertQueue.ts @@ -0,0 +1,64 @@ +import dotenv from 'dotenv' +import { DataSource } from 'typeorm' +import { IComponentNodes, IExecuteDocStoreUpsert, IExecuteFlowParams, IExecuteProcessLoader, IExecuteVectorStoreInsert } from '../Interface' +import { Telemetry } from '../utils/telemetry' +import { CachePool } from '../CachePool' +import { BaseQueue } from './BaseQueue' +import { executeUpsert } from '../utils/upsertVector' +import { executeDocStoreUpsert, insertIntoVectorStore, processLoader } from '../services/documentstore' +import { RedisOptions } from 'bullmq' + +dotenv.config() + +interface UpsertQueueOptions { + appDataSource: DataSource + telemetry: Telemetry + cachePool: CachePool + componentNodes: IComponentNodes +} + +export class UpsertQueue extends BaseQueue { + private componentNodes: IComponentNodes + private telemetry: Telemetry + private cachePool: CachePool + private appDataSource: DataSource + private queueName: string + + constructor(name: string, connection: RedisOptions, options: UpsertQueueOptions) { + super(name, connection) + this.queueName = name + this.componentNodes = options.componentNodes || {} + this.telemetry = options.telemetry + this.cachePool = options.cachePool + this.appDataSource = options.appDataSource + } + + public getQueueName() { + return this.queueName + } + + async processJob(data: IExecuteFlowParams | IExecuteDocStoreUpsert | IExecuteProcessLoader | IExecuteVectorStoreInsert) { + if (this.appDataSource) data.appDataSource = this.appDataSource + if (this.telemetry) data.telemetry = this.telemetry + if (this.cachePool) (data as any).cachePool = this.cachePool + if (this.componentNodes) data.componentNodes = this.componentNodes + + // document-store/loader/process/:loaderId + if (Object.prototype.hasOwnProperty.call(data, 'isProcessWithoutUpsert')) { + return await processLoader(data as IExecuteProcessLoader) + } + + // document-store/vectorstore/insert/:loaderId + if (Object.prototype.hasOwnProperty.call(data, 'isVectorStoreInsert')) { + return await insertIntoVectorStore(data as IExecuteVectorStoreInsert) + } + + // document-store/upsert/:storeId + if (Object.prototype.hasOwnProperty.call(data, 'storeId')) { + return await executeDocStoreUpsert(data as IExecuteDocStoreUpsert) + } + + // upsert-vector/:chatflowid + return await executeUpsert(data as IExecuteFlowParams) + } +} diff --git a/packages/server/src/routes/predictions/index.ts b/packages/server/src/routes/predictions/index.ts index ca192c89fb2..993eea78d93 100644 --- a/packages/server/src/routes/predictions/index.ts +++ b/packages/server/src/routes/predictions/index.ts @@ -1,13 +1,9 @@ import express from 'express' -import multer from 'multer' import predictionsController from '../../controllers/predictions' -import { getUploadPath } from '../../utils' const router = express.Router() -const upload = multer({ dest: getUploadPath() }) - // CREATE -router.post(['/', '/:id'], upload.array('files'), predictionsController.getRateLimiterMiddleware, predictionsController.createPrediction) +router.post(['/', '/:id'], predictionsController.getRateLimiterMiddleware, predictionsController.createPrediction) export default router diff --git a/packages/server/src/services/chat-messages/index.ts b/packages/server/src/services/chat-messages/index.ts index 621b37d1a72..d524e6a350a 100644 --- a/packages/server/src/services/chat-messages/index.ts +++ b/packages/server/src/services/chat-messages/index.ts @@ -1,6 +1,6 @@ import { DeleteResult, FindOptionsWhere } from 'typeorm' import { StatusCodes } from 'http-status-codes' -import { ChatMessageRatingType, ChatType, IChatMessage } from '../../Interface' +import { ChatMessageRatingType, ChatType, IChatMessage, MODE } from '../../Interface' import { utilGetChatMessage } from '../../utils/getChatMessage' import { utilAddChatMessage } from '../../utils/addChatMesage' import { getRunningExpressApp } from '../../utils/getRunningExpressApp' @@ -10,6 +10,7 @@ import logger from '../../utils/logger' import { ChatMessage } from '../../database/entities/ChatMessage' import { InternalFlowiseError } from '../../errors/internalFlowiseError' import { getErrorMessage } from '../../errors/utils' +import { QueueEventsProducer } from 'bullmq' // Add chatmessages for chatflowid const createChatMessage = async (chatMessage: Partial) => { @@ -160,16 +161,20 @@ const removeChatMessagesByMessageIds = async ( const abortChatMessage = async (chatId: string, chatflowid: string) => { try { const appServer = getRunningExpressApp() + const id = `${chatflowid}_${chatId}` - const endingNodeData = appServer.chatflowPool.activeChatflows[`${chatflowid}_${chatId}`]?.endingNodeData as any - - if (endingNodeData && endingNodeData.signal) { - try { - endingNodeData.signal.abort() - await appServer.chatflowPool.remove(`${chatflowid}_${chatId}`) - } catch (e) { - logger.error(`[server]: Error aborting chat message for ${chatflowid}, chatId ${chatId}: ${e}`) - } + if (process.env.MODE === MODE.QUEUE) { + const predictionQueue = appServer.queueManager.getQueue('prediction') + const connection = appServer.queueManager.getConnection() + const queueEventsProducer = new QueueEventsProducer(predictionQueue.getQueueName(), { + connection + }) + await queueEventsProducer.publishEvent({ + eventName: 'abort', + id + }) + } else { + appServer.abortControllerPool.abort(id) } } catch (error) { throw new InternalFlowiseError( diff --git a/packages/server/src/services/chatflows/index.ts b/packages/server/src/services/chatflows/index.ts index ba6243ee5bb..edabc3b2989 100644 --- a/packages/server/src/services/chatflows/index.ts +++ b/packages/server/src/services/chatflows/index.ts @@ -267,12 +267,6 @@ const updateChatflow = async (chatflow: ChatFlow, updateChatFlow: ChatFlow): Pro await _checkAndUpdateDocumentStoreUsage(newDbChatflow) const dbResponse = await appServer.AppDataSource.getRepository(ChatFlow).save(newDbChatflow) - // chatFlowPool is initialized only when a flow is opened - // if the user attempts to rename/update category without opening any flow, chatFlowPool will be undefined - if (appServer.chatflowPool) { - // Update chatflowpool inSync to false, to build flow from scratch again because data has been changed - appServer.chatflowPool.updateInSync(chatflow.id, false) - } return dbResponse } catch (error) { throw new InternalFlowiseError( diff --git a/packages/server/src/services/documentstore/index.ts b/packages/server/src/services/documentstore/index.ts index 3ac7d10a8f5..7fb67120846 100644 --- a/packages/server/src/services/documentstore/index.ts +++ b/packages/server/src/services/documentstore/index.ts @@ -17,6 +17,7 @@ import { addLoaderSource, ChatType, DocumentStoreStatus, + IComponentNodes, IDocumentStoreFileChunkPagedResponse, IDocumentStoreLoader, IDocumentStoreLoaderFile, @@ -24,7 +25,11 @@ import { IDocumentStoreRefreshData, IDocumentStoreUpsertData, IDocumentStoreWhereUsed, - INodeData + IExecuteDocStoreUpsert, + IExecuteProcessLoader, + IExecuteVectorStoreInsert, + INodeData, + MODE } from '../../Interface' import { DocumentStoreFileChunk } from '../../database/entities/DocumentStoreFileChunk' import { v4 as uuidv4 } from 'uuid' @@ -36,11 +41,11 @@ import { StatusCodes } from 'http-status-codes' import { getErrorMessage } from '../../errors/utils' import { ChatFlow } from '../../database/entities/ChatFlow' import { Document } from '@langchain/core/documents' -import { App } from '../../index' import { UpsertHistory } from '../../database/entities/UpsertHistory' import { cloneDeep, omit } from 'lodash' -import { FLOWISE_COUNTER_STATUS, FLOWISE_METRIC_COUNTERS } from '../../Interface.Metrics' import { DOCUMENTSTORE_TOOL_DESCRIPTION_PROMPT_GENERATOR } from '../../utils/prompt' +import { DataSource } from 'typeorm' +import { Telemetry } from '../../utils/telemetry' const DOCUMENT_STORE_BASE_FOLDER = 'docustore' @@ -182,10 +187,9 @@ const getUsedChatflowNames = async (entity: DocumentStore) => { } // Get chunks for a specific loader or store -const getDocumentStoreFileChunks = async (storeId: string, docId: string, pageNo: number = 1) => { +const getDocumentStoreFileChunks = async (appDataSource: DataSource, storeId: string, docId: string, pageNo: number = 1) => { try { - const appServer = getRunningExpressApp() - const entity = await appServer.AppDataSource.getRepository(DocumentStore).findOneBy({ + const entity = await appDataSource.getRepository(DocumentStore).findOneBy({ id: storeId }) if (!entity) { @@ -227,10 +231,10 @@ const getDocumentStoreFileChunks = async (storeId: string, docId: string, pageNo if (docId === 'all') { whereCondition = { storeId: storeId } } - const count = await appServer.AppDataSource.getRepository(DocumentStoreFileChunk).count({ + const count = await appDataSource.getRepository(DocumentStoreFileChunk).count({ where: whereCondition }) - const chunksWithCount = await appServer.AppDataSource.getRepository(DocumentStoreFileChunk).find({ + const chunksWithCount = await appDataSource.getRepository(DocumentStoreFileChunk).find({ skip, take, where: whereCondition, @@ -323,7 +327,7 @@ const deleteDocumentStoreFileChunk = async (storeId: string, docId: string, chun found.totalChars -= tbdChunk.pageContent.length entity.loaders = JSON.stringify(loaders) await appServer.AppDataSource.getRepository(DocumentStore).save(entity) - return getDocumentStoreFileChunks(storeId, docId) + return getDocumentStoreFileChunks(appServer.AppDataSource, storeId, docId) } catch (error) { throw new InternalFlowiseError( StatusCodes.INTERNAL_SERVER_ERROR, @@ -335,6 +339,8 @@ const deleteDocumentStoreFileChunk = async (storeId: string, docId: string, chun const deleteVectorStoreFromStore = async (storeId: string) => { try { const appServer = getRunningExpressApp() + const componentNodes = appServer.nodesPool.componentNodes + const entity = await appServer.AppDataSource.getRepository(DocumentStore).findOneBy({ id: storeId }) @@ -367,7 +373,7 @@ const deleteVectorStoreFromStore = async (storeId: string) => { // Get Record Manager Instance const recordManagerConfig = JSON.parse(entity.recordManagerConfig) const recordManagerObj = await _createRecordManagerObject( - appServer, + componentNodes, { recordManagerName: recordManagerConfig.name, recordManagerConfig: recordManagerConfig.config }, options ) @@ -375,7 +381,7 @@ const deleteVectorStoreFromStore = async (storeId: string) => { // Get Embeddings Instance const embeddingConfig = JSON.parse(entity.embeddingConfig) const embeddingObj = await _createEmbeddingsObject( - appServer, + componentNodes, { embeddingName: embeddingConfig.name, embeddingConfig: embeddingConfig.config }, options ) @@ -383,7 +389,7 @@ const deleteVectorStoreFromStore = async (storeId: string) => { // Get Vector Store Node Data const vectorStoreConfig = JSON.parse(entity.vectorStoreConfig) const vStoreNodeData = _createVectorStoreNodeData( - appServer, + componentNodes, { vectorStoreName: vectorStoreConfig.name, vectorStoreConfig: vectorStoreConfig.config }, embeddingObj, recordManagerObj @@ -391,7 +397,7 @@ const deleteVectorStoreFromStore = async (storeId: string) => { // Get Vector Store Instance const vectorStoreObj = await _createVectorStoreObject( - appServer, + componentNodes, { vectorStoreName: vectorStoreConfig.name, vectorStoreConfig: vectorStoreConfig.config }, vStoreNodeData ) @@ -437,7 +443,7 @@ const editDocumentStoreFileChunk = async (storeId: string, docId: string, chunkI await appServer.AppDataSource.getRepository(DocumentStoreFileChunk).save(editChunk) entity.loaders = JSON.stringify(loaders) await appServer.AppDataSource.getRepository(DocumentStore).save(entity) - return getDocumentStoreFileChunks(storeId, docId) + return getDocumentStoreFileChunks(appServer.AppDataSource, storeId, docId) } catch (error) { throw new InternalFlowiseError( StatusCodes.INTERNAL_SERVER_ERROR, @@ -446,7 +452,6 @@ const editDocumentStoreFileChunk = async (storeId: string, docId: string, chunkI } } -// Update documentStore const updateDocumentStore = async (documentStore: DocumentStore, updatedDocumentStore: DocumentStore) => { try { const appServer = getRunningExpressApp() @@ -481,12 +486,11 @@ const _saveFileToStorage = async (fileBase64: string, entity: DocumentStore) => } } -const _splitIntoChunks = async (data: IDocumentStoreLoaderForPreview) => { +const _splitIntoChunks = async (appDataSource: DataSource, componentNodes: IComponentNodes, data: IDocumentStoreLoaderForPreview) => { try { - const appServer = getRunningExpressApp() let splitterInstance = null if (data.splitterId && data.splitterConfig && Object.keys(data.splitterConfig).length > 0) { - const nodeInstanceFilePath = appServer.nodesPool.componentNodes[data.splitterId].filePath as string + const nodeInstanceFilePath = componentNodes[data.splitterId].filePath as string const nodeModule = await import(nodeInstanceFilePath) const newNodeInstance = new nodeModule.nodeClass() let nodeData = { @@ -496,7 +500,7 @@ const _splitIntoChunks = async (data: IDocumentStoreLoaderForPreview) => { splitterInstance = await newNodeInstance.init(nodeData) } if (!data.loaderId) return [] - const nodeInstanceFilePath = appServer.nodesPool.componentNodes[data.loaderId].filePath as string + const nodeInstanceFilePath = componentNodes[data.loaderId].filePath as string const nodeModule = await import(nodeInstanceFilePath) // doc loader configs const nodeData = { @@ -506,7 +510,7 @@ const _splitIntoChunks = async (data: IDocumentStoreLoaderForPreview) => { } const options: ICommonObject = { chatflowid: uuidv4(), - appDataSource: appServer.AppDataSource, + appDataSource, databaseEntities, logger } @@ -521,7 +525,7 @@ const _splitIntoChunks = async (data: IDocumentStoreLoaderForPreview) => { } } -const _normalizeFilePaths = async (data: IDocumentStoreLoaderForPreview, entity: DocumentStore | null) => { +const _normalizeFilePaths = async (appDataSource: DataSource, data: IDocumentStoreLoaderForPreview, entity: DocumentStore | null) => { const keys = Object.getOwnPropertyNames(data.loaderConfig) let rehydrated = false for (let i = 0; i < keys.length; i++) { @@ -535,8 +539,7 @@ const _normalizeFilePaths = async (data: IDocumentStoreLoaderForPreview, entity: let documentStoreEntity: DocumentStore | null = entity if (input.startsWith('FILE-STORAGE::')) { if (!documentStoreEntity) { - const appServer = getRunningExpressApp() - documentStoreEntity = await appServer.AppDataSource.getRepository(DocumentStore).findOneBy({ + documentStoreEntity = await appDataSource.getRepository(DocumentStore).findOneBy({ id: data.storeId }) if (!documentStoreEntity) { @@ -570,7 +573,7 @@ const _normalizeFilePaths = async (data: IDocumentStoreLoaderForPreview, entity: data.rehydrated = rehydrated } -const previewChunks = async (data: IDocumentStoreLoaderForPreview) => { +const previewChunks = async (appDataSource: DataSource, componentNodes: IComponentNodes, data: IDocumentStoreLoaderForPreview) => { try { if (data.preview) { if ( @@ -582,9 +585,9 @@ const previewChunks = async (data: IDocumentStoreLoaderForPreview) => { } } if (!data.rehydrated) { - await _normalizeFilePaths(data, null) + await _normalizeFilePaths(appDataSource, data, null) } - let docs = await _splitIntoChunks(data) + let docs = await _splitIntoChunks(appDataSource, componentNodes, data) const totalChunks = docs.length // if -1, return all chunks if (data.previewChunkCount === -1) data.previewChunkCount = totalChunks @@ -602,10 +605,9 @@ const previewChunks = async (data: IDocumentStoreLoaderForPreview) => { } } -const saveProcessingLoader = async (data: IDocumentStoreLoaderForPreview): Promise => { +const saveProcessingLoader = async (appDataSource: DataSource, data: IDocumentStoreLoaderForPreview): Promise => { try { - const appServer = getRunningExpressApp() - const entity = await appServer.AppDataSource.getRepository(DocumentStore).findOneBy({ + const entity = await appDataSource.getRepository(DocumentStore).findOneBy({ id: data.storeId }) if (!entity) { @@ -667,7 +669,7 @@ const saveProcessingLoader = async (data: IDocumentStoreLoaderForPreview): Promi existingLoaders.push(loader) entity.loaders = JSON.stringify(existingLoaders) } - await appServer.AppDataSource.getRepository(DocumentStore).save(entity) + await appDataSource.getRepository(DocumentStore).save(entity) const newLoaders = JSON.parse(entity.loaders) const newLoader = newLoaders.find((ldr: IDocumentStoreLoader) => ldr.id === newDocLoaderId) if (!newLoader) { @@ -683,21 +685,53 @@ const saveProcessingLoader = async (data: IDocumentStoreLoaderForPreview): Promi } } -const processLoader = async (data: IDocumentStoreLoaderForPreview, docLoaderId: string) => { +export const processLoader = async ({ appDataSource, componentNodes, data, docLoaderId }: IExecuteProcessLoader) => { + const entity = await appDataSource.getRepository(DocumentStore).findOneBy({ + id: data.storeId + }) + if (!entity) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: documentStoreServices.processLoader - Document store ${data.storeId} not found` + ) + } + await _saveChunksToStorage(appDataSource, componentNodes, data, entity, docLoaderId) + return getDocumentStoreFileChunks(appDataSource, data.storeId as string, docLoaderId) +} + +const processLoaderMiddleware = async (data: IDocumentStoreLoaderForPreview, docLoaderId: string) => { try { const appServer = getRunningExpressApp() - const entity = await appServer.AppDataSource.getRepository(DocumentStore).findOneBy({ - id: data.storeId - }) - if (!entity) { - throw new InternalFlowiseError( - StatusCodes.NOT_FOUND, - `Error: documentStoreServices.processLoader - Document store ${data.storeId} not found` + const appDataSource = appServer.AppDataSource + const componentNodes = appServer.nodesPool.componentNodes + const telemetry = appServer.telemetry + + const executeData: IExecuteProcessLoader = { + appDataSource, + componentNodes, + data, + docLoaderId, + isProcessWithoutUpsert: true, + telemetry + } + + if (process.env.MODE === MODE.QUEUE) { + const upsertQueue = appServer.queueManager.getQueue('upsert') + const job = await upsertQueue.addJob( + omit(executeData, ['componentNodes', 'appDataSource', 'sseStreamer', 'telemetry', 'cachePool']) ) + logger.debug(`[server]: Job added to queue: ${job.id}`) + + const queueEvents = upsertQueue.getQueueEvents() + const result = await job.waitUntilFinished(queueEvents) + + if (!result) { + throw new Error('Job execution failed') + } + return result } - // this method will run async, will have to be moved to a worker thread - await _saveChunksToStorage(data, entity, docLoaderId) - return getDocumentStoreFileChunks(data.storeId as string, docLoaderId) + + return await processLoader(executeData) } catch (error) { throw new InternalFlowiseError( StatusCodes.INTERNAL_SERVER_ERROR, @@ -706,16 +740,21 @@ const processLoader = async (data: IDocumentStoreLoaderForPreview, docLoaderId: } } -const _saveChunksToStorage = async (data: IDocumentStoreLoaderForPreview, entity: DocumentStore, newLoaderId: string) => { +const _saveChunksToStorage = async ( + appDataSource: DataSource, + componentNodes: IComponentNodes, + data: IDocumentStoreLoaderForPreview, + entity: DocumentStore, + newLoaderId: string +) => { const re = new RegExp('^data.*;base64', 'i') try { - const appServer = getRunningExpressApp() //step 1: restore the full paths, if any - await _normalizeFilePaths(data, entity) + await _normalizeFilePaths(appDataSource, data, entity) //step 2: split the file into chunks - const response = await previewChunks(data) + const response = await previewChunks(appDataSource, componentNodes, data) //step 3: remove all files associated with the loader const existingLoaders = JSON.parse(entity.loaders) @@ -783,7 +822,7 @@ const _saveChunksToStorage = async (data: IDocumentStoreLoaderForPreview, entity } //step 7: remove all previous chunks - await appServer.AppDataSource.getRepository(DocumentStoreFileChunk).delete({ docId: newLoaderId }) + await appDataSource.getRepository(DocumentStoreFileChunk).delete({ docId: newLoaderId }) if (response.chunks) { //step 8: now save the new chunks const totalChars = response.chunks.reduce((acc, chunk) => { @@ -801,8 +840,8 @@ const _saveChunksToStorage = async (data: IDocumentStoreLoaderForPreview, entity pageContent: chunk.pageContent, metadata: JSON.stringify(chunk.metadata) } - const dChunk = appServer.AppDataSource.getRepository(DocumentStoreFileChunk).create(docChunk) - await appServer.AppDataSource.getRepository(DocumentStoreFileChunk).save(dChunk) + const dChunk = appDataSource.getRepository(DocumentStoreFileChunk).create(docChunk) + await appDataSource.getRepository(DocumentStoreFileChunk).save(dChunk) }) // update the loader with the new metrics loader.totalChunks = response.totalChunks @@ -815,7 +854,7 @@ const _saveChunksToStorage = async (data: IDocumentStoreLoaderForPreview, entity entity.loaders = JSON.stringify(existingLoaders) //step 9: update the entity in the database - await appServer.AppDataSource.getRepository(DocumentStore).save(entity) + await appDataSource.getRepository(DocumentStore).save(entity) return } catch (error) { @@ -914,10 +953,9 @@ const updateVectorStoreConfigOnly = async (data: ICommonObject) => { ) } } -const saveVectorStoreConfig = async (data: ICommonObject, isStrictSave = true) => { +const saveVectorStoreConfig = async (appDataSource: DataSource, data: ICommonObject, isStrictSave = true) => { try { - const appServer = getRunningExpressApp() - const entity = await appServer.AppDataSource.getRepository(DocumentStore).findOneBy({ + const entity = await appDataSource.getRepository(DocumentStore).findOneBy({ id: data.storeId }) if (!entity) { @@ -968,7 +1006,7 @@ const saveVectorStoreConfig = async (data: ICommonObject, isStrictSave = true) = // this also means that the store is not yet sync'ed to vector store entity.status = DocumentStoreStatus.SYNC } - await appServer.AppDataSource.getRepository(DocumentStore).save(entity) + await appDataSource.getRepository(DocumentStore).save(entity) return entity } catch (error) { throw new InternalFlowiseError( @@ -978,15 +1016,19 @@ const saveVectorStoreConfig = async (data: ICommonObject, isStrictSave = true) = } } -const insertIntoVectorStore = async (data: ICommonObject, isStrictSave = true) => { +export const insertIntoVectorStore = async ({ + appDataSource, + componentNodes, + telemetry, + data, + isStrictSave +}: IExecuteVectorStoreInsert) => { try { - const appServer = getRunningExpressApp() - const entity = await saveVectorStoreConfig(data, isStrictSave) + const entity = await saveVectorStoreConfig(appDataSource, data, isStrictSave) entity.status = DocumentStoreStatus.UPSERTING - await appServer.AppDataSource.getRepository(DocumentStore).save(entity) + await appDataSource.getRepository(DocumentStore).save(entity) - // TODO: to be moved into a worker thread... - const indexResult = await _insertIntoVectorStoreWorkerThread(data, isStrictSave) + const indexResult = await _insertIntoVectorStoreWorkerThread(appDataSource, componentNodes, telemetry, data, isStrictSave) return indexResult } catch (error) { throw new InternalFlowiseError( @@ -996,16 +1038,62 @@ const insertIntoVectorStore = async (data: ICommonObject, isStrictSave = true) = } } -const _insertIntoVectorStoreWorkerThread = async (data: ICommonObject, isStrictSave = true) => { +const insertIntoVectorStoreMiddleware = async (data: ICommonObject, isStrictSave = true) => { try { const appServer = getRunningExpressApp() - const entity = await saveVectorStoreConfig(data, isStrictSave) + const appDataSource = appServer.AppDataSource + const componentNodes = appServer.nodesPool.componentNodes + const telemetry = appServer.telemetry + + const executeData: IExecuteVectorStoreInsert = { + appDataSource, + componentNodes, + telemetry, + data, + isStrictSave, + isVectorStoreInsert: true + } + + if (process.env.MODE === MODE.QUEUE) { + const upsertQueue = appServer.queueManager.getQueue('upsert') + const job = await upsertQueue.addJob( + omit(executeData, ['componentNodes', 'appDataSource', 'sseStreamer', 'telemetry', 'cachePool']) + ) + logger.debug(`[server]: Job added to queue: ${job.id}`) + + const queueEvents = upsertQueue.getQueueEvents() + const result = await job.waitUntilFinished(queueEvents) + + if (!result) { + throw new Error('Job execution failed') + } + return result + } else { + return await insertIntoVectorStore(executeData) + } + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: documentStoreServices.insertIntoVectorStoreMiddleware - ${getErrorMessage(error)}` + ) + } +} + +const _insertIntoVectorStoreWorkerThread = async ( + appDataSource: DataSource, + componentNodes: IComponentNodes, + telemetry: Telemetry, + data: ICommonObject, + isStrictSave = true +) => { + try { + const entity = await saveVectorStoreConfig(appDataSource, data, isStrictSave) let upsertHistory: Record = {} const chatflowid = data.storeId // fake chatflowid because this is not tied to any chatflow const options: ICommonObject = { chatflowid, - appDataSource: appServer.AppDataSource, + appDataSource, databaseEntities, logger } @@ -1014,14 +1102,14 @@ const _insertIntoVectorStoreWorkerThread = async (data: ICommonObject, isStrictS // Get Record Manager Instance if (data.recordManagerName && data.recordManagerConfig) { - recordManagerObj = await _createRecordManagerObject(appServer, data, options, upsertHistory) + recordManagerObj = await _createRecordManagerObject(componentNodes, data, options, upsertHistory) } // Get Embeddings Instance - const embeddingObj = await _createEmbeddingsObject(appServer, data, options, upsertHistory) + const embeddingObj = await _createEmbeddingsObject(componentNodes, data, options, upsertHistory) // Get Vector Store Node Data - const vStoreNodeData = _createVectorStoreNodeData(appServer, data, embeddingObj, recordManagerObj) + const vStoreNodeData = _createVectorStoreNodeData(componentNodes, data, embeddingObj, recordManagerObj) // Prepare docs for upserting const filterOptions: ICommonObject = { @@ -1030,7 +1118,7 @@ const _insertIntoVectorStoreWorkerThread = async (data: ICommonObject, isStrictS if (data.docId) { filterOptions['docId'] = data.docId } - const chunks = await appServer.AppDataSource.getRepository(DocumentStoreFileChunk).find({ + const chunks = await appDataSource.getRepository(DocumentStoreFileChunk).find({ where: filterOptions }) const docs: Document[] = chunks.map((chunk: DocumentStoreFileChunk) => { @@ -1042,7 +1130,7 @@ const _insertIntoVectorStoreWorkerThread = async (data: ICommonObject, isStrictS vStoreNodeData.inputs.document = docs // Get Vector Store Instance - const vectorStoreObj = await _createVectorStoreObject(appServer, data, vStoreNodeData, upsertHistory) + const vectorStoreObj = await _createVectorStoreObject(componentNodes, data, vStoreNodeData, upsertHistory) const indexResult = await vectorStoreObj.vectorStoreMethods.upsert(vStoreNodeData, options) // Save to DB @@ -1053,20 +1141,20 @@ const _insertIntoVectorStoreWorkerThread = async (data: ICommonObject, isStrictS result.chatflowid = chatflowid const newUpsertHistory = new UpsertHistory() Object.assign(newUpsertHistory, result) - const upsertHistoryItem = appServer.AppDataSource.getRepository(UpsertHistory).create(newUpsertHistory) - await appServer.AppDataSource.getRepository(UpsertHistory).save(upsertHistoryItem) + const upsertHistoryItem = appDataSource.getRepository(UpsertHistory).create(newUpsertHistory) + await appDataSource.getRepository(UpsertHistory).save(upsertHistoryItem) } - await appServer.telemetry.sendTelemetry('vector_upserted', { + await telemetry.sendTelemetry('vector_upserted', { version: await getAppVersion(), chatlowId: chatflowid, type: ChatType.INTERNAL, flowGraph: omit(indexResult['result'], ['totalKeys', 'addedDocs']) }) - appServer.metricsProvider?.incrementCounter(FLOWISE_METRIC_COUNTERS.VECTORSTORE_UPSERT, { status: FLOWISE_COUNTER_STATUS.SUCCESS }) + // TODO: appServer.metricsProvider?.incrementCounter(FLOWISE_METRIC_COUNTERS.VECTORSTORE_UPSERT, { status: FLOWISE_COUNTER_STATUS.SUCCESS }) entity.status = DocumentStoreStatus.UPSERTED - await appServer.AppDataSource.getRepository(DocumentStore).save(entity) + await appDataSource.getRepository(DocumentStore).save(entity) return indexResult ?? { result: 'Successfully Upserted' } } catch (error) { @@ -1120,6 +1208,8 @@ const getRecordManagerProviders = async () => { const queryVectorStore = async (data: ICommonObject) => { try { const appServer = getRunningExpressApp() + const componentNodes = appServer.nodesPool.componentNodes + const entity = await appServer.AppDataSource.getRepository(DocumentStore).findOneBy({ id: data.storeId }) @@ -1144,7 +1234,7 @@ const queryVectorStore = async (data: ICommonObject) => { const embeddingConfig = JSON.parse(entity.embeddingConfig) data.embeddingName = embeddingConfig.name data.embeddingConfig = embeddingConfig.config - let embeddingObj = await _createEmbeddingsObject(appServer, data, options) + let embeddingObj = await _createEmbeddingsObject(componentNodes, data, options) const vsConfig = JSON.parse(entity.vectorStoreConfig) data.vectorStoreName = vsConfig.name @@ -1153,10 +1243,10 @@ const queryVectorStore = async (data: ICommonObject) => { data.vectorStoreConfig = { ...vsConfig.config, ...data.inputs } } - const vStoreNodeData = _createVectorStoreNodeData(appServer, data, embeddingObj, undefined) + const vStoreNodeData = _createVectorStoreNodeData(componentNodes, data, embeddingObj, undefined) // Get Vector Store Instance - const vectorStoreObj = await _createVectorStoreObject(appServer, data, vStoreNodeData) + const vectorStoreObj = await _createVectorStoreObject(componentNodes, data, vStoreNodeData) const retriever = await vectorStoreObj.init(vStoreNodeData, '', options) if (!retriever) { throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Failed to create retriever`) @@ -1205,13 +1295,13 @@ const queryVectorStore = async (data: ICommonObject) => { } const _createEmbeddingsObject = async ( - appServer: App, + componentNodes: IComponentNodes, data: ICommonObject, options: ICommonObject, upsertHistory?: Record ): Promise => { // prepare embedding node data - const embeddingComponent = appServer.nodesPool.componentNodes[data.embeddingName] + const embeddingComponent = componentNodes[data.embeddingName] const embeddingNodeData: any = { inputs: { ...data.embeddingConfig }, outputs: { output: 'document' }, @@ -1240,13 +1330,13 @@ const _createEmbeddingsObject = async ( } const _createRecordManagerObject = async ( - appServer: App, + componentNodes: IComponentNodes, data: ICommonObject, options: ICommonObject, upsertHistory?: Record ) => { // prepare record manager node data - const recordManagerComponent = appServer.nodesPool.componentNodes[data.recordManagerName] + const recordManagerComponent = componentNodes[data.recordManagerName] const rmNodeData: any = { inputs: { ...data.recordManagerConfig }, id: `${recordManagerComponent.name}_0`, @@ -1273,8 +1363,8 @@ const _createRecordManagerObject = async ( return recordManagerObj } -const _createVectorStoreNodeData = (appServer: App, data: ICommonObject, embeddingObj: any, recordManagerObj?: any) => { - const vectorStoreComponent = appServer.nodesPool.componentNodes[data.vectorStoreName] +const _createVectorStoreNodeData = (componentNodes: IComponentNodes, data: ICommonObject, embeddingObj: any, recordManagerObj?: any) => { + const vectorStoreComponent = componentNodes[data.vectorStoreName] const vStoreNodeData: any = { id: `${vectorStoreComponent.name}_0`, inputs: { ...data.vectorStoreConfig }, @@ -1303,25 +1393,27 @@ const _createVectorStoreNodeData = (appServer: App, data: ICommonObject, embeddi } const _createVectorStoreObject = async ( - appServer: App, + componentNodes: IComponentNodes, data: ICommonObject, vStoreNodeData: INodeData, upsertHistory?: Record ) => { - const vStoreNodeInstanceFilePath = appServer.nodesPool.componentNodes[data.vectorStoreName].filePath as string + const vStoreNodeInstanceFilePath = componentNodes[data.vectorStoreName].filePath as string const vStoreNodeModule = await import(vStoreNodeInstanceFilePath) const vStoreNodeInstance = new vStoreNodeModule.nodeClass() if (upsertHistory) upsertHistory['flowData'] = saveUpsertFlowData(vStoreNodeData, upsertHistory) return vStoreNodeInstance } -const upsertDocStoreMiddleware = async ( +const upsertDocStore = async ( + appDataSource: DataSource, + componentNodes: IComponentNodes, + telemetry: Telemetry, storeId: string, data: IDocumentStoreUpsertData, files: Express.Multer.File[] = [], isRefreshExisting = false ) => { - const appServer = getRunningExpressApp() const docId = data.docId const newLoader = typeof data.loader === 'string' ? JSON.parse(data.loader) : data.loader const newSplitter = typeof data.splitter === 'string' ? JSON.parse(data.splitter) : data.splitter @@ -1330,7 +1422,7 @@ const upsertDocStoreMiddleware = async ( const newRecordManager = typeof data.recordManager === 'string' ? JSON.parse(data.recordManager) : data.recordManager const getComponentLabelFromName = (nodeName: string) => { - const component = Object.values(appServer.nodesPool.componentNodes).find((node) => node.name === nodeName) + const component = Object.values(componentNodes).find((node) => node.name === nodeName) return component?.label || '' } @@ -1353,7 +1445,7 @@ const upsertDocStoreMiddleware = async ( // Step 1: Get existing loader if (docId) { - const entity = await appServer.AppDataSource.getRepository(DocumentStore).findOneBy({ id: storeId }) + const entity = await appDataSource.getRepository(DocumentStore).findOneBy({ id: storeId }) if (!entity) { throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Document store ${storeId} not found`) } @@ -1508,8 +1600,15 @@ const upsertDocStoreMiddleware = async ( } try { - const newLoader = await saveProcessingLoader(processData) - const result = await processLoader(processData, newLoader.id || '') + const newLoader = await saveProcessingLoader(appDataSource, processData) + const result = await processLoader({ + appDataSource, + componentNodes, + data: processData, + docLoaderId: newLoader.id || '', + isProcessWithoutUpsert: false, + telemetry + }) const newDocId = result.docId const insertData = { @@ -1523,10 +1622,76 @@ const upsertDocStoreMiddleware = async ( recordManagerConfig } - const res = await insertIntoVectorStore(insertData, false) + const res = await insertIntoVectorStore({ + appDataSource, + componentNodes, + telemetry, + data: insertData, + isStrictSave: false, + isVectorStoreInsert: true + }) res.docId = newDocId return res + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: documentStoreServices.upsertDocStore - ${getErrorMessage(error)}` + ) + } +} + +export const executeDocStoreUpsert = async ({ + appDataSource, + componentNodes, + telemetry, + storeId, + totalItems, + files, + isRefreshAPI +}: IExecuteDocStoreUpsert) => { + const results = [] + for (const item of totalItems) { + const res = await upsertDocStore(appDataSource, componentNodes, telemetry, storeId, item, files, isRefreshAPI) + results.push(res) + } + return isRefreshAPI ? results : results[0] +} + +const upsertDocStoreMiddleware = async (storeId: string, data: IDocumentStoreUpsertData, files: Express.Multer.File[] = []) => { + const appServer = getRunningExpressApp() + const componentNodes = appServer.nodesPool.componentNodes + const appDataSource = appServer.AppDataSource + const telemetry = appServer.telemetry + + try { + const executeData: IExecuteDocStoreUpsert = { + appDataSource, + componentNodes, + telemetry, + storeId, + totalItems: [data], + files, + isRefreshAPI: false + } + + if (process.env.MODE === MODE.QUEUE) { + const upsertQueue = appServer.queueManager.getQueue('upsert') + const job = await upsertQueue.addJob( + omit(executeData, ['componentNodes', 'appDataSource', 'sseStreamer', 'telemetry', 'cachePool']) + ) + logger.debug(`[server]: Job added to queue: ${job.id}`) + + const queueEvents = upsertQueue.getQueueEvents() + const result = await job.waitUntilFinished(queueEvents) + + if (!result) { + throw new Error('Job execution failed') + } + return result + } else { + return await executeDocStoreUpsert(executeData) + } } catch (error) { throw new InternalFlowiseError( StatusCodes.INTERNAL_SERVER_ERROR, @@ -1537,9 +1702,11 @@ const upsertDocStoreMiddleware = async ( const refreshDocStoreMiddleware = async (storeId: string, data?: IDocumentStoreRefreshData) => { const appServer = getRunningExpressApp() + const componentNodes = appServer.nodesPool.componentNodes + const appDataSource = appServer.AppDataSource + const telemetry = appServer.telemetry try { - const results = [] let totalItems: IDocumentStoreUpsertData[] = [] if (!data || !data.items || data.items.length === 0) { @@ -1558,12 +1725,33 @@ const refreshDocStoreMiddleware = async (storeId: string, data?: IDocumentStoreR totalItems = data.items } - for (const item of totalItems) { - const res = await upsertDocStoreMiddleware(storeId, item, [], true) - results.push(res) + const executeData: IExecuteDocStoreUpsert = { + appDataSource, + componentNodes, + telemetry, + storeId, + totalItems, + files: [], + isRefreshAPI: true } - return results + if (process.env.MODE === MODE.QUEUE) { + const upsertQueue = appServer.queueManager.getQueue('upsert') + const job = await upsertQueue.addJob( + omit(executeData, ['componentNodes', 'appDataSource', 'sseStreamer', 'telemetry', 'cachePool']) + ) + logger.debug(`[server]: Job added to queue: ${job.id}`) + + const queueEvents = upsertQueue.getQueueEvents() + const result = await job.waitUntilFinished(queueEvents) + + if (!result) { + throw new Error('Job execution failed') + } + return result + } else { + return await executeDocStoreUpsert(executeData) + } } catch (error) { throw new InternalFlowiseError( StatusCodes.INTERNAL_SERVER_ERROR, @@ -1642,11 +1830,11 @@ export default { updateDocumentStore, previewChunks, saveProcessingLoader, - processLoader, + processLoaderMiddleware, deleteDocumentStoreFileChunk, editDocumentStoreFileChunk, getDocumentLoaders, - insertIntoVectorStore, + insertIntoVectorStoreMiddleware, getEmbeddingProviders, getVectorStoreProviders, getRecordManagerProviders, diff --git a/packages/server/src/services/openai-realtime/index.ts b/packages/server/src/services/openai-realtime/index.ts index 34fc230da80..c1d9c6251fc 100644 --- a/packages/server/src/services/openai-realtime/index.ts +++ b/packages/server/src/services/openai-realtime/index.ts @@ -95,7 +95,6 @@ const buildAndInitTool = async (chatflowid: string, _chatId?: string, _apiMessag const flowDataObj: ICommonObject = { chatflowid, chatId } const reactFlowNodeData: INodeData = await resolveVariables( - appServer.AppDataSource, nodeToExecute.data, reactFlowNodes, '', diff --git a/packages/server/src/utils/SSEStreamer.ts b/packages/server/src/utils/SSEStreamer.ts index ffa7693a5d5..5fa5ca4d15f 100644 --- a/packages/server/src/utils/SSEStreamer.ts +++ b/packages/server/src/utils/SSEStreamer.ts @@ -1,4 +1,3 @@ -import express from 'express' import { Response } from 'express' import { IServerSideEventStreamer } from 'flowise-components' @@ -13,11 +12,6 @@ type Client = { export class SSEStreamer implements IServerSideEventStreamer { clients: { [id: string]: Client } = {} - app: express.Application - - constructor(app: express.Application) { - this.app = app - } addExternalClient(chatId: string, res: Response) { this.clients[chatId] = { clientType: 'EXTERNAL', response: res, started: false } @@ -40,18 +34,6 @@ export class SSEStreamer implements IServerSideEventStreamer { } } - // Send SSE message to a specific client - streamEvent(chatId: string, data: string) { - const client = this.clients[chatId] - if (client) { - const clientResponse = { - event: 'start', - data: data - } - client.response.write('message:\ndata:' + JSON.stringify(clientResponse) + '\n\n') - } - } - streamCustomEvent(chatId: string, eventType: string, data: any) { const client = this.clients[chatId] if (client) { diff --git a/packages/server/src/utils/addChatMesage.ts b/packages/server/src/utils/addChatMesage.ts index 887031bfcda..88984d031c9 100644 --- a/packages/server/src/utils/addChatMesage.ts +++ b/packages/server/src/utils/addChatMesage.ts @@ -1,3 +1,4 @@ +import { DataSource } from 'typeorm' import { ChatMessage } from '../database/entities/ChatMessage' import { IChatMessage } from '../Interface' import { getRunningExpressApp } from '../utils/getRunningExpressApp' @@ -6,14 +7,14 @@ import { getRunningExpressApp } from '../utils/getRunningExpressApp' * Method that add chat messages. * @param {Partial} chatMessage */ -export const utilAddChatMessage = async (chatMessage: Partial): Promise => { - const appServer = getRunningExpressApp() +export const utilAddChatMessage = async (chatMessage: Partial, appDataSource?: DataSource): Promise => { + const dataSource = appDataSource ?? getRunningExpressApp().AppDataSource const newChatMessage = new ChatMessage() Object.assign(newChatMessage, chatMessage) if (!newChatMessage.createdDate) { newChatMessage.createdDate = new Date() } - const chatmessage = await appServer.AppDataSource.getRepository(ChatMessage).create(newChatMessage) - const dbResponse = await appServer.AppDataSource.getRepository(ChatMessage).save(chatmessage) + const chatmessage = await dataSource.getRepository(ChatMessage).create(newChatMessage) + const dbResponse = await dataSource.getRepository(ChatMessage).save(chatmessage) return dbResponse } diff --git a/packages/server/src/utils/buildAgentGraph.ts b/packages/server/src/utils/buildAgentGraph.ts index f3bec805a39..61a55aebdfa 100644 --- a/packages/server/src/utils/buildAgentGraph.ts +++ b/packages/server/src/utils/buildAgentGraph.ts @@ -19,144 +19,77 @@ import { StatusCodes } from 'http-status-codes' import { v4 as uuidv4 } from 'uuid' import { StructuredTool } from '@langchain/core/tools' import { BaseMessage, HumanMessage, AIMessage, AIMessageChunk, ToolMessage } from '@langchain/core/messages' -import { - IChatFlow, - IComponentNodes, - IDepthQueue, - IReactFlowNode, - IReactFlowObject, - IReactFlowEdge, - IMessage, - IncomingInput -} from '../Interface' -import { - buildFlow, - getStartingNodes, - getEndingNodes, - constructGraphs, - databaseEntities, - getSessionChatHistory, - getMemorySessionId, - clearSessionMemory, - getAPIOverrideConfig -} from '../utils' -import { getRunningExpressApp } from './getRunningExpressApp' +import { IChatFlow, IComponentNodes, IDepthQueue, IReactFlowNode, IReactFlowEdge, IMessage, IncomingInput, IFlowConfig } from '../Interface' +import { databaseEntities, clearSessionMemory, getAPIOverrideConfig } from '../utils' import { replaceInputsWithConfig, resolveVariables } from '.' import { InternalFlowiseError } from '../errors/internalFlowiseError' import { getErrorMessage } from '../errors/utils' import logger from './logger' import { Variable } from '../database/entities/Variable' +import { DataSource } from 'typeorm' +import { CachePool } from '../CachePool' /** * Build Agent Graph - * @param {IChatFlow} chatflow - * @param {string} chatId - * @param {string} sessionId - * @param {ICommonObject} incomingInput - * @param {boolean} isInternal - * @param {string} baseURL */ -export const buildAgentGraph = async ( - chatflow: IChatFlow, - chatId: string, - apiMessageId: string, - sessionId: string, - incomingInput: IncomingInput, - isInternal: boolean, - baseURL?: string, - sseStreamer?: IServerSideEventStreamer, - shouldStreamResponse?: boolean, - uploadedFilesContent?: string -): Promise => { +export const buildAgentGraph = async ({ + agentflow, + flowConfig, + incomingInput, + nodes, + edges, + initializedNodes, + endingNodeIds, + startingNodeIds, + depthQueue, + chatHistory, + uploadedFilesContent, + appDataSource, + componentNodes, + sseStreamer, + shouldStreamResponse, + cachePool, + baseURL, + signal +}: { + agentflow: IChatFlow + flowConfig: IFlowConfig + incomingInput: IncomingInput + nodes: IReactFlowNode[] + edges: IReactFlowEdge[] + initializedNodes: IReactFlowNode[] + endingNodeIds: string[] + startingNodeIds: string[] + depthQueue: IDepthQueue + chatHistory: IMessage[] + uploadedFilesContent: string + appDataSource: DataSource + componentNodes: IComponentNodes + sseStreamer: IServerSideEventStreamer + shouldStreamResponse: boolean + cachePool: CachePool + baseURL: string + signal?: AbortController +}): Promise => { try { - const appServer = getRunningExpressApp() - const chatflowid = chatflow.id - - /*** Get chatflows and prepare data ***/ - const flowData = chatflow.flowData - const parsedFlowData: IReactFlowObject = JSON.parse(flowData) - const nodes = parsedFlowData.nodes - const edges = parsedFlowData.edges - - /*** Get Ending Node with Directed Graph ***/ - const { graph, nodeDependencies } = constructGraphs(nodes, edges) - const directedGraph = graph - - const endingNodes = getEndingNodes(nodeDependencies, directedGraph, nodes) - - /*** Get Starting Nodes with Reversed Graph ***/ - const constructedObj = constructGraphs(nodes, edges, { isReversed: true }) - const nonDirectedGraph = constructedObj.graph - let startingNodeIds: string[] = [] - let depthQueue: IDepthQueue = {} - const endingNodeIds = endingNodes.map((n) => n.id) - for (const endingNodeId of endingNodeIds) { - const resx = getStartingNodes(nonDirectedGraph, endingNodeId) - startingNodeIds.push(...resx.startingNodeIds) - depthQueue = Object.assign(depthQueue, resx.depthQueue) - } - startingNodeIds = [...new Set(startingNodeIds)] - - /*** Get Memory Node for Chat History ***/ - let chatHistory: IMessage[] = [] - const agentMemoryList = ['agentMemory', 'sqliteAgentMemory', 'postgresAgentMemory', 'mySQLAgentMemory'] - const memoryNode = nodes.find((node) => agentMemoryList.includes(node.data.name)) - if (memoryNode) { - chatHistory = await getSessionChatHistory( - chatflowid, - getMemorySessionId(memoryNode, incomingInput, chatId, isInternal), - memoryNode, - appServer.nodesPool.componentNodes, - appServer.AppDataSource, - databaseEntities, - logger, - incomingInput.history - ) - } - - /*** Get API Config ***/ - const availableVariables = await appServer.AppDataSource.getRepository(Variable).find() - const { nodeOverrides, variableOverrides, apiOverrideStatus } = getAPIOverrideConfig(chatflow) - - // Initialize nodes like ChatModels, Tools, etc. - const reactFlowNodes: IReactFlowNode[] = await buildFlow({ - startingNodeIds, - reactFlowNodes: nodes, - reactFlowEdges: edges, - apiMessageId, - graph, - depthQueue, - componentNodes: appServer.nodesPool.componentNodes, - question: incomingInput.question, - uploadedFilesContent, - chatHistory, - chatId, - sessionId, - chatflowid, - appDataSource: appServer.AppDataSource, - overrideConfig: incomingInput?.overrideConfig, - apiOverrideStatus, - nodeOverrides, - availableVariables, - variableOverrides, - cachePool: appServer.cachePool, - isUpsert: false, - uploads: incomingInput.uploads, - baseURL - }) + const chatflowid = flowConfig.chatflowid + const chatId = flowConfig.chatId + const sessionId = flowConfig.sessionId + const analytic = agentflow.analytic + const uploads = incomingInput.uploads const options = { chatId, sessionId, chatflowid, logger, - analytic: chatflow.analytic, - appDataSource: appServer.AppDataSource, - databaseEntities: databaseEntities, - cachePool: appServer.cachePool, - uploads: incomingInput.uploads, + analytic, + appDataSource, + databaseEntities, + cachePool, + uploads, baseURL, - signal: new AbortController() + signal: signal ?? new AbortController() } let streamResults @@ -171,9 +104,9 @@ export const buildAgentGraph = async ( let totalUsedTools: IUsedTool[] = [] let totalArtifacts: ICommonObject[] = [] - const workerNodes = reactFlowNodes.filter((node) => node.data.name === 'worker') - const supervisorNodes = reactFlowNodes.filter((node) => node.data.name === 'supervisor') - const seqAgentNodes = reactFlowNodes.filter((node) => node.data.category === 'Sequential Agents') + const workerNodes = initializedNodes.filter((node) => node.data.name === 'worker') + const supervisorNodes = initializedNodes.filter((node) => node.data.name === 'supervisor') + const seqAgentNodes = initializedNodes.filter((node) => node.data.category === 'Sequential Agents') const mapNameToLabel: Record = {} @@ -189,11 +122,12 @@ export const buildAgentGraph = async ( try { if (!seqAgentNodes.length) { streamResults = await compileMultiAgentsGraph({ - chatflow, + agentflow, + appDataSource, mapNameToLabel, - reactFlowNodes, + reactFlowNodes: initializedNodes, workerNodeIds: endingNodeIds, - componentNodes: appServer.nodesPool.componentNodes, + componentNodes, options, startingNodeIds, question: incomingInput.question, @@ -208,10 +142,11 @@ export const buildAgentGraph = async ( isSequential = true streamResults = await compileSeqAgentsGraph({ depthQueue, - chatflow, - reactFlowNodes, + agentflow, + appDataSource, + reactFlowNodes: initializedNodes, reactFlowEdges: edges, - componentNodes: appServer.nodesPool.componentNodes, + componentNodes, options, question: incomingInput.question, prependHistoryMessages: incomingInput.history, @@ -275,7 +210,7 @@ export const buildAgentGraph = async ( ) inputEdges.forEach((edge) => { - const parentNode = reactFlowNodes.find((nd) => nd.id === edge.source) + const parentNode = initializedNodes.find((nd) => nd.id === edge.source) if (parentNode) { if (parentNode.data.name.includes('seqCondition')) { const newMessages = messages.slice(0, -1) @@ -366,7 +301,7 @@ export const buildAgentGraph = async ( // If last message is an AI Message with tool calls, that means the last node was interrupted if (lastMessageRaw.tool_calls && lastMessageRaw.tool_calls.length > 0) { // The last node that got interrupted - const node = reactFlowNodes.find((node) => node.id === lastMessageRaw.additional_kwargs.nodeId) + const node = initializedNodes.find((node) => node.id === lastMessageRaw.additional_kwargs.nodeId) // Find the next tool node that is connected to the interrupted node, to get the approve/reject button text const tooNodeId = edges.find( @@ -374,7 +309,7 @@ export const buildAgentGraph = async ( edge.target.includes('seqToolNode') && edge.source === (lastMessageRaw.additional_kwargs && lastMessageRaw.additional_kwargs.nodeId) )?.target - const connectedToolNode = reactFlowNodes.find((node) => node.id === tooNodeId) + const connectedToolNode = initializedNodes.find((node) => node.id === tooNodeId) // Map raw tool calls to used tools, to be shown on interrupted message const mappedToolCalls = lastMessageRaw.tool_calls.map((toolCall) => { @@ -449,7 +384,7 @@ export const buildAgentGraph = async ( } } catch (e) { // clear agent memory because checkpoints were saved during runtime - await clearSessionMemory(nodes, appServer.nodesPool.componentNodes, chatId, appServer.AppDataSource, sessionId) + await clearSessionMemory(nodes, componentNodes, chatId, appDataSource, sessionId) if (getErrorMessage(e).includes('Aborted')) { if (shouldStreamResponse && sseStreamer) { sseStreamer.streamAbortEvent(chatId) @@ -466,7 +401,8 @@ export const buildAgentGraph = async ( } type MultiAgentsGraphParams = { - chatflow: IChatFlow + agentflow: IChatFlow + appDataSource: DataSource mapNameToLabel: Record reactFlowNodes: IReactFlowNode[] workerNodeIds: string[] @@ -484,13 +420,13 @@ type MultiAgentsGraphParams = { const compileMultiAgentsGraph = async (params: MultiAgentsGraphParams) => { const { - chatflow, + agentflow, + appDataSource, mapNameToLabel, reactFlowNodes, workerNodeIds, componentNodes, options, - startingNodeIds, prependHistoryMessages = [], chatHistory = [], overrideConfig = {}, @@ -501,7 +437,6 @@ const compileMultiAgentsGraph = async (params: MultiAgentsGraphParams) => { let question = params.question - const appServer = getRunningExpressApp() const channels: ITeamState = { messages: { value: (x: BaseMessage[], y: BaseMessage[]) => x.concat(y), @@ -522,8 +457,8 @@ const compileMultiAgentsGraph = async (params: MultiAgentsGraphParams) => { const workerNodes = reactFlowNodes.filter((node) => workerNodeIds.includes(node.data.id)) /*** Get API Config ***/ - const availableVariables = await appServer.AppDataSource.getRepository(Variable).find() - const { nodeOverrides, variableOverrides, apiOverrideStatus } = getAPIOverrideConfig(chatflow) + const availableVariables = await appDataSource.getRepository(Variable).find() + const { nodeOverrides, variableOverrides, apiOverrideStatus } = getAPIOverrideConfig(agentflow) let supervisorWorkers: { [key: string]: IMultiAgentNode[] } = {} @@ -537,7 +472,6 @@ const compileMultiAgentsGraph = async (params: MultiAgentsGraphParams) => { if (overrideConfig && apiOverrideStatus) flowNodeData = replaceInputsWithConfig(flowNodeData, overrideConfig, nodeOverrides, variableOverrides) flowNodeData = await resolveVariables( - appServer.AppDataSource, flowNodeData, reactFlowNodes, question, @@ -579,7 +513,6 @@ const compileMultiAgentsGraph = async (params: MultiAgentsGraphParams) => { if (overrideConfig && apiOverrideStatus) flowNodeData = replaceInputsWithConfig(flowNodeData, overrideConfig, nodeOverrides, variableOverrides) flowNodeData = await resolveVariables( - appServer.AppDataSource, flowNodeData, reactFlowNodes, question, @@ -626,15 +559,7 @@ const compileMultiAgentsGraph = async (params: MultiAgentsGraphParams) => { //@ts-ignore workflowGraph.addEdge(START, supervisorResult.name) - - // Add agentflow to pool ;(workflowGraph as any).signal = options.signal - appServer.chatflowPool.add( - `${chatflow.id}_${options.chatId}`, - workflowGraph as any, - reactFlowNodes.filter((node) => startingNodeIds.includes(node.id)), - overrideConfig - ) // Get memory let memory = supervisorResult?.checkpointMemory @@ -685,7 +610,8 @@ const compileMultiAgentsGraph = async (params: MultiAgentsGraphParams) => { type SeqAgentsGraphParams = { depthQueue: IDepthQueue - chatflow: IChatFlow + agentflow: IChatFlow + appDataSource: DataSource reactFlowNodes: IReactFlowNode[] reactFlowEdges: IReactFlowEdge[] componentNodes: IComponentNodes @@ -702,7 +628,8 @@ type SeqAgentsGraphParams = { const compileSeqAgentsGraph = async (params: SeqAgentsGraphParams) => { const { depthQueue, - chatflow, + agentflow, + appDataSource, reactFlowNodes, reactFlowEdges, componentNodes, @@ -717,8 +644,6 @@ const compileSeqAgentsGraph = async (params: SeqAgentsGraphParams) => { let question = params.question - const appServer = getRunningExpressApp() - let channels: ISeqAgentsState = { messages: { value: (x: BaseMessage[], y: BaseMessage[]) => x.concat(y), @@ -761,8 +686,8 @@ const compileSeqAgentsGraph = async (params: SeqAgentsGraphParams) => { let interruptToolNodeNames = [] /*** Get API Config ***/ - const availableVariables = await appServer.AppDataSource.getRepository(Variable).find() - const { nodeOverrides, variableOverrides, apiOverrideStatus } = getAPIOverrideConfig(chatflow) + const availableVariables = await appDataSource.getRepository(Variable).find() + const { nodeOverrides, variableOverrides, apiOverrideStatus } = getAPIOverrideConfig(agentflow) const initiateNode = async (node: IReactFlowNode) => { const nodeInstanceFilePath = componentNodes[node.data.name].filePath as string @@ -773,7 +698,6 @@ const compileSeqAgentsGraph = async (params: SeqAgentsGraphParams) => { if (overrideConfig && apiOverrideStatus) flowNodeData = replaceInputsWithConfig(flowNodeData, overrideConfig, nodeOverrides, variableOverrides) flowNodeData = await resolveVariables( - appServer.AppDataSource, flowNodeData, reactFlowNodes, question, @@ -1059,14 +983,8 @@ const compileSeqAgentsGraph = async (params: SeqAgentsGraphParams) => { routeMessage ) } - /*** Add agentflow to pool ***/ + ;(seqGraph as any).signal = options.signal - appServer.chatflowPool.add( - `${chatflow.id}_${options.chatId}`, - seqGraph as any, - reactFlowNodes.filter((node) => startAgentNodes.map((nd) => nd.id).includes(node.id)), - overrideConfig - ) /*** Get memory ***/ const startNode = reactFlowNodes.find((node: IReactFlowNode) => node.data.name === 'seqStart') diff --git a/packages/server/src/utils/buildChatflow.ts b/packages/server/src/utils/buildChatflow.ts index 09cc847439a..9cd33033066 100644 --- a/packages/server/src/utils/buildChatflow.ts +++ b/packages/server/src/utils/buildChatflow.ts @@ -1,15 +1,14 @@ import { Request } from 'express' -import * as path from 'path' +import { DataSource } from 'typeorm' +import { v4 as uuidv4 } from 'uuid' +import { omit } from 'lodash' import { IFileUpload, convertSpeechToText, ICommonObject, addSingleFileToStorage, - addArrayFilesToStorage, - mapMimeTypeToInputField, - mapExtToInputField, generateFollowUpPrompts, - IServerSideEventStreamer + IAction } from 'flowise-components' import { StatusCodes } from 'http-status-codes' import { @@ -21,11 +20,19 @@ import { IDepthQueue, ChatType, IChatMessage, - IChatFlow, - IReactFlowEdge + IExecuteFlowParams, + IFlowConfig, + IComponentNodes, + IVariable, + INodeOverrides, + IVariableOverride, + MODE } from '../Interface' import { InternalFlowiseError } from '../errors/internalFlowiseError' +import { databaseEntities } from '.' import { ChatFlow } from '../database/entities/ChatFlow' +import { ChatMessage } from '../database/entities/ChatMessage' +import { Variable } from '../database/entities/Variable' import { getRunningExpressApp } from '../utils/getRunningExpressApp' import { isFlowValidForStream, @@ -37,453 +44,515 @@ import { findMemoryNode, replaceInputsWithConfig, getStartingNodes, - isStartNodeDependOnInput, getMemorySessionId, - isSameOverrideConfig, getEndingNodes, constructGraphs, - isSameChatId, getAPIOverrideConfig } from '../utils' import { validateChatflowAPIKey } from './validateKey' -import { databaseEntities } from '.' -import { v4 as uuidv4 } from 'uuid' -import { omit } from 'lodash' -import * as fs from 'fs' import logger from './logger' import { utilAddChatMessage } from './addChatMesage' import { buildAgentGraph } from './buildAgentGraph' import { getErrorMessage } from '../errors/utils' -import { ChatMessage } from '../database/entities/ChatMessage' -import { IAction } from 'flowise-components' -import { FLOWISE_METRIC_COUNTERS, FLOWISE_COUNTER_STATUS } from '../Interface.Metrics' -import { Variable } from '../database/entities/Variable' +import { FLOWISE_METRIC_COUNTERS, FLOWISE_COUNTER_STATUS, IMetricsProvider } from '../Interface.Metrics' -/** - * Build Chatflow - * @param {Request} req - * @param {boolean} isInternal +/* + * Initialize the ending node to be executed */ -export const utilBuildChatflow = async (req: Request, isInternal: boolean = false): Promise => { - const appServer = getRunningExpressApp() - try { - const chatflowid = req.params.id - - const httpProtocol = req.get('x-forwarded-proto') || req.protocol - const baseURL = `${httpProtocol}://${req.get('host')}` +const initEndingNode = async ({ + endingNodeIds, + componentNodes, + reactFlowNodes, + incomingInput, + flowConfig, + uploadedFilesContent, + availableVariables, + apiOverrideStatus, + nodeOverrides, + variableOverrides +}: { + endingNodeIds: string[] + componentNodes: IComponentNodes + reactFlowNodes: IReactFlowNode[] + incomingInput: IncomingInput + flowConfig: IFlowConfig + uploadedFilesContent: string + availableVariables: IVariable[] + apiOverrideStatus: boolean + nodeOverrides: INodeOverrides + variableOverrides: IVariableOverride[] +}): Promise<{ endingNodeData: INodeData; endingNodeInstance: any }> => { + const question = incomingInput.question + const chatHistory = flowConfig.chatHistory + const sessionId = flowConfig.sessionId + + const nodeToExecute = + endingNodeIds.length === 1 + ? reactFlowNodes.find((node: IReactFlowNode) => endingNodeIds[0] === node.id) + : reactFlowNodes[reactFlowNodes.length - 1] + + if (!nodeToExecute) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Node not found`) + } - let incomingInput: IncomingInput = req.body - let nodeToExecuteData: INodeData - const chatflow = await appServer.AppDataSource.getRepository(ChatFlow).findOneBy({ - id: chatflowid - }) - if (!chatflow) { - throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Chatflow ${chatflowid} not found`) - } + if (incomingInput.overrideConfig && apiOverrideStatus) { + nodeToExecute.data = replaceInputsWithConfig(nodeToExecute.data, incomingInput.overrideConfig, nodeOverrides, variableOverrides) + } - const chatId = incomingInput.chatId ?? incomingInput.overrideConfig?.sessionId ?? uuidv4() - const userMessageDateTime = new Date() - if (!isInternal) { - const isKeyValidated = await validateChatflowAPIKey(req, chatflow) - if (!isKeyValidated) { - throw new InternalFlowiseError(StatusCodes.UNAUTHORIZED, `Unauthorized`) - } - } + const reactFlowNodeData: INodeData = await resolveVariables( + nodeToExecute.data, + reactFlowNodes, + question, + chatHistory, + flowConfig, + uploadedFilesContent, + availableVariables, + variableOverrides + ) - let fileUploads: IFileUpload[] = [] - let uploadedFilesContent = '' - if (incomingInput.uploads) { - fileUploads = incomingInput.uploads - for (let i = 0; i < fileUploads.length; i += 1) { - const upload = fileUploads[i] - - // if upload in an image, a rag file, or audio - if ((upload.type === 'file' || upload.type === 'file:rag' || upload.type === 'audio') && upload.data) { - const filename = upload.name - const splitDataURI = upload.data.split(',') - const bf = Buffer.from(splitDataURI.pop() || '', 'base64') - const mime = splitDataURI[0].split(':')[1].split(';')[0] - await addSingleFileToStorage(mime, bf, filename, chatflowid, chatId) - upload.type = 'stored-file' - // Omit upload.data since we don't store the content in database - fileUploads[i] = omit(upload, ['data']) - } + logger.debug(`[server]: Running ${reactFlowNodeData.label} (${reactFlowNodeData.id})`) - if (upload.type === 'url' && upload.data) { - const filename = upload.name - const urlData = upload.data - fileUploads[i] = { data: urlData, name: filename, type: 'url', mime: upload.mime ?? 'image/png' } - } + const nodeInstanceFilePath = componentNodes[reactFlowNodeData.name].filePath as string + const nodeModule = await import(nodeInstanceFilePath) + const nodeInstance = new nodeModule.nodeClass({ sessionId }) - // Run Speech to Text conversion - if (upload.mime === 'audio/webm' || upload.mime === 'audio/mp4' || upload.mime === 'audio/ogg') { - logger.debug(`Attempting a speech to text conversion...`) - let speechToTextConfig: ICommonObject = {} - if (chatflow.speechToText) { - const speechToTextProviders = JSON.parse(chatflow.speechToText) - for (const provider in speechToTextProviders) { - const providerObj = speechToTextProviders[provider] - if (providerObj.status) { - speechToTextConfig = providerObj - speechToTextConfig['name'] = provider - break - } - } - } - if (speechToTextConfig) { - const options: ICommonObject = { - chatId, - chatflowid, - appDataSource: appServer.AppDataSource, - databaseEntities: databaseEntities - } - const speechToTextResult = await convertSpeechToText(upload, speechToTextConfig, options) - logger.debug(`Speech to text result: ${speechToTextResult}`) - if (speechToTextResult) { - incomingInput.question = speechToTextResult - } - } - } + return { endingNodeData: reactFlowNodeData, endingNodeInstance: nodeInstance } +} - if (upload.type === 'file:full' && upload.data) { - upload.type = 'stored-file:full' - // Omit upload.data since we don't store the content in database - uploadedFilesContent += `${upload.data}\n\n` - fileUploads[i] = omit(upload, ['data']) - } - } +/* + * Get chat history from memory node + * This is used to fill in the {{chat_history}} variable if it is used in the Format Prompt Value + */ +const getChatHistory = async ({ + endingNodes, + nodes, + chatflowid, + appDataSource, + componentNodes, + incomingInput, + chatId, + isInternal, + isAgentFlow +}: { + endingNodes: IReactFlowNode[] + nodes: IReactFlowNode[] + chatflowid: string + appDataSource: DataSource + componentNodes: IComponentNodes + incomingInput: IncomingInput + chatId: string + isInternal: boolean + isAgentFlow: boolean +}) => { + const prependMessages = incomingInput.history ?? [] + let chatHistory: IMessage[] = [] + + if (isAgentFlow) { + const agentMemoryList = ['agentMemory', 'sqliteAgentMemory', 'postgresAgentMemory', 'mySQLAgentMemory'] + const memoryNode = nodes.find((node) => agentMemoryList.includes(node.data.name)) + if (memoryNode) { + chatHistory = await getSessionChatHistory( + chatflowid, + getMemorySessionId(memoryNode, incomingInput, chatId, isInternal), + memoryNode, + componentNodes, + appDataSource, + databaseEntities, + logger, + prependMessages + ) } + return chatHistory + } - let isStreamValid = false + /* In case there are multiple ending nodes, get the memory from the last available ending node + * By right, in each flow, there should only be one memory node + */ + for (const endingNode of endingNodes) { + const endingNodeData = endingNode.data + if (!endingNodeData.inputs?.memory) continue - const files = (req.files as Express.Multer.File[]) || [] + const memoryNodeId = endingNodeData.inputs?.memory.split('.')[0].replace('{{', '') + const memoryNode = nodes.find((node) => node.data.id === memoryNodeId) - if (files.length) { - const overrideConfig: ICommonObject = { ...req.body } - const fileNames: string[] = [] - for (const file of files) { - const fileBuffer = fs.readFileSync(file.path) - // Address file name with special characters: https://github.com/expressjs/multer/issues/1104 - file.originalname = Buffer.from(file.originalname, 'latin1').toString('utf8') - const storagePath = await addArrayFilesToStorage(file.mimetype, fileBuffer, file.originalname, fileNames, chatflowid) + if (!memoryNode) continue - const fileInputFieldFromMimeType = mapMimeTypeToInputField(file.mimetype) + chatHistory = await getSessionChatHistory( + chatflowid, + getMemorySessionId(memoryNode, incomingInput, chatId, isInternal), + memoryNode, + componentNodes, + appDataSource, + databaseEntities, + logger, + prependMessages + ) + } - const fileExtension = path.extname(file.originalname) + return chatHistory +} - const fileInputFieldFromExt = mapExtToInputField(fileExtension) +/* + * Function to traverse the flow graph and execute the nodes + */ +export const executeFlow = async ({ + componentNodes, + incomingInput, + chatflow, + chatId, + appDataSource, + telemetry, + cachePool, + sseStreamer, + baseURL, + isInternal, + signal +}: IExecuteFlowParams) => { + const question = incomingInput.question + const overrideConfig = incomingInput.overrideConfig ?? {} + const uploads = incomingInput.uploads + const prependMessages = incomingInput.history ?? [] + const streaming = incomingInput.streaming + const userMessageDateTime = new Date() + const chatflowid = chatflow.id + + /* Process file uploads from the chat + * - Images + * - Files + * - Audio + */ + let fileUploads: IFileUpload[] = [] + let uploadedFilesContent = '' + if (incomingInput.uploads) { + fileUploads = incomingInput.uploads + for (let i = 0; i < fileUploads.length; i += 1) { + const upload = fileUploads[i] + + // if upload in an image, a rag file, or audio + if ((upload.type === 'file' || upload.type === 'file:rag' || upload.type === 'audio') && upload.data) { + const filename = upload.name + const splitDataURI = upload.data.split(',') + const bf = Buffer.from(splitDataURI.pop() || '', 'base64') + const mime = splitDataURI[0].split(':')[1].split(';')[0] + await addSingleFileToStorage(mime, bf, filename, chatflowid, chatId) + upload.type = 'stored-file' + // Omit upload.data since we don't store the content in database + fileUploads[i] = omit(upload, ['data']) + } - let fileInputField = 'txtFile' + if (upload.type === 'url' && upload.data) { + const filename = upload.name + const urlData = upload.data + fileUploads[i] = { data: urlData, name: filename, type: 'url', mime: upload.mime ?? 'image/png' } + } - if (fileInputFieldFromExt !== 'txtFile') { - fileInputField = fileInputFieldFromExt - } else if (fileInputFieldFromMimeType !== 'txtFile') { - fileInputField = fileInputFieldFromExt + // Run Speech to Text conversion + if (upload.mime === 'audio/webm' || upload.mime === 'audio/mp4' || upload.mime === 'audio/ogg') { + logger.debug(`Attempting a speech to text conversion...`) + let speechToTextConfig: ICommonObject = {} + if (chatflow.speechToText) { + const speechToTextProviders = JSON.parse(chatflow.speechToText) + for (const provider in speechToTextProviders) { + const providerObj = speechToTextProviders[provider] + if (providerObj.status) { + speechToTextConfig = providerObj + speechToTextConfig['name'] = provider + break + } + } } - - if (overrideConfig[fileInputField]) { - const existingFileInputField = overrideConfig[fileInputField].replace('FILE-STORAGE::', '') - const existingFileInputFieldArray = JSON.parse(existingFileInputField) - - const newFileInputField = storagePath.replace('FILE-STORAGE::', '') - const newFileInputFieldArray = JSON.parse(newFileInputField) - - const updatedFieldArray = existingFileInputFieldArray.concat(newFileInputFieldArray) - - overrideConfig[fileInputField] = `FILE-STORAGE::${JSON.stringify(updatedFieldArray)}` - } else { - overrideConfig[fileInputField] = storagePath + if (speechToTextConfig) { + const options: ICommonObject = { + chatId, + chatflowid, + appDataSource, + databaseEntities: databaseEntities + } + const speechToTextResult = await convertSpeechToText(upload, speechToTextConfig, options) + logger.debug(`Speech to text result: ${speechToTextResult}`) + if (speechToTextResult) { + incomingInput.question = speechToTextResult + } } - - fs.unlinkSync(file.path) - } - if (overrideConfig.vars && typeof overrideConfig.vars === 'string') { - overrideConfig.vars = JSON.parse(overrideConfig.vars) - } - incomingInput = { - question: req.body.question ?? 'hello', - overrideConfig - } - if (req.body.chatId) { - incomingInput.chatId = req.body.chatId } - } - - /*** Get chatflows and prepare data ***/ - const flowData = chatflow.flowData - const parsedFlowData: IReactFlowObject = JSON.parse(flowData) - const nodes = parsedFlowData.nodes - const edges = parsedFlowData.edges - - const apiMessageId = uuidv4() - - /*** Get session ID ***/ - const memoryNode = findMemoryNode(nodes, edges) - const memoryType = memoryNode?.data?.label - let sessionId = getMemorySessionId(memoryNode, incomingInput, chatId, isInternal) - - /*** Get Ending Node with Directed Graph ***/ - const { graph, nodeDependencies } = constructGraphs(nodes, edges) - const directedGraph = graph - const endingNodes = getEndingNodes(nodeDependencies, directedGraph, nodes) - /*** If the graph is an agent graph, build the agent response ***/ - if (endingNodes.filter((node) => node.data.category === 'Multi Agents' || node.data.category === 'Sequential Agents').length) { - return await utilBuildAgentResponse( - chatflow, - isInternal, - chatId, - apiMessageId, - memoryType ?? '', - sessionId, - userMessageDateTime, - fileUploads, - incomingInput, - nodes, - edges, - baseURL, - appServer.sseStreamer, - true, - uploadedFilesContent - ) - } - // Get prepend messages - const prependMessages = incomingInput.history - - const flowVariables = {} as Record - - /* Reuse the flow without having to rebuild (to avoid duplicated upsert, recomputation, reinitialization of memory) when all these conditions met: - * - Reuse of flows is not disabled - * - Node Data already exists in pool - * - Still in sync (i.e the flow has not been modified since) - * - Existing overrideConfig and new overrideConfig are the same - * - Existing chatId and new chatId is the same - * - Flow doesn't start with/contain nodes that depend on incomingInput.question - ***/ - const isFlowReusable = () => { - return ( - process.env.DISABLE_CHATFLOW_REUSE !== 'true' && - Object.prototype.hasOwnProperty.call(appServer.chatflowPool.activeChatflows, chatflowid) && - appServer.chatflowPool.activeChatflows[chatflowid].inSync && - appServer.chatflowPool.activeChatflows[chatflowid].endingNodeData && - isSameChatId(appServer.chatflowPool.activeChatflows[chatflowid].chatId, chatId) && - isSameOverrideConfig( - isInternal, - appServer.chatflowPool.activeChatflows[chatflowid].overrideConfig, - incomingInput.overrideConfig - ) && - !isStartNodeDependOnInput(appServer.chatflowPool.activeChatflows[chatflowid].startingNodes, nodes) - ) + if (upload.type === 'file:full' && upload.data) { + upload.type = 'stored-file:full' + // Omit upload.data since we don't store the content in database + uploadedFilesContent += `${upload.data}\n\n` + fileUploads[i] = omit(upload, ['data']) + } } + } - if (isFlowReusable()) { - nodeToExecuteData = appServer.chatflowPool.activeChatflows[chatflowid].endingNodeData as INodeData - isStreamValid = isFlowValidForStream(nodes, nodeToExecuteData) - logger.debug( - `[server]: Reuse existing chatflow ${chatflowid} with ending node ${nodeToExecuteData.label} (${nodeToExecuteData.id})` - ) - } else { - const isCustomFunctionEndingNode = endingNodes.some((node) => node.data?.outputs?.output === 'EndingNode') - - for (const endingNode of endingNodes) { - const endingNodeData = endingNode.data - - const isEndingNode = endingNodeData?.outputs?.output === 'EndingNode' - - // Once custom function ending node exists, no need to do follow-up checks. - if (isEndingNode) continue + /*** Get chatflows and prepare data ***/ + const flowData = chatflow.flowData + const parsedFlowData: IReactFlowObject = JSON.parse(flowData) + const nodes = parsedFlowData.nodes + const edges = parsedFlowData.edges + + const apiMessageId = uuidv4() + + /*** Get session ID ***/ + const memoryNode = findMemoryNode(nodes, edges) + const memoryType = memoryNode?.data.label || '' + let sessionId = getMemorySessionId(memoryNode, incomingInput, chatId, isInternal) + + /*** Get Ending Node with Directed Graph ***/ + const { graph, nodeDependencies } = constructGraphs(nodes, edges) + const directedGraph = graph + const endingNodes = getEndingNodes(nodeDependencies, directedGraph, nodes) + + /*** Get Starting Nodes with Reversed Graph ***/ + const constructedObj = constructGraphs(nodes, edges, { isReversed: true }) + const nonDirectedGraph = constructedObj.graph + let startingNodeIds: string[] = [] + let depthQueue: IDepthQueue = {} + const endingNodeIds = endingNodes.map((n) => n.id) + for (const endingNodeId of endingNodeIds) { + const resx = getStartingNodes(nonDirectedGraph, endingNodeId) + startingNodeIds.push(...resx.startingNodeIds) + depthQueue = Object.assign(depthQueue, resx.depthQueue) + } + startingNodeIds = [...new Set(startingNodeIds)] + + const isAgentFlow = + endingNodes.filter((node) => node.data.category === 'Multi Agents' || node.data.category === 'Sequential Agents').length > 0 + + /*** Get Chat History ***/ + const chatHistory = await getChatHistory({ + endingNodes, + nodes, + chatflowid, + appDataSource, + componentNodes, + incomingInput, + chatId, + isInternal, + isAgentFlow + }) + + /*** Get API Config ***/ + const availableVariables = await appDataSource.getRepository(Variable).find() + const { nodeOverrides, variableOverrides, apiOverrideStatus } = getAPIOverrideConfig(chatflow) + + const flowConfig: IFlowConfig = { + chatflowid, + chatId, + sessionId, + chatHistory, + apiMessageId, + ...incomingInput.overrideConfig + } - if ( - endingNodeData.outputs && - Object.keys(endingNodeData.outputs).length && - !Object.values(endingNodeData.outputs ?? {}).includes(endingNodeData.name) - ) { - throw new InternalFlowiseError( - StatusCodes.INTERNAL_SERVER_ERROR, - `Output of ${endingNodeData.label} (${endingNodeData.id}) must be ${endingNodeData.label}, can't be an Output Prediction` - ) - } + logger.debug(`[server]: Start building flow ${chatflowid}`) + + /*** BFS to traverse from Starting Nodes to Ending Node ***/ + const reactFlowNodes = await buildFlow({ + startingNodeIds, + reactFlowNodes: nodes, + reactFlowEdges: edges, + apiMessageId, + graph, + depthQueue, + componentNodes, + question, + uploadedFilesContent, + chatHistory, + chatId, + sessionId, + chatflowid, + appDataSource, + overrideConfig, + apiOverrideStatus, + nodeOverrides, + availableVariables, + variableOverrides, + cachePool, + isUpsert: false, + uploads, + baseURL + }) + + if (isAgentFlow) { + const agentflow = chatflow + const streamResults = await buildAgentGraph({ + agentflow, + flowConfig, + incomingInput, + nodes, + edges, + initializedNodes: reactFlowNodes, + endingNodeIds, + startingNodeIds, + depthQueue, + chatHistory, + uploadedFilesContent, + appDataSource, + componentNodes, + sseStreamer, + shouldStreamResponse: true, // agentflow is always streamed + cachePool, + baseURL, + signal + }) - isStreamValid = isFlowValidForStream(nodes, endingNodeData) + if (streamResults) { + const { finalResult, finalAction, sourceDocuments, artifacts, usedTools, agentReasoning } = streamResults + const userMessage: Omit = { + role: 'userMessage', + content: incomingInput.question, + chatflowid: agentflow.id, + chatType: isInternal ? ChatType.INTERNAL : ChatType.EXTERNAL, + chatId, + memoryType, + sessionId, + createdDate: userMessageDateTime, + fileUploads: incomingInput.uploads ? JSON.stringify(fileUploads) : undefined, + leadEmail: incomingInput.leadEmail } + await utilAddChatMessage(userMessage, appDataSource) - // Once custom function ending node exists, flow is always unavailable to stream - isStreamValid = isCustomFunctionEndingNode ? false : isStreamValid - - let chatHistory: IMessage[] = [] - - // When {{chat_history}} is used in Format Prompt Value, fetch the chat conversations from memory node - for (const endingNode of endingNodes) { - const endingNodeData = endingNode.data - - if (!endingNodeData.inputs?.memory) continue - - const memoryNodeId = endingNodeData.inputs?.memory.split('.')[0].replace('{{', '') - const memoryNode = nodes.find((node) => node.data.id === memoryNodeId) - - if (!memoryNode) continue - - chatHistory = await getSessionChatHistory( - chatflowid, - getMemorySessionId(memoryNode, incomingInput, chatId, isInternal), - memoryNode, - appServer.nodesPool.componentNodes, - appServer.AppDataSource, - databaseEntities, - logger, - prependMessages - ) + const apiMessage: Omit = { + id: apiMessageId, + role: 'apiMessage', + content: finalResult, + chatflowid: agentflow.id, + chatType: isInternal ? ChatType.INTERNAL : ChatType.EXTERNAL, + chatId, + memoryType, + sessionId } - /*** Get Starting Nodes with Reversed Graph ***/ - const constructedObj = constructGraphs(nodes, edges, { isReversed: true }) - const nonDirectedGraph = constructedObj.graph - let startingNodeIds: string[] = [] - let depthQueue: IDepthQueue = {} - const endingNodeIds = endingNodes.map((n) => n.id) - for (const endingNodeId of endingNodeIds) { - const resx = getStartingNodes(nonDirectedGraph, endingNodeId) - startingNodeIds.push(...resx.startingNodeIds) - depthQueue = Object.assign(depthQueue, resx.depthQueue) - } - startingNodeIds = [...new Set(startingNodeIds)] - - const startingNodes = nodes.filter((nd) => startingNodeIds.includes(nd.id)) - - /*** Get API Config ***/ - const availableVariables = await appServer.AppDataSource.getRepository(Variable).find() - const { nodeOverrides, variableOverrides, apiOverrideStatus } = getAPIOverrideConfig(chatflow) - - logger.debug(`[server]: Start building chatflow ${chatflowid}`) - - /*** BFS to traverse from Starting Nodes to Ending Node ***/ - const reactFlowNodes = await buildFlow({ - startingNodeIds, - reactFlowNodes: nodes, - reactFlowEdges: edges, - apiMessageId, - graph, - depthQueue, - componentNodes: appServer.nodesPool.componentNodes, - question: incomingInput.question, - uploadedFilesContent, - chatHistory, - chatId, - sessionId: sessionId ?? '', - chatflowid, - appDataSource: appServer.AppDataSource, - overrideConfig: incomingInput?.overrideConfig, - apiOverrideStatus, - nodeOverrides, - availableVariables, - variableOverrides, - cachePool: appServer.cachePool, - isUpsert: false, - uploads: incomingInput.uploads, - baseURL - }) + if (sourceDocuments?.length) apiMessage.sourceDocuments = JSON.stringify(sourceDocuments) + if (artifacts?.length) apiMessage.artifacts = JSON.stringify(artifacts) + if (usedTools?.length) apiMessage.usedTools = JSON.stringify(usedTools) + if (agentReasoning?.length) apiMessage.agentReasoning = JSON.stringify(agentReasoning) + if (finalAction && Object.keys(finalAction).length) apiMessage.action = JSON.stringify(finalAction) - // Show output of setVariable nodes in the response - for (const node of reactFlowNodes) { - if ( - node.data.name === 'setVariable' && - (node.data.inputs?.showOutput === true || node.data.inputs?.showOutput === 'true') - ) { - const outputResult = node.data.instance - const variableKey = node.data.inputs?.variableName - flowVariables[variableKey] = outputResult + if (agentflow.followUpPrompts) { + const followUpPromptsConfig = JSON.parse(agentflow.followUpPrompts) + const generatedFollowUpPrompts = await generateFollowUpPrompts(followUpPromptsConfig, apiMessage.content, { + chatId, + chatflowid: agentflow.id, + appDataSource, + databaseEntities + }) + if (generatedFollowUpPrompts?.questions) { + apiMessage.followUpPrompts = JSON.stringify(generatedFollowUpPrompts.questions) } } + const chatMessage = await utilAddChatMessage(apiMessage, appDataSource) - const nodeToExecute = - endingNodeIds.length === 1 - ? reactFlowNodes.find((node: IReactFlowNode) => endingNodeIds[0] === node.id) - : reactFlowNodes[reactFlowNodes.length - 1] - if (!nodeToExecute) { - throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Node not found`) - } + await telemetry.sendTelemetry('agentflow_prediction_sent', { + version: await getAppVersion(), + agentflowId: agentflow.id, + chatId, + type: isInternal ? ChatType.INTERNAL : ChatType.EXTERNAL, + flowGraph: getTelemetryFlowObj(nodes, edges) + }) - // Only override the config if its status is true - if (incomingInput.overrideConfig && apiOverrideStatus) { - nodeToExecute.data = replaceInputsWithConfig( - nodeToExecute.data, - incomingInput.overrideConfig, - nodeOverrides, - variableOverrides - ) - } + // Find the previous chat message with the same action id and remove the action + if (incomingInput.action && Object.keys(incomingInput.action).length) { + let query = await appDataSource + .getRepository(ChatMessage) + .createQueryBuilder('chat_message') + .where('chat_message.chatId = :chatId', { chatId }) + .orWhere('chat_message.sessionId = :sessionId', { sessionId }) + .orderBy('chat_message.createdDate', 'DESC') + .getMany() - const flowData: ICommonObject = { - chatflowid, - chatId, - sessionId, - apiMessageId, - chatHistory, - ...incomingInput.overrideConfig + for (const result of query) { + if (result.action) { + try { + const action: IAction = JSON.parse(result.action) + if (action.id === incomingInput.action.id) { + const newChatMessage = new ChatMessage() + Object.assign(newChatMessage, result) + newChatMessage.action = null + const cm = await appDataSource.getRepository(ChatMessage).create(newChatMessage) + await appDataSource.getRepository(ChatMessage).save(cm) + break + } + } catch (e) { + // error converting action to JSON + } + } + } } - const reactFlowNodeData: INodeData = await resolveVariables( - appServer.AppDataSource, - nodeToExecute.data, - reactFlowNodes, - incomingInput.question, - chatHistory, - flowData, - uploadedFilesContent, - availableVariables, - variableOverrides - ) - nodeToExecuteData = reactFlowNodeData + // Prepare response + let result: ICommonObject = {} + result.text = finalResult + result.question = incomingInput.question + result.chatId = chatId + result.chatMessageId = chatMessage?.id + if (sessionId) result.sessionId = sessionId + if (memoryType) result.memoryType = memoryType + if (agentReasoning?.length) result.agentReasoning = agentReasoning + if (finalAction && Object.keys(finalAction).length) result.action = finalAction + result.followUpPrompts = JSON.stringify(apiMessage.followUpPrompts) - appServer.chatflowPool.add(chatflowid, nodeToExecuteData, startingNodes, incomingInput?.overrideConfig, chatId) + return result } + return undefined + } else { + const isStreamValid = await checkIfStreamValid(endingNodes, nodes, streaming) + + /*** Find the last node to execute ***/ + const { endingNodeData, endingNodeInstance } = await initEndingNode({ + endingNodeIds, + componentNodes, + reactFlowNodes, + incomingInput, + flowConfig, + uploadedFilesContent, + availableVariables, + apiOverrideStatus, + nodeOverrides, + variableOverrides + }) - logger.debug(`[server]: Running ${nodeToExecuteData.label} (${nodeToExecuteData.id})`) - - const nodeInstanceFilePath = appServer.nodesPool.componentNodes[nodeToExecuteData.name].filePath as string - const nodeModule = await import(nodeInstanceFilePath) - const nodeInstance = new nodeModule.nodeClass({ sessionId }) - - isStreamValid = (req.body.streaming === 'true' || req.body.streaming === true) && isStreamValid + /*** If user uploaded files from chat, prepend the content of the files ***/ const finalQuestion = uploadedFilesContent ? `${uploadedFilesContent}\n\n${incomingInput.question}` : incomingInput.question + /*** Prepare run params ***/ const runParams = { chatId, chatflowid, apiMessageId, logger, - appDataSource: appServer.AppDataSource, + appDataSource, databaseEntities, analytic: chatflow.analytic, - uploads: incomingInput.uploads, - prependMessages + uploads, + prependMessages, + ...(isStreamValid && { sseStreamer, shouldStreamResponse: isStreamValid }) } - let result = await nodeInstance.run(nodeToExecuteData, finalQuestion, { - ...runParams, - ...(isStreamValid && { sseStreamer: appServer.sseStreamer, shouldStreamResponse: true }) - }) + /*** Run the ending node ***/ + let result = await endingNodeInstance.run(endingNodeData, finalQuestion, runParams) result = typeof result === 'string' ? { text: result } : result - // Retrieve threadId from assistant if exists + /*** Retrieve threadId from OpenAI Assistant if exists ***/ if (typeof result === 'object' && result.assistant) { sessionId = result.assistant.threadId } const userMessage: Omit = { role: 'userMessage', - content: incomingInput.question, + content: question, chatflowid, chatType: isInternal ? ChatType.INTERNAL : ChatType.EXTERNAL, chatId, memoryType, sessionId, createdDate: userMessageDateTime, - fileUploads: incomingInput.uploads ? JSON.stringify(fileUploads) : undefined, + fileUploads: uploads ? JSON.stringify(fileUploads) : undefined, leadEmail: incomingInput.leadEmail } - await utilAddChatMessage(userMessage) + await utilAddChatMessage(userMessage, appDataSource) let resultText = '' if (result.text) resultText = result.text @@ -509,7 +578,7 @@ export const utilBuildChatflow = async (req: Request, isInternal: boolean = fals const followUpPrompts = await generateFollowUpPrompts(followUpPromptsConfig, apiMessage.content, { chatId, chatflowid, - appDataSource: appServer.AppDataSource, + appDataSource, databaseEntities }) if (followUpPrompts?.questions) { @@ -517,10 +586,11 @@ export const utilBuildChatflow = async (req: Request, isInternal: boolean = fals } } - const chatMessage = await utilAddChatMessage(apiMessage) + const chatMessage = await utilAddChatMessage(apiMessage, appDataSource) - logger.debug(`[server]: Finished running ${nodeToExecuteData.label} (${nodeToExecuteData.id})`) - await appServer.telemetry.sendTelemetry('prediction_sent', { + logger.debug(`[server]: Finished running ${endingNodeData.label} (${endingNodeData.id})`) + + await telemetry.sendTelemetry('prediction_sent', { version: await getAppVersion(), chatflowId: chatflowid, chatId, @@ -528,14 +598,8 @@ export const utilBuildChatflow = async (req: Request, isInternal: boolean = fals flowGraph: getTelemetryFlowObj(nodes, edges) }) - appServer.metricsProvider?.incrementCounter( - isInternal ? FLOWISE_METRIC_COUNTERS.CHATFLOW_PREDICTION_INTERNAL : FLOWISE_METRIC_COUNTERS.CHATFLOW_PREDICTION_EXTERNAL, - { status: FLOWISE_COUNTER_STATUS.SUCCESS } - ) - // Prepare response - // return the question in the response - // this is used when input text is empty but question is in audio format - result.question = incomingInput.question + /*** Prepare response ***/ + result.question = incomingInput.question // return the question in the response, this is used when input text is empty but question is in audio format result.chatId = chatId result.chatMessageId = chatMessage?.id result.followUpPrompts = JSON.stringify(apiMessage.followUpPrompts) @@ -543,160 +607,187 @@ export const utilBuildChatflow = async (req: Request, isInternal: boolean = fals if (sessionId) result.sessionId = sessionId if (memoryType) result.memoryType = memoryType - if (Object.keys(flowVariables).length) result.flowVariables = flowVariables return result - } catch (e) { - appServer.metricsProvider?.incrementCounter( - isInternal ? FLOWISE_METRIC_COUNTERS.CHATFLOW_PREDICTION_INTERNAL : FLOWISE_METRIC_COUNTERS.CHATFLOW_PREDICTION_EXTERNAL, - { status: FLOWISE_COUNTER_STATUS.FAILURE } - ) - logger.error('[server]: Error:', e) - if (e instanceof InternalFlowiseError && e.statusCode === StatusCodes.UNAUTHORIZED) { - throw e - } else { - throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, getErrorMessage(e)) - } } } -const utilBuildAgentResponse = async ( - agentflow: IChatFlow, - isInternal: boolean, - chatId: string, - apiMessageId: string, - memoryType: string, - sessionId: string, - userMessageDateTime: Date, - fileUploads: IFileUpload[], - incomingInput: IncomingInput, +/** + * Function to check if the flow is valid for streaming + * @param {IReactFlowNode[]} endingNodes + * @param {IReactFlowNode[]} nodes + * @param {boolean | string} streaming + * @returns {boolean} + */ +const checkIfStreamValid = async ( + endingNodes: IReactFlowNode[], nodes: IReactFlowNode[], - edges: IReactFlowEdge[], - baseURL?: string, - sseStreamer?: IServerSideEventStreamer, - shouldStreamResponse?: boolean, - uploadedFilesContent?: string -) => { + streaming: boolean | string | undefined +): Promise => { + // Once custom function ending node exists, flow is always unavailable to stream + const isCustomFunctionEndingNode = endingNodes.some((node) => node.data?.outputs?.output === 'EndingNode') + if (isCustomFunctionEndingNode) return false + + let isStreamValid = false + for (const endingNode of endingNodes) { + const endingNodeData = endingNode.data + + const isEndingNode = endingNodeData?.outputs?.output === 'EndingNode' + + // Once custom function ending node exists, no need to do follow-up checks. + if (isEndingNode) continue + + if ( + endingNodeData.outputs && + Object.keys(endingNodeData.outputs).length && + !Object.values(endingNodeData.outputs ?? {}).includes(endingNodeData.name) + ) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Output of ${endingNodeData.label} (${endingNodeData.id}) must be ${endingNodeData.label}, can't be an Output Prediction` + ) + } + + isStreamValid = isFlowValidForStream(nodes, endingNodeData) + } + + isStreamValid = (streaming === 'true' || streaming === true) && isStreamValid + + return isStreamValid +} + +/** + * Build/Data Preperation for execute function + * @param {Request} req + * @param {boolean} isInternal + */ +export const utilBuildChatflow = async (req: Request, isInternal: boolean = false): Promise => { const appServer = getRunningExpressApp() + const chatflowid = req.params.id + + // Check if chatflow exists + const chatflow = await appServer.AppDataSource.getRepository(ChatFlow).findOneBy({ + id: chatflowid + }) + if (!chatflow) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Chatflow ${chatflowid} not found`) + } + + const isAgentFlow = chatflow.type === 'MULTIAGENT' + const httpProtocol = req.get('x-forwarded-proto') || req.protocol + const baseURL = `${httpProtocol}://${req.get('host')}` + const incomingInput: IncomingInput = req.body + const chatId = incomingInput.chatId ?? incomingInput.overrideConfig?.sessionId ?? uuidv4() + try { - const streamResults = await buildAgentGraph( - agentflow, - chatId, - apiMessageId, - sessionId, - incomingInput, - isInternal, - baseURL, - sseStreamer, - shouldStreamResponse, - uploadedFilesContent - ) - if (streamResults) { - const { finalResult, finalAction, sourceDocuments, artifacts, usedTools, agentReasoning } = streamResults - const userMessage: Omit = { - role: 'userMessage', - content: incomingInput.question, - chatflowid: agentflow.id, - chatType: isInternal ? ChatType.INTERNAL : ChatType.EXTERNAL, - chatId, - memoryType, - sessionId, - createdDate: userMessageDateTime, - fileUploads: incomingInput.uploads ? JSON.stringify(fileUploads) : undefined, - leadEmail: incomingInput.leadEmail + // Validate API Key if its external API request + if (!isInternal) { + const isKeyValidated = await validateChatflowAPIKey(req, chatflow) + if (!isKeyValidated) { + throw new InternalFlowiseError(StatusCodes.UNAUTHORIZED, `Unauthorized`) } - await utilAddChatMessage(userMessage) + } - const apiMessage: Omit = { - id: apiMessageId, - role: 'apiMessage', - content: finalResult, - chatflowid: agentflow.id, - chatType: isInternal ? ChatType.INTERNAL : ChatType.EXTERNAL, - chatId, - memoryType, - sessionId - } - if (sourceDocuments?.length) apiMessage.sourceDocuments = JSON.stringify(sourceDocuments) - if (artifacts?.length) apiMessage.artifacts = JSON.stringify(artifacts) - if (usedTools?.length) apiMessage.usedTools = JSON.stringify(usedTools) - if (agentReasoning?.length) apiMessage.agentReasoning = JSON.stringify(agentReasoning) - if (finalAction && Object.keys(finalAction).length) apiMessage.action = JSON.stringify(finalAction) - if (agentflow.followUpPrompts) { - const followUpPromptsConfig = JSON.parse(agentflow.followUpPrompts) - const generatedFollowUpPrompts = await generateFollowUpPrompts(followUpPromptsConfig, apiMessage.content, { - chatId, - chatflowid: agentflow.id, - appDataSource: appServer.AppDataSource, - databaseEntities - }) - if (generatedFollowUpPrompts?.questions) { - apiMessage.followUpPrompts = JSON.stringify(generatedFollowUpPrompts.questions) - } - } - const chatMessage = await utilAddChatMessage(apiMessage) + const executeData: IExecuteFlowParams = { + incomingInput: req.body, + chatflow, + chatId, + baseURL, + isInternal, + appDataSource: appServer.AppDataSource, + sseStreamer: appServer.sseStreamer, + telemetry: appServer.telemetry, + cachePool: appServer.cachePool, + componentNodes: appServer.nodesPool.componentNodes + } - await appServer.telemetry.sendTelemetry('agentflow_prediction_sent', { - version: await getAppVersion(), - agentflowId: agentflow.id, - chatId, - type: isInternal ? ChatType.INTERNAL : ChatType.EXTERNAL, - flowGraph: getTelemetryFlowObj(nodes, edges) - }) - appServer.metricsProvider?.incrementCounter( - isInternal ? FLOWISE_METRIC_COUNTERS.AGENTFLOW_PREDICTION_INTERNAL : FLOWISE_METRIC_COUNTERS.AGENTFLOW_PREDICTION_EXTERNAL, - { status: FLOWISE_COUNTER_STATUS.SUCCESS } + if (process.env.MODE === MODE.QUEUE) { + const predictionQueue = appServer.queueManager.getQueue('prediction') + const job = await predictionQueue.addJob( + omit(executeData, ['componentNodes', 'appDataSource', 'sseStreamer', 'telemetry', 'cachePool']) ) + logger.debug(`[server]: Job added to queue: ${job.id}`) - // Find the previous chat message with the same action id and remove the action - if (incomingInput.action && Object.keys(incomingInput.action).length) { - let query = await appServer.AppDataSource.getRepository(ChatMessage) - .createQueryBuilder('chat_message') - .where('chat_message.chatId = :chatId', { chatId }) - .orWhere('chat_message.sessionId = :sessionId', { sessionId }) - .orderBy('chat_message.createdDate', 'DESC') - .getMany() + const queueEvents = predictionQueue.getQueueEvents() + const result = await job.waitUntilFinished(queueEvents) - for (const result of query) { - if (result.action) { - try { - const action: IAction = JSON.parse(result.action) - if (action.id === incomingInput.action.id) { - const newChatMessage = new ChatMessage() - Object.assign(newChatMessage, result) - newChatMessage.action = null - const cm = await appServer.AppDataSource.getRepository(ChatMessage).create(newChatMessage) - await appServer.AppDataSource.getRepository(ChatMessage).save(cm) - break - } - } catch (e) { - // error converting action to JSON - } - } - } + if (!result) { + throw new Error('Job execution failed') } - // Prepare response - let result: ICommonObject = {} - result.text = finalResult - result.question = incomingInput.question - result.chatId = chatId - result.chatMessageId = chatMessage?.id - if (sessionId) result.sessionId = sessionId - if (memoryType) result.memoryType = memoryType - if (agentReasoning?.length) result.agentReasoning = agentReasoning - if (finalAction && Object.keys(finalAction).length) result.action = finalAction - result.followUpPrompts = JSON.stringify(apiMessage.followUpPrompts) + incrementSuccessMetricCounter(appServer.metricsProvider, isInternal, isAgentFlow) + return result + // Set up a one-time listener for the specific job completion + /*queueEvents.once('completed', ({ jobId, returnvalue }) => { + if (jobId === job.id) { + // Respond with the job result + console.log('Job completed:', returnvalue) + console.log('typeof Job completed:', typeof returnvalue) + const result = typeof returnvalue === 'string' ? JSON.parse(returnvalue) : returnvalue + if (isStreamValid) { + appServer.sseStreamer.streamMetadataEvent(result.chatId, result) + } + return returnvalue + } + })*/ + } else { + const signal = new AbortController() + appServer.abortControllerPool.add(`${chatflow.id}_${chatId}`, signal) + executeData.signal = signal + const result = await executeFlow(executeData) + appServer.abortControllerPool.remove(`${chatflow.id}_${chatId}`) + incrementSuccessMetricCounter(appServer.metricsProvider, isInternal, isAgentFlow) return result } - return undefined } catch (e) { logger.error('[server]: Error:', e) - appServer.metricsProvider?.incrementCounter( + appServer.abortControllerPool.remove(`${chatflow.id}_${chatId}`) + incrementFailedMetricCounter(appServer.metricsProvider, isInternal, isAgentFlow) + if (e instanceof InternalFlowiseError && e.statusCode === StatusCodes.UNAUTHORIZED) { + throw e + } else { + throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, getErrorMessage(e)) + } + } +} + +/** + * Increment success metric counter + * @param {IMetricsProvider} metricsProvider + * @param {boolean} isInternal + * @param {boolean} isAgentFlow + */ +const incrementSuccessMetricCounter = (metricsProvider: IMetricsProvider, isInternal: boolean, isAgentFlow: boolean) => { + if (isAgentFlow) { + metricsProvider?.incrementCounter( isInternal ? FLOWISE_METRIC_COUNTERS.AGENTFLOW_PREDICTION_INTERNAL : FLOWISE_METRIC_COUNTERS.AGENTFLOW_PREDICTION_EXTERNAL, + { status: FLOWISE_COUNTER_STATUS.SUCCESS } + ) + } else { + metricsProvider?.incrementCounter( + isInternal ? FLOWISE_METRIC_COUNTERS.CHATFLOW_PREDICTION_INTERNAL : FLOWISE_METRIC_COUNTERS.CHATFLOW_PREDICTION_EXTERNAL, + { status: FLOWISE_COUNTER_STATUS.SUCCESS } + ) + } +} + +/** + * Increment failed metric counter + * @param {IMetricsProvider} metricsProvider + * @param {boolean} isInternal + * @param {boolean} isAgentFlow + */ +const incrementFailedMetricCounter = (metricsProvider: IMetricsProvider, isInternal: boolean, isAgentFlow: boolean) => { + if (isAgentFlow) { + metricsProvider?.incrementCounter( + isInternal ? FLOWISE_METRIC_COUNTERS.AGENTFLOW_PREDICTION_INTERNAL : FLOWISE_METRIC_COUNTERS.AGENTFLOW_PREDICTION_EXTERNAL, + { status: FLOWISE_COUNTER_STATUS.FAILURE } + ) + } else { + metricsProvider?.incrementCounter( + isInternal ? FLOWISE_METRIC_COUNTERS.CHATFLOW_PREDICTION_INTERNAL : FLOWISE_METRIC_COUNTERS.CHATFLOW_PREDICTION_EXTERNAL, { status: FLOWISE_COUNTER_STATUS.FAILURE } ) - throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, getErrorMessage(e)) } } diff --git a/packages/server/src/utils/index.ts b/packages/server/src/utils/index.ts index 27e55b0403c..d60febe6e59 100644 --- a/packages/server/src/utils/index.ts +++ b/packages/server/src/utils/index.ts @@ -534,7 +534,6 @@ export const buildFlow = async ({ if (isUpsert) upsertHistory['flowData'] = saveUpsertFlowData(flowNodeData, upsertHistory) const reactFlowNodeData: INodeData = await resolveVariables( - appDataSource, flowNodeData, flowNodes, question, @@ -736,10 +735,9 @@ export const clearSessionMemory = async ( } const getGlobalVariable = async ( - appDataSource: DataSource, overrideConfig?: ICommonObject, availableVariables: IVariable[] = [], - variableOverrides?: ICommonObject[] + variableOverrides: ICommonObject[] = [] ) => { // override variables defined in overrideConfig // nodeData.inputs.vars is an Object, check each property and override the variable @@ -800,13 +798,12 @@ const getGlobalVariable = async ( * @returns {string} */ export const getVariableValue = async ( - appDataSource: DataSource, paramValue: string | object, reactFlowNodes: IReactFlowNode[], question: string, chatHistory: IMessage[], isAcceptVariable = false, - flowData?: ICommonObject, + flowConfig?: ICommonObject, uploadedFilesContent?: string, availableVariables: IVariable[] = [], variableOverrides: ICommonObject[] = [] @@ -851,7 +848,7 @@ export const getVariableValue = async ( } if (variableFullPath.startsWith('$vars.')) { - const vars = await getGlobalVariable(appDataSource, flowData, availableVariables, variableOverrides) + const vars = await getGlobalVariable(flowConfig, availableVariables, variableOverrides) const variableValue = get(vars, variableFullPath.replace('$vars.', '')) if (variableValue) { variableDict[`{{${variableFullPath}}}`] = variableValue @@ -859,8 +856,8 @@ export const getVariableValue = async ( } } - if (variableFullPath.startsWith('$flow.') && flowData) { - const variableValue = get(flowData, variableFullPath.replace('$flow.', '')) + if (variableFullPath.startsWith('$flow.') && flowConfig) { + const variableValue = get(flowConfig, variableFullPath.replace('$flow.', '')) if (variableValue) { variableDict[`{{${variableFullPath}}}`] = variableValue returnVal = returnVal.split(`{{${variableFullPath}}}`).join(variableValue) @@ -954,12 +951,11 @@ export const getVariableValue = async ( * @returns {INodeData} */ export const resolveVariables = async ( - appDataSource: DataSource, reactFlowNodeData: INodeData, reactFlowNodes: IReactFlowNode[], question: string, chatHistory: IMessage[], - flowData?: ICommonObject, + flowConfig?: ICommonObject, uploadedFilesContent?: string, availableVariables: IVariable[] = [], variableOverrides: ICommonObject[] = [] @@ -974,13 +970,12 @@ export const resolveVariables = async ( const resolvedInstances = [] for (const param of paramValue) { const resolvedInstance = await getVariableValue( - appDataSource, param, reactFlowNodes, question, chatHistory, undefined, - flowData, + flowConfig, uploadedFilesContent, availableVariables, variableOverrides @@ -991,13 +986,12 @@ export const resolveVariables = async ( } else { const isAcceptVariable = reactFlowNodeData.inputParams.find((param) => param.name === key)?.acceptVariable ?? false const resolvedInstance = await getVariableValue( - appDataSource, paramValue, reactFlowNodes, question, chatHistory, isAcceptVariable, - flowData, + flowConfig, uploadedFilesContent, availableVariables, variableOverrides diff --git a/packages/server/src/utils/rateLimit.ts b/packages/server/src/utils/rateLimit.ts index 4cad150279e..f551e18bb72 100644 --- a/packages/server/src/utils/rateLimit.ts +++ b/packages/server/src/utils/rateLimit.ts @@ -1,55 +1,105 @@ import { NextFunction, Request, Response } from 'express' import { rateLimit, RateLimitRequestHandler } from 'express-rate-limit' -import { IChatFlow } from '../Interface' +import { IChatFlow, MODE } from '../Interface' import { Mutex } from 'async-mutex' +import { RedisStore } from 'rate-limit-redis' +import Redis from 'ioredis' -let rateLimiters: Record = {} -const rateLimiterMutex = new Mutex() - -async function addRateLimiter(id: string, duration: number, limit: number, message: string) { - const release = await rateLimiterMutex.acquire() - try { - rateLimiters[id] = rateLimit({ - windowMs: duration * 1000, - max: limit, - handler: (_, res) => { - res.status(429).send(message) +export class RateLimiterManager { + private rateLimiters: Record = {} + private rateLimiterMutex: Mutex = new Mutex() + private redisClient: Redis + private static instance: RateLimiterManager + + constructor() { + if (process.env.MODE === MODE.QUEUE) { + this.redisClient = new Redis({ + host: process.env.REDIS_HOST || 'localhost', + port: parseInt(process.env.REDIS_PORT || '6379'), + username: process.env.REDIS_USERNAME || undefined, + password: process.env.REDIS_PASSWORD || undefined, + tls: + process.env.REDIS_TLS === 'true' + ? { + cert: process.env.REDIS_CERT ? Buffer.from(process.env.REDIS_CERT, 'base64') : undefined, + key: process.env.REDIS_KEY ? Buffer.from(process.env.REDIS_KEY, 'base64') : undefined, + ca: process.env.REDIS_CA ? Buffer.from(process.env.REDIS_CA, 'base64') : undefined + } + : undefined + }) + } + } + + public static getInstance(): RateLimiterManager { + if (!RateLimiterManager.instance) { + RateLimiterManager.instance = new RateLimiterManager() + } + return RateLimiterManager.instance + } + + public async addRateLimiter(id: string, duration: number, limit: number, message: string): Promise { + const release = await this.rateLimiterMutex.acquire() + try { + if (process.env.MODE === MODE.QUEUE) { + this.rateLimiters[id] = rateLimit({ + windowMs: duration * 1000, + max: limit, + standardHeaders: true, + legacyHeaders: false, + store: new RedisStore({ + // @ts-expect-error - Known issue: the `call` function is not present in @types/ioredis + sendCommand: (...args: string[]) => this.redisClient.call(...args) + }) + }) + } else { + this.rateLimiters[id] = rateLimit({ + windowMs: duration * 1000, + max: limit, + handler: (_, res) => { + res.status(429).send(message) + } + }) } - }) - } finally { - release() + } finally { + release() + } } -} -function removeRateLimit(id: string) { - if (rateLimiters[id]) { - delete rateLimiters[id] + public removeRateLimiter(id: string): void { + if (this.rateLimiters[id]) { + delete this.rateLimiters[id] + } } -} -export function getRateLimiter(req: Request, res: Response, next: NextFunction) { - const id = req.params.id - if (!rateLimiters[id]) return next() - const idRateLimiter = rateLimiters[id] - return idRateLimiter(req, res, next) -} + public getRateLimiter(): (req: Request, res: Response, next: NextFunction) => void { + return (req: Request, res: Response, next: NextFunction) => { + const id = req.params.id + if (!this.rateLimiters[id]) return next() + const idRateLimiter = this.rateLimiters[id] + return idRateLimiter(req, res, next) + } + } -export async function updateRateLimiter(chatFlow: IChatFlow) { - if (!chatFlow.apiConfig) return - const apiConfig = JSON.parse(chatFlow.apiConfig) + public async updateRateLimiter(chatFlow: IChatFlow): Promise { + if (!chatFlow.apiConfig) return + const apiConfig = JSON.parse(chatFlow.apiConfig) - const rateLimit: { limitDuration: number; limitMax: number; limitMsg: string; status?: boolean } = apiConfig.rateLimit - if (!rateLimit) return + const rateLimit: { limitDuration: number; limitMax: number; limitMsg: string; status?: boolean } = apiConfig.rateLimit + if (!rateLimit) return - const { limitDuration, limitMax, limitMsg, status } = rateLimit - if (status === false) removeRateLimit(chatFlow.id) - else if (limitMax && limitDuration && limitMsg) await addRateLimiter(chatFlow.id, limitDuration, limitMax, limitMsg) -} + const { limitDuration, limitMax, limitMsg, status } = rateLimit + if (status === false) { + this.removeRateLimiter(chatFlow.id) + } else if (limitMax && limitDuration && limitMsg) { + await this.addRateLimiter(chatFlow.id, limitDuration, limitMax, limitMsg) + } + } -export async function initializeRateLimiter(chatFlowPool: IChatFlow[]) { - await Promise.all( - chatFlowPool.map(async (chatFlow) => { - await updateRateLimiter(chatFlow) - }) - ) + public async initializeRateLimiters(chatflows: IChatFlow[]): Promise { + await Promise.all( + chatflows.map(async (chatFlow) => { + await this.updateRateLimiter(chatFlow) + }) + ) + } } diff --git a/packages/server/src/utils/upsertVector.ts b/packages/server/src/utils/upsertVector.ts index de1e564130e..7e866ae78b8 100644 --- a/packages/server/src/utils/upsertVector.ts +++ b/packages/server/src/utils/upsertVector.ts @@ -16,7 +16,7 @@ import { getAPIOverrideConfig } from '../utils' import { validateChatflowAPIKey } from './validateKey' -import { IncomingInput, INodeDirectedGraph, IReactFlowObject, ChatType } from '../Interface' +import { IncomingInput, INodeDirectedGraph, IReactFlowObject, ChatType, IExecuteFlowParams, MODE } from '../Interface' import { ChatFlow } from '../database/entities/ChatFlow' import { getRunningExpressApp } from '../utils/getRunningExpressApp' import { UpsertHistory } from '../database/entities/UpsertHistory' @@ -26,17 +26,196 @@ import { getErrorMessage } from '../errors/utils' import { v4 as uuidv4 } from 'uuid' import { FLOWISE_COUNTER_STATUS, FLOWISE_METRIC_COUNTERS } from '../Interface.Metrics' import { Variable } from '../database/entities/Variable' + +export const executeUpsert = async ({ + componentNodes, + incomingInput, + chatflow, + chatId, + appDataSource, + telemetry, + cachePool, + isInternal, + files +}: IExecuteFlowParams) => { + const question = incomingInput.question + const overrideConfig = incomingInput.overrideConfig ?? {} + let stopNodeId = incomingInput?.stopNodeId ?? '' + const chatHistory: IMessage[] = [] + const isUpsert = true + const chatflowid = chatflow.id + const apiMessageId = uuidv4() + + if (files?.length) { + const overrideConfig: ICommonObject = { ...incomingInput } + for (const file of files) { + const fileNames: string[] = [] + const fileBuffer = fs.readFileSync(file.path) + // Address file name with special characters: https://github.com/expressjs/multer/issues/1104 + file.originalname = Buffer.from(file.originalname, 'latin1').toString('utf8') + const storagePath = await addArrayFilesToStorage(file.mimetype, fileBuffer, file.originalname, fileNames, chatflowid) + + const fileInputFieldFromMimeType = mapMimeTypeToInputField(file.mimetype) + + const fileExtension = path.extname(file.originalname) + + const fileInputFieldFromExt = mapExtToInputField(fileExtension) + + let fileInputField = 'txtFile' + + if (fileInputFieldFromExt !== 'txtFile') { + fileInputField = fileInputFieldFromExt + } else if (fileInputFieldFromMimeType !== 'txtFile') { + fileInputField = fileInputFieldFromExt + } + + if (overrideConfig[fileInputField]) { + const existingFileInputField = overrideConfig[fileInputField].replace('FILE-STORAGE::', '') + const existingFileInputFieldArray = JSON.parse(existingFileInputField) + + const newFileInputField = storagePath.replace('FILE-STORAGE::', '') + const newFileInputFieldArray = JSON.parse(newFileInputField) + + const updatedFieldArray = existingFileInputFieldArray.concat(newFileInputFieldArray) + + overrideConfig[fileInputField] = `FILE-STORAGE::${JSON.stringify(updatedFieldArray)}` + } else { + overrideConfig[fileInputField] = storagePath + } + + fs.unlinkSync(file.path) + } + if (overrideConfig.vars && typeof overrideConfig.vars === 'string') { + overrideConfig.vars = JSON.parse(overrideConfig.vars) + } + incomingInput = { + ...incomingInput, + question: '', + overrideConfig, + stopNodeId, + chatId + } + } + + /*** Get chatflows and prepare data ***/ + const flowData = chatflow.flowData + const parsedFlowData: IReactFlowObject = JSON.parse(flowData) + const nodes = parsedFlowData.nodes + const edges = parsedFlowData.edges + + /*** Get session ID ***/ + const memoryNode = findMemoryNode(nodes, edges) + let sessionId = getMemorySessionId(memoryNode, incomingInput, chatId, isInternal) + + /*** Find the 1 final vector store will be upserted ***/ + const vsNodes = nodes.filter((node) => node.data.category === 'Vector Stores') + const vsNodesWithFileUpload = vsNodes.filter((node) => node.data.inputs?.fileUpload) + if (vsNodesWithFileUpload.length > 1) { + throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, 'Multiple vector store nodes with fileUpload enabled') + } else if (vsNodesWithFileUpload.length === 1 && !stopNodeId) { + stopNodeId = vsNodesWithFileUpload[0].data.id + } + + /*** Check if multiple vector store nodes exist, and if stopNodeId is specified ***/ + if (vsNodes.length > 1 && !stopNodeId) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + 'There are multiple vector nodes, please provide stopNodeId in body request' + ) + } else if (vsNodes.length === 1 && !stopNodeId) { + stopNodeId = vsNodes[0].data.id + } else if (!vsNodes.length && !stopNodeId) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, 'No vector node found') + } + + /*** Get Starting Nodes with Reversed Graph ***/ + const { graph } = constructGraphs(nodes, edges, { isReversed: true }) + const nodeIds = getAllConnectedNodes(graph, stopNodeId) + const filteredGraph: INodeDirectedGraph = {} + for (const key of nodeIds) { + if (Object.prototype.hasOwnProperty.call(graph, key)) { + filteredGraph[key] = graph[key] + } + } + const { startingNodeIds, depthQueue } = getStartingNodes(filteredGraph, stopNodeId) + + /*** Get API Config ***/ + const availableVariables = await appDataSource.getRepository(Variable).find() + const { nodeOverrides, variableOverrides, apiOverrideStatus } = getAPIOverrideConfig(chatflow) + + /* For "files" input, add a new node override with the actual input name such as pdfFile, txtFile, etc. + * https://github.com/FlowiseAI/Flowise/pull/3569 + */ + for (const nodeLabel in nodeOverrides) { + const params = nodeOverrides[nodeLabel] + const enabledFileParam = params.find((param) => param.enabled && param.name === 'files') + if (enabledFileParam) { + const fileInputFieldFromExt = mapExtToInputField(enabledFileParam.type) + nodeOverrides[nodeLabel].push({ + ...enabledFileParam, + name: fileInputFieldFromExt + }) + } + } + + const upsertedResult = await buildFlow({ + startingNodeIds, + reactFlowNodes: nodes, + reactFlowEdges: edges, + apiMessageId, + graph: filteredGraph, + depthQueue, + componentNodes, + question, + chatHistory, + chatId, + sessionId, + chatflowid, + appDataSource, + overrideConfig, + apiOverrideStatus, + nodeOverrides, + availableVariables, + variableOverrides, + cachePool, + isUpsert, + stopNodeId + }) + + // Save to DB + if (upsertedResult['flowData'] && upsertedResult['result']) { + const result = cloneDeep(upsertedResult) + result['flowData'] = JSON.stringify(result['flowData']) + result['result'] = JSON.stringify(omit(result['result'], ['totalKeys', 'addedDocs'])) + result.chatflowid = chatflowid + const newUpsertHistory = new UpsertHistory() + Object.assign(newUpsertHistory, result) + const upsertHistory = appDataSource.getRepository(UpsertHistory).create(newUpsertHistory) + await appDataSource.getRepository(UpsertHistory).save(upsertHistory) + } + + await telemetry.sendTelemetry('vector_upserted', { + version: await getAppVersion(), + chatlowId: chatflowid, + type: isInternal ? ChatType.INTERNAL : ChatType.EXTERNAL, + flowGraph: getTelemetryFlowObj(nodes, edges), + stopNodeId + }) + + return upsertedResult['result'] ?? { result: 'Successfully Upserted' } +} + /** * Upsert documents * @param {Request} req * @param {boolean} isInternal */ export const upsertVector = async (req: Request, isInternal: boolean = false) => { + const appServer = getRunningExpressApp() try { - const appServer = getRunningExpressApp() const chatflowid = req.params.id - let incomingInput: IncomingInput = req.body + // Check if chatflow exists const chatflow = await appServer.AppDataSource.getRepository(ChatFlow).findOneBy({ id: chatflowid }) @@ -44,6 +223,12 @@ export const upsertVector = async (req: Request, isInternal: boolean = false) => throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Chatflow ${chatflowid} not found`) } + const httpProtocol = req.get('x-forwarded-proto') || req.protocol + const baseURL = `${httpProtocol}://${req.get('host')}` + const incomingInput: IncomingInput = req.body + const chatId = incomingInput.chatId ?? incomingInput.overrideConfig?.sessionId ?? uuidv4() + const files = (req.files as Express.Multer.File[]) || [] + if (!isInternal) { const isKeyValidated = await validateChatflowAPIKey(req, chatflow) if (!isKeyValidated) { @@ -51,181 +236,52 @@ export const upsertVector = async (req: Request, isInternal: boolean = false) => } } - const files = (req.files as Express.Multer.File[]) || [] - - if (files.length) { - const overrideConfig: ICommonObject = { ...req.body } - for (const file of files) { - const fileNames: string[] = [] - const fileBuffer = fs.readFileSync(file.path) - // Address file name with special characters: https://github.com/expressjs/multer/issues/1104 - file.originalname = Buffer.from(file.originalname, 'latin1').toString('utf8') - const storagePath = await addArrayFilesToStorage(file.mimetype, fileBuffer, file.originalname, fileNames, chatflowid) - - const fileInputFieldFromMimeType = mapMimeTypeToInputField(file.mimetype) - - const fileExtension = path.extname(file.originalname) - - const fileInputFieldFromExt = mapExtToInputField(fileExtension) - - let fileInputField = 'txtFile' - - if (fileInputFieldFromExt !== 'txtFile') { - fileInputField = fileInputFieldFromExt - } else if (fileInputFieldFromMimeType !== 'txtFile') { - fileInputField = fileInputFieldFromExt - } - - if (overrideConfig[fileInputField]) { - const existingFileInputField = overrideConfig[fileInputField].replace('FILE-STORAGE::', '') - const existingFileInputFieldArray = JSON.parse(existingFileInputField) - - const newFileInputField = storagePath.replace('FILE-STORAGE::', '') - const newFileInputFieldArray = JSON.parse(newFileInputField) - - const updatedFieldArray = existingFileInputFieldArray.concat(newFileInputFieldArray) - - overrideConfig[fileInputField] = `FILE-STORAGE::${JSON.stringify(updatedFieldArray)}` - } else { - overrideConfig[fileInputField] = storagePath - } - - fs.unlinkSync(file.path) - } - if (overrideConfig.vars && typeof overrideConfig.vars === 'string') { - overrideConfig.vars = JSON.parse(overrideConfig.vars) - } - incomingInput = { - question: req.body.question ?? 'hello', - overrideConfig, - stopNodeId: req.body.stopNodeId - } - if (req.body.chatId) { - incomingInput.chatId = req.body.chatId - } + const executeData: IExecuteFlowParams = { + componentNodes: appServer.nodesPool.componentNodes, + incomingInput, + chatflow, + chatId, + appDataSource: appServer.AppDataSource, + telemetry: appServer.telemetry, + cachePool: appServer.cachePool, + sseStreamer: appServer.sseStreamer, + baseURL, + isInternal, + files, + isUpsert: true } - /*** Get chatflows and prepare data ***/ - const flowData = chatflow.flowData - const parsedFlowData: IReactFlowObject = JSON.parse(flowData) - const nodes = parsedFlowData.nodes - const edges = parsedFlowData.edges - - const apiMessageId = req.body.apiMessageId ?? uuidv4() - - let stopNodeId = incomingInput?.stopNodeId ?? '' - let chatHistory: IMessage[] = [] - let chatId = incomingInput.chatId ?? '' - let isUpsert = true - - // Get session ID - const memoryNode = findMemoryNode(nodes, edges) - let sessionId = getMemorySessionId(memoryNode, incomingInput, chatId, isInternal) - - const vsNodes = nodes.filter((node) => node.data.category === 'Vector Stores') - - // Get StopNodeId for vector store which has fielUpload - const vsNodesWithFileUpload = vsNodes.filter((node) => node.data.inputs?.fileUpload) - if (vsNodesWithFileUpload.length > 1) { - throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, 'Multiple vector store nodes with fileUpload enabled') - } else if (vsNodesWithFileUpload.length === 1 && !stopNodeId) { - stopNodeId = vsNodesWithFileUpload[0].data.id - } + if (process.env.MODE === MODE.QUEUE) { + const upsertQueue = appServer.queueManager.getQueue('upsert') - // Check if multiple vector store nodes exist, and if stopNodeId is specified - if (vsNodes.length > 1 && !stopNodeId) { - throw new InternalFlowiseError( - StatusCodes.INTERNAL_SERVER_ERROR, - 'There are multiple vector nodes, please provide stopNodeId in body request' + const job = await upsertQueue.addJob( + omit(executeData, ['componentNodes', 'appDataSource', 'sseStreamer', 'telemetry', 'cachePool']) ) - } else if (vsNodes.length === 1 && !stopNodeId) { - stopNodeId = vsNodes[0].data.id - } else if (!vsNodes.length && !stopNodeId) { - throw new InternalFlowiseError(StatusCodes.NOT_FOUND, 'No vector node found') - } - - const { graph } = constructGraphs(nodes, edges, { isReversed: true }) - - const nodeIds = getAllConnectedNodes(graph, stopNodeId) + logger.debug(`[server]: Job added to queue: ${job.id}`) - const filteredGraph: INodeDirectedGraph = {} - for (const key of nodeIds) { - if (Object.prototype.hasOwnProperty.call(graph, key)) { - filteredGraph[key] = graph[key] - } - } + const queueEvents = upsertQueue.getQueueEvents() + const result = await job.waitUntilFinished(queueEvents) - const { startingNodeIds, depthQueue } = getStartingNodes(filteredGraph, stopNodeId) - - /*** Get API Config ***/ - const availableVariables = await appServer.AppDataSource.getRepository(Variable).find() - const { nodeOverrides, variableOverrides, apiOverrideStatus } = getAPIOverrideConfig(chatflow) - - // For "files" input, add a new node override with the actual input name such as pdfFile, txtFile, etc. - for (const nodeLabel in nodeOverrides) { - const params = nodeOverrides[nodeLabel] - const enabledFileParam = params.find((param) => param.enabled && param.name === 'files') - if (enabledFileParam) { - const fileInputFieldFromExt = mapExtToInputField(enabledFileParam.type) - nodeOverrides[nodeLabel].push({ - ...enabledFileParam, - name: fileInputFieldFromExt - }) + if (!result) { + throw new Error('Job execution failed') } - } - - const upsertedResult = await buildFlow({ - startingNodeIds, - reactFlowNodes: nodes, - reactFlowEdges: edges, - apiMessageId, - graph: filteredGraph, - depthQueue, - componentNodes: appServer.nodesPool.componentNodes, - question: incomingInput.question, - chatHistory, - chatId, - sessionId: sessionId ?? '', - chatflowid, - appDataSource: appServer.AppDataSource, - overrideConfig: incomingInput?.overrideConfig, - apiOverrideStatus, - nodeOverrides, - availableVariables, - variableOverrides, - cachePool: appServer.cachePool, - isUpsert, - stopNodeId - }) - const startingNodes = nodes.filter((nd) => startingNodeIds.includes(nd.data.id)) - - await appServer.chatflowPool.add(chatflowid, undefined, startingNodes, incomingInput?.overrideConfig, chatId) + appServer.metricsProvider?.incrementCounter(FLOWISE_METRIC_COUNTERS.VECTORSTORE_UPSERT, { + status: FLOWISE_COUNTER_STATUS.SUCCESS + }) + return result + } else { + const result = await executeUpsert(executeData) - // Save to DB - if (upsertedResult['flowData'] && upsertedResult['result']) { - const result = cloneDeep(upsertedResult) - result['flowData'] = JSON.stringify(result['flowData']) - result['result'] = JSON.stringify(omit(result['result'], ['totalKeys', 'addedDocs'])) - result.chatflowid = chatflowid - const newUpsertHistory = new UpsertHistory() - Object.assign(newUpsertHistory, result) - const upsertHistory = appServer.AppDataSource.getRepository(UpsertHistory).create(newUpsertHistory) - await appServer.AppDataSource.getRepository(UpsertHistory).save(upsertHistory) + appServer.metricsProvider?.incrementCounter(FLOWISE_METRIC_COUNTERS.VECTORSTORE_UPSERT, { + status: FLOWISE_COUNTER_STATUS.SUCCESS + }) + return result } - - await appServer.telemetry.sendTelemetry('vector_upserted', { - version: await getAppVersion(), - chatlowId: chatflowid, - type: isInternal ? ChatType.INTERNAL : ChatType.EXTERNAL, - flowGraph: getTelemetryFlowObj(nodes, edges), - stopNodeId - }) - appServer.metricsProvider?.incrementCounter(FLOWISE_METRIC_COUNTERS.VECTORSTORE_UPSERT, { status: FLOWISE_COUNTER_STATUS.SUCCESS }) - - return upsertedResult['result'] ?? { result: 'Successfully Upserted' } } catch (e) { logger.error('[server]: Error:', e) + appServer.metricsProvider?.incrementCounter(FLOWISE_METRIC_COUNTERS.VECTORSTORE_UPSERT, { status: FLOWISE_COUNTER_STATUS.FAILURE }) + if (e instanceof InternalFlowiseError && e.statusCode === StatusCodes.UNAUTHORIZED) { throw e } else { diff --git a/packages/ui/package.json b/packages/ui/package.json index 25a0e0102f3..763aa64afb8 100644 --- a/packages/ui/package.json +++ b/packages/ui/package.json @@ -56,7 +56,6 @@ "rehype-raw": "^7.0.0", "remark-gfm": "^3.0.1", "remark-math": "^5.1.1", - "socket.io-client": "^4.6.1", "uuid": "^9.0.1", "yup": "^0.32.9" }, diff --git a/packages/ui/vite.config.js b/packages/ui/vite.config.js index b397e41ea9a..34aeec77440 100644 --- a/packages/ui/vite.config.js +++ b/packages/ui/vite.config.js @@ -14,10 +14,6 @@ export default defineConfig(async ({ mode }) => { '^/api(/|$).*': { target: `http://${serverHost}:${serverPort}`, changeOrigin: true - }, - '/socket.io': { - target: `http://${serverHost}:${serverPort}`, - changeOrigin: true } } } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index c74613e3c20..ab93b475c3f 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -403,9 +403,6 @@ importers: sanitize-filename: specifier: ^1.6.3 version: 1.6.3 - socket.io: - specifier: ^4.6.1 - version: 4.7.4(bufferutil@4.0.8)(utf-8-validate@6.0.4) srt-parser-2: specifier: ^1.2.3 version: 1.2.3 @@ -474,8 +471,8 @@ importers: packages/server: dependencies: '@oclif/core': - specifier: ^1.13.10 - version: 1.26.2 + specifier: 4.0.7 + version: 4.0.7 '@opentelemetry/api': specifier: ^1.3.0 version: 1.9.0 @@ -530,6 +527,9 @@ importers: axios: specifier: 1.6.2 version: 1.6.2(debug@4.3.4) + bullmq: + specifier: ^5.13.2 + version: 5.34.3 content-disposition: specifier: 0.5.4 version: 0.5.4 @@ -596,21 +596,21 @@ importers: prom-client: specifier: ^15.1.3 version: 15.1.3 + rate-limit-redis: + specifier: ^4.2.0 + version: 4.2.0(express-rate-limit@6.11.2(express@4.18.3)) reflect-metadata: specifier: ^0.1.13 version: 0.1.14 sanitize-html: specifier: ^2.11.0 version: 2.12.1 - socket.io: - specifier: ^4.6.1 - version: 4.7.4(bufferutil@4.0.8)(utf-8-validate@6.0.4) sqlite3: specifier: ^5.1.6 version: 5.1.7 typeorm: specifier: ^0.3.6 - version: 0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@20.12.12)(typescript@5.5.2)) + version: 0.3.20(ioredis@5.4.1)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@20.12.12)(typescript@5.5.2)) uuid: specifier: ^9.0.1 version: 9.0.1 @@ -813,9 +813,6 @@ importers: remark-math: specifier: ^5.1.1 version: 5.1.1 - socket.io-client: - specifier: ^4.6.1 - version: 4.7.4(bufferutil@4.0.8)(utf-8-validate@6.0.4) uuid: specifier: ^9.0.1 version: 9.0.1 @@ -3927,6 +3924,36 @@ packages: '@mongodb-js/saslprep@1.1.5': resolution: { integrity: sha512-XLNOMH66KhJzUJNwT/qlMnS4WsNDWD5ASdyaSH3EtK+F4r/CFGa3jT4GNi4mfOitGvWXtdLgQJkQjxSVrio+jA== } + '@msgpackr-extract/msgpackr-extract-darwin-arm64@3.0.3': + resolution: { integrity: sha512-QZHtlVgbAdy2zAqNA9Gu1UpIuI8Xvsd1v8ic6B2pZmeFnFcMWiPLfWXh7TVw4eGEZ/C9TH281KwhVoeQUKbyjw== } + cpu: [arm64] + os: [darwin] + + '@msgpackr-extract/msgpackr-extract-darwin-x64@3.0.3': + resolution: { integrity: sha512-mdzd3AVzYKuUmiWOQ8GNhl64/IoFGol569zNRdkLReh6LRLHOXxU4U8eq0JwaD8iFHdVGqSy4IjFL4reoWCDFw== } + cpu: [x64] + os: [darwin] + + '@msgpackr-extract/msgpackr-extract-linux-arm64@3.0.3': + resolution: { integrity: sha512-YxQL+ax0XqBJDZiKimS2XQaf+2wDGVa1enVRGzEvLLVFeqa5kx2bWbtcSXgsxjQB7nRqqIGFIcLteF/sHeVtQg== } + cpu: [arm64] + os: [linux] + + '@msgpackr-extract/msgpackr-extract-linux-arm@3.0.3': + resolution: { integrity: sha512-fg0uy/dG/nZEXfYilKoRe7yALaNmHoYeIoJuJ7KJ+YyU2bvY8vPv27f7UKhGRpY6euFYqEVhxCFZgAUNQBM3nw== } + cpu: [arm] + os: [linux] + + '@msgpackr-extract/msgpackr-extract-linux-x64@3.0.3': + resolution: { integrity: sha512-cvwNfbP07pKUfq1uH+S6KJ7dT9K8WOE4ZiAcsrSes+UY55E/0jLYc+vq+DO7jlmqRb5zAggExKm0H7O/CBaesg== } + cpu: [x64] + os: [linux] + + '@msgpackr-extract/msgpackr-extract-win32-x64@3.0.3': + resolution: { integrity: sha512-x0fWaQtYp4E6sktbsdAqnehxDgEc/VwM7uLsRCYWaiGu0ykYdZPiS8zCWdnjHwyiumousxfBm4SO31eXqwEZhQ== } + cpu: [x64] + os: [win32] + '@mui/base@5.0.0-beta.27': resolution: { integrity: sha512-duL37qxihT1N0pW/gyXVezP7SttLkF+cLAs/y6g6ubEFmVadjbnZ45SeF12/vAiKzqwf5M0uFH1cczIPXFZygA== } engines: { node: '>=12.0.0' } @@ -4179,16 +4206,13 @@ packages: resolution: { integrity: sha512-NCcr1uQo1k5U+SYlnIrbAh3cxy+OQT1VtqiAbxdymSlptbzBb62AjH2xXgjNCoP073hoa1CfCAcwoZ8k96C4nA== } engines: { node: ^14.17.0 || ^16.13.0 || >=18.0.0 } - '@oclif/core@1.26.2': - resolution: { integrity: sha512-6jYuZgXvHfOIc9GIaS4T3CIKGTjPmfAxuMcbCbMRKJJl4aq/4xeRlEz0E8/hz8HxvxZBGvN2GwAUHlrGWQVrVw== } - engines: { node: '>=14.0.0' } - '@oclif/core@2.15.0': resolution: { integrity: sha512-fNEMG5DzJHhYmI3MgpByTvltBOMyFcnRIUMxbiz2ai8rhaYgaTHMG3Q38HcosfIvtw9nCjxpcQtC8MN8QtVCcA== } engines: { node: '>=14.0.0' } - '@oclif/linewrap@1.0.0': - resolution: { integrity: sha512-Ups2dShK52xXa8w6iBWLgcjPJWjais6KPJQq3gQ/88AY6BXoTX+MIGFPrWQO1KLMiQfoTpcLnUwloN4brrVUHw== } + '@oclif/core@4.0.7': + resolution: { integrity: sha512-sU4Dx+RXCWAkrMw8tQFYAL6VfcHYKLPxVC9iKfgTXr4aDhcCssDwrbgpx0Di1dnNxvQlDGUhuCEInZuIY/nNfw== } + engines: { node: '>=18.0.0' } '@oclif/plugin-help@5.2.20': resolution: { integrity: sha512-u+GXX/KAGL9S10LxAwNUaWdzbEBARJ92ogmM7g3gDVud2HioCmvWQCDohNRVZ9GYV9oKwZ/M8xwd6a1d95rEKQ== } @@ -4202,11 +4226,6 @@ packages: resolution: { integrity: sha512-y7eSzT6R5bmTIJbiMMXgOlbBpcWXGlVhNeQJBLBCCy1+90Wbjyqf6uvY0i2WcO4sh/THTJ20qCW80j3XUlgDTA== } engines: { node: '>=12.0.0' } - '@oclif/screen@3.0.8': - resolution: { integrity: sha512-yx6KAqlt3TAHBduS2fMQtJDL2ufIHnDRArrJEOoTTuizxqmjLT+psGYOHpmMl3gvQpFJ11Hs76guUUktzAF9Bg== } - engines: { node: '>=12.0.0' } - deprecated: Package no longer supported. Contact Support at https://www.npmjs.com/support for more info. - '@octokit/auth-token@2.5.0': resolution: { integrity: sha512-r5FVUJCOLl19AxiuZD2VRZ/ORjp/4IN98Of6YJoJOkY75CIBuYfmiNHGrDwXr+aLGG55igl9QrxX3hbiXlLb+g== } @@ -5401,9 +5420,6 @@ packages: resolution: { integrity: sha512-AK17WaC0hx1wR9juAOsQkJ6DjDxBGEf5TrKhpXtNFEn+cVto9Li3MVsdpAO97AF7bhFXSyC8tJA3F4ThhqwCdg== } engines: { node: '>=14.0.0' } - '@socket.io/component-emitter@3.1.0': - resolution: { integrity: sha512-+9jVqKhRSpsc591z5vX+X5Yyw+he/HCB4iQ/RYxw35CEPaY1gnsNE43nf9n9AaYjAQrTiI/mOwKUKdUs9vf7Xg== } - '@sqltools/formatter@1.2.5': resolution: { integrity: sha512-Uy0+khmZqUrUGm5dmMqVlnvufZRSK0FbYzVgp0UMstm+F5+W2/jnEEQyc9vo1ZR/E5ZI/B1WjjoTqBqwJL6Krw== } @@ -5696,9 +5712,6 @@ packages: '@types/content-disposition@0.5.8': resolution: { integrity: sha512-QVSSvno3dE0MgO76pJhmv4Qyi/j0Yk9pBp0Y7TJ2Tlj+KCgJWY6qX7nnxCOLkZ3VYRSIk1WTxCvwUSdx6CCLdg== } - '@types/cookie@0.4.1': - resolution: { integrity: sha512-XW/Aa8APYr6jSVVA1y/DEIZX0/GMKLEVekNG727R8cs56ahETkRAy/3DR7+fJyh7oUgGwNQaRfXCun0+KbWY7Q== } - '@types/cors@2.8.17': resolution: { integrity: sha512-8CGDvrBj1zgo2qE+oS3pOCyYNqCPryMWY2bGfwA0dcfopWGgxs+78df0Rs3rc9THP4JkOhLsAa+15VdpAqkcUA== } @@ -6556,6 +6569,10 @@ packages: ansicolors@0.3.2: resolution: { integrity: sha512-QXu7BPrP29VllRxH8GwB7x5iX5qWKAAMLqKQGWTeLWVlNHNOpVMJ91dsxQAIWXpjuW5wqvxu3Jd/nRjrJ+0pqg== } + ansis@3.4.0: + resolution: { integrity: sha512-zVESKSQhWaPhGaWiKj1k+UqvpC7vPBBgG3hjQEeIx2YGzylWt8qA3ziAzRuUtm0OnaGsZKjIvfl8D/sJTt/I0w== } + engines: { node: '>=16' } + any-promise@1.3.0: resolution: { integrity: sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A== } @@ -6996,10 +7013,6 @@ packages: base64-js@1.5.1: resolution: { integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA== } - base64id@2.0.0: - resolution: { integrity: sha512-lGe34o6EHj9y3Kts9R4ZYs/Gr+6N7MCaMlIFA3F1R2O5/m7K06AxfSeO5530PEERE6/WyEg3lsuyw4GHlPZHog== } - engines: { node: ^4.5.0 || >= 5.9 } - base@0.11.2: resolution: { integrity: sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg== } engines: { node: '>=0.10.0' } @@ -7175,6 +7188,9 @@ packages: builtins@5.0.1: resolution: { integrity: sha512-qwVpFEHNfhYJIzNRBvd2C1kyo6jz3ZSMPyyuR47OPdiKWlbYnZNyDWuyR175qDnAJLiCo5fBBqPb3RiXgWlkOQ== } + bullmq@5.34.3: + resolution: { integrity: sha512-S8/V11w7p6jYAGvv+00skLza/4inTOupWPe0uCD8mZSUiYKzvmW4/YEB+KVjZI2CC2oD3KJ3t7/KkUd31MxMig== } + bundle-name@3.0.0: resolution: { integrity: sha512-PKA4BeSvBpQKQ8iPOGCSiell+N8P+Tf1DlwqmYhpe2gAhKPHn8EYOxVT+ShuGmhg8lN8XiSlS80yiExKXrURlw== } engines: { node: '>=12' } @@ -7726,10 +7742,6 @@ packages: cookie-signature@1.0.6: resolution: { integrity: sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ== } - cookie@0.4.2: - resolution: { integrity: sha512-aSWTXFzaKWkvHO1Ny/s+ePFpvKsPnjc551iI41v3ny/ow6tBG5Vd+FuqGNhh1LxOmVzOlGUriIlOaokOvhaStA== } - engines: { node: '>= 0.6' } - cookie@0.5.0: resolution: { integrity: sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw== } engines: { node: '>= 0.6' } @@ -7796,6 +7808,10 @@ packages: crlf-normalize@1.0.20: resolution: { integrity: sha512-h/rBerTd3YHQGfv7tNT25mfhWvRq2BBLCZZ80GFarFxf6HQGbpW6iqDL3N+HBLpjLfAdcBXfWAzVlLfHkRUQBQ== } + cron-parser@4.9.0: + resolution: { integrity: sha512-p0SaNjrHOnQeR8/VnfGbmg9te2kfyYSQ7Sc/j/6DtPL3JQvKxmjO9TSjNFpujqV3vEYYBvNNvXSxzyksBWAx1Q== } + engines: { node: '>=12.0.0' } + cross-env@7.0.3: resolution: { integrity: sha512-+/HKd6EgcQCJGh2PSjZuUitQBQynKor4wrFbRg4DtAgS1aWO+gU52xpH7M9ScGgXSYmAVS9bIJ8EzuaGw0oNAw== } engines: { node: '>=10.14', npm: '>=6', yarn: '>=1' } @@ -8423,6 +8439,11 @@ packages: ee-first@1.1.1: resolution: { integrity: sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow== } + ejs@3.1.10: + resolution: { integrity: sha512-UeJmFfOrAQS8OJWPZ4qtgHyWExa088/MtK5UEyoJGFH67cDEXkZSviOiKRCZ4Xij0zxI3JECgYs3oKx+AizQBA== } + engines: { node: '>=0.10.0' } + hasBin: true + ejs@3.1.9: resolution: { integrity: sha512-rC+QVNMJWv+MtPgkt0y+0rVEIdbtxVADApW9JXrUVlzHetgcyczP/E7DJmWJ4fJCZF2cPcBk0laWO9ZHMG3DmQ== } engines: { node: '>=0.10.0' } @@ -8466,17 +8487,6 @@ packages: end-of-stream@1.4.4: resolution: { integrity: sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== } - engine.io-client@6.5.3: - resolution: { integrity: sha512-9Z0qLB0NIisTRt1DZ/8U2k12RJn8yls/nXMZLn+/N8hANT3TcYjKFKcwbw5zFQiN4NTde3TSY9zb79e1ij6j9Q== } - - engine.io-parser@5.2.2: - resolution: { integrity: sha512-RcyUFKA93/CXH20l4SoVvzZfrSDMOTUS3bWVpTt2FuFP+XYrL8i8oonHP7WInRyVHXh0n/ORtoeiE1os+8qkSw== } - engines: { node: '>=10.0.0' } - - engine.io@6.5.4: - resolution: { integrity: sha512-KdVSDKhVKyOi+r5uEabrDLZw2qXStVvCsEB/LN3mw4WFi6Gx50jTyuxYVCwAAC0U46FdnzP/ScKRBTXb/NiEOg== } - engines: { node: '>=10.2.0' } - enhanced-resolve@5.16.0: resolution: { integrity: sha512-O+QWCviPNSSLAD9Ucn8Awv+poAkqn3T1XY5/N7kR7rQO9yfSGWkYZDwpJ+iKF7B8rxaQKWngSqACpgzeapSyoA== } engines: { node: '>=10.13.0' } @@ -10073,6 +10083,10 @@ packages: resolution: { integrity: sha512-1DKMMzlIHM02eBBVOFQ1+AolGjs6+xEcM4PDL7NqOS6szq7H9jSaEkIUH6/a5Hl241LzW6JLSiAbNvTQjUupUA== } engines: { node: '>=12.22.0' } + ioredis@5.4.1: + resolution: { integrity: sha512-2YZsvl7jopIa1gaePkeMtd9rAcSjOOjPtpcLlOeusyO+XH2SK5ZcT+UCrElPP+WVIInh2TzeI4XW9ENaSLVVHA== } + engines: { node: '>=12.22.0' } + ip-address@9.0.5: resolution: { integrity: sha512-zHtQzGojZXTwZTHQqra+ETKd4Sn3vgi7uBmlPoXVWZqYvuKmtI0l/VZTjqGmJY9x88GGOaZ9+G9ES8hC4T4X8g== } engines: { node: '>= 12' } @@ -11027,6 +11041,10 @@ packages: resolution: { integrity: sha512-O18pf7nyvHTckunPWCV1XUNXU1piu01y2b7ATJ0ppkUkk8ocqVWBrYjJBCwHDjD/ZWcfyrA0P4gKhzWGi5EINQ== } engines: { node: '>=14' } + lilconfig@3.1.3: + resolution: { integrity: sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw== } + engines: { node: '>=14' } + lines-and-columns@1.2.4: resolution: { integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg== } @@ -11257,6 +11275,10 @@ packages: react: optional: true + luxon@3.5.0: + resolution: { integrity: sha512-rh+Zjr6DNfUYR3bPwJEnuwDdqMbxZW7LOQfUN4B54+Cl+0o5zaU9RJ6bcidfDtC1cWCZXQ+nvX8bf6bAji37QQ== } + engines: { node: '>=12' } + lz-string@1.5.0: resolution: { integrity: sha512-h5bgJWpxJNswbU7qCrV0tIKQCaS3blPDrqKWx+QxzuzL1zGUzij9XCWLrSLsJPu5t+eWA/ycetzYAO5IOMcWAQ== } hasBin: true @@ -11799,6 +11821,13 @@ packages: ms@2.1.3: resolution: { integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== } + msgpackr-extract@3.0.3: + resolution: { integrity: sha512-P0efT1C9jIdVRefqjzOQ9Xml57zpOXnIuS+csaB4MdZbTdmGDLo8XhzBG1N7aO11gKDDkJvBLULeFTo46wwreA== } + hasBin: true + + msgpackr@1.11.2: + resolution: { integrity: sha512-F9UngXRlPyWCDEASDpTf6c9uNhGPTqnTeLVt7bN+bU1eajoR/8V9ys2BRaV5C/e5ihE6sJ9uPIKaYt6bFuO32g== } + multer@1.4.5-lts.1: resolution: { integrity: sha512-ywPWvcDMeH+z9gQq5qYHCCy+ethsk4goepZ45GLD63fOu0YcNecQxi64nDs3qluZB+murG3/D4dJ7+dGctcCQQ== } engines: { node: '>= 6.0.0' } @@ -11891,6 +11920,9 @@ packages: resolution: { integrity: sha512-fZjdhDOeRcaS+rcpve7XuwHBmktS1nS1gzgghwKUQQ8nTy2FdSDr6ZT8k6YhvlJeHmmQMYiT/IH9hfco5zeW2Q== } engines: { node: '>=10' } + node-abort-controller@3.1.1: + resolution: { integrity: sha512-AGK2yQKIjRuqnc6VkX2Xj5d+QW8xZ87pa1UK6yA6ouUyuxfHuMP6umE5QK7UmTeOAymo+Zx1Fxiuw9rVx8taHQ== } + node-addon-api@6.1.0: resolution: { integrity: sha512-+eawOlIgy680F0kBzPUNFhMZGtJ1YmqM6l4+Crf4IkImjYrO/mqPwRMh352g23uIaQKFItcQ64I7KMaJxHgAVA== } @@ -11932,6 +11964,10 @@ packages: resolution: { integrity: sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA== } engines: { node: '>= 6.13.0' } + node-gyp-build-optional-packages@5.2.2: + resolution: { integrity: sha512-s+w+rBWnpTMwSFbaE0UXsRlg7hU4FjekKU4eyAih5T8nJuNZT1nNsskXpxmeqSK9UzkBl6UgRlnKc8hz8IEqOw== } + hasBin: true + node-gyp-build@4.8.1: resolution: { integrity: sha512-OSs33Z9yWr148JZcbZd5WiAXhh/n9z8TxQcdMhIOlpN9AhWpLfvVFO73+m77bBABQMaY9XSvIa+qk0jlI7Gcaw== } hasBin: true @@ -13481,6 +13517,12 @@ packages: resolution: { integrity: sha512-fhNEG0vGi7bESitNNqNBAfYPdl2efB+1paFlI8BQDCNkruERKuuhG8LkQClDIVqUJLkrmKuOSPQ3xZHqVnVo3Q== } engines: { node: '>=14.18.0' } + rate-limit-redis@4.2.0: + resolution: { integrity: sha512-wV450NQyKC24NmPosJb2131RoczLdfIJdKCReNwtVpm5998U8SgKrAZrIHaN/NfQgqOHaan8Uq++B4sa5REwjA== } + engines: { node: '>= 16' } + peerDependencies: + express-rate-limit: '>= 6' + raw-body@2.5.2: resolution: { integrity: sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA== } engines: { node: '>= 0.8' } @@ -14385,21 +14427,6 @@ packages: resolution: { integrity: sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg== } engines: { node: '>=0.10.0' } - socket.io-adapter@2.5.4: - resolution: { integrity: sha512-wDNHGXGewWAjQPt3pyeYBtpWSq9cLE5UW1ZUPL/2eGK9jtse/FpXib7epSTsz0Q0m+6sg6Y4KtcFTlah1bdOVg== } - - socket.io-client@4.7.4: - resolution: { integrity: sha512-wh+OkeF0rAVCrABWQBaEjLfb7DVPotMbu0cgWgyR0v6eA4EoVnAwcIeIbcdTE3GT/H3kbdLl7OoH2+asoDRIIg== } - engines: { node: '>=10.0.0' } - - socket.io-parser@4.2.4: - resolution: { integrity: sha512-/GbIKmo8ioc+NIWIhwdecY0ge+qVBSMdgxGygevmdHj24bsfgtCmcUUcQ5ZzcylGFHsN3k4HB4Cgkl96KVnuew== } - engines: { node: '>=10.0.0' } - - socket.io@4.7.4: - resolution: { integrity: sha512-DcotgfP1Zg9iP/dH9zvAQcWrE0TtbMVwXmlV4T4mqsvY+gw+LqUGPfx2AoVyRk0FLME+GQhufDMyacFmw7ksqw== } - engines: { node: '>=10.2.0' } - sockjs@0.3.24: resolution: { integrity: sha512-GJgLTZ7vYb/JtPSSZ10hsOYIvEYsjbNU+zPdIHcUaWVNUEPivzxku31865sSSud0Da0W4lEeOPlmw93zLQchuQ== } @@ -16219,18 +16246,6 @@ packages: utf-8-validate: optional: true - ws@8.11.0: - resolution: { integrity: sha512-HPG3wQd9sNQoT9xHyNCXoDUa+Xw/VevmY9FoHyQ+g+rrMn4j6FB4np7Z0OhdTgjx6MgQLK7jwSy1YecU1+4Asg== } - engines: { node: '>=10.0.0' } - peerDependencies: - bufferutil: ^4.0.1 - utf-8-validate: ^5.0.2 - peerDependenciesMeta: - bufferutil: - optional: true - utf-8-validate: - optional: true - ws@8.13.0: resolution: { integrity: sha512-x9vcZYTrFPC7aSIbj7sRCYo7L/Xb8Iy+pW0ng0wt2vCJv7M9HOMy0UoN3rr+IFC7hb7vXoqS+P9ktyLLLhO+LA== } engines: { node: '>=10.0.0' } @@ -16297,10 +16312,6 @@ packages: resolution: { integrity: sha512-f9s+fUkX04BxQf+7mMWAp5zk61pciie+fFLC9hX9UVvCeJQfNHRHXpeo5MPcR0EUf57PYLdt+ZO4f3Ipk2oZUw== } engines: { node: '>=0.1' } - xmlhttprequest-ssl@2.0.0: - resolution: { integrity: sha512-QKxVRxiRACQcVuQEYFsI1hhkrMlrXHPegbbd1yn9UHOmRxY+si12nQYzri3vbzt8VdTTRviqcKxcyllFas5z2A== } - engines: { node: '>=0.4.0' } - xtend@4.0.2: resolution: { integrity: sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== } engines: { node: '>=0.4' } @@ -20720,7 +20731,7 @@ snapshots: '@mendable/firecrawl-js@0.0.28': dependencies: - axios: 1.7.2 + axios: 1.7.2(debug@4.3.7) dotenv: 16.4.5 uuid: 9.0.1 zod: 3.23.8 @@ -20752,6 +20763,24 @@ snapshots: dependencies: sparse-bitfield: 3.0.3 + '@msgpackr-extract/msgpackr-extract-darwin-arm64@3.0.3': + optional: true + + '@msgpackr-extract/msgpackr-extract-darwin-x64@3.0.3': + optional: true + + '@msgpackr-extract/msgpackr-extract-linux-arm64@3.0.3': + optional: true + + '@msgpackr-extract/msgpackr-extract-linux-arm@3.0.3': + optional: true + + '@msgpackr-extract/msgpackr-extract-linux-x64@3.0.3': + optional: true + + '@msgpackr-extract/msgpackr-extract-win32-x64@3.0.3': + optional: true + '@mui/base@5.0.0-beta.27(@types/react@18.2.65)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': dependencies: '@babel/runtime': 7.24.0 @@ -21083,19 +21112,17 @@ snapshots: - bluebird - supports-color - '@oclif/core@1.26.2': + '@oclif/core@2.15.0(@swc/core@1.4.6)(@types/node@20.12.12)(typescript@5.5.2)': dependencies: - '@oclif/linewrap': 1.0.0 - '@oclif/screen': 3.0.8 + '@types/cli-progress': 3.11.5 ansi-escapes: 4.3.2 ansi-styles: 4.3.0 cardinal: 2.1.1 chalk: 4.1.2 clean-stack: 3.0.1 cli-progress: 3.12.0 - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.7(supports-color@8.1.1) ejs: 3.1.9 - fs-extra: 9.1.0 get-package-type: 0.1.0 globby: 11.1.0 hyperlinker: 1.0.0 @@ -21105,52 +21132,41 @@ snapshots: natural-orderby: 2.0.3 object-treeify: 1.1.33 password-prompt: 1.1.3 - semver: 7.6.0 + slice-ansi: 4.0.0 string-width: 4.2.3 strip-ansi: 6.0.1 supports-color: 8.1.1 supports-hyperlinks: 2.3.0 + ts-node: 10.9.2(@swc/core@1.4.6)(@types/node@20.12.12)(typescript@5.5.2) tslib: 2.6.2 widest-line: 3.1.0 + wordwrap: 1.0.0 wrap-ansi: 7.0.0 + transitivePeerDependencies: + - '@swc/core' + - '@swc/wasm' + - '@types/node' + - typescript - '@oclif/core@2.15.0(@swc/core@1.4.6)(@types/node@20.12.12)(typescript@5.5.2)': + '@oclif/core@4.0.7': dependencies: - '@types/cli-progress': 3.11.5 ansi-escapes: 4.3.2 - ansi-styles: 4.3.0 - cardinal: 2.1.1 - chalk: 4.1.2 + ansis: 3.4.0 clean-stack: 3.0.1 - cli-progress: 3.12.0 + cli-spinners: 2.9.2 debug: 4.3.7(supports-color@8.1.1) - ejs: 3.1.9 + ejs: 3.1.10 get-package-type: 0.1.0 globby: 11.1.0 - hyperlinker: 1.0.0 indent-string: 4.0.0 is-wsl: 2.2.0 - js-yaml: 3.14.1 - natural-orderby: 2.0.3 - object-treeify: 1.1.33 - password-prompt: 1.1.3 - slice-ansi: 4.0.0 + lilconfig: 3.1.3 + minimatch: 9.0.4 string-width: 4.2.3 - strip-ansi: 6.0.1 supports-color: 8.1.1 - supports-hyperlinks: 2.3.0 - ts-node: 10.9.2(@swc/core@1.4.6)(@types/node@20.12.12)(typescript@5.5.2) - tslib: 2.6.2 widest-line: 3.1.0 wordwrap: 1.0.0 wrap-ansi: 7.0.0 - transitivePeerDependencies: - - '@swc/core' - - '@swc/wasm' - - '@types/node' - - typescript - - '@oclif/linewrap@1.0.0': {} '@oclif/plugin-help@5.2.20(@swc/core@1.4.6)(@types/node@20.12.12)(typescript@5.5.2)': dependencies: @@ -21187,8 +21203,6 @@ snapshots: - supports-color - typescript - '@oclif/screen@3.0.8': {} - '@octokit/auth-token@2.5.0': dependencies: '@octokit/types': 6.41.0 @@ -21588,7 +21602,7 @@ snapshots: '@opentelemetry/instrumentation': 0.54.0(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.27.0 forwarded-parse: 2.1.2 - semver: 7.6.0 + semver: 7.6.3 transitivePeerDependencies: - supports-color @@ -21789,7 +21803,7 @@ snapshots: '@types/shimmer': 1.2.0 import-in-the-middle: 1.11.2 require-in-the-middle: 7.4.0 - semver: 7.6.0 + semver: 7.6.3 shimmer: 1.2.1 transitivePeerDependencies: - supports-color @@ -22005,7 +22019,7 @@ snapshots: '@puppeteer/browsers@1.4.6(typescript@5.5.2)': dependencies: debug: 4.3.4(supports-color@8.1.1) - extract-zip: 2.0.1(supports-color@8.1.1) + extract-zip: 2.0.1 progress: 2.0.3 proxy-agent: 6.3.0 tar-fs: 3.0.4 @@ -22874,8 +22888,6 @@ snapshots: '@smithy/types': 2.11.0 tslib: 2.6.2 - '@socket.io/component-emitter@3.1.0': {} - '@sqltools/formatter@1.2.5': {} '@stripe/agent-toolkit@0.1.20(@langchain/core@0.3.18(openai@4.57.3(encoding@0.1.13)(zod@3.22.4)))(ai@3.2.22(openai@4.57.3(encoding@0.1.13)(zod@3.22.4))(react@18.2.0)(solid-js@1.7.1)(svelte@4.2.18)(vue@3.4.31(typescript@5.5.2))(zod@3.22.4))': @@ -23195,8 +23207,6 @@ snapshots: '@types/content-disposition@0.5.8': {} - '@types/cookie@0.4.1': {} - '@types/cors@2.8.17': dependencies: '@types/node': 20.11.26 @@ -24230,6 +24240,8 @@ snapshots: ansicolors@0.3.2: {} + ansis@3.4.0: {} + any-promise@1.3.0: {} anymatch@2.0.0: @@ -24556,7 +24568,7 @@ snapshots: transitivePeerDependencies: - debug - axios@1.7.2: + axios@1.7.2(debug@4.3.7): dependencies: follow-redirects: 1.15.6(debug@4.3.7) form-data: 4.0.0 @@ -24564,14 +24576,6 @@ snapshots: transitivePeerDependencies: - debug - axios@1.7.2(debug@4.3.4): - dependencies: - follow-redirects: 1.15.6(debug@4.3.4) - form-data: 4.0.0 - proxy-from-env: 1.1.0 - transitivePeerDependencies: - - debug - axios@1.7.4(debug@4.3.7): dependencies: follow-redirects: 1.15.6(debug@4.3.7) @@ -24897,8 +24901,6 @@ snapshots: base64-js@1.5.1: {} - base64id@2.0.0: {} - base@0.11.2: dependencies: cache-base: 1.0.1 @@ -25116,6 +25118,18 @@ snapshots: dependencies: semver: 7.6.3 + bullmq@5.34.3: + dependencies: + cron-parser: 4.9.0 + ioredis: 5.4.1 + msgpackr: 1.11.2 + node-abort-controller: 3.1.1 + semver: 7.6.3 + tslib: 2.6.2 + uuid: 9.0.1 + transitivePeerDependencies: + - supports-color + bundle-name@3.0.0: dependencies: run-applescript: 5.0.0 @@ -25527,8 +25541,8 @@ snapshots: cmake-js@7.3.0: dependencies: - axios: 1.7.2(debug@4.3.4) - debug: 4.3.4(supports-color@8.1.1) + axios: 1.7.2(debug@4.3.7) + debug: 4.3.7(supports-color@5.5.0) fs-extra: 11.2.0 lodash.isplainobject: 4.0.6 memory-stream: 1.0.0 @@ -25745,8 +25759,6 @@ snapshots: cookie-signature@1.0.6: {} - cookie@0.4.2: {} - cookie@0.5.0: {} cookie@0.7.1: {} @@ -25829,6 +25841,10 @@ snapshots: transitivePeerDependencies: - ts-toolbelt + cron-parser@4.9.0: + dependencies: + luxon: 3.5.0 + cross-env@7.0.3: dependencies: cross-spawn: 7.0.3 @@ -26519,6 +26535,10 @@ snapshots: ee-first@1.1.1: {} + ejs@3.1.10: + dependencies: + jake: 10.8.7 + ejs@3.1.9: dependencies: jake: 10.8.7 @@ -26549,37 +26569,6 @@ snapshots: dependencies: once: 1.4.0 - engine.io-client@6.5.3(bufferutil@4.0.8)(utf-8-validate@6.0.4): - dependencies: - '@socket.io/component-emitter': 3.1.0 - debug: 4.3.7(supports-color@5.5.0) - engine.io-parser: 5.2.2 - ws: 8.11.0(bufferutil@4.0.8)(utf-8-validate@6.0.4) - xmlhttprequest-ssl: 2.0.0 - transitivePeerDependencies: - - bufferutil - - supports-color - - utf-8-validate - - engine.io-parser@5.2.2: {} - - engine.io@6.5.4(bufferutil@4.0.8)(utf-8-validate@6.0.4): - dependencies: - '@types/cookie': 0.4.1 - '@types/cors': 2.8.17 - '@types/node': 20.12.12 - accepts: 1.3.8 - base64id: 2.0.0 - cookie: 0.4.2 - cors: 2.8.5 - debug: 4.3.4(supports-color@8.1.1) - engine.io-parser: 5.2.2 - ws: 8.11.0(bufferutil@4.0.8)(utf-8-validate@6.0.4) - transitivePeerDependencies: - - bufferutil - - supports-color - - utf-8-validate - enhanced-resolve@5.16.0: dependencies: graceful-fs: 4.2.11 @@ -27347,9 +27336,19 @@ snapshots: extract-files@9.0.0: {} + extract-zip@2.0.1: + dependencies: + debug: 4.3.7(supports-color@5.5.0) + get-stream: 5.2.0 + yauzl: 2.10.0 + optionalDependencies: + '@types/yauzl': 2.10.3 + transitivePeerDependencies: + - supports-color + extract-zip@2.0.1(supports-color@8.1.1): dependencies: - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.7(supports-color@8.1.1) get-stream: 5.2.0 yauzl: 2.10.0 optionalDependencies: @@ -27683,10 +27682,6 @@ snapshots: optionalDependencies: debug: 4.3.4(supports-color@8.1.1) - follow-redirects@1.15.6(debug@4.3.4): - optionalDependencies: - debug: 4.3.4(supports-color@8.1.1) - follow-redirects@1.15.6(debug@4.3.7): optionalDependencies: debug: 4.3.7(supports-color@5.5.0) @@ -28870,6 +28865,20 @@ snapshots: transitivePeerDependencies: - supports-color + ioredis@5.4.1: + dependencies: + '@ioredis/commands': 1.2.0 + cluster-key-slot: 1.1.2 + debug: 4.3.7(supports-color@5.5.0) + denque: 2.1.0 + lodash.defaults: 4.2.0 + lodash.isarguments: 3.1.0 + redis-errors: 1.2.0 + redis-parser: 3.0.0 + standard-as-callback: 2.1.0 + transitivePeerDependencies: + - supports-color + ip-address@9.0.5: dependencies: jsbn: 1.1.0 @@ -30118,6 +30127,8 @@ snapshots: lilconfig@3.1.1: {} + lilconfig@3.1.3: {} + lines-and-columns@1.2.4: {} linkifyjs@4.1.3: {} @@ -30410,6 +30421,8 @@ snapshots: openai: 4.57.3(encoding@0.1.13)(zod@3.22.4) react: 18.2.0 + luxon@3.5.0: {} + lz-string@1.5.0: {} magic-bytes.js@1.10.0: {} @@ -31190,6 +31203,22 @@ snapshots: ms@2.1.3: {} + msgpackr-extract@3.0.3: + dependencies: + node-gyp-build-optional-packages: 5.2.2 + optionalDependencies: + '@msgpackr-extract/msgpackr-extract-darwin-arm64': 3.0.3 + '@msgpackr-extract/msgpackr-extract-darwin-x64': 3.0.3 + '@msgpackr-extract/msgpackr-extract-linux-arm': 3.0.3 + '@msgpackr-extract/msgpackr-extract-linux-arm64': 3.0.3 + '@msgpackr-extract/msgpackr-extract-linux-x64': 3.0.3 + '@msgpackr-extract/msgpackr-extract-win32-x64': 3.0.3 + optional: true + + msgpackr@1.11.2: + optionalDependencies: + msgpackr-extract: 3.0.3 + multer@1.4.5-lts.1: dependencies: append-field: 1.0.0 @@ -31293,6 +31322,8 @@ snapshots: dependencies: semver: 7.6.3 + node-abort-controller@3.1.1: {} + node-addon-api@6.1.0: {} node-addon-api@7.1.0: {} @@ -31321,6 +31352,11 @@ snapshots: node-forge@1.3.1: {} + node-gyp-build-optional-packages@5.2.2: + dependencies: + detect-libc: 2.0.2 + optional: true + node-gyp-build@4.8.1: optional: true @@ -33062,6 +33098,10 @@ snapshots: ranges-sort@6.0.11: {} + rate-limit-redis@4.2.0(express-rate-limit@6.11.2(express@4.18.3)): + dependencies: + express-rate-limit: 6.11.2(express@4.18.3) + raw-body@2.5.2: dependencies: bytes: 3.1.2 @@ -34268,47 +34308,6 @@ snapshots: transitivePeerDependencies: - supports-color - socket.io-adapter@2.5.4(bufferutil@4.0.8)(utf-8-validate@6.0.4): - dependencies: - debug: 4.3.4(supports-color@8.1.1) - ws: 8.11.0(bufferutil@4.0.8)(utf-8-validate@6.0.4) - transitivePeerDependencies: - - bufferutil - - supports-color - - utf-8-validate - - socket.io-client@4.7.4(bufferutil@4.0.8)(utf-8-validate@6.0.4): - dependencies: - '@socket.io/component-emitter': 3.1.0 - debug: 4.3.4(supports-color@8.1.1) - engine.io-client: 6.5.3(bufferutil@4.0.8)(utf-8-validate@6.0.4) - socket.io-parser: 4.2.4 - transitivePeerDependencies: - - bufferutil - - supports-color - - utf-8-validate - - socket.io-parser@4.2.4: - dependencies: - '@socket.io/component-emitter': 3.1.0 - debug: 4.3.4(supports-color@8.1.1) - transitivePeerDependencies: - - supports-color - - socket.io@4.7.4(bufferutil@4.0.8)(utf-8-validate@6.0.4): - dependencies: - accepts: 1.3.8 - base64id: 2.0.0 - cors: 2.8.5 - debug: 4.3.4(supports-color@8.1.1) - engine.io: 6.5.4(bufferutil@4.0.8)(utf-8-validate@6.0.4) - socket.io-adapter: 2.5.4(bufferutil@4.0.8)(utf-8-validate@6.0.4) - socket.io-parser: 4.2.4 - transitivePeerDependencies: - - bufferutil - - supports-color - - utf-8-validate - sockjs@0.3.24: dependencies: faye-websocket: 0.11.4 @@ -35387,6 +35386,34 @@ snapshots: transitivePeerDependencies: - supports-color + typeorm@0.3.20(ioredis@5.4.1)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@20.12.12)(typescript@5.5.2)): + dependencies: + '@sqltools/formatter': 1.2.5 + app-root-path: 3.1.0 + buffer: 6.0.3 + chalk: 4.1.2 + cli-highlight: 2.1.11 + dayjs: 1.11.10 + debug: 4.3.4(supports-color@8.1.1) + dotenv: 16.4.5 + glob: 10.3.10 + mkdirp: 2.1.6 + reflect-metadata: 0.2.1 + sha.js: 2.4.11 + tslib: 2.6.2 + uuid: 9.0.1 + yargs: 17.7.2 + optionalDependencies: + ioredis: 5.4.1 + mongodb: 6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1) + mysql2: 3.11.4 + pg: 8.11.3 + redis: 4.6.13 + sqlite3: 5.1.7 + ts-node: 10.9.2(@swc/core@1.4.6)(@types/node@20.12.12)(typescript@5.5.2) + transitivePeerDependencies: + - supports-color + typescript@5.5.2: {} ua-parser-js@0.7.37: {} @@ -36478,11 +36505,6 @@ snapshots: bufferutil: 4.0.8 utf-8-validate: 6.0.4 - ws@8.11.0(bufferutil@4.0.8)(utf-8-validate@6.0.4): - optionalDependencies: - bufferutil: 4.0.8 - utf-8-validate: 6.0.4 - ws@8.13.0(bufferutil@4.0.8)(utf-8-validate@6.0.4): optionalDependencies: bufferutil: 4.0.8 @@ -36520,8 +36542,6 @@ snapshots: xmldom-sre@0.1.31: {} - xmlhttprequest-ssl@2.0.0: {} - xtend@4.0.2: {} y18n@3.2.2: {}