diff --git a/api/package.json b/api/package.json index 34c5614c5ed..6fbe62fea9d 100644 --- a/api/package.json +++ b/api/package.json @@ -66,7 +66,7 @@ "multer": "^1.4.5-lts.1", "nodejs-gpt": "^1.37.4", "nodemailer": "^6.9.4", - "openai": "^4.28.4", + "openai": "^4.29.0", "openai-chat-tokens": "^0.2.8", "openid-client": "^5.4.2", "passport": "^0.6.0", diff --git a/api/server/middleware/abortRun.js b/api/server/middleware/abortRun.js index e17420baf99..2b8a95724c3 100644 --- a/api/server/middleware/abortRun.js +++ b/api/server/middleware/abortRun.js @@ -4,9 +4,10 @@ const { checkMessageGaps, recordUsage } = require('~/server/services/Threads'); const { getConvo } = require('~/models/Conversation'); const getLogStores = require('~/cache/getLogStores'); const { sendMessage } = require('~/server/utils'); -// const spendTokens = require('~/models/spendTokens'); const { logger } = require('~/config'); +const three_minutes = 1000 * 60 * 3; + async function abortRun(req, res) { res.setHeader('Content-Type', 'application/json'); const { abortKey } = req.body; @@ -40,7 +41,7 @@ async function abortRun(req, res) { const { openai } = await initializeClient({ req, res }); try { - await cache.set(cacheKey, 'cancelled'); + await cache.set(cacheKey, 'cancelled', three_minutes); const cancelledRun = await openai.beta.threads.runs.cancel(thread_id, run_id); logger.debug('[abortRun] Cancelled run:', cancelledRun); } catch (error) { diff --git a/api/server/routes/assistants/actions.js b/api/server/routes/assistants/actions.js index a68187e2689..33db6ce803a 100644 --- a/api/server/routes/assistants/actions.js +++ b/api/server/routes/assistants/actions.js @@ -2,9 +2,9 @@ const { v4 } = require('uuid'); const express = require('express'); const { actionDelimiter } = require('librechat-data-provider'); const { initializeClient } = require('~/server/services/Endpoints/assistants'); +const { encryptMetadata, domainParser } = require('~/server/services/ActionService'); const { updateAction, getActions, deleteAction } = require('~/models/Action'); const { updateAssistant, getAssistant } = require('~/models/Assistant'); -const { encryptMetadata } = require('~/server/services/ActionService'); const { logger } = require('~/config'); const router = express.Router(); @@ -44,7 +44,10 @@ router.post('/:assistant_id', async (req, res) => { let metadata = encryptMetadata(_metadata); - const { domain } = metadata; + let { domain } = metadata; + /* Azure doesn't support periods in function names */ + domain = domainParser(req, domain, true); + if (!domain) { return res.status(400).json({ message: 'No domain provided' }); } @@ -141,9 +144,10 @@ router.post('/:assistant_id', async (req, res) => { * @param {string} req.params.action_id - The ID of the action to delete. * @returns {Object} 200 - success response - application/json */ -router.delete('/:assistant_id/:action_id', async (req, res) => { +router.delete('/:assistant_id/:action_id/:model', async (req, res) => { try { - const { assistant_id, action_id } = req.params; + const { assistant_id, action_id, model } = req.params; + req.body.model = model; /** @type {{ openai: OpenAI }} */ const { openai } = await initializeClient({ req, res }); @@ -167,6 +171,8 @@ router.delete('/:assistant_id/:action_id', async (req, res) => { return true; }); + domain = domainParser(req, domain, true); + const updatedTools = tools.filter( (tool) => !(tool.function && tool.function.name.includes(domain)), ); diff --git a/api/server/routes/assistants/chat.js b/api/server/routes/assistants/chat.js index fd6c5f7bd57..a0cf570008d 100644 --- a/api/server/routes/assistants/chat.js +++ b/api/server/routes/assistants/chat.js @@ -4,8 +4,10 @@ const { Constants, RunStatus, CacheKeys, + ContentTypes, EModelEndpoint, ViolationTypes, + AssistantStreamEvents, } = require('librechat-data-provider'); const { initThread, @@ -18,8 +20,8 @@ const { const { runAssistant, createOnTextProgress } = require('~/server/services/AssistantService'); const { addTitle, initializeClient } = require('~/server/services/Endpoints/assistants'); const { sendResponse, sendMessage, sleep, isEnabled, countTokens } = require('~/server/utils'); +const { createRun, StreamRunManager } = require('~/server/services/Runs'); const { getTransactions } = require('~/models/Transaction'); -const { createRun } = require('~/server/services/Runs'); const checkBalance = require('~/models/checkBalance'); const { getConvo } = require('~/models/Conversation'); const getLogStores = require('~/cache/getLogStores'); @@ -38,6 +40,8 @@ const { router.post('/abort', handleAbort()); +const ten_minutes = 1000 * 60 * 10; + /** * @route POST / * @desc Chat with an assistant @@ -147,7 +151,7 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res return sendResponse(res, messageData, defaultErrorMessage); } - await sleep(3000); + await sleep(2000); try { const status = await cache.get(cacheKey); @@ -187,6 +191,42 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res latestMessageId: responseMessageId, }); + const errorContentPart = { + text: { + value: + error?.message ?? 'There was an error processing your request. Please try again later.', + }, + type: ContentTypes.ERROR, + }; + + if (!Array.isArray(runMessages[runMessages.length - 1]?.content)) { + runMessages[runMessages.length - 1].content = [errorContentPart]; + } else { + const contentParts = runMessages[runMessages.length - 1].content; + for (let i = 0; i < contentParts.length; i++) { + const currentPart = contentParts[i]; + /** @type {CodeToolCall | RetrievalToolCall | FunctionToolCall | undefined} */ + const toolCall = currentPart?.[ContentTypes.TOOL_CALL]; + if ( + toolCall && + toolCall?.function && + !(toolCall?.function?.output || toolCall?.function?.output?.length) + ) { + contentParts[i] = { + ...currentPart, + [ContentTypes.TOOL_CALL]: { + ...toolCall, + function: { + ...toolCall.function, + output: 'error processing tool', + }, + }, + }; + } + } + runMessages[runMessages.length - 1].content.push(errorContentPart); + } + finalEvent = { title: 'New Chat', final: true, @@ -358,53 +398,107 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res body.instructions = instructions; } - /* NOTE: - * By default, a Run will use the model and tools configuration specified in Assistant object, - * but you can override most of these when creating the Run for added flexibility: - */ - const run = await createRun({ - openai, - thread_id, - body, - }); + const sendInitialResponse = () => { + sendMessage(res, { + sync: true, + conversationId, + // messages: previousMessages, + requestMessage, + responseMessage: { + user: req.user.id, + messageId: openai.responseMessage.messageId, + parentMessageId: userMessageId, + conversationId, + assistant_id, + thread_id, + model: assistant_id, + }, + }); + }; - run_id = run.id; - await cache.set(cacheKey, `${thread_id}:${run_id}`); + /** @type {RunResponse | typeof StreamRunManager | undefined} */ + let response; + + const processRun = async (retry = false) => { + if (req.app.locals[EModelEndpoint.azureOpenAI]?.assistants) { + if (retry) { + response = await runAssistant({ + openai, + thread_id, + run_id, + in_progress: openai.in_progress, + }); + return; + } + + /* NOTE: + * By default, a Run will use the model and tools configuration specified in Assistant object, + * but you can override most of these when creating the Run for added flexibility: + */ + const run = await createRun({ + openai, + thread_id, + body, + }); - sendMessage(res, { - sync: true, - conversationId, - // messages: previousMessages, - requestMessage, - responseMessage: { - user: req.user.id, - messageId: openai.responseMessage.messageId, - parentMessageId: userMessageId, - conversationId, - assistant_id, - thread_id, - model: assistant_id, - }, - }); + run_id = run.id; + await cache.set(cacheKey, `${thread_id}:${run_id}`, ten_minutes); + sendInitialResponse(); - // todo: retry logic - let response = await runAssistant({ openai, thread_id, run_id }); - logger.debug('[/assistants/chat/] response', response); + // todo: retry logic + response = await runAssistant({ openai, thread_id, run_id }); + return; + } - if (response.run.status === RunStatus.IN_PROGRESS) { - response = await runAssistant({ + /** @type {{[AssistantStreamEvents.ThreadRunCreated]: (event: ThreadRunCreated) => Promise}} */ + const handlers = { + [AssistantStreamEvents.ThreadRunCreated]: async (event) => { + await cache.set(cacheKey, `${thread_id}:${event.data.id}`, ten_minutes); + run_id = event.data.id; + sendInitialResponse(); + }, + }; + + const streamRunManager = new StreamRunManager({ + req, + res, openai, thread_id, - run_id, - in_progress: openai.in_progress, + responseMessage: openai.responseMessage, + handlers, + // streamOptions: { + + // }, }); + + await streamRunManager.runAssistant({ + thread_id, + body, + }); + + response = streamRunManager; + }; + + await processRun(); + logger.debug('[/assistants/chat/] response', { + run: response.run, + steps: response.steps, + }); + + if (response.run.status === RunStatus.CANCELLED) { + logger.debug('[/assistants/chat/] Run cancelled, handled by `abortRun`'); + return res.end(); + } + + if (response.run.status === RunStatus.IN_PROGRESS) { + processRun(true); } completedRun = response.run; /** @type {ResponseMessage} */ const responseMessage = { - ...openai.responseMessage, + ...response.finalMessage, parentMessageId: userMessageId, conversationId, user: req.user.id, @@ -413,9 +507,6 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res model: assistant_id, }; - // TODO: token count from usage returned in run - // TODO: parse responses, save to db, send to user - sendMessage(res, { title: 'New Chat', final: true, @@ -432,7 +523,7 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res if (parentMessageId === Constants.NO_PARENT && !_thread_id) { addTitle(req, { text, - responseText: openai.responseText, + responseText: response.text, conversationId, client, }); @@ -447,7 +538,7 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res if (!response.run.usage) { await sleep(3000); - completedRun = await openai.beta.threads.runs.retrieve(thread_id, run.id); + completedRun = await openai.beta.threads.runs.retrieve(thread_id, response.run.id); if (completedRun.usage) { await recordUsage({ ...completedRun.usage, diff --git a/api/server/services/ActionService.js b/api/server/services/ActionService.js index 7b3466239f3..22770f15500 100644 --- a/api/server/services/ActionService.js +++ b/api/server/services/ActionService.js @@ -1,8 +1,35 @@ -const { AuthTypeEnum } = require('librechat-data-provider'); +const { AuthTypeEnum, EModelEndpoint, actionDomainSeparator } = require('librechat-data-provider'); const { encryptV2, decryptV2 } = require('~/server/utils/crypto'); const { getActions } = require('~/models/Action'); const { logger } = require('~/config'); +/** + * Parses the domain for an action. + * + * Azure OpenAI Assistants API doesn't support periods in function + * names due to `[a-zA-Z0-9_-]*` Regex Validation. + * + * @param {Express.Request} req - Express Request object + * @param {string} domain - The domain for the actoin + * @param {boolean} inverse - If true, replaces periods with `actionDomainSeparator` + * @returns {string} The parsed domain + */ +function domainParser(req, domain, inverse = false) { + if (!domain) { + return; + } + + if (!req.app.locals[EModelEndpoint.azureOpenAI]?.assistants) { + return domain; + } + + if (inverse) { + return domain.replace(/\./g, actionDomainSeparator); + } + + return domain.replace(actionDomainSeparator, '.'); +} + /** * Loads action sets based on the user and assistant ID. * @@ -117,4 +144,5 @@ module.exports = { createActionTool, encryptMetadata, decryptMetadata, + domainParser, }; diff --git a/api/server/services/AppService.js b/api/server/services/AppService.js index 16f6f541b36..8d3008a742a 100644 --- a/api/server/services/AppService.js +++ b/api/server/services/AppService.js @@ -1,6 +1,7 @@ const { Constants, FileSources, + Capabilities, EModelEndpoint, defaultSocialLogins, validateAzureGroups, @@ -122,6 +123,13 @@ const AppService = async (app) => { ); } }); + + if (azureConfiguration.assistants) { + endpointLocals[EModelEndpoint.assistants] = { + // Note: may need to add retrieval models here in the future + capabilities: [Capabilities.tools, Capabilities.actions, Capabilities.code_interpreter], + }; + } } if (config?.endpoints?.[EModelEndpoint.assistants]) { @@ -133,8 +141,11 @@ const AppService = async (app) => { ); } + const prevConfig = endpointLocals[EModelEndpoint.assistants] ?? {}; + /** @type {Partial} */ endpointLocals[EModelEndpoint.assistants] = { + ...prevConfig, retrievalModels: parsedConfig.retrievalModels, disableBuilder: parsedConfig.disableBuilder, pollIntervalMs: parsedConfig.pollIntervalMs, diff --git a/api/server/services/AssistantService.js b/api/server/services/AssistantService.js index 509aa378e80..d4f7e82099f 100644 --- a/api/server/services/AssistantService.js +++ b/api/server/services/AssistantService.js @@ -4,18 +4,17 @@ const { StepTypes, RunStatus, StepStatus, - FilePurpose, ContentTypes, ToolCallTypes, - imageExtRegex, imageGenTools, EModelEndpoint, defaultOrderQuery, } = require('librechat-data-provider'); const { retrieveAndProcessFile } = require('~/server/services/Files/process'); -const { RunManager, waitForRun } = require('~/server/services/Runs'); const { processRequiredActions } = require('~/server/services/ToolService'); const { createOnProgress, sendMessage, sleep } = require('~/server/utils'); +const { RunManager, waitForRun } = require('~/server/services/Runs'); +const { processMessages } = require('~/server/services/Threads'); const { TextStream } = require('~/app/clients'); const { logger } = require('~/config'); @@ -230,6 +229,7 @@ function createInProgressHandler(openai, thread_id, messages) { const { file_id } = output.image; const file = await retrieveAndProcessFile({ openai, + client: openai, file_id, basename: `${file_id}.png`, }); @@ -299,7 +299,7 @@ function createInProgressHandler(openai, thread_id, messages) { openai.index++; } - const result = await processMessages(openai, [message]); + const result = await processMessages({ openai, client: openai, messages: [message] }); openai.addContentData({ [ContentTypes.TEXT]: { value: result.text }, type: ContentTypes.TEXT, @@ -318,8 +318,8 @@ function createInProgressHandler(openai, thread_id, messages) { res: openai.res, index: messageIndex, messageId: openai.responseMessage.messageId, + conversationId: openai.responseMessage.conversationId, type: ContentTypes.TEXT, - stream: true, thread_id, }); @@ -416,7 +416,13 @@ async function runAssistant({ // const { messages: sortedMessages, text } = await processMessages(openai, messages); // return { run, steps, messages: sortedMessages, text }; const sortedMessages = messages.sort((a, b) => a.created_at - b.created_at); - return { run, steps, messages: sortedMessages }; + return { + run, + steps, + messages: sortedMessages, + finalMessage: openai.responseMessage, + text: openai.responseText, + }; } const { submit_tool_outputs } = run.required_action; @@ -447,98 +453,8 @@ async function runAssistant({ }); } -/** - * Sorts, processes, and flattens messages to a single string. - * - * @param {OpenAIClient} openai - The OpenAI client instance. - * @param {ThreadMessage[]} messages - An array of messages. - * @returns {Promise<{messages: ThreadMessage[], text: string}>} The sorted messages and the flattened text. - */ -async function processMessages(openai, messages = []) { - const sorted = messages.sort((a, b) => a.created_at - b.created_at); - - let text = ''; - for (const message of sorted) { - message.files = []; - for (const content of message.content) { - const processImageFile = - content.type === 'image_file' && !openai.processedFileIds.has(content.image_file?.file_id); - if (processImageFile) { - const { file_id } = content.image_file; - - const file = await retrieveAndProcessFile({ openai, file_id, basename: `${file_id}.png` }); - openai.processedFileIds.add(file_id); - message.files.push(file); - continue; - } - - text += (content.text?.value ?? '') + ' '; - logger.debug('[processMessages] Processing message:', { value: text }); - - // Process annotations if they exist - if (!content.text?.annotations?.length) { - continue; - } - - logger.debug('[processMessages] Processing annotations:', content.text.annotations); - for (const annotation of content.text.annotations) { - logger.debug('Current annotation:', annotation); - let file; - const processFilePath = - annotation.file_path && !openai.processedFileIds.has(annotation.file_path?.file_id); - - if (processFilePath) { - const basename = imageExtRegex.test(annotation.text) - ? path.basename(annotation.text) - : null; - file = await retrieveAndProcessFile({ - openai, - file_id: annotation.file_path.file_id, - basename, - }); - openai.processedFileIds.add(annotation.file_path.file_id); - } - - const processFileCitation = - annotation.file_citation && - !openai.processedFileIds.has(annotation.file_citation?.file_id); - - if (processFileCitation) { - file = await retrieveAndProcessFile({ - openai, - file_id: annotation.file_citation.file_id, - unknownType: true, - }); - openai.processedFileIds.add(annotation.file_citation.file_id); - } - - if (!file && (annotation.file_path || annotation.file_citation)) { - const { file_id } = annotation.file_citation || annotation.file_path || {}; - file = await retrieveAndProcessFile({ openai, file_id, unknownType: true }); - openai.processedFileIds.add(file_id); - } - - if (!file) { - continue; - } - - if (file.purpose && file.purpose === FilePurpose.Assistants) { - text = text.replace(annotation.text, file.filename); - } else if (file.filepath) { - text = text.replace(annotation.text, file.filepath); - } - - message.files.push(file); - } - } - } - - return { messages: sorted, text }; -} - module.exports = { getResponse, runAssistant, - processMessages, createOnTextProgress, }; diff --git a/api/server/services/Files/process.js b/api/server/services/Files/process.js index 8b6cfb7a746..69f8011dfc7 100644 --- a/api/server/services/Files/process.js +++ b/api/server/services/Files/process.js @@ -338,19 +338,26 @@ const processFileUpload = async ({ req, res, file, metadata }) => { * Retrieves and processes an OpenAI file based on its type. * * @param {Object} params - The params passed to the function. - * @param {OpenAIClient} params.openai - The params passed to the function. + * @param {OpenAIClient} params.openai - The OpenAI client instance. + * @param {RunClient} params.client - The LibreChat client instance: either refers to `openai` or `streamRunManager`. * @param {string} params.file_id - The ID of the file to retrieve. * @param {string} params.basename - The basename of the file (if image); e.g., 'image.jpg'. * @param {boolean} [params.unknownType] - Whether the file type is unknown. * @returns {Promise<{file_id: string, filepath: string, source: string, bytes?: number, width?: number, height?: number} | null>} * - Returns null if `file_id` is not defined; else, the file metadata if successfully retrieved and processed. */ -async function retrieveAndProcessFile({ openai, file_id, basename: _basename, unknownType }) { +async function retrieveAndProcessFile({ + openai, + client, + file_id, + basename: _basename, + unknownType, +}) { if (!file_id) { return null; } - if (openai.attachedFileIds?.has(file_id)) { + if (client.attachedFileIds?.has(file_id)) { return { file_id, // filepath: TODO: local source filepath?, @@ -416,7 +423,7 @@ async function retrieveAndProcessFile({ openai, file_id, basename: _basename, un */ const processAsImage = async (dataBuffer, fileExt) => { // Logic to process image files, convert to webp, etc. - const _file = await convertToWebP(openai.req, dataBuffer, 'high', `${file_id}${fileExt}`); + const _file = await convertToWebP(client.req, dataBuffer, 'high', `${file_id}${fileExt}`); const file = { ..._file, type: 'image/webp', diff --git a/api/server/services/Runs/StreamRunManager.js b/api/server/services/Runs/StreamRunManager.js new file mode 100644 index 00000000000..fe9e8da737c --- /dev/null +++ b/api/server/services/Runs/StreamRunManager.js @@ -0,0 +1,618 @@ +const path = require('path'); +const { + StepTypes, + ContentTypes, + ToolCallTypes, + // StepStatus, + MessageContentTypes, + AssistantStreamEvents, +} = require('librechat-data-provider'); +const { retrieveAndProcessFile } = require('~/server/services/Files/process'); +const { processRequiredActions } = require('~/server/services/ToolService'); +const { createOnProgress, sendMessage } = require('~/server/utils'); +const { processMessages } = require('~/server/services/Threads'); +const { logger } = require('~/config'); + +/** + * Implements the StreamRunManager functionality for managing the streaming + * and processing of run steps, messages, and tool calls within a thread. + * @implements {StreamRunManager} + */ +class StreamRunManager { + constructor(fields) { + this.index = 0; + /** @type {Map} */ + this.steps = new Map(); + + /** @type {Map} */ + this.processedFileIds = new Set(); + /** @type {Map Promise} */ + this.progressCallbacks = new Map(); + /** @type {Run | null} */ + this.run = null; + + /** @type {Express.Request} */ + this.req = fields.req; + /** @type {Express.Response} */ + this.res = fields.res; + /** @type {OpenAI} */ + this.openai = fields.openai; + /** @type {string} */ + this.apiKey = this.openai.apiKey; + /** @type {string} */ + this.thread_id = fields.thread_id; + /** @type {RunCreateAndStreamParams} */ + this.initialRunBody = fields.runBody; + /** + * @type {Object. Promise>} + */ + this.clientHandlers = fields.handlers ?? {}; + /** @type {OpenAIRequestOptions} */ + this.streamOptions = fields.streamOptions ?? {}; + /** @type {Partial} */ + this.finalMessage = fields.responseMessage ?? {}; + /** @type {ThreadMessage[]} */ + this.messages = []; + /** @type {string} */ + this.text = ''; + + /** + * @type {Object. Promise>} + */ + this.handlers = { + [AssistantStreamEvents.ThreadCreated]: this.handleThreadCreated, + [AssistantStreamEvents.ThreadRunCreated]: this.handleRunEvent, + [AssistantStreamEvents.ThreadRunQueued]: this.handleRunEvent, + [AssistantStreamEvents.ThreadRunInProgress]: this.handleRunEvent, + [AssistantStreamEvents.ThreadRunRequiresAction]: this.handleRunEvent, + [AssistantStreamEvents.ThreadRunCompleted]: this.handleRunEvent, + [AssistantStreamEvents.ThreadRunFailed]: this.handleRunEvent, + [AssistantStreamEvents.ThreadRunCancelling]: this.handleRunEvent, + [AssistantStreamEvents.ThreadRunCancelled]: this.handleRunEvent, + [AssistantStreamEvents.ThreadRunExpired]: this.handleRunEvent, + [AssistantStreamEvents.ThreadRunStepCreated]: this.handleRunStepEvent, + [AssistantStreamEvents.ThreadRunStepInProgress]: this.handleRunStepEvent, + [AssistantStreamEvents.ThreadRunStepCompleted]: this.handleRunStepEvent, + [AssistantStreamEvents.ThreadRunStepFailed]: this.handleRunStepEvent, + [AssistantStreamEvents.ThreadRunStepCancelled]: this.handleRunStepEvent, + [AssistantStreamEvents.ThreadRunStepExpired]: this.handleRunStepEvent, + [AssistantStreamEvents.ThreadRunStepDelta]: this.handleRunStepDeltaEvent, + [AssistantStreamEvents.ThreadMessageCreated]: this.handleMessageEvent, + [AssistantStreamEvents.ThreadMessageInProgress]: this.handleMessageEvent, + [AssistantStreamEvents.ThreadMessageCompleted]: this.handleMessageEvent, + [AssistantStreamEvents.ThreadMessageIncomplete]: this.handleMessageEvent, + [AssistantStreamEvents.ThreadMessageDelta]: this.handleMessageDeltaEvent, + [AssistantStreamEvents.ErrorEvent]: this.handleErrorEvent, + }; + } + + /** + * + * Sends the content data to the client via SSE. + * + * @param {StreamContentData} data + * @returns {Promise} + */ + async addContentData(data) { + const { type, index } = data; + this.finalMessage.content[index] = { type, [type]: data[type] }; + + if (type === ContentTypes.TEXT) { + this.text += data[type].value; + return; + } + + const contentData = { + index, + type, + [type]: data[type], + thread_id: this.thread_id, + messageId: this.finalMessage.messageId, + conversationId: this.finalMessage.conversationId, + }; + + sendMessage(this.res, contentData); + } + + /* <------------------ Main Event Handlers ------------------> */ + + /** + * Run the assistant and handle the events. + * @param {Object} params - + * The parameters for running the assistant. + * @param {string} params.thread_id - The thread id. + * @param {RunCreateAndStreamParams} params.body - The body of the run. + * @returns {Promise} + */ + async runAssistant({ thread_id, body }) { + const streamRun = this.openai.beta.threads.runs.createAndStream( + thread_id, + body, + this.streamOptions, + ); + for await (const event of streamRun) { + await this.handleEvent(event); + } + } + + /** + * Handle the event. + * @param {AssistantStreamEvent} event - The stream event object. + * @returns {Promise} + */ + async handleEvent(event) { + const handler = this.handlers[event.event]; + const clientHandler = this.clientHandlers[event.event]; + + if (clientHandler) { + await clientHandler.call(this, event); + } + + if (handler) { + await handler.call(this, event); + } else { + logger.warn(`Unhandled event type: ${event.event}`); + } + } + + /** + * Handle thread.created event + * @param {ThreadCreated} event - + * The thread.created event object. + */ + async handleThreadCreated(event) { + logger.debug('Thread created:', event.data); + } + + /** + * Handle Run Events + * @param {ThreadRunCreated | ThreadRunQueued | ThreadRunInProgress | ThreadRunRequiresAction | ThreadRunCompleted | ThreadRunFailed | ThreadRunCancelling | ThreadRunCancelled | ThreadRunExpired} event - + * The run event object. + */ + async handleRunEvent(event) { + this.run = event.data; + logger.debug('Run event:', this.run); + if (event.event === AssistantStreamEvents.ThreadRunRequiresAction) { + await this.onRunRequiresAction(event); + } else if (event.event === AssistantStreamEvents.ThreadRunCompleted) { + logger.debug('Run completed:', this.run); + } + } + + /** + * Handle Run Step Events + * @param {ThreadRunStepCreated | ThreadRunStepInProgress | ThreadRunStepCompleted | ThreadRunStepFailed | ThreadRunStepCancelled | ThreadRunStepExpired} event - + * The run step event object. + */ + async handleRunStepEvent(event) { + logger.debug('Run step event:', event.data); + + const step = event.data; + this.steps.set(step.id, step); + + if (event.event === AssistantStreamEvents.ThreadRunStepCreated) { + this.onRunStepCreated(event); + } else if (event.event === AssistantStreamEvents.ThreadRunStepCompleted) { + this.onRunStepCompleted(event); + } + } + + /* <------------------ Delta Events ------------------> */ + + /** @param {CodeImageOutput} */ + async handleCodeImageOutput(output) { + if (this.processedFileIds.has(output.image?.file_id)) { + return; + } + + const { file_id } = output.image; + const file = await retrieveAndProcessFile({ + openai: this.openai, + client: this, + file_id, + basename: `${file_id}.png`, + }); + // toolCall.asset_pointer = file.filepath; + const prelimImage = { + file_id, + filename: path.basename(file.filepath), + filepath: file.filepath, + height: file.height, + width: file.width, + }; + // check if every key has a value before adding to content + const prelimImageKeys = Object.keys(prelimImage); + const validImageFile = prelimImageKeys.every((key) => prelimImage[key]); + + if (!validImageFile) { + return; + } + + const index = this.getStepIndex(file_id); + const image_file = { + [ContentTypes.IMAGE_FILE]: prelimImage, + type: ContentTypes.IMAGE_FILE, + index, + }; + this.addContentData(image_file); + this.processedFileIds.add(file_id); + } + + /** + * Create Tool Call Stream + * @param {number} index - The index of the tool call. + * @param {StepToolCall} toolCall - + * The current tool call object. + */ + createToolCallStream(index, toolCall) { + /** @type {StepToolCall} */ + const state = toolCall; + const type = state.type; + const data = state[type]; + + /** @param {ToolCallDelta} */ + const deltaHandler = async (delta) => { + for (const key in delta) { + if (!Object.prototype.hasOwnProperty.call(data, key)) { + logger.warn(`Unhandled tool call key "${key}", delta: `, delta); + continue; + } + + if (Array.isArray(delta[key])) { + if (!Array.isArray(data[key])) { + data[key] = []; + } + + for (const d of delta[key]) { + if (typeof d === 'object' && !Object.prototype.hasOwnProperty.call(d, 'index')) { + logger.warn('Expected an object with an \'index\' for array updates but got:', d); + continue; + } + + const imageOutput = type === ToolCallTypes.CODE_INTERPRETER && d?.type === 'image'; + + if (imageOutput) { + await this.handleCodeImageOutput(d); + continue; + } + + const { index, ...updateData } = d; + // Ensure the data at index is an object or undefined before assigning + if (typeof data[key][index] !== 'object' || data[key][index] === null) { + data[key][index] = {}; + } + // Merge the updateData into data[key][index] + for (const updateKey in updateData) { + data[key][index][updateKey] = updateData[updateKey]; + } + } + } else if (typeof delta[key] === 'string' && typeof data[key] === 'string') { + // Concatenate strings + data[key] += delta[key]; + } else if ( + typeof delta[key] === 'object' && + delta[key] !== null && + !Array.isArray(delta[key]) + ) { + // Merge objects + data[key] = { ...data[key], ...delta[key] }; + } else { + // Directly set the value for other types + data[key] = delta[key]; + } + + state[type] = data; + + this.addContentData({ + [ContentTypes.TOOL_CALL]: toolCall, + type: ContentTypes.TOOL_CALL, + index, + }); + } + }; + + return deltaHandler; + } + + /** + * @param {string} stepId - + * @param {StepToolCall} toolCall - + * + */ + handleNewToolCall(stepId, toolCall) { + const stepKey = this.generateToolCallKey(stepId, toolCall); + const index = this.getStepIndex(stepKey); + this.getStepIndex(toolCall.id, index); + toolCall.progress = 0.01; + this.orderedRunSteps.set(index, toolCall); + const progressCallback = this.createToolCallStream(index, toolCall); + this.progressCallbacks.set(stepKey, progressCallback); + + this.addContentData({ + [ContentTypes.TOOL_CALL]: toolCall, + type: ContentTypes.TOOL_CALL, + index, + }); + } + + /** + * Handle Completed Tool Call + * @param {string} stepId - The id of the step the tool_call is part of. + * @param {StepToolCall} toolCall - The tool call object. + * + */ + handleCompletedToolCall(stepId, toolCall) { + if (toolCall.type === ToolCallTypes.FUNCTION) { + return; + } + + const stepKey = this.generateToolCallKey(stepId, toolCall); + const index = this.getStepIndex(stepKey); + toolCall.progress = 1; + this.orderedRunSteps.set(index, toolCall); + this.addContentData({ + [ContentTypes.TOOL_CALL]: toolCall, + type: ContentTypes.TOOL_CALL, + index, + }); + } + + /** + * Handle Run Step Delta Event + * @param {ThreadRunStepDelta} event - + * The run step delta event object. + */ + async handleRunStepDeltaEvent(event) { + const { delta, id: stepId } = event.data; + + if (!delta.step_details) { + logger.warn('Undefined or unhandled run step delta:', delta); + return; + } + + /** @type {{ tool_calls: Array }} */ + const { tool_calls } = delta.step_details; + + if (!tool_calls) { + logger.warn('Unhandled run step details', delta.step_details); + return; + } + + for (const toolCall of tool_calls) { + const stepKey = this.generateToolCallKey(stepId, toolCall); + + if (!this.mappedOrder.has(stepKey)) { + this.handleNewToolCall(stepId, toolCall); + continue; + } + + const toolCallDelta = toolCall[toolCall.type]; + const progressCallback = this.progressCallbacks.get(stepKey); + await progressCallback(toolCallDelta); + } + } + + /** + * Handle Message Delta Event + * @param {ThreadMessageDelta} event - + * The Message Delta event object. + */ + async handleMessageDeltaEvent(event) { + const message = event.data; + const onProgress = this.progressCallbacks.get(message.id); + const content = message.delta.content?.[0]; + + if (content && content.type === MessageContentTypes.TEXT) { + onProgress(content.text.value); + } + } + + /** + * Handle Error Event + * @param {ErrorEvent} event - + * The Error event object. + */ + async handleErrorEvent(event) { + logger.error('Error event:', event.data); + } + + /* <------------------ Misc. Helpers ------------------> */ + + /** + * Gets the step index for a given step key, creating a new index if it doesn't exist. + * @param {string} stepKey - + * The access key for the step. Either a message.id, tool_call key, or file_id. + * @param {number | undefined} [overrideIndex] - An override index to use an alternative stepKey. + * This is necessary due to the toolCall Id being unavailable in delta stream events. + * @returns {number | undefined} index - The index of the step; `undefined` if invalid key or using overrideIndex. + */ + getStepIndex(stepKey, overrideIndex) { + if (!stepKey) { + return; + } + + if (!isNaN(overrideIndex)) { + this.mappedOrder.set(stepKey, overrideIndex); + return; + } + + let index = this.mappedOrder.get(stepKey); + + if (index === undefined) { + index = this.index; + this.mappedOrder.set(stepKey, this.index); + this.index++; + } + + return index; + } + + /** + * Generate Tool Call Key + * @param {string} stepId - The id of the step the tool_call is part of. + * @param {StepToolCall} toolCall - The tool call object. + * @returns {string} key - The generated key for the tool call. + */ + generateToolCallKey(stepId, toolCall) { + return `${stepId}_tool_call_${toolCall.index}_${toolCall.type}`; + } + + /* <------------------ Run Event handlers ------------------> */ + + /** + * Handle Run Events Requiring Action + * @param {ThreadRunRequiresAction} event - + * The run event object requiring action. + */ + async onRunRequiresAction(event) { + const run = event.data; + const { submit_tool_outputs } = run.required_action; + const actions = submit_tool_outputs.tool_calls.map((item) => { + const functionCall = item.function; + const args = JSON.parse(functionCall.arguments); + return { + tool: functionCall.name, + toolInput: args, + toolCallId: item.id, + run_id: run.id, + thread_id: this.thread_id, + }; + }); + + const { tool_outputs } = await processRequiredActions(this, actions); + /** @type {AssistantStream | undefined} */ + let toolRun; + try { + toolRun = this.openai.beta.threads.runs.submitToolOutputsStream( + run.thread_id, + run.id, + { + tool_outputs, + stream: true, + }, + this.streamOptions, + ); + } catch (error) { + logger.error('Error submitting tool outputs:', error); + throw error; + } + + for await (const event of toolRun) { + await this.handleEvent(event); + } + } + + /* <------------------ RunStep Event handlers ------------------> */ + + /** + * Handle Run Step Created Events + * @param {ThreadRunStepCreated} event - + * The created run step event object. + */ + async onRunStepCreated(event) { + const step = event.data; + const isMessage = step.type === StepTypes.MESSAGE_CREATION; + + if (isMessage) { + /** @type {MessageCreationStepDetails} */ + const { message_creation } = step.step_details; + const stepKey = message_creation.message_id; + const index = this.getStepIndex(stepKey); + this.orderedRunSteps.set(index, message_creation); + // Create the Factory Function to stream the message + const { onProgress: progressCallback } = createOnProgress({ + // todo: add option to save partialText to db + // onProgress: () => {}, + }); + + // This creates a function that attaches all of the parameters + // specified here to each SSE message generated by the TextStream + const onProgress = progressCallback({ + index, + res: this.res, + messageId: this.finalMessage.messageId, + conversationId: this.finalMessage.conversationId, + thread_id: this.thread_id, + type: ContentTypes.TEXT, + }); + + this.progressCallbacks.set(stepKey, onProgress); + this.orderedRunSteps.set(index, step); + return; + } + + if (step.type !== StepTypes.TOOL_CALLS) { + logger.warn('Unhandled step creation type:', step.type); + return; + } + + /** @type {{ tool_calls: StepToolCall[] }} */ + const { tool_calls } = step.step_details; + for (const toolCall of tool_calls) { + this.handleNewToolCall(step.id, toolCall); + } + } + + /** + * Handle Run Step Completed Events + * @param {ThreadRunStepCompleted} event - + * The completed run step event object. + */ + async onRunStepCompleted(event) { + const step = event.data; + const isMessage = step.type === StepTypes.MESSAGE_CREATION; + + if (isMessage) { + logger.warn('RunStep Message completion: to be handled by Message Event.', step); + return; + } + + /** @type {{ tool_calls: StepToolCall[] }} */ + const { tool_calls } = step.step_details; + for (let i = 0; i < tool_calls.length; i++) { + const toolCall = tool_calls[i]; + toolCall.index = i; + this.handleCompletedToolCall(step.id, toolCall); + } + } + + /* <------------------ Message Event handlers ------------------> */ + + /** + * Handle Message Event + * @param {ThreadMessageCreated | ThreadMessageInProgress | ThreadMessageCompleted | ThreadMessageIncomplete} event - + * The Message event object. + */ + async handleMessageEvent(event) { + if (event.event === AssistantStreamEvents.ThreadMessageCompleted) { + this.messageCompleted(event); + } + } + + /** + * Handle Message Completed Events + * @param {ThreadMessageCompleted} event - + * The Completed Message event object. + */ + async messageCompleted(event) { + const message = event.data; + const result = await processMessages({ + openai: this.openai, + client: this, + messages: [message], + }); + const index = this.mappedOrder.get(message.id); + this.addContentData({ + [ContentTypes.TEXT]: { value: result.text }, + type: ContentTypes.TEXT, + index, + }); + this.messages.push(message); + } +} + +module.exports = StreamRunManager; diff --git a/api/server/services/Runs/index.js b/api/server/services/Runs/index.js index 2cb06d46772..7327b271ff9 100644 --- a/api/server/services/Runs/index.js +++ b/api/server/services/Runs/index.js @@ -1,9 +1,11 @@ const handle = require('./handle'); const methods = require('./methods'); const RunManager = require('./RunManager'); +const StreamRunManager = require('./StreamRunManager'); module.exports = { ...handle, ...methods, RunManager, + StreamRunManager, }; diff --git a/api/server/services/Threads/manage.js b/api/server/services/Threads/manage.js index 125277860ca..12386b60a36 100644 --- a/api/server/services/Threads/manage.js +++ b/api/server/services/Threads/manage.js @@ -1,14 +1,19 @@ +const path = require('path'); const { v4 } = require('uuid'); const { - EModelEndpoint, Constants, - defaultOrderQuery, + FilePurpose, ContentTypes, + imageExtRegex, + EModelEndpoint, + defaultOrderQuery, } = require('librechat-data-provider'); +const { retrieveAndProcessFile } = require('~/server/services/Files/process'); const { recordMessage, getMessages } = require('~/models/Message'); const { saveConvo } = require('~/models/Conversation'); const spendTokens = require('~/models/spendTokens'); const { countTokens } = require('~/server/utils'); +const { logger } = require('~/config'); /** * Initializes a new thread or adds messages to an existing thread. @@ -484,9 +489,108 @@ const recordUsage = async ({ prompt_tokens, completion_tokens, model, user, conv ); }; +/** + * Sorts, processes, and flattens messages to a single string. + * + * @param {object} params - The OpenAI client instance. + * @param {OpenAIClient} params.openai - The OpenAI client instance. + * @param {RunClient} params.client - The LibreChat client that manages the run: either refers to `OpenAI` or `StreamRunManager`. + * @param {ThreadMessage[]} params.messages - An array of messages. + * @returns {Promise<{messages: ThreadMessage[], text: string}>} The sorted messages and the flattened text. + */ +async function processMessages({ openai, client, messages = [] }) { + const sorted = messages.sort((a, b) => a.created_at - b.created_at); + + let text = ''; + for (const message of sorted) { + message.files = []; + for (const content of message.content) { + const processImageFile = + content.type === 'image_file' && !client.processedFileIds.has(content.image_file?.file_id); + if (processImageFile) { + const { file_id } = content.image_file; + + const file = await retrieveAndProcessFile({ + openai, + client, + file_id, + basename: `${file_id}.png`, + }); + client.processedFileIds.add(file_id); + message.files.push(file); + continue; + } + + text += (content.text?.value ?? '') + ' '; + logger.debug('[processMessages] Processing message:', { value: text }); + + // Process annotations if they exist + if (!content.text?.annotations?.length) { + continue; + } + + logger.debug('[processMessages] Processing annotations:', content.text.annotations); + for (const annotation of content.text.annotations) { + logger.debug('Current annotation:', annotation); + let file; + const processFilePath = + annotation.file_path && !client.processedFileIds.has(annotation.file_path?.file_id); + + if (processFilePath) { + const basename = imageExtRegex.test(annotation.text) + ? path.basename(annotation.text) + : null; + file = await retrieveAndProcessFile({ + openai, + client, + file_id: annotation.file_path.file_id, + basename, + }); + client.processedFileIds.add(annotation.file_path.file_id); + } + + const processFileCitation = + annotation.file_citation && + !client.processedFileIds.has(annotation.file_citation?.file_id); + + if (processFileCitation) { + file = await retrieveAndProcessFile({ + openai, + client, + file_id: annotation.file_citation.file_id, + unknownType: true, + }); + client.processedFileIds.add(annotation.file_citation.file_id); + } + + if (!file && (annotation.file_path || annotation.file_citation)) { + const { file_id } = annotation.file_citation || annotation.file_path || {}; + file = await retrieveAndProcessFile({ openai, client, file_id, unknownType: true }); + client.processedFileIds.add(file_id); + } + + if (!file) { + continue; + } + + if (file.purpose && file.purpose === FilePurpose.Assistants) { + text = text.replace(annotation.text, file.filename); + } else if (file.filepath) { + text = text.replace(annotation.text, file.filepath); + } + + message.files.push(file); + } + } + } + + return { messages: sorted, text }; +} + module.exports = { initThread, recordUsage, + processMessages, saveUserMessage, checkMessageGaps, addThreadMetadata, diff --git a/api/server/services/ToolService.js b/api/server/services/ToolService.js index ec57a224f52..369ae37f7a5 100644 --- a/api/server/services/ToolService.js +++ b/api/server/services/ToolService.js @@ -10,7 +10,7 @@ const { validateAndParseOpenAPISpec, actionDelimiter, } = require('librechat-data-provider'); -const { loadActionSets, createActionTool } = require('./ActionService'); +const { loadActionSets, createActionTool, domainParser } = require('./ActionService'); const { processFileURL } = require('~/server/services/Files/process'); const { loadTools } = require('~/app/clients/tools/util'); const { redactMessage } = require('~/config/parsers'); @@ -112,26 +112,26 @@ function formatToOpenAIAssistantTool(tool) { /** * Processes return required actions from run. * - * @param {OpenAIClient} openai - OpenAI Client. + * @param {OpenAIClient} client - OpenAI or StreamRunManager Client. * @param {RequiredAction[]} requiredActions - The required actions to submit outputs for. * @returns {Promise} The outputs of the tools. * */ -async function processRequiredActions(openai, requiredActions) { +async function processRequiredActions(client, requiredActions) { logger.debug( - `[required actions] user: ${openai.req.user.id} | thread_id: ${requiredActions[0].thread_id} | run_id: ${requiredActions[0].run_id}`, + `[required actions] user: ${client.req.user.id} | thread_id: ${requiredActions[0].thread_id} | run_id: ${requiredActions[0].run_id}`, requiredActions, ); const tools = requiredActions.map((action) => action.tool); const loadedTools = await loadTools({ - user: openai.req.user.id, - model: openai.req.body.model ?? 'gpt-3.5-turbo-1106', + user: client.req.user.id, + model: client.req.body.model ?? 'gpt-3.5-turbo-1106', tools, functions: true, options: { processFileURL, - openAIApiKey: openai.apiKey, - fileStrategy: openai.req.app.locals.fileStrategy, + openAIApiKey: client.apiKey, + fileStrategy: client.req.app.locals.fileStrategy, returnMetadata: true, }, skipSpecs: true, @@ -170,14 +170,14 @@ async function processRequiredActions(openai, requiredActions) { action: isActionTool, }; - const toolCallIndex = openai.mappedOrder.get(toolCall.id); + const toolCallIndex = client.mappedOrder.get(toolCall.id); if (imageGenTools.has(currentAction.tool)) { const imageOutput = output; toolCall.function.output = `${currentAction.tool} displayed an image. All generated images are already plainly visible, so don't repeat the descriptions in detail. Do not list download links as they are available in the UI already. The user may download the images by clicking on them, but do not mention anything about downloading to the user.`; // Streams the "Finished" state of the tool call in the UI - openai.addContentData({ + client.addContentData({ [ContentTypes.TOOL_CALL]: toolCall, index: toolCallIndex, type: ContentTypes.TOOL_CALL, @@ -198,10 +198,10 @@ async function processRequiredActions(openai, requiredActions) { index: toolCallIndex, }; - openai.addContentData(image_file); + client.addContentData(image_file); // Update the stored tool call - openai.seenToolCalls.set(toolCall.id, toolCall); + client.seenToolCalls && client.seenToolCalls.set(toolCall.id, toolCall); return { tool_call_id: currentAction.toolCallId, @@ -209,8 +209,8 @@ async function processRequiredActions(openai, requiredActions) { }; } - openai.seenToolCalls.set(toolCall.id, toolCall); - openai.addContentData({ + client.seenToolCalls && client.seenToolCalls.set(toolCall.id, toolCall); + client.addContentData({ [ContentTypes.TOOL_CALL]: toolCall, index: toolCallIndex, type: ContentTypes.TOOL_CALL, @@ -230,13 +230,13 @@ async function processRequiredActions(openai, requiredActions) { if (!actionSets.length) { actionSets = (await loadActionSets({ - user: openai.req.user.id, - assistant_id: openai.req.body.assistant_id, + user: client.req.user.id, + assistant_id: client.req.body.assistant_id, })) ?? []; } const actionSet = actionSets.find((action) => - currentAction.tool.includes(action.metadata.domain), + currentAction.tool.includes(domainParser(client.req, action.metadata.domain, true)), ); if (!actionSet) { @@ -251,7 +251,7 @@ async function processRequiredActions(openai, requiredActions) { const validationResult = validateAndParseOpenAPISpec(actionSet.metadata.raw_spec); if (!validationResult.spec) { throw new Error( - `Invalid spec: user: ${openai.req.user.id} | thread_id: ${requiredActions[0].thread_id} | run_id: ${requiredActions[0].run_id}`, + `Invalid spec: user: ${client.req.user.id} | thread_id: ${requiredActions[0].thread_id} | run_id: ${requiredActions[0].run_id}`, ); } const { requestBuilders } = openapiToFunction(validationResult.spec); @@ -260,7 +260,7 @@ async function processRequiredActions(openai, requiredActions) { } const functionName = currentAction.tool.replace( - `${actionDelimiter}${actionSet.metadata.domain}`, + `${actionDelimiter}${domainParser(client.req, actionSet.metadata.domain, true)}`, '', ); const requestBuilder = builders[functionName]; diff --git a/api/typedefs.js b/api/typedefs.js index eb0f450d428..01ae1b9d679 100644 --- a/api/typedefs.js +++ b/api/typedefs.js @@ -8,6 +8,180 @@ * @memberof typedefs */ +/** + * @exports AssistantStreamEvent + * @typedef {import('openai').default.Beta.AssistantStreamEvent} AssistantStreamEvent + * @memberof typedefs + */ + +/** + * @exports AssistantStream + * @typedef {AsyncIterable} AssistantStream + * @memberof typedefs + */ + +/** + * @exports RunCreateAndStreamParams + * @typedef {import('openai').OpenAI.Beta.Threads.RunCreateAndStreamParams} RunCreateAndStreamParams + * @memberof typedefs + */ + +/** + * @exports OpenAIRequestOptions + * @typedef {import('openai').OpenAI.RequestOptions} OpenAIRequestOptions + * @memberof typedefs + */ + +/** + * @exports ThreadCreated + * @typedef {import('openai').default.Beta.AssistantStreamEvent.ThreadCreated} ThreadCreated + * @memberof typedefs + */ + +/** + * @exports ThreadRunCreated + * @typedef {import('openai').default.Beta.AssistantStreamEvent.ThreadRunCreated} ThreadRunCreated + * @memberof typedefs + */ + +/** + * @exports ThreadRunQueued + * @typedef {import('openai').default.Beta.AssistantStreamEvent.ThreadRunQueued} ThreadRunQueued + * @memberof typedefs + */ + +/** + * @exports ThreadRunInProgress + * @typedef {import('openai').default.Beta.AssistantStreamEvent.ThreadRunInProgress} ThreadRunInProgress + * @memberof typedefs + */ + +/** + * @exports ThreadRunRequiresAction + * @typedef {import('openai').default.Beta.AssistantStreamEvent.ThreadRunRequiresAction} ThreadRunRequiresAction + * @memberof typedefs + */ + +/** + * @exports ThreadRunCompleted + * @typedef {import('openai').default.Beta.AssistantStreamEvent.ThreadRunCompleted} ThreadRunCompleted + * @memberof typedefs + */ + +/** + * @exports ThreadRunFailed + * @typedef {import('openai').default.Beta.AssistantStreamEvent.ThreadRunFailed} ThreadRunFailed + * @memberof typedefs + */ + +/** + * @exports ThreadRunCancelling + * @typedef {import('openai').default.Beta.AssistantStreamEvent.ThreadRunCancelling} ThreadRunCancelling + * @memberof typedefs + */ + +/** + * @exports ThreadRunCancelled + * @typedef {import('openai').default.Beta.AssistantStreamEvent.ThreadRunCancelled} ThreadRunCancelled + * @memberof typedefs + */ + +/** + * @exports ThreadRunExpired + * @typedef {import('openai').default.Beta.AssistantStreamEvent.ThreadRunExpired} ThreadRunExpired + * @memberof typedefs + */ + +/** + * @exports ThreadRunStepCreated + * @typedef {import('openai').default.Beta.AssistantStreamEvent.ThreadRunStepCreated} ThreadRunStepCreated + * @memberof typedefs + */ + +/** + * @exports ThreadRunStepInProgress + * @typedef {import('openai').default.Beta.AssistantStreamEvent.ThreadRunStepInProgress} ThreadRunStepInProgress + * @memberof typedefs + */ + +/** + * @exports ThreadRunStepDelta + * @typedef {import('openai').default.Beta.AssistantStreamEvent.ThreadRunStepDelta} ThreadRunStepDelta + * @memberof typedefs + */ + +/** + * @exports ThreadRunStepCompleted + * @typedef {import('openai').default.Beta.AssistantStreamEvent.ThreadRunStepCompleted} ThreadRunStepCompleted + * @memberof typedefs + */ + +/** + * @exports ThreadRunStepFailed + * @typedef {import('openai').default.Beta.AssistantStreamEvent.ThreadRunStepFailed} ThreadRunStepFailed + * @memberof typedefs + */ + +/** + * @exports ThreadRunStepCancelled + * @typedef {import('openai').default.Beta.AssistantStreamEvent.ThreadRunStepCancelled} ThreadRunStepCancelled + * @memberof typedefs + */ + +/** + * @exports ThreadRunStepExpired + * @typedef {import('openai').default.Beta.AssistantStreamEvent.ThreadRunStepExpired} ThreadRunStepExpired + * @memberof typedefs + */ + +/** + * @exports ThreadMessageCreated + * @typedef {import('openai').default.Beta.AssistantStreamEvent.ThreadMessageCreated} ThreadMessageCreated + * @memberof typedefs + */ + +/** + * @exports ThreadMessageInProgress + * @typedef {import('openai').default.Beta.AssistantStreamEvent.ThreadMessageInProgress} ThreadMessageInProgress + * @memberof typedefs + */ + +/** + * @exports ThreadMessageDelta + * @typedef {import('openai').default.Beta.AssistantStreamEvent.ThreadMessageDelta} ThreadMessageDelta + * @memberof typedefs + */ + +/** + * @exports ThreadMessageCompleted + * @typedef {import('openai').default.Beta.AssistantStreamEvent.ThreadMessageCompleted} ThreadMessageCompleted + * @memberof typedefs + */ + +/** + * @exports ThreadMessageIncomplete + * @typedef {import('openai').default.Beta.AssistantStreamEvent.ThreadMessageIncomplete} ThreadMessageIncomplete + * @memberof typedefs + */ + +/** + * @exports ErrorEvent + * @typedef {import('openai').default.Beta.AssistantStreamEvent.ErrorEvent} ErrorEvent + * @memberof typedefs + */ + +/** + * @exports ToolCallDeltaObject + * @typedef {import('openai').default.Beta.Threads.Runs.Steps.ToolCallDeltaObject} ToolCallDeltaObject + * @memberof typedefs + */ + +/** + * @exports ToolCallDelta + * @typedef {import('openai').default.Beta.Threads.Runs.Steps.ToolCallDelta} ToolCallDelta + * @memberof typedefs + */ + /** * @exports Assistant * @typedef {import('librechat-data-provider').Assistant} Assistant @@ -109,6 +283,18 @@ * @memberof typedefs */ +/** + * @exports TMessageContentParts + * @typedef {import('librechat-data-provider').TMessageContentParts} TMessageContentParts + * @memberof typedefs + */ + +/** + * @exports StreamContentData + * @typedef {import('librechat-data-provider').StreamContentData} StreamContentData + * @memberof typedefs + */ + /** * @exports ActionRequest * @typedef {import('librechat-data-provider').ActionRequest} ActionRequest @@ -698,6 +884,8 @@ * @property {Run} run - The detailed information about the run. * @property {RunStep[]} steps - An array of steps taken during the run. * @property {StepMessage[]} messages - An array of messages related to the run. + * @property {ResponseMessage} finalMessage - The final response message, with all content parts. + * @property {string} text - The final response text, accumulated from message parts * @memberof typedefs */ @@ -726,7 +914,7 @@ // */ /** - * @typedef {Object} OpenAIClientType + * @typedef {Object} RunClient * * @property {Express.Request} req - The Express request object. * @property {Express.Response} res - The Express response object. @@ -754,7 +942,7 @@ * * @property {ResponseMessage} responseMessage - A message object for responses. * - * @typedef {OpenAI & OpenAIClientType} OpenAIClient + * @typedef {OpenAI & RunClient} OpenAIClient */ /** @@ -773,3 +961,50 @@ * @property {Object} [metadata] - Optional. Metadata for the run. * @memberof typedefs */ + +/** + * @typedef {Object} StreamRunManager + * Manages streaming and processing of run steps, messages, and tool calls within a thread. + * + * @property {number} index - Tracks the current index for step or message processing. + * @property {Map} steps - Stores run steps by their IDs. + * @property {Map} mappedOrder - Maps step or message IDs to their processing order index. + * @property {Map} orderedRunSteps - Stores run steps in order of processing. + * @property {Set} processedFileIds - Keeps track of file IDs that have been processed. + * @property {Map} progressCallbacks - Stores callbacks for reporting progress on step or message processing. + * @property {boolean} submittedToolOutputs - Indicates whether tool outputs have been submitted. + * @property {Object|null} run - Holds the current run object. + * @property {Object} req - The HTTP request object associated with the run. + * @property {Object} res - The HTTP response object for sending back data. + * @property {Object} openai - The OpenAI client instance. + * @property {string} apiKey - The API key used for OpenAI requests. + * @property {string} thread_id - The ID of the thread associated with the run. + * @property {Object} initialRunBody - The initial body of the run request. + * @property {Object.} clientHandlers - Custom handlers provided by the client. + * @property {Object} streamOptions - Options for streaming the run. + * @property {Object} finalMessage - The final message object to be constructed and sent. + * @property {Array} messages - An array of messages processed during the run. + * @property {string} text - Accumulated text from text content data. + * @property {Object.} handlers - Internal event handlers for different types of streaming events. + * + * @method addContentData Adds content data to the final message or sends it immediately depending on type. + * @method runAssistant Initializes and manages the streaming of a thread run. + * @method handleEvent Dispatches streaming events to the appropriate handlers. + * @method handleThreadCreated Handles the event when a thread is created. + * @method handleRunEvent Handles various run state events. + * @method handleRunStepEvent Handles events related to individual run steps. + * @method handleCodeImageOutput Processes and handles code-generated image outputs. + * @method createToolCallStream Initializes streaming for tool call outputs. + * @method handleNewToolCall Handles the creation of a new tool call within a run step. + * @method handleCompletedToolCall Handles the completion of tool call processing. + * @method handleRunStepDeltaEvent Handles updates (deltas) for run steps. + * @method handleMessageDeltaEvent Handles updates (deltas) for messages. + * @method handleErrorEvent Handles error events during streaming. + * @method getStepIndex Retrieves or assigns an index for a given step or message key. + * @method generateToolCallKey Generates a unique key for a tool call within a step. + * @method onRunRequiresAction Handles actions required by a run to proceed. + * @method onRunStepCreated Handles the creation of a new run step. + * @method onRunStepCompleted Handles the completion of a run step. + * @method handleMessageEvent Handles events related to messages within the run. + * @method messageCompleted Handles the completion of a message processing. + */ diff --git a/client/src/common/types.ts b/client/src/common/types.ts index 4cbb97737df..45689332e58 100644 --- a/client/src/common/types.ts +++ b/client/src/common/types.ts @@ -210,7 +210,7 @@ export type TAdditionalProps = { export type TMessageContentProps = TInitialProps & TAdditionalProps; -export type TText = Pick; +export type TText = Pick & { className?: string }; export type TEditProps = Pick & Omit; export type TDisplayProps = TText & diff --git a/client/src/components/Chat/Input/ChatForm.tsx b/client/src/components/Chat/Input/ChatForm.tsx index 8067eded4d9..4300128b83a 100644 --- a/client/src/components/Chat/Input/ChatForm.tsx +++ b/client/src/components/Chat/Input/ChatForm.tsx @@ -24,8 +24,18 @@ const ChatForm = ({ index = 0 }) => { const [showStopButton, setShowStopButton] = useRecoilState(store.showStopButtonByIndex(index)); const { requiresKey } = useRequiresKey(); + const methods = useForm<{ text: string }>({ + defaultValues: { text: '' }, + }); + const { handlePaste, handleKeyUp, handleKeyDown, handleCompositionStart, handleCompositionEnd } = - useTextarea({ textAreaRef, submitButtonRef, disabled: !!requiresKey }); + useTextarea({ + textAreaRef, + submitButtonRef, + disabled: !!requiresKey, + setValue: methods.setValue, + getValues: methods.getValues, + }); const { ask, @@ -39,9 +49,6 @@ const ChatForm = ({ index = 0 }) => { } = useChatContext(); const assistantMap = useAssistantsMapContext(); - const methods = useForm<{ text: string }>({ - defaultValues: { text: '' }, - }); const submitMessage = useCallback( (data?: { text: string }) => { diff --git a/client/src/components/Chat/Messages/Content/ContentParts.tsx b/client/src/components/Chat/Messages/Content/ContentParts.tsx index e436ea1a81e..031795773f9 100644 --- a/client/src/components/Chat/Messages/Content/ContentParts.tsx +++ b/client/src/components/Chat/Messages/Content/ContentParts.tsx @@ -21,20 +21,20 @@ any) => { return ( <> - {content.map((part: TMessageContentParts | undefined, idx: number) => { - if (!part) { - return null; - } - return ( - - ); - })} + {content + .filter((part: TMessageContentParts | undefined) => part) + .map((part: TMessageContentParts | undefined, idx: number) => { + const showCursor = idx === content.length - 1 && isLast; + return ( + + ); + })} {!isSubmitting && unfinished && ( diff --git a/client/src/components/Chat/Messages/Content/MessageContent.tsx b/client/src/components/Chat/Messages/Content/MessageContent.tsx index 55a4f6563a3..ea761c65691 100644 --- a/client/src/components/Chat/Messages/Content/MessageContent.tsx +++ b/client/src/components/Chat/Messages/Content/MessageContent.tsx @@ -5,23 +5,21 @@ import FileContainer from '~/components/Chat/Input/Files/FileContainer'; import Plugin from '~/components/Messages/Content/Plugin'; import Error from '~/components/Messages/Content/Error'; import { DelayedRender } from '~/components/ui'; -import { useAuthContext } from '~/hooks'; import EditMessage from './EditMessage'; import Container from './Container'; import Markdown from './Markdown'; import { cn } from '~/utils'; import Image from './Image'; -export const ErrorMessage = ({ text }: TText) => { - const { logout } = useAuthContext(); - - if (text.includes('ban')) { - logout(); - return null; - } +export const ErrorMessage = ({ text, className = '' }: TText) => { return ( -
+
diff --git a/client/src/components/Chat/Messages/Content/Part.tsx b/client/src/components/Chat/Messages/Content/Part.tsx index e17e1f5e9c2..b96d0bcf833 100644 --- a/client/src/components/Chat/Messages/Content/Part.tsx +++ b/client/src/components/Chat/Messages/Content/Part.tsx @@ -1,6 +1,7 @@ import { ToolCallTypes, ContentTypes, imageGenTools } from 'librechat-data-provider'; import type { TMessageContentParts, TMessage } from 'librechat-data-provider'; import type { TDisplayProps } from '~/common'; +import { ErrorMessage } from './MessageContent'; import RetrievalCall from './RetrievalCall'; import CodeAnalyze from './CodeAnalyze'; import Container from './Container'; @@ -17,6 +18,7 @@ const DisplayMessage = ({ text, isCreatedByUser = false, message, showCursor }: return (
; + } else if (part.type === ContentTypes.TEXT) { // Access the value property return ( diff --git a/client/src/components/Chat/Messages/Content/ToolCall.tsx b/client/src/components/Chat/Messages/Content/ToolCall.tsx index a130e8dfcd9..29e0984ca86 100644 --- a/client/src/components/Chat/Messages/Content/ToolCall.tsx +++ b/client/src/components/Chat/Messages/Content/ToolCall.tsx @@ -1,6 +1,7 @@ // import { useState, useEffect } from 'react'; -import { actionDelimiter } from 'librechat-data-provider'; +import { actionDelimiter, actionDomainSeparator } from 'librechat-data-provider'; import * as Popover from '@radix-ui/react-popover'; +import useLocalize from '~/hooks/useLocalize'; import ProgressCircle from './ProgressCircle'; import InProgressCall from './InProgressCall'; import CancelledIcon from './CancelledIcon'; @@ -24,12 +25,14 @@ export default function ToolCall({ args: string; output?: string | null; }) { + const localize = useLocalize(); const progress = useProgress(initialProgress); const radius = 56.08695652173913; const circumference = 2 * Math.PI * radius; const offset = circumference - progress * circumference; - const [function_name, domain] = name.split(actionDelimiter); + const [function_name, _domain] = name.split(actionDelimiter); + const domain = _domain?.replaceAll(actionDomainSeparator, '.') ?? null; const error = output?.toLowerCase()?.includes('error processing tool'); return ( @@ -58,8 +61,12 @@ export default function ToolCall({ ({})} - inProgressText={'Running action'} - finishedText={domain ? `Talked to ${domain}` : `Ran ${function_name}`} + inProgressText={localize('com_assistants_running_action')} + finishedText={ + domain + ? localize('com_assistants_completed_action', domain) + : localize('com_assistants_completed_function', function_name) + } hasInput={!!args?.length} popover={true} /> diff --git a/client/src/components/Chat/Messages/Content/ToolPopover.tsx b/client/src/components/Chat/Messages/Content/ToolPopover.tsx index 971cdb7afa0..dbc203f7b62 100644 --- a/client/src/components/Chat/Messages/Content/ToolPopover.tsx +++ b/client/src/components/Chat/Messages/Content/ToolPopover.tsx @@ -1,4 +1,5 @@ import * as Popover from '@radix-ui/react-popover'; +import useLocalize from '~/hooks/useLocalize'; export default function ToolPopover({ input, @@ -11,6 +12,7 @@ export default function ToolPopover({ output?: string | null; domain?: string; }) { + const localize = useLocalize(); const formatText = (text: string) => { try { return JSON.stringify(JSON.parse(text), null, 2); @@ -31,7 +33,9 @@ export default function ToolPopover({
- {domain ? 'Assistant sent this info to ' + domain : `Assistant used ${function_name}`} + {domain + ? localize('com_assistants_domain_info', domain) + : localize('com_assistants_function_use', function_name)}
@@ -40,7 +44,9 @@ export default function ToolPopover({
{output && ( <> -
Result
+
+ {localize('com_ui_result')} +
{formatText(output)} diff --git a/client/src/components/Chat/Messages/HoverButtons.tsx b/client/src/components/Chat/Messages/HoverButtons.tsx index 18702f8b4b3..7f6b288cbbe 100644 --- a/client/src/components/Chat/Messages/HoverButtons.tsx +++ b/client/src/components/Chat/Messages/HoverButtons.tsx @@ -1,4 +1,5 @@ import { useState } from 'react'; +import { EModelEndpoint } from 'librechat-data-provider'; import type { TConversation, TMessage } from 'librechat-data-provider'; import { Clipboard, CheckMark, EditIcon, RegenerateIcon, ContinueIcon } from '~/components/svg'; import { useGenerationsByLatest, useLocalize } from '~/hooks'; @@ -55,21 +56,23 @@ export default function HoverButtons({ return (
- + {endpoint !== EModelEndpoint.assistants && ( + + )}
diff --git a/client/src/components/SidePanel/Builder/ActionsPanel.tsx b/client/src/components/SidePanel/Builder/ActionsPanel.tsx index 509d38464de..4156679bc45 100644 --- a/client/src/components/SidePanel/Builder/ActionsPanel.tsx +++ b/client/src/components/SidePanel/Builder/ActionsPanel.tsx @@ -6,9 +6,11 @@ import { TokenExchangeMethodEnum, } from 'librechat-data-provider'; import type { AssistantPanelProps, ActionAuthForm } from '~/common'; +import { useAssistantsMapContext, useToastContext } from '~/Providers'; import { Dialog, DialogTrigger } from '~/components/ui'; import { useDeleteAction } from '~/data-provider'; import { NewTrashIcon } from '~/components/svg'; +import useLocalize from '~/hooks/useLocalize'; import ActionsInput from './ActionsInput'; import ActionsAuth from './ActionsAuth'; import { Panel } from '~/common'; @@ -20,12 +22,25 @@ export default function ActionsPanel({ setActivePanel, assistant_id, }: AssistantPanelProps) { + const localize = useLocalize(); + const { showToast } = useToastContext(); + const assistantMap = useAssistantsMapContext(); const [openAuthDialog, setOpenAuthDialog] = useState(false); const deleteAction = useDeleteAction({ onSuccess: () => { + showToast({ + message: localize('com_assistants_delete_actions_success'), + status: 'success', + }); setActivePanel(Panel.builder); setAction(undefined); }, + onError(error) { + showToast({ + message: (error as Error)?.message ?? localize('com_assistants_delete_actions_error'), + status: 'error', + }); + }, }); const methods = useForm({ @@ -115,6 +130,7 @@ export default function ActionsPanel({ const confirmed = confirm('Are you sure you want to delete this action?'); if (confirmed) { deleteAction.mutate({ + model: assistantMap[assistant_id].model, action_id: action.action_id, assistant_id, }); @@ -129,8 +145,7 @@ export default function ActionsPanel({ )}
{(action ? 'Edit' : 'Add') + ' ' + 'actions'}
- {/* TODO: use App title */} - Let your Assistant retrieve information or take actions outside of LibreChat. + {localize('com_assistants_actions_info')}
{/*
Learn more. @@ -141,7 +156,7 @@ export default function ActionsPanel({
diff --git a/client/src/components/SidePanel/Builder/AssistantSelect.tsx b/client/src/components/SidePanel/Builder/AssistantSelect.tsx index 10e1dbb5e5b..422b781cdbd 100644 --- a/client/src/components/SidePanel/Builder/AssistantSelect.tsx +++ b/client/src/components/SidePanel/Builder/AssistantSelect.tsx @@ -107,6 +107,7 @@ export default function AssistantSelect({ functions, ...actions, assistant: update, + model: update.model, }; Object.entries(assistant).forEach(([name, value]) => { diff --git a/client/src/data-provider/mutations.ts b/client/src/data-provider/mutations.ts index a15f034e9bd..f81baa2543f 100644 --- a/client/src/data-provider/mutations.ts +++ b/client/src/data-provider/mutations.ts @@ -489,7 +489,7 @@ export const useDeleteAction = ( const queryClient = useQueryClient(); return useMutation([MutationKeys.deleteAction], { mutationFn: (variables: DeleteActionVariables) => - dataService.deleteAction(variables.assistant_id, variables.action_id), + dataService.deleteAction(variables.assistant_id, variables.action_id, variables.model), onMutate: (variables) => options?.onMutate?.(variables), onError: (error, variables, context) => options?.onError?.(error, variables, context), diff --git a/client/src/hooks/Input/useTextarea.ts b/client/src/hooks/Input/useTextarea.ts index 3fd47e7f743..59bb89b4e80 100644 --- a/client/src/hooks/Input/useTextarea.ts +++ b/client/src/hooks/Input/useTextarea.ts @@ -2,6 +2,7 @@ import debounce from 'lodash/debounce'; import React, { useEffect, useRef, useCallback } from 'react'; import { EModelEndpoint } from 'librechat-data-provider'; import type { TEndpointOption } from 'librechat-data-provider'; +import type { UseFormSetValue } from 'react-hook-form'; import type { KeyboardEvent } from 'react'; import { useAssistantsMapContext } from '~/Providers/AssistantsMapContext'; import useGetSender from '~/hooks/Conversations/useGetSender'; @@ -12,7 +13,6 @@ import useLocalize from '~/hooks/useLocalize'; type KeyEvent = KeyboardEvent; function insertTextAtCursor(element: HTMLTextAreaElement, textToInsert: string) { - // Focus the element to ensure the insertion point is updated element.focus(); // Use the browser's built-in undoable actions if possible @@ -31,6 +31,25 @@ function insertTextAtCursor(element: HTMLTextAreaElement, textToInsert: string) } } +/** + * Necessary resize helper for edge cases where paste doesn't update the container height. + * + 1) Resetting the height to 'auto' forces the component to recalculate height based on its current content + + 2) Forcing a reflow. Accessing offsetHeight will cause a reflow of the page, + ensuring that the reset height takes effect before resetting back to the scrollHeight. + This step is necessary because changes to the DOM do not instantly cause reflows. + + 3) Reseting back to scrollHeight reads and applies the ideal height for the current content dynamically + */ +const forceResize = (textAreaRef: React.RefObject) => { + if (textAreaRef.current) { + textAreaRef.current.style.height = 'auto'; + textAreaRef.current.offsetHeight; + textAreaRef.current.style.height = `${textAreaRef.current.scrollHeight}px`; + } +}; + const getAssistantName = ({ name, localize, @@ -48,10 +67,14 @@ const getAssistantName = ({ export default function useTextarea({ textAreaRef, submitButtonRef, + setValue, + getValues, disabled = false, }: { textAreaRef: React.RefObject; submitButtonRef: React.RefObject; + setValue: UseFormSetValue<{ text: string }>; + getValues: (field: string) => string; disabled?: boolean; }) { const assistantMap = useAssistantsMapContext(); @@ -205,6 +228,21 @@ export default function useTextarea({ isComposing.current = false; }; + /** Necessary handler to update form state when paste doesn't fire textArea input event */ + const setPastedValue = useCallback( + (textArea: HTMLTextAreaElement, pastedData: string) => { + const currentTextValue = getValues('text') || ''; + const { selectionStart, selectionEnd } = textArea; + const newValue = + currentTextValue.substring(0, selectionStart) + + pastedData + + currentTextValue.substring(selectionEnd); + + setValue('text', newValue, { shouldValidate: true }); + }, + [getValues, setValue], + ); + const handlePaste = useCallback( (e: React.ClipboardEvent) => { e.preventDefault(); @@ -214,7 +252,9 @@ export default function useTextarea({ } const pastedData = e.clipboardData.getData('text/plain'); + setPastedValue(textArea, pastedData); insertTextAtCursor(textArea, pastedData); + forceResize(textAreaRef); if (e.clipboardData && e.clipboardData.files.length > 0) { e.preventDefault(); @@ -229,7 +269,7 @@ export default function useTextarea({ handleFiles(timestampedFiles); } }, - [handleFiles, setFilesLoading, textAreaRef], + [handleFiles, setFilesLoading, setPastedValue, textAreaRef], ); return { diff --git a/client/src/hooks/SSE/useContentHandler.ts b/client/src/hooks/SSE/useContentHandler.ts index b0e015dab8a..511ee304b4a 100644 --- a/client/src/hooks/SSE/useContentHandler.ts +++ b/client/src/hooks/SSE/useContentHandler.ts @@ -22,7 +22,7 @@ export default function useContentHandler({ setMessages, getMessages }: TUseCont const messageMap = useMemo(() => new Map(), []); return useCallback( ({ data, submission }: TContentHandler) => { - const { type, messageId, thread_id, conversationId, index, stream } = data; + const { type, messageId, thread_id, conversationId, index } = data; const _messages = getMessages(); const messages = @@ -46,8 +46,9 @@ export default function useContentHandler({ setMessages, getMessages }: TUseCont } // TODO: handle streaming for non-text - const part: ContentPart = - stream && data[ContentTypes.TEXT] ? { value: data[ContentTypes.TEXT] } : data[type]; + const part: ContentPart = data[ContentTypes.TEXT] + ? { value: data[ContentTypes.TEXT] } + : data[type]; /* spreading the content array to avoid mutation */ response.content = [...(response.content ?? [])]; diff --git a/client/src/hooks/SSE/useSSE.ts b/client/src/hooks/SSE/useSSE.ts index dc95602667d..c5fefc83611 100644 --- a/client/src/hooks/SSE/useSSE.ts +++ b/client/src/hooks/SSE/useSSE.ts @@ -502,10 +502,7 @@ export default function useSSE(submission: TSubmission | null, index = 0) { ); useEffect(() => { - if (submission === null) { - return; - } - if (Object.keys(submission).length === 0) { + if (submission === null || Object.keys(submission).length === 0) { return; } diff --git a/client/src/localization/languages/Eng.tsx b/client/src/localization/languages/Eng.tsx index c65b2481b64..5ee9dfede25 100644 --- a/client/src/localization/languages/Eng.tsx +++ b/client/src/localization/languages/Eng.tsx @@ -24,6 +24,17 @@ export default { com_assistants_actions: 'Actions', com_assistants_add_tools: 'Add Tools', com_assistants_add_actions: 'Add Actions', + com_assistants_available_actions: 'Available Actions', + com_assistants_running_action: 'Running action', + com_assistants_completed_action: 'Talked to {0}', + com_assistants_completed_function: 'Ran {0}', + com_assistants_function_use: 'Assistant used {0}', + com_assistants_domain_info: 'Assistant sent this info to {0}', + com_assistants_delete_actions_success: 'Successfully deleted Action from Assistant', + com_assistants_update_actions_success: 'Successfully created or updated Action', + com_assistants_update_actions_error: 'There was an error creating or updating the action.', + com_assistants_delete_actions_error: 'There was an error deleting the action.', + com_assistants_actions_info: 'Let your Assistant retrieve information or take actions via API\'s', com_assistants_name_placeholder: 'Optional: The name of the assistant', com_assistants_instructions_placeholder: 'The system instructions that the assistant uses', com_assistants_description_placeholder: 'Optional: Describe your Assistant here', @@ -61,6 +72,8 @@ export default { com_ui_context: 'Context', com_ui_size: 'Size', com_ui_host: 'Host', + com_ui_update: 'Update', + com_ui_authentication: 'Authentication', com_ui_instructions: 'Instructions', com_ui_description: 'Description', com_ui_error: 'Error', @@ -106,6 +119,7 @@ export default { com_ui_chats: 'chats', com_ui_avatar: 'Avatar', com_ui_unknown: 'Unknown', + com_ui_result: 'Result', com_ui_image_gen: 'Image Gen', com_ui_assistant: 'Assistant', com_ui_assistants: 'Assistants', diff --git a/package-lock.json b/package-lock.json index d6cc40b9e30..a8cdb00de5a 100644 --- a/package-lock.json +++ b/package-lock.json @@ -80,7 +80,7 @@ "multer": "^1.4.5-lts.1", "nodejs-gpt": "^1.37.4", "nodemailer": "^6.9.4", - "openai": "^4.28.4", + "openai": "^4.29.0", "openai-chat-tokens": "^0.2.8", "openid-client": "^5.4.2", "passport": "^0.6.0", @@ -115,9 +115,9 @@ } }, "api/node_modules/openai": { - "version": "4.28.4", - "resolved": "https://registry.npmjs.org/openai/-/openai-4.28.4.tgz", - "integrity": "sha512-RNIwx4MT/F0zyizGcwS+bXKLzJ8QE9IOyigDG/ttnwB220d58bYjYFp0qjvGwEFBO6+pvFVIDABZPGDl46RFsg==", + "version": "4.29.0", + "resolved": "https://registry.npmjs.org/openai/-/openai-4.29.0.tgz", + "integrity": "sha512-ic6C681bSow1XQdKhADthM/OOKqNL05M1gCFLx1mRqLJ+yH49v6qnvaWQ76kwqI/IieCuVTXfRfTk3sz4cB45w==", "dependencies": { "@types/node": "^18.11.18", "@types/node-fetch": "^2.6.4", @@ -4895,9 +4895,9 @@ "integrity": "sha512-iRP+QKI2+oz3UAh4nPEq14CsEjrjD6a5+fuypjScisAh9kXKFvdJOZJDwk7kikLvWVLGEs9+kIUS4LPQV7VZVw==" }, "node_modules/@firebase/app": { - "version": "0.9.27", - "resolved": "https://registry.npmjs.org/@firebase/app/-/app-0.9.27.tgz", - "integrity": "sha512-p2Dvl1ge4kRsyK5+wWcmdAIE9MSwZ0pDKAYB51LZgZuz6wciUZk4E1yAEdkfQlRxuHehn+Ol9WP5Qk2XQZiHGg==", + "version": "0.9.29", + "resolved": "https://registry.npmjs.org/@firebase/app/-/app-0.9.29.tgz", + "integrity": "sha512-HbKTjfmILklasIu/ij6zKnFf3SgLYXkBDVN7leJfVGmohl+zA7Ig+eXM1ZkT1pyBJ8FTYR+mlOJer/lNEnUCtw==", "dependencies": { "@firebase/component": "0.6.5", "@firebase/logger": "0.4.0", @@ -4947,11 +4947,11 @@ "integrity": "sha512-uwSUj32Mlubybw7tedRzR24RP8M8JUVR3NPiMk3/Z4bCmgEKTlQBwMXrehDAZ2wF+TsBq0SN1c6ema71U/JPyQ==" }, "node_modules/@firebase/app-compat": { - "version": "0.2.27", - "resolved": "https://registry.npmjs.org/@firebase/app-compat/-/app-compat-0.2.27.tgz", - "integrity": "sha512-SYlqocfUDKPHR6MSFC8hree0BTiWFu5o8wbf6zFlYXyG41w7TcHp4wJi4H/EL5V6cM4kxwruXTJtqXX/fRAZtw==", + "version": "0.2.29", + "resolved": "https://registry.npmjs.org/@firebase/app-compat/-/app-compat-0.2.29.tgz", + "integrity": "sha512-NqUdegXJfwphx9i/2bOE2CTZ55TC9bbDg+iwkxVShsPBJhD3CzQJkFhoDz4ccfbJaKZGsqjY3fisgX5kbDROnA==", "dependencies": { - "@firebase/app": "0.9.27", + "@firebase/app": "0.9.29", "@firebase/component": "0.6.5", "@firebase/logger": "0.4.0", "@firebase/util": "1.9.4", @@ -4964,15 +4964,15 @@ "integrity": "sha512-AeweANOIo0Mb8GiYm3xhTEBVCmPwTYAu9Hcd2qSkLuga/6+j9b1Jskl5bpiSQWy9eJ/j5pavxj6eYogmnuzm+Q==" }, "node_modules/@firebase/auth": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/@firebase/auth/-/auth-1.6.0.tgz", - "integrity": "sha512-Qhl35eJTV6BwvuueTPCY6x8kUlYyzALtjp/Ws0X3fw3AnjVVfuVb7oQ3Xh5VPVfMFhaIuUAd1KXwcAuIklkSDw==", + "version": "1.6.2", + "resolved": "https://registry.npmjs.org/@firebase/auth/-/auth-1.6.2.tgz", + "integrity": "sha512-BFo/Nj1AAbKLbFiUyXCcnT/bSqMJicFOgdTAKzlXvCul7+eUE29vWmzd1g59O3iKAxvv3+fbQYjQVJpNTTHIyw==", "dependencies": { "@firebase/component": "0.6.5", "@firebase/logger": "0.4.0", "@firebase/util": "1.9.4", "tslib": "^2.1.0", - "undici": "5.26.5" + "undici": "5.28.3" }, "peerDependencies": { "@firebase/app": "0.x", @@ -4985,16 +4985,16 @@ } }, "node_modules/@firebase/auth-compat": { - "version": "0.5.2", - "resolved": "https://registry.npmjs.org/@firebase/auth-compat/-/auth-compat-0.5.2.tgz", - "integrity": "sha512-pRgje5BPCNR1vXyvGOVXwOHtv88A2WooXfklI8sV7/jWi03ExFqNfpJT26GUo/oD39NoKJ3Kt6rD5gVvdV7lMw==", + "version": "0.5.4", + "resolved": "https://registry.npmjs.org/@firebase/auth-compat/-/auth-compat-0.5.4.tgz", + "integrity": "sha512-EtRVW9s0YsuJv3GnOGDoLUW3Pp9f3HcqWA2WK92E30Qa0FEVRwCSRLVQwn9td+SLVY3AP9gi/auC1q3osd4yCg==", "dependencies": { - "@firebase/auth": "1.6.0", + "@firebase/auth": "1.6.2", "@firebase/auth-types": "0.12.0", "@firebase/component": "0.6.5", "@firebase/util": "1.9.4", "tslib": "^2.1.0", - "undici": "5.26.5" + "undici": "5.28.3" }, "peerDependencies": { "@firebase/app-compat": "0.x" @@ -5060,9 +5060,9 @@ } }, "node_modules/@firebase/firestore": { - "version": "4.4.2", - "resolved": "https://registry.npmjs.org/@firebase/firestore/-/firestore-4.4.2.tgz", - "integrity": "sha512-YaX6ypa/RzU6OkxzUQlpSxwhOIWdTraCNz7sMsbaSEjjl/pj/QvX6TqjkdWGzuBYh2S6rz7ErhDO0g39oZZw/g==", + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/@firebase/firestore/-/firestore-4.5.0.tgz", + "integrity": "sha512-rXS6v4HbsN6vZQlq2fLW1ZHb+J5SnS+8Zqb/McbKFIrGYjPUZo5CyO75mkgtlR1tCYAwCebaqoEWb6JHgZv/ww==", "dependencies": { "@firebase/component": "0.6.5", "@firebase/logger": "0.4.0", @@ -5071,7 +5071,7 @@ "@grpc/grpc-js": "~1.9.0", "@grpc/proto-loader": "^0.7.8", "tslib": "^2.1.0", - "undici": "5.26.5" + "undici": "5.28.3" }, "engines": { "node": ">=10.10.0" @@ -5081,12 +5081,12 @@ } }, "node_modules/@firebase/firestore-compat": { - "version": "0.3.25", - "resolved": "https://registry.npmjs.org/@firebase/firestore-compat/-/firestore-compat-0.3.25.tgz", - "integrity": "sha512-+xI7WmsgZCBhMn/+uhDKcg+lsOUJ9FJyt5PGTzkFPbCsozWfeQZ7eVnfPh0rMkUOf0yIQ924RIe04gwvEIbcoQ==", + "version": "0.3.27", + "resolved": "https://registry.npmjs.org/@firebase/firestore-compat/-/firestore-compat-0.3.27.tgz", + "integrity": "sha512-gY2q0fCDJvPg/IurZQbBM7MIVjxA1/LsvfgFOubUTrex5KTY9qm4/2V2R79eAs8Q+b4B8soDtlEjk6L8BW1Crw==", "dependencies": { "@firebase/component": "0.6.5", - "@firebase/firestore": "4.4.2", + "@firebase/firestore": "4.5.0", "@firebase/firestore-types": "3.0.0", "@firebase/util": "1.9.4", "tslib": "^2.1.0" @@ -5105,9 +5105,9 @@ } }, "node_modules/@firebase/functions": { - "version": "0.11.1", - "resolved": "https://registry.npmjs.org/@firebase/functions/-/functions-0.11.1.tgz", - "integrity": "sha512-3uUa1hB79Gmy6E1gHTfzoHeZolBeHc/I/n3+lOCDe6BOos9AHmzRjKygcFE/7VA2FJjitCE0K+OHI6+OuoY8fQ==", + "version": "0.11.2", + "resolved": "https://registry.npmjs.org/@firebase/functions/-/functions-0.11.2.tgz", + "integrity": "sha512-2NULTYOZbu0rXczwfYdqQH0w1FmmYrKjTy1YPQSHLCAkMBdfewoKmVm4Lyo2vRn0H9ZndciLY7NszKDFt9MKCQ==", "dependencies": { "@firebase/app-check-interop-types": "0.3.0", "@firebase/auth-interop-types": "0.2.1", @@ -5115,19 +5115,19 @@ "@firebase/messaging-interop-types": "0.2.0", "@firebase/util": "1.9.4", "tslib": "^2.1.0", - "undici": "5.26.5" + "undici": "5.28.3" }, "peerDependencies": { "@firebase/app": "0.x" } }, "node_modules/@firebase/functions-compat": { - "version": "0.3.7", - "resolved": "https://registry.npmjs.org/@firebase/functions-compat/-/functions-compat-0.3.7.tgz", - "integrity": "sha512-uXe6Kmku5lNogp3OpPBcOJbSvnaCOn+YxS3zlXKNU6Q/NLwcvO3RY1zwYyctCos2RemEw3KEQ7YdzcECXjHWLw==", + "version": "0.3.8", + "resolved": "https://registry.npmjs.org/@firebase/functions-compat/-/functions-compat-0.3.8.tgz", + "integrity": "sha512-VDHSw6UOu8RxfgAY/q8e+Jn+9Fh60Fc28yck0yfMsi2e0BiWgonIMWkFspFGGLgOJebTHl+hc+9v91rhzU6xlg==", "dependencies": { "@firebase/component": "0.6.5", - "@firebase/functions": "0.11.1", + "@firebase/functions": "0.11.2", "@firebase/functions-types": "0.6.0", "@firebase/util": "1.9.4", "tslib": "^2.1.0" @@ -5294,26 +5294,26 @@ "integrity": "sha512-RtEH4vdcbXZuZWRZbIRmQVBNsE7VDQpet2qFvq6vwKLBIQRQR5Kh58M4ok3A3US8Sr3rubYnaGqZSurCwI8uMA==" }, "node_modules/@firebase/storage": { - "version": "0.12.1", - "resolved": "https://registry.npmjs.org/@firebase/storage/-/storage-0.12.1.tgz", - "integrity": "sha512-KJ5NV7FUh54TeTlEjdkTTX60ciCKOp9EqlbLnpdcXUYRJg0Z4810TXbilPc1z7fTIG4iPjtdi95bGE9n4dBX8A==", + "version": "0.12.2", + "resolved": "https://registry.npmjs.org/@firebase/storage/-/storage-0.12.2.tgz", + "integrity": "sha512-MzanOBcxDx9oOwDaDPMuiYxd6CxcN1xZm+os5uNE3C1itbRKLhM9rzpODDKWzcbnHHFtXk3Q3lsK/d3Xa1WYYw==", "dependencies": { "@firebase/component": "0.6.5", "@firebase/util": "1.9.4", "tslib": "^2.1.0", - "undici": "5.26.5" + "undici": "5.28.3" }, "peerDependencies": { "@firebase/app": "0.x" } }, "node_modules/@firebase/storage-compat": { - "version": "0.3.4", - "resolved": "https://registry.npmjs.org/@firebase/storage-compat/-/storage-compat-0.3.4.tgz", - "integrity": "sha512-Y0m5e2gS/wB9Ioth2X/Sgz76vcxvqgQrCmfa9qwhss/N31kxY2Gks6Frv0nrE18AjVfcSmcfDitqUwxcMOTRSg==", + "version": "0.3.5", + "resolved": "https://registry.npmjs.org/@firebase/storage-compat/-/storage-compat-0.3.5.tgz", + "integrity": "sha512-5dJXfY5NxCF5NAk4dLvJqC+m6cgcf0Fr29nrMHwhwI34pBheQq2PdRZqALsqZCES9dnHTuFNlqGQDpLr+Ph4rw==", "dependencies": { "@firebase/component": "0.6.5", - "@firebase/storage": "0.12.1", + "@firebase/storage": "0.12.2", "@firebase/storage-types": "0.8.0", "@firebase/util": "1.9.4", "tslib": "^2.1.0" @@ -14378,25 +14378,25 @@ } }, "node_modules/firebase": { - "version": "10.8.0", - "resolved": "https://registry.npmjs.org/firebase/-/firebase-10.8.0.tgz", - "integrity": "sha512-UJpC24vw8JFuHEOQyArBGKTUd7+kohLISCzHyn0M/prP0KOTx2io1eyLliEid330QqnWI7FOlPxoU97qecCSfQ==", + "version": "10.9.0", + "resolved": "https://registry.npmjs.org/firebase/-/firebase-10.9.0.tgz", + "integrity": "sha512-R8rDU3mg2dq0uPOoZ5Nc3BeZTbXxBPJS8HcZLtnV0f5/YrmpNsHngzmMHRVB+91T+ViJGVL/42dV23gS9w9ccw==", "dependencies": { "@firebase/analytics": "0.10.1", "@firebase/analytics-compat": "0.2.7", - "@firebase/app": "0.9.27", + "@firebase/app": "0.9.29", "@firebase/app-check": "0.8.2", "@firebase/app-check-compat": "0.3.9", - "@firebase/app-compat": "0.2.27", + "@firebase/app-compat": "0.2.29", "@firebase/app-types": "0.9.0", - "@firebase/auth": "1.6.0", - "@firebase/auth-compat": "0.5.2", + "@firebase/auth": "1.6.2", + "@firebase/auth-compat": "0.5.4", "@firebase/database": "1.0.3", "@firebase/database-compat": "1.0.3", - "@firebase/firestore": "4.4.2", - "@firebase/firestore-compat": "0.3.25", - "@firebase/functions": "0.11.1", - "@firebase/functions-compat": "0.3.7", + "@firebase/firestore": "4.5.0", + "@firebase/firestore-compat": "0.3.27", + "@firebase/functions": "0.11.2", + "@firebase/functions-compat": "0.3.8", "@firebase/installations": "0.6.5", "@firebase/installations-compat": "0.2.5", "@firebase/messaging": "0.12.6", @@ -14405,8 +14405,8 @@ "@firebase/performance-compat": "0.2.5", "@firebase/remote-config": "0.4.5", "@firebase/remote-config-compat": "0.2.5", - "@firebase/storage": "0.12.1", - "@firebase/storage-compat": "0.3.4", + "@firebase/storage": "0.12.2", + "@firebase/storage-compat": "0.3.5", "@firebase/util": "1.9.4" } }, @@ -14456,9 +14456,9 @@ "integrity": "sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw==" }, "node_modules/follow-redirects": { - "version": "1.15.5", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.5.tgz", - "integrity": "sha512-vSFWUON1B+yAw1VN4xMfxgn5fTUiaOzAJCKBwIIgT/+7CuGy9+r+5gITvP62j3RmaD5Ph65UaERdOSRGUzZtgw==", + "version": "1.15.6", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz", + "integrity": "sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==", "funding": [ { "type": "individual", @@ -17671,9 +17671,9 @@ } }, "node_modules/jose": { - "version": "4.15.4", - "resolved": "https://registry.npmjs.org/jose/-/jose-4.15.4.tgz", - "integrity": "sha512-W+oqK4H+r5sITxfxpSU+MMdr/YSWGvgZMQDIsNoBDGGy4i7GBPTtvFKibQzW06n3U3TqHjhvBJsirShsEJ6eeQ==", + "version": "4.15.5", + "resolved": "https://registry.npmjs.org/jose/-/jose-4.15.5.tgz", + "integrity": "sha512-jc7BFxgKPKi94uOvEmzlSWFFe2+vASyXaKUpdQKatWAESU2MWjDfFf0fdfc83CDKcA5QecabZeNLyfhe3yKNkg==", "funding": { "url": "https://github.com/sponsors/panva" } @@ -26476,9 +26476,9 @@ "dev": true }, "node_modules/undici": { - "version": "5.26.5", - "resolved": "https://registry.npmjs.org/undici/-/undici-5.26.5.tgz", - "integrity": "sha512-cSb4bPFd5qgR7qr2jYAi0hlX9n5YKK2ONKkLFkxl+v/9BvC0sOpZjBHDBSXc5lWAf5ty9oZdRXytBIHzgUcerw==", + "version": "5.28.3", + "resolved": "https://registry.npmjs.org/undici/-/undici-5.28.3.tgz", + "integrity": "sha512-3ItfzbrhDlINjaP0duwnNsKpDQk3acHI3gVJ1z4fmwMK31k5G9OVIAMLSIaP6w4FaGkaAkN6zaQO9LUvZ1t7VA==", "dependencies": { "@fastify/busboy": "^2.0.0" }, @@ -27994,7 +27994,7 @@ }, "packages/data-provider": { "name": "librechat-data-provider", - "version": "0.4.7", + "version": "0.4.8", "license": "ISC", "dependencies": { "@types/js-yaml": "^4.0.9", diff --git a/packages/data-provider/package.json b/packages/data-provider/package.json index 391cd39d876..b01e3dc7238 100644 --- a/packages/data-provider/package.json +++ b/packages/data-provider/package.json @@ -1,6 +1,6 @@ { "name": "librechat-data-provider", - "version": "0.5.0", + "version": "0.5.1", "description": "data services for librechat apps", "main": "dist/index.js", "module": "dist/index.es.js", diff --git a/packages/data-provider/src/config.ts b/packages/data-provider/src/config.ts index 6467cd565c3..879942ab835 100644 --- a/packages/data-provider/src/config.ts +++ b/packages/data-provider/src/config.ts @@ -527,3 +527,29 @@ export const defaultOrderQuery: { } = { order: 'asc', }; + +export enum AssistantStreamEvents { + ThreadCreated = 'thread.created', + ThreadRunCreated = 'thread.run.created', + ThreadRunQueued = 'thread.run.queued', + ThreadRunInProgress = 'thread.run.in_progress', + ThreadRunRequiresAction = 'thread.run.requires_action', + ThreadRunCompleted = 'thread.run.completed', + ThreadRunFailed = 'thread.run.failed', + ThreadRunCancelling = 'thread.run.cancelling', + ThreadRunCancelled = 'thread.run.cancelled', + ThreadRunExpired = 'thread.run.expired', + ThreadRunStepCreated = 'thread.run.step.created', + ThreadRunStepInProgress = 'thread.run.step.in_progress', + ThreadRunStepCompleted = 'thread.run.step.completed', + ThreadRunStepFailed = 'thread.run.step.failed', + ThreadRunStepCancelled = 'thread.run.step.cancelled', + ThreadRunStepExpired = 'thread.run.step.expired', + ThreadRunStepDelta = 'thread.run.step.delta', + ThreadMessageCreated = 'thread.message.created', + ThreadMessageInProgress = 'thread.message.in_progress', + ThreadMessageCompleted = 'thread.message.completed', + ThreadMessageIncomplete = 'thread.message.incomplete', + ThreadMessageDelta = 'thread.message.delta', + ErrorEvent = 'error', +} diff --git a/packages/data-provider/src/data-service.ts b/packages/data-provider/src/data-service.ts index 7fb45134cf6..2c7744124ec 100644 --- a/packages/data-provider/src/data-service.ts +++ b/packages/data-provider/src/data-service.ts @@ -275,5 +275,9 @@ export const listConversationsByQuery = ( } }; -export const deleteAction = async (assistant_id: string, action_id: string): Promise => - request.delete(endpoints.assistants(`actions/${assistant_id}/${action_id}`)); +export const deleteAction = async ( + assistant_id: string, + action_id: string, + model: string, +): Promise => + request.delete(endpoints.assistants(`actions/${assistant_id}/${action_id}/${model}`)); diff --git a/packages/data-provider/src/types/assistants.ts b/packages/data-provider/src/types/assistants.ts index 5128f252080..5ac3cc9a7e6 100644 --- a/packages/data-provider/src/types/assistants.ts +++ b/packages/data-provider/src/types/assistants.ts @@ -186,6 +186,7 @@ export enum ContentTypes { TEXT = 'text', TOOL_CALL = 'tool_call', IMAGE_FILE = 'image_file', + ERROR = 'error', } export enum StepTypes { @@ -236,6 +237,7 @@ export type ContentPart = (CodeToolCall | RetrievalToolCall | FunctionToolCall | PartMetadata; export type TMessageContentParts = + | { type: ContentTypes.ERROR; text: Text & PartMetadata } | { type: ContentTypes.TEXT; text: Text & PartMetadata } | { type: ContentTypes.TOOL_CALL; @@ -243,16 +245,20 @@ export type TMessageContentParts = } | { type: ContentTypes.IMAGE_FILE; image_file: ImageFile & PartMetadata }; -export type TContentData = TMessageContentParts & { +export type StreamContentData = TMessageContentParts & { + index: number; +}; + +export type TContentData = StreamContentData & { messageId: string; conversationId: string; userMessageId: string; thread_id: string; - index: number; stream?: boolean; }; export const actionDelimiter = '_action_'; +export const actionDomainSeparator = '---'; export enum AuthTypeEnum { ServiceHttp = 'service_http', diff --git a/packages/data-provider/src/types/mutations.ts b/packages/data-provider/src/types/mutations.ts index cd8937e74b5..ad149a4e4ad 100644 --- a/packages/data-provider/src/types/mutations.ts +++ b/packages/data-provider/src/types/mutations.ts @@ -56,6 +56,7 @@ export type UpdateActionVariables = { functions: FunctionTool[]; metadata: ActionMetadata; action_id?: string; + model: string; }; export type UploadAssistantAvatarOptions = { @@ -109,6 +110,7 @@ export type UpdateActionOptions = { export type DeleteActionVariables = { assistant_id: string; action_id: string; + model: string; }; export type DeleteActionOptions = { diff --git a/packages/data-provider/tsconfig.json b/packages/data-provider/tsconfig.json index 4549392e14c..4de513a4220 100644 --- a/packages/data-provider/tsconfig.json +++ b/packages/data-provider/tsconfig.json @@ -8,7 +8,7 @@ "target": "es5", "moduleResolution": "node", "allowSyntheticDefaultImports": true, - "lib": ["es2017", "dom"], + "lib": ["es2017", "dom", "ES2021.String"], "allowJs": true, "skipLibCheck": true, "esModuleInterop": true,