From c2458c3f32a8dcabc169a2ad34e8d9a8467b92c6 Mon Sep 17 00:00:00 2001 From: Peli de Halleux Date: Fri, 13 Dec 2024 20:20:17 +0000 Subject: [PATCH] tool parsing fixes when string is empty --- packages/core/src/anthropic.ts | 3 +- packages/core/src/chat.ts | 59 ++++++++++++++++++++++++++++++--- packages/core/src/chatrender.ts | 4 +-- packages/core/src/json5.ts | 1 + packages/core/src/promptdom.ts | 15 --------- 5 files changed, 60 insertions(+), 22 deletions(-) diff --git a/packages/core/src/anthropic.ts b/packages/core/src/anthropic.ts index d9408abc1c..98bec02ca2 100644 --- a/packages/core/src/anthropic.ts +++ b/packages/core/src/anthropic.ts @@ -39,6 +39,7 @@ import { import { HttpsProxyAgent } from "https-proxy-agent" import { MarkdownTrace } from "./trace" import { createFetch, FetchType } from "./fetch" +import { JSONLLMTryParse } from "./json5" const convertFinishReason = ( stopReason: Anthropic.Message["stop_reason"] @@ -133,7 +134,7 @@ const convertToolCallMessage = ( deleteUndefinedValues({ type: "tool_use", id: tool.id, - input: JSON.parse(tool.function.arguments), + input: JSONLLMTryParse(tool.function.arguments), name: tool.function.name, cache_control: toCacheControl(msg), }) satisfies Anthropic.Beta.PromptCaching.PromptCachingBetaToolUseBlockParam diff --git a/packages/core/src/chat.ts b/packages/core/src/chat.ts index 83ab1d92d4..c1d92433e9 100644 --- a/packages/core/src/chat.ts +++ b/packages/core/src/chat.ts @@ -44,7 +44,6 @@ import { parseAnnotations } from "./annotations" import { errorMessage, isCancelError, serializeError } from "./error" import { estimateChatTokens } from "./chatencoder" import { createChatTurnGenerationContext } from "./runpromptcontext" -import { dedent } from "./indent" import { parseModelIdentifier, traceLanguageModelConnection } from "./models" import { ChatCompletionAssistantMessageParam, @@ -201,9 +200,7 @@ async function runToolCall( messages: ChatCompletionMessageParam[], options: GenerationOptions ) { - const callArgs: any = call.arguments // sometimes wrapped in \`\`\`json ... - ? JSONLLMTryParse(call.arguments) - : undefined + const callArgs: any = JSONLLMTryParse(call.arguments) trace.fence(call.arguments, "json") if (callArgs === undefined) trace.error("arguments failed to parse") @@ -780,6 +777,59 @@ async function choicesToLogitBias( return res } +function collapseChatMessages(messages: ChatCompletionMessageParam[]) { + /* + // concat the content of system messages at the start of the messages into a single message + const startSystem = messages.findIndex((m) => m.role === "system") + if (startSystem > -1) { + const endSystem = + startSystem + + messages + .slice(startSystem) + .findIndex((m) => m.role !== "system" || m.cacheControl) + if (endSystem > startSystem + 1) { + const systemContent = messages + .slice(startSystem, endSystem) + .map((m) => m.content) + .join("\n") + messages.splice(startSystem, endSystem - startSystem, { + role: "system", + content: systemContent, + }) + } + } + + // concat the user messages at the start into a single message + const startUser = messages.findIndex((m) => m.role === "user") + if (startUser > -1) { + const endUser = + startUser + + messages + .slice(startUser) + .findIndex((m) => m.role !== "user" || m.cacheControl) + if (endUser > startUser + 1) { + const msg: ChatCompletionUserMessageParam = { + role: "user", + content: messages + .slice(startUser, endUser) + .flatMap((m) => { + const mu = m as ChatCompletionUserMessageParam + return typeof mu.content === "string" + ? ([ + { + type: "text", + text: mu.content, + } satisfies ChatCompletionContentPartText, + ] satisfies ChatCompletionContentPart[]) + : mu.content + }), + } + messages.splice(startUser, endUser - startUser, msg) + } + } + */ +} + export async function executeChatSession( connectionToken: LanguageModelConfiguration, cancellationToken: CancellationToken, @@ -834,6 +884,7 @@ export async function executeChatSession( let genVars: Record while (true) { stats.turns++ + collapseChatMessages(messages) const tokens = estimateChatTokens(model, messages) if (messages) trace.details( diff --git a/packages/core/src/chatrender.ts b/packages/core/src/chatrender.ts index b53409fc40..f317496ed5 100644 --- a/packages/core/src/chatrender.ts +++ b/packages/core/src/chatrender.ts @@ -8,7 +8,7 @@ import { } from "./chattypes" // Import utility functions for JSON5 parsing, markdown formatting, and YAML stringification. -import { JSON5TryParse } from "./json5" +import { JSONLLMTryParse } from "./json5" import { details, fenceMD } from "./markdown" import { YAMLStringify } from "./yaml" @@ -185,7 +185,7 @@ export function renderMessagesToMarkdown( * @returns A formatted string in YAML or JSON. */ function renderToolArguments(args: string) { - const js = JSON5TryParse(args) + const js = JSONLLMTryParse(args) // Convert arguments to YAML if possible, otherwise keep as JSON. if (js) return fenceMD(YAMLStringify(js), "yaml") else return fenceMD(args, "json") diff --git a/packages/core/src/json5.ts b/packages/core/src/json5.ts index 2239afc27d..8547fc1033 100644 --- a/packages/core/src/json5.ts +++ b/packages/core/src/json5.ts @@ -105,6 +105,7 @@ export function JSON5TryParse( */ export function JSONLLMTryParse(s: string): any { if (s === undefined || s === null) return s + if (s === "") return {} // Removes any fencing and then tries to parse the string. const cleaned = unfence(s, "json") return JSON5TryParse(cleaned) diff --git a/packages/core/src/promptdom.ts b/packages/core/src/promptdom.ts index f41da4824a..10eefbc849 100644 --- a/packages/core/src/promptdom.ts +++ b/packages/core/src/promptdom.ts @@ -1280,19 +1280,4 @@ ${fileOutputs.map((fo) => ` ${fo.pattern}: ${fo.description || "generated file ` ) } - - // concat the content of system messages at the start of the messages into a single message - const endSystem = messages.findIndex( - (m) => m.role !== "system" || m.cacheControl - ) - if (endSystem > 0) { - const systemContent = messages - .slice(0, endSystem) - .map((m) => m.content) - .join("\n") - messages.splice(0, endSystem, { - role: "system", - content: systemContent, - }) - } }