Skip to content

Commit

Permalink
tool parsing fixes when string is empty
Browse files Browse the repository at this point in the history
  • Loading branch information
pelikhan committed Dec 13, 2024
1 parent 448e17a commit c2458c3
Show file tree
Hide file tree
Showing 5 changed files with 60 additions and 22 deletions.
3 changes: 2 additions & 1 deletion packages/core/src/anthropic.ts
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ import {
import { HttpsProxyAgent } from "https-proxy-agent"
import { MarkdownTrace } from "./trace"
import { createFetch, FetchType } from "./fetch"
import { JSONLLMTryParse } from "./json5"

const convertFinishReason = (
stopReason: Anthropic.Message["stop_reason"]
Expand Down Expand Up @@ -133,7 +134,7 @@ const convertToolCallMessage = (
deleteUndefinedValues({
type: "tool_use",
id: tool.id,
input: JSON.parse(tool.function.arguments),
input: JSONLLMTryParse(tool.function.arguments),
name: tool.function.name,
cache_control: toCacheControl(msg),
}) satisfies Anthropic.Beta.PromptCaching.PromptCachingBetaToolUseBlockParam
Expand Down
59 changes: 55 additions & 4 deletions packages/core/src/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,6 @@ import { parseAnnotations } from "./annotations"
import { errorMessage, isCancelError, serializeError } from "./error"
import { estimateChatTokens } from "./chatencoder"
import { createChatTurnGenerationContext } from "./runpromptcontext"
import { dedent } from "./indent"
import { parseModelIdentifier, traceLanguageModelConnection } from "./models"
import {
ChatCompletionAssistantMessageParam,
Expand Down Expand Up @@ -201,9 +200,7 @@ async function runToolCall(
messages: ChatCompletionMessageParam[],
options: GenerationOptions
) {
const callArgs: any = call.arguments // sometimes wrapped in \`\`\`json ...
? JSONLLMTryParse(call.arguments)
: undefined
const callArgs: any = JSONLLMTryParse(call.arguments)
trace.fence(call.arguments, "json")
if (callArgs === undefined) trace.error("arguments failed to parse")

Expand Down Expand Up @@ -780,6 +777,59 @@ async function choicesToLogitBias(
return res
}

function collapseChatMessages(messages: ChatCompletionMessageParam[]) {
/*
// concat the content of system messages at the start of the messages into a single message
const startSystem = messages.findIndex((m) => m.role === "system")
if (startSystem > -1) {
const endSystem =
startSystem +
messages
.slice(startSystem)
.findIndex((m) => m.role !== "system" || m.cacheControl)
if (endSystem > startSystem + 1) {
const systemContent = messages
.slice(startSystem, endSystem)
.map((m) => m.content)
.join("\n")
messages.splice(startSystem, endSystem - startSystem, {
role: "system",
content: systemContent,
})
}
}
// concat the user messages at the start into a single message
const startUser = messages.findIndex((m) => m.role === "user")
if (startUser > -1) {
const endUser =
startUser +
messages
.slice(startUser)
.findIndex((m) => m.role !== "user" || m.cacheControl)
if (endUser > startUser + 1) {
const msg: ChatCompletionUserMessageParam = {
role: "user",
content: messages
.slice(startUser, endUser)
.flatMap<ChatCompletionContentPart>((m) => {
const mu = m as ChatCompletionUserMessageParam
return typeof mu.content === "string"
? ([
{
type: "text",
text: mu.content,
} satisfies ChatCompletionContentPartText,
] satisfies ChatCompletionContentPart[])
: mu.content
}),
}
messages.splice(startUser, endUser - startUser, msg)
}
}
*/
}

export async function executeChatSession(
connectionToken: LanguageModelConfiguration,
cancellationToken: CancellationToken,
Expand Down Expand Up @@ -834,6 +884,7 @@ export async function executeChatSession(
let genVars: Record<string, string>
while (true) {
stats.turns++
collapseChatMessages(messages)
const tokens = estimateChatTokens(model, messages)
if (messages)
trace.details(
Expand Down
4 changes: 2 additions & 2 deletions packages/core/src/chatrender.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import {
} from "./chattypes"

// Import utility functions for JSON5 parsing, markdown formatting, and YAML stringification.
import { JSON5TryParse } from "./json5"
import { JSONLLMTryParse } from "./json5"
import { details, fenceMD } from "./markdown"
import { YAMLStringify } from "./yaml"

Expand Down Expand Up @@ -185,7 +185,7 @@ export function renderMessagesToMarkdown(
* @returns A formatted string in YAML or JSON.
*/
function renderToolArguments(args: string) {
const js = JSON5TryParse(args)
const js = JSONLLMTryParse(args)
// Convert arguments to YAML if possible, otherwise keep as JSON.
if (js) return fenceMD(YAMLStringify(js), "yaml")
else return fenceMD(args, "json")
Expand Down
1 change: 1 addition & 0 deletions packages/core/src/json5.ts
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,7 @@ export function JSON5TryParse<T = unknown>(
*/
export function JSONLLMTryParse(s: string): any {
if (s === undefined || s === null) return s
if (s === "") return {}
// Removes any fencing and then tries to parse the string.
const cleaned = unfence(s, "json")
return JSON5TryParse(cleaned)
Expand Down
15 changes: 0 additions & 15 deletions packages/core/src/promptdom.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1280,19 +1280,4 @@ ${fileOutputs.map((fo) => ` ${fo.pattern}: ${fo.description || "generated file
`
)
}

// concat the content of system messages at the start of the messages into a single message
const endSystem = messages.findIndex(
(m) => m.role !== "system" || m.cacheControl
)
if (endSystem > 0) {
const systemContent = messages
.slice(0, endSystem)
.map((m) => m.content)
.join("\n")
messages.splice(0, endSystem, {
role: "system",
content: systemContent,
})
}
}

0 comments on commit c2458c3

Please sign in to comment.