Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(flows): add flows #262

Open
wants to merge 7 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
78 changes: 78 additions & 0 deletions examples/flows/agent.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
import "dotenv/config";
import { BeeAgent } from "bee-agent-framework/agents/bee/agent";
import { BAMChatLLM } from "bee-agent-framework/adapters/bam/chat";
import { z } from "zod";
import { BaseMessage } from "bee-agent-framework/llms/primitives/message";
import { JsonDriver } from "bee-agent-framework/llms/drivers/json";
import { WikipediaTool } from "bee-agent-framework/tools/search/wikipedia";
import { OpenMeteoTool } from "bee-agent-framework/tools/weather/openMeteo";
import { ReadOnlyMemory } from "bee-agent-framework/memory/base";
import { UnconstrainedMemory } from "bee-agent-framework/memory/unconstrainedMemory";
import { Flow } from "bee-agent-framework/flows";
import { createConsoleReader } from "examples/helpers/io.js";

const schema = z.object({
answer: z.instanceof(BaseMessage).optional(),
memory: z.instanceof(ReadOnlyMemory),
});

const workflow = new Flow({ schema: schema })
.addStep("simpleAgent", async (state) => {
const simpleAgent = new BeeAgent({
llm: BAMChatLLM.fromPreset("meta-llama/llama-3-1-70b-instruct"),
tools: [],
memory: state.memory,
});
const answer = await simpleAgent.run({ prompt: null });
reader.write("🤖 Simple Agent", answer.result.text);

return {
update: { answer: answer.result },
next: "critique",
};
})
.addStep("critique", schema.required(), async (state) => {
const llm = BAMChatLLM.fromPreset("meta-llama/llama-3-1-70b-instruct");
const { parsed: critiqueResponse } = await new JsonDriver(llm).generate(
z.object({ score: z.number().int().min(0).max(100) }),
[
BaseMessage.of({
role: "system",
text: `You are an evaluation assistant who scores the credibility of the last assistant's response. Chitchatting always has a score of 100. If the assistant was unable to answer the user's query, then the score will be 0.`,
}),
...state.memory.messages,
state.answer,
],
);
reader.write("🧠 Score", critiqueResponse.score.toString());

return {
next: critiqueResponse.score < 75 ? "complexAgent" : Flow.END,
};
})
.addStep("complexAgent", async (state) => {
const complexAgent = new BeeAgent({
llm: BAMChatLLM.fromPreset("meta-llama/llama-3-1-70b-instruct"),
tools: [new WikipediaTool(), new OpenMeteoTool()],
memory: state.memory,
});
const { result } = await complexAgent.run({ prompt: null });
reader.write("🤖 Complex Agent", result.text);
return { update: { answer: result } };
})
.setStart("simpleAgent");

const reader = createConsoleReader();
const memory = new UnconstrainedMemory();

for await (const { prompt } of reader) {
const userMessage = BaseMessage.of({ role: "user", text: prompt });
await memory.add(userMessage);

const response = await workflow.run({
memory: memory.asReadOnly(),
});
await memory.add(response.state.answer!);

reader.write("🤖 Final Answer", response.state.answer!.text);
}
169 changes: 169 additions & 0 deletions examples/flows/contentCreator.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,169 @@
import "dotenv/config.js";
import { Flow } from "bee-agent-framework/flows";
import { z } from "zod";
import { BeeAgent } from "bee-agent-framework/agents/bee/agent";
import { UnconstrainedMemory } from "bee-agent-framework/memory/unconstrainedMemory";
import { BAMChatLLM } from "bee-agent-framework/adapters/bam/chat";
import { createConsoleReader } from "examples/helpers/io.js";
import { BaseMessage } from "bee-agent-framework/llms/primitives/message";
import { JsonDriver } from "bee-agent-framework/llms/drivers/json";
import { isEmpty, pick } from "remeda";
import { LLMTool } from "bee-agent-framework/tools/llm";
import { GoogleSearchTool } from "bee-agent-framework/tools/search/googleSearch";

const schema = z.object({
input: z.string(),
output: z.string().optional(),

topic: z.string().optional(),
notes: z.array(z.string()).default([]),
plan: z.string().optional(),
draft: z.string().optional(),
});

const flow = new Flow({
schema,
outputSchema: schema.required({ output: true }),
})
.addStep("preprocess", async (state) => {
const llm = BAMChatLLM.fromPreset("meta-llama/llama-3-1-70b-instruct");
const driver = new JsonDriver(llm);

const { parsed } = await driver.generate(
schema.pick({ topic: true, notes: true }).or(
z.object({
error: z
.string()
.describe("Use when the input query does not make sense or you need clarification."),
}),
),
[
BaseMessage.of({
role: `user`,
text: [
"Your task is to rewrite the user query so that it guides the content planner and editor to craft a blog post that perfectly aligns with the user's needs. Notes should be used only if the user complains about something.",
"If the user query does ",
"",
...[state.topic && ["# Previous Topic", state.topic, ""]],
...[!isEmpty(state.notes) && ["# Previous Notes", ...state.notes, ""]],
"# User Query",
state.input || "empty",
]
.filter(Boolean)
.join("\n"),
}),
],
);

return "error" in parsed
? { update: { output: parsed.error }, next: Flow.END }
: { update: pick(parsed, ["notes", "topic"]) };
})
.addStep("planner", schema.required({ topic: true }), async (state) => {
const llm = BAMChatLLM.fromPreset("meta-llama/llama-3-1-70b-instruct");
const agent = new BeeAgent({
llm,
memory: new UnconstrainedMemory(),
tools: [new GoogleSearchTool(), new LLMTool({ llm })],
});

agent.emitter.on("update", (data) => {
console.info(data.update);
});

const { result } = await agent.run({
prompt: [
`You are a Content Planner. Your task is to write a content plan for "${state.topic}" topic in Markdown format.`,
``,
`# Objectives`,
`1. Prioritize the latest trends, key players, and noteworthy news.`,
`2. Identify the target audience, considering their interests and pain points.`,
`3. Develop a detailed content outline including introduction, key points, and a call to action.`,
`4. Include SEO keywords and relevant sources.`,
``,
...[!isEmpty(state.notes) && ["# Notes", ...state.notes, ""]],
`Provide a structured output that covers the mentioned sections.`,
].join("\n"),
});

console.info(result.text);

return {
update: {
plan: result.text,
},
};
})
.addStep("writer", schema.required({ plan: true }), async (state) => {
const llm = BAMChatLLM.fromPreset("meta-llama/llama-3-1-70b-instruct");
const output = await llm.generate([
BaseMessage.of({
role: `system`,
text: [
`You are a Content Writer. Your task is to write a compelling blog post based on the provided context.`,
``,
`# Context`,
`${state.plan}`,
``,
`# Objectives`,
`- An engaging introduction`,
`- Insightful body paragraphs (2-3 per section)`,
`- Properly named sections/subtitles`,
`- A summarizing conclusion`,
`- Format: Markdown`,
``,
...[!isEmpty(state.notes) && ["# Notes", ...state.notes, ""]],
`Ensure the content flows naturally, incorporates SEO keywords, and is well-structured.`,
].join("\n"),
}),
]);

return {
update: { draft: output.getTextContent() },
};
})
.addStep("editor", schema.required({ draft: true }), async (state) => {
const llm = BAMChatLLM.fromPreset("meta-llama/llama-3-1-70b-instruct");
const output = await llm.generate([
BaseMessage.of({
role: `system`,
text: [
`You are an Editor. Your task is to transform the following draft blog post to a final version.`,
``,
`# Draft`,
`${state.draft}`,
``,
`# Objectives`,
`- Fix Grammatical errors`,
`- Journalistic best practices`,
``,
...[!isEmpty(state.notes) && ["# Notes", ...state.notes, ""]],
``,
`IMPORTANT: The final version must not contain any editor's comments.`,
].join("\n"),
}),
]);

return {
update: { output: output.getTextContent() },
};
});

let lastResult = {} as Flow.output<typeof flow>;
const reader = createConsoleReader();
for await (const { prompt } of reader) {
const { result } = await flow
.run({
input: prompt,
notes: lastResult?.notes,
topic: lastResult?.topic,
})
.observe((emitter) => {
emitter.on("start", ({ step, run }) => {
reader.write(`-> ▶️ ${step}`, JSON.stringify(run.state).substring(0, 200).concat("..."));
});
});

lastResult = result;
reader.write("🤖 Answer", lastResult.output);
}
26 changes: 26 additions & 0 deletions examples/flows/simple.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
import { Flow } from "bee-agent-framework/flows";
import { z } from "zod";

const schema = z.object({
hops: z.number().default(0),
});

const flow = new Flow({ schema })
.addStep("a", async (state) => ({})) // does nothing
.addStep("b", async (state) => ({
// adds one and moves to b
update: { hops: state.hops + 1 },
}))
.addStep("c", async (state) => ({
update: { hops: state.hops + 1 },
next: Math.random() > 0.5 ? "b" : Flow.END,
}));

const response = await flow.run({ hops: 0 }).observe((emitter) => {
emitter.on("start", (data) => console.log(`-> start ${data.step}`));
emitter.on("error", (data) => console.log(`-> error ${data.step}`));
emitter.on("success", (data) => console.log(`-> finish ${data.step}`));
});

console.log(`Hops: ${response.result.hops}`);
console.log(`-> steps`, response.steps.map((step) => step.name).join(","));
3 changes: 2 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,8 @@
"serializer",
"infra",
"deps",
"instrumentation"
"instrumentation",
"flows"
]
]
}
Expand Down
4 changes: 2 additions & 2 deletions src/context.ts
Original file line number Diff line number Diff line change
Expand Up @@ -97,8 +97,8 @@ export class RunContext<T extends RunInstance, P = any> extends Serializable {
return this.controller.signal;
}

abort() {
this.controller.abort();
abort(reason?: Error) {
this.controller.abort(reason);
}

constructor(
Expand Down
Loading
Loading