Skip to content

Commit

Permalink
hit the backend in for chat workflow in generate_stream_response
Browse files Browse the repository at this point in the history
  • Loading branch information
nwaughachukwuma committed Nov 2, 2024
1 parent 9f936c3 commit 607b0d9
Showing 1 changed file with 16 additions and 10 deletions.
26 changes: 16 additions & 10 deletions app/src/utils/chat_thread.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,17 @@
import re
from typing import Any, Generator

import httpx
import streamlit as st
from src.utils.main_utils import GenerateAudioCastRequest, generate_audiocast
from src.utils.session_state import reset_session

from app.src.utils.chat_utils import (
from env_var import SERVER_URL
from utils_pkg.chat_utils import (
ContentCategory,
SessionChatMessage,
SessionChatRequest,
)
from app.src.utils.main_utils import GenerateAudioCastRequest, chat, generate_audiocast

termination_prefix = "Ok, thanks for clarifying!"
termination_suffix = "Please click the button below to start generating the audiocast."
Expand All @@ -18,17 +21,20 @@ def generate_stream_response(
session_id: str,
prompt: str,
content_category: ContentCategory,
):
) -> Generator[str, Any, None]:
with st.spinner("Generating response..."):
response_generator = chat(
session_id,
SessionChatRequest(
message=SessionChatMessage(role="user", content=prompt),
content_category=content_category,
),
session_chat = SessionChatRequest(
message=SessionChatMessage(role="user", content=prompt),
content_category=content_category,
)

return response_generator
response = httpx.post(
f"{SERVER_URL}/chat/{session_id}",
json={**session_chat.model_dump()},
timeout=None,
)
response.raise_for_status()
return response.json()


def handle_example_prompt(
Expand Down

0 comments on commit 607b0d9

Please sign in to comment.