From 3da4c4e4123683f691bd2c158c19102878211ba1 Mon Sep 17 00:00:00 2001 From: Hyun-Sik Won Date: Thu, 5 Jun 2025 16:29:50 +0900 Subject: [PATCH] fix(docs): Correct typos and grammatical inconsistencies --- backend/src/agent/graph.py | 6 +++--- backend/src/agent/prompts.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/backend/src/agent/graph.py b/backend/src/agent/graph.py index dae64b7..c34dcbb 100644 --- a/backend/src/agent/graph.py +++ b/backend/src/agent/graph.py @@ -42,9 +42,9 @@ genai_client = Client(api_key=os.getenv("GEMINI_API_KEY")) # Nodes def generate_query(state: OverallState, config: RunnableConfig) -> QueryGenerationState: - """LangGraph node that generates a search queries based on the User's question. + """LangGraph node that generates search queries based on the User's question. - Uses Gemini 2.0 Flash to create an optimized search query for web research based on + Uses Gemini 2.0 Flash to create an optimized search queries for web research based on the User's question. Args: @@ -52,7 +52,7 @@ def generate_query(state: OverallState, config: RunnableConfig) -> QueryGenerati config: Configuration for the runnable, including LLM provider settings Returns: - Dictionary with state update, including search_query key containing the generated query + Dictionary with state update, including search_query key containing the generated queries """ configurable = Configuration.from_runnable_config(config) diff --git a/backend/src/agent/prompts.py b/backend/src/agent/prompts.py index d8fd3b9..f979db3 100644 --- a/backend/src/agent/prompts.py +++ b/backend/src/agent/prompts.py @@ -87,7 +87,7 @@ Instructions: - You have access to all the information gathered from the previous steps. - You have access to the user's question. - Generate a high-quality answer to the user's question based on the provided summaries and the user's question. -- you MUST include all the citations from the summaries in the answer correctly. +- You MUST include all the citations from the summaries in the answer correctly. User Context: - {research_topic}