55import base64
66from pathlib import Path
77import asyncio
8- import os
98import time
109
1110from agno .agent import Agent
12- from agno .document import Document
1311from agno .document .reader .pdf_reader import PDFReader
1412from agno .utils .log import logger
15- from agno .agent import Agent , AgentMemory
13+ from agno .agent import AgentMemory
1614from agno .embedder .google import GeminiEmbedder
1715from agno .knowledge import AgentKnowledge
1816from agno .memory .db .postgres import PgMemoryDb
2321
2422import traceback
2523
26- from typing import Optional
2724
2825db_url = "postgresql+psycopg://ai:ai@localhost:5532/ai"
2926
27+
3028def get_agentic_rag_agent (
3129 model_id : str = "gemini-2.0-flash" ,
3230 user_id : Optional [str ] = None ,
3331 session_id : Optional [str ] = None ,
3432 debug_mode : bool = True ,
3533) -> Agent :
3634 """Get an Agentic RAG Agent with Memory optimized for Deepseek and PDFs."""
37-
35+
3836 # Initialize Deepseek model
39- model = Gemini (id = model_id )
37+ model = Gemini (id = model_id )
4038
4139 # Define persistent memory for chat history
4240 memory = AgentMemory (
43- db = PgMemoryDb (
44- table_name = "pdf_agent_memory" ,
45- db_url = db_url
46- ),
41+ db = PgMemoryDb (table_name = "pdf_agent_memory" , db_url = db_url ),
4742 create_user_memories = False ,
4843 create_session_summary = False ,
4944 )
@@ -57,10 +52,9 @@ def get_agentic_rag_agent(
5752 embedder = GeminiEmbedder (),
5853 ),
5954 num_documents = 4 , # Optimal for PDF chunking
60- document_processor = PDFReader (chunk_size = 1000
61- ),
62- batch_size = 32 ,
63- parallel_processing = True
55+ document_processor = PDFReader (chunk_size = 1000 ),
56+ batch_size = 32 ,
57+ parallel_processing = True ,
6458 )
6559
6660 # Create the PDF-focused Agent
@@ -69,10 +63,7 @@ def get_agentic_rag_agent(
6963 session_id = session_id ,
7064 user_id = user_id ,
7165 model = model ,
72- storage = PostgresAgentStorage (
73- table_name = "pdf_agent_sessions" ,
74- db_url = db_url
75- ),
66+ storage = PostgresAgentStorage (table_name = "pdf_agent_sessions" , db_url = db_url ),
7667 memory = memory ,
7768 knowledge = knowledge_base ,
7869 description = "You are a helpful Agent called 'Agentic RAG' and your goal is to assist the user in the best way possible." ,
@@ -123,10 +114,10 @@ def get_agentic_rag_agent(
123114
124115# Styles
125116message_style = dict (
126- display = "inline-block" ,
127- padding = "1em" ,
117+ display = "inline-block" ,
118+ padding = "1em" ,
128119 border_radius = "8px" ,
129- max_width = ["30em" , "30em" , "50em" , "50em" , "50em" , "50em" ]
120+ max_width = ["30em" , "30em" , "50em" , "50em" , "50em" , "50em" ],
130121)
131122
132123SIDEBAR_STYLE = dict (
@@ -148,14 +139,18 @@ def get_agentic_rag_agent(
148139 _hover = {"bg" : rx .color ("mauve" , 3 )},
149140)
150141
142+
151143@dataclass
152144class QA :
153145 """A question and answer pair."""
146+
154147 question : str
155148 answer : str
156149
150+
157151class LoadingIcon (rx .Component ):
158152 """A custom loading icon component."""
153+
159154 library = "react-loading-icons"
160155 tag = "SpinningCircles"
161156 stroke : rx .Var [str ]
@@ -169,11 +164,13 @@ class LoadingIcon(rx.Component):
169164 def get_event_triggers (self ) -> dict :
170165 return {"on_change" : lambda status : [status ]}
171166
167+
172168loading_icon = LoadingIcon .create
173169
174170
175171class State (rx .State ):
176172 """The app state."""
173+
177174 chats : List [List [QA ]] = [[]]
178175 base64_pdf : str = ""
179176 uploading : bool = False
@@ -193,7 +190,7 @@ class Config:
193190 exclude = {"_temp_dir" }
194191 json_encoders = {
195192 Path : lambda v : str (v ),
196- tempfile .TemporaryDirectory : lambda v : None
193+ tempfile .TemporaryDirectory : lambda v : None ,
197194 }
198195
199196 def _create_agent (self ) -> Agent :
@@ -202,12 +199,12 @@ def _create_agent(self) -> Agent:
202199 # Generate a consistent session ID based on current chat
203200 if not self ._session_id :
204201 self ._session_id = f"session_{ int (time .time ())} "
205-
202+
206203 return get_agentic_rag_agent (
207204 model_id = "gemini-2.0-flash" ,
208205 session_id = self ._session_id ,
209206 user_id = None ,
210- debug_mode = True
207+ debug_mode = True ,
211208 )
212209 except Exception as e :
213210 logger .error (f"Agent creation error: { str (e )} " )
@@ -225,7 +222,7 @@ async def handle_upload(self, files: List[rx.UploadFile]):
225222
226223 file = files [0 ]
227224 upload_data = await file .read ()
228-
225+
229226 # Create persistent temp directory
230227 if self ._temp_dir is None :
231228 self ._temp_dir = Path (tempfile .mkdtemp ())
@@ -255,7 +252,7 @@ async def handle_upload(self, files: List[rx.UploadFile]):
255252 return
256253
257254 # Store base64 for preview
258- base64_pdf = base64 .b64encode (upload_data ).decode (' utf-8' )
255+ base64_pdf = base64 .b64encode (upload_data ).decode (" utf-8" )
259256 self .base64_pdf = base64_pdf
260257 self .knowledge_base_files .append (file .filename )
261258
@@ -265,15 +262,15 @@ async def handle_upload(self, files: List[rx.UploadFile]):
265262 finally :
266263 self .uploading = False
267264 yield
268-
265+
269266 @rx .event (background = True )
270267 async def process_question (self , form_data : dict ):
271268 """Process a question using streaming responses"""
272269 if self .processing or not form_data .get ("question" ):
273270 return
274271
275272 question = form_data ["question" ]
276-
273+
277274 async with self :
278275 self .processing = True
279276 self .chats [self .current_chat ].append (QA (question = question , answer = "" ))
@@ -291,7 +288,9 @@ def run_stream():
291288 stream_response = agent .run (question , stream = True )
292289 for chunk in stream_response :
293290 if chunk .content :
294- asyncio .run_coroutine_threadsafe (queue .put (chunk .content ), loop )
291+ asyncio .run_coroutine_threadsafe (
292+ queue .put (chunk .content ), loop
293+ )
295294 asyncio .run_coroutine_threadsafe (queue .put (None ), loop )
296295 except Exception as e :
297296 error_msg = f"Error: { str (e )} "
@@ -308,7 +307,7 @@ def run_stream():
308307 if isinstance (chunk , str ) and chunk .startswith ("Error: " ):
309308 answer_content = chunk
310309 break
311-
310+
312311 answer_content += chunk
313312 async with self :
314313 self .chats [self .current_chat ][- 1 ].answer = answer_content
@@ -326,15 +325,14 @@ def run_stream():
326325 async with self :
327326 self .processing = False
328327 yield
329-
330328
331329 def clear_knowledge_base (self ):
332330 """Clear knowledge base and reset state"""
333331 try :
334332 # Create temporary agent to clear vector store
335333 agent = self ._create_agent ()
336334 agent .knowledge .vector_db .delete ()
337-
335+
338336 # Reset state
339337 self .loaded_files .clear ()
340338 self .knowledge_base_files .clear ()
@@ -344,26 +342,27 @@ def clear_knowledge_base(self):
344342 self .upload_status = "Knowledge base cleared"
345343 except Exception as e :
346344 self .upload_status = f"Error clearing knowledge base: { str (e )} "
347-
345+
348346 def create_new_chat (self ):
349347 """Create a new chat"""
350348 self .chats .append ([])
351349 self .current_chat = len (self .chats ) - 1
352350
351+
353352def pdf_preview () -> rx .Component :
354353 return rx .box (
355354 rx .heading ("PDF Preview" , size = "4" , margin_bottom = "1em" ),
356355 rx .cond (
357356 State .base64_pdf != "" ,
358357 rx .html (
359- f'''
358+ f"""
360359 <iframe
361360 src="data:application/pdf;base64,{ State .base64_pdf } "
362361 width="100%"
363362 height="600px"
364363 style="border: none; border-radius: 8px;">
365364 </iframe>
366- '''
365+ """
367366 ),
368367 rx .text ("No PDF uploaded yet" , color = "red" ),
369368 ),
@@ -373,6 +372,7 @@ def pdf_preview() -> rx.Component:
373372 overflow = "hidden" ,
374373 )
375374
375+
376376def message (qa : QA ) -> rx .Component :
377377 return rx .box (
378378 rx .box (
@@ -398,12 +398,10 @@ def message(qa: QA) -> rx.Component:
398398 width = "100%" ,
399399 )
400400
401+
401402def chat () -> rx .Component :
402403 return rx .vstack (
403- rx .box (
404- rx .foreach (State .chats [State .current_chat ], message ),
405- width = "100%"
406- ),
404+ rx .box (rx .foreach (State .chats [State .current_chat ], message ), width = "100%" ),
407405 py = "8" ,
408406 flex = "1" ,
409407 width = "100%" ,
@@ -414,6 +412,7 @@ def chat() -> rx.Component:
414412 padding_bottom = "5em" ,
415413 )
416414
415+
417416def action_bar () -> rx .Component :
418417 return rx .box (
419418 rx .vstack (
@@ -461,6 +460,7 @@ def action_bar() -> rx.Component:
461460 width = "100%" ,
462461 )
463462
463+
464464def sidebar () -> rx .Component :
465465 return rx .box (
466466 rx .vstack (
@@ -509,11 +509,7 @@ def sidebar() -> rx.Component:
509509 width = "100%" ,
510510 ),
511511 ),
512- rx .text (
513- State .upload_status ,
514- color = rx .color ("mauve" , 11 ),
515- font_size = "sm"
516- ),
512+ rx .text (State .upload_status , color = rx .color ("mauve" , 11 ), font_size = "sm" ),
517513 align_items = "stretch" ,
518514 height = "100%" ,
519515 ),
0 commit comments