ソースを参照

added structure log to health endpoin

galo 3 ヶ月 前
コミット
fa3c7c9bf1
1 ファイル変更21 行追加3 行削除
  1. 21 3
      app/services/qa.py

+ 21 - 3
app/services/qa.py

@@ -9,6 +9,11 @@ from langchain.memory import ConversationBufferWindowMemory
 import os
 from dotenv import load_dotenv
 from tempfile import NamedTemporaryFile
+import logging
+
+# Set up logging
+logging.basicConfig(level=logging.DEBUG)
+logger = logging.getLogger(__name__)
 
 # Load environment variables
 load_dotenv()
@@ -43,8 +48,10 @@ def index_file(file_content: bytes, file_name: str):
 def get_prompt_with_history(session_id):
     memory = session_memories.get(session_id)
     if not memory:
-        memory = ConversationBufferWindowMemory(k=3)  # Keep last 3 turns
+        memory = ConversationBufferWindowMemory(k=3)
         session_memories[session_id] = memory
+        logger.debug(f"Initialized new memory for session_id: {session_id}")
+    logger.debug(f"Memory buffer for session_id {session_id}: {memory.buffer}")
     return PromptTemplate(
         input_variables=["question", "history"],
         template=f"Previous conversation:\n{{history}}\n\nResponda à seguinte pergunta: {{question}}"
@@ -55,7 +62,9 @@ def get_prompt_with_history_and_docs(session_id, docs):
     if not memory:
         memory = ConversationBufferWindowMemory(k=3)
         session_memories[session_id] = memory
-    history_text = memory.buffer if memory.buffer else "No previous conversation."
+        logger.debug(f"Initialized new memory for session_id: {session_id}")
+    logger.debug(f"Memory buffer for session_id {session_id}: {memory.buffer}")
+    history_text = memory.buffer_as_str if hasattr(memory, 'buffer_as_str') else str(memory.buffer) if memory.buffer else "No previous conversation."
     docs_text = "\n".join([f"Source: {doc.page_content}" for doc in docs]) if docs else "No relevant documents found."
     return PromptTemplate(
         input_variables=["question", "history"],
@@ -64,9 +73,14 @@ def get_prompt_with_history_and_docs(session_id, docs):
 
 def get_answer(session_id: str, question: str) -> str:
     memory = session_memories.get(session_id, ConversationBufferWindowMemory(k=3))
+    if session_id not in session_memories:
+        session_memories[session_id] = memory
+        logger.debug(f"New memory assigned for session_id: {session_id}")
     prompt = get_prompt_with_history(session_id)
     chain = LLMChain(llm=llm, prompt=prompt, memory=memory)
+    logger.debug(f"Before run - Memory buffer: {memory.buffer}")
     response = chain.run(question=question)
+    logger.debug(f"After run - Memory buffer: {memory.buffer}")
     response = response[:100] if len(response) > 100 else response
     return response
 
@@ -75,11 +89,15 @@ def ask_rag(session_id: str, question: str, file_content: bytes = None, file_nam
         index_file(file_content, file_name)
     
     memory = session_memories.get(session_id, ConversationBufferWindowMemory(k=3))
+    if session_id not in session_memories:
+        session_memories[session_id] = memory
+        logger.debug(f"New memory assigned for session_id: {session_id}")
     docs = vector_store.similarity_search(question, k=3)
     prompt = get_prompt_with_history_and_docs(session_id, docs)
     chain = LLMChain(llm=llm, prompt=prompt, memory=memory)
-    
+    logger.debug(f"Before run - Memory buffer: {memory.buffer}")
     response = chain.run(question=question)
+    logger.debug(f"After run - Memory buffer: {memory.buffer}")
     response = response[:100] if len(response) > 100 else response
     
     sources = [doc.page_content for doc in docs]