Se agrega al chat un componente spander que muestra las funestes de la respuesta para la citacion
This commit is contained in:
parent
e2d3c56c52
commit
57f0da6a5d
18
app.py
18
app.py
@ -6,6 +6,7 @@ from chats.streamlit_tools import import_file, clear_cache
|
||||
from streamlit_extras.add_vertical_space import add_vertical_space
|
||||
from langchain_tools.pdf_tools import PdfLangChain
|
||||
from langchain_tools.lc_tools import LangChainTools
|
||||
from chats.chat_tools import MessageManager
|
||||
|
||||
|
||||
# App title
|
||||
@ -121,12 +122,21 @@ if pdf_name:
|
||||
input = "\n".join([msg["content"]
|
||||
for msg in st.session_state.messages])
|
||||
|
||||
query = qa.invoke({"question": f"{input}"},
|
||||
query = qa.invoke({"question": f"{prompt}"},
|
||||
return_only_outputs=True)
|
||||
|
||||
response = query["answer"]
|
||||
response_text = query["answer"]
|
||||
documents_source = query["source_documents"]
|
||||
|
||||
messageManager = MessageManager()
|
||||
|
||||
citation: str = messageManager.generate_citations(
|
||||
documents_source)
|
||||
# st.markdown(citation)
|
||||
|
||||
with st.chat_message("assistant"):
|
||||
st.write(response)
|
||||
st.write(response_text)
|
||||
st.session_state.messages.append(
|
||||
{"role": "assistant", "content": response})
|
||||
{"role": "assistant", "content": response_text})
|
||||
expander = st.expander("Fuentes")
|
||||
expander.markdown(citation)
|
||||
|
Binary file not shown.
@ -24,5 +24,17 @@ class MessageManager:
|
||||
print(f'{ia_emoticon} ' + Style.BRIGHT + Fore.YELLOW +
|
||||
'IA:' + Style.RESET_ALL + f'{bot_response["answer"]}')
|
||||
|
||||
def generate_citations(self):
|
||||
pass
|
||||
def generate_citations(self, documents_source: list) -> str:
|
||||
|
||||
text_source: str = ""
|
||||
|
||||
for index, document in enumerate(documents_source):
|
||||
quote: str = document.page_content
|
||||
source: str = document.metadata['source'].replace(
|
||||
'documents/pdfs/', '')
|
||||
page: str = document.metadata['page'] + 1
|
||||
fuente: str = f"**Fuente #{index + 1}:** \n '{quote}'\n(*{source}, P.{page})*"
|
||||
|
||||
text_source += fuente + "\n\n\n"
|
||||
|
||||
return text_source
|
||||
|
8531
documents/pdfs/1.TC_Malamud, Se está muriendo la democracia.pdf
Normal file
8531
documents/pdfs/1.TC_Malamud, Se está muriendo la democracia.pdf
Normal file
File diff suppressed because it is too large
Load Diff
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -172,14 +172,17 @@ class LangChainTools():
|
||||
|
||||
# Configura la memoria
|
||||
memory = ConversationBufferMemory(
|
||||
memory_key="chat_history", return_messages=True)
|
||||
memory_key="chat_history",
|
||||
return_messages=True,
|
||||
output_key='answer')
|
||||
|
||||
# Define the Retrieval QA Chain to integrate the database and LLM
|
||||
conversation = ConversationalRetrievalChain.from_llm(
|
||||
_llm,
|
||||
retriever=_vectordb.as_retriever(),
|
||||
memory=memory,
|
||||
verbose=False # Modo verboso
|
||||
verbose=False, # Modo verboso
|
||||
return_source_documents=True # Devuelve los documentos fuente
|
||||
)
|
||||
|
||||
return conversation
|
||||
|
Loading…
Reference in New Issue
Block a user