FastApiAssistant #2
							
								
								
									
										141
									
								
								app/api_old.py
									
									
									
									
									
								
							
							
						
						
									
										141
									
								
								app/api_old.py
									
									
									
									
									
								
							| @@ -1,141 +0,0 @@ | ||||
| from flask import Flask, request, jsonify | ||||
| from langchain_community.tools.tavily_search import TavilySearchResults | ||||
| from typing import Annotated | ||||
| from typing_extensions import TypedDict | ||||
| from langgraph.graph.message import add_messages | ||||
| from langchain_openai import ChatOpenAI | ||||
| from dotenv import load_dotenv | ||||
| from langgraph.prebuilt import create_react_agent | ||||
| from langchain_core.prompts import ChatPromptTemplate | ||||
| from langgraph.checkpoint.memory import MemorySaver | ||||
| from langchain_tools.agent_tools import ( | ||||
|     redact_email, list_calendar_events, | ||||
|     create_calendar_event, get_company_info, | ||||
|     get_current_date_and_time | ||||
| ) | ||||
| from langchain_community.tools.gmail.utils import ( | ||||
|     build_resource_service, get_gmail_credentials) | ||||
| from langchain_community.agent_toolkits import GmailToolkit | ||||
|  | ||||
| # Cargar las variables de entorno | ||||
| load_dotenv() | ||||
|  | ||||
| # Inicializar la app Flask | ||||
| app = Flask(__name__) | ||||
|  | ||||
| # Inicializar el modelo LLM de OpenAI | ||||
| llm = ChatOpenAI( | ||||
|     model="gpt-4o-mini", | ||||
|     temperature=0 | ||||
| ) | ||||
|  | ||||
| # Configuración de Gmail | ||||
| toolkit = GmailToolkit() | ||||
| credentials = get_gmail_credentials( | ||||
|     token_file="token.json", | ||||
|     scopes=["https://mail.google.com/"], | ||||
|     client_secrets_file="credentials.json", | ||||
| ) | ||||
| api_resource = build_resource_service(credentials=credentials) | ||||
| toolkit = GmailToolkit(api_resource=api_resource) | ||||
|  | ||||
| # Crear herramientas | ||||
| tools = toolkit.get_tools() | ||||
| search = TavilySearchResults(max_results=2) | ||||
| tools.extend([ | ||||
|     search, redact_email, list_calendar_events, | ||||
|     create_calendar_event, get_company_info, | ||||
|     get_current_date_and_time]) | ||||
|  | ||||
| # Definir el sistema prompt | ||||
| system_prompt = ChatPromptTemplate.from_messages( | ||||
|     [ | ||||
|         ( | ||||
|             "system", | ||||
|             "Eres Mariana, el asistente virtual de OneCluster, una empresa de " | ||||
|             "software que ofrece soluciones personalizadas. Asume el tono de " | ||||
|             "J.A.R.V.I.S.: cordial, atento y con tacto en todo momento." | ||||
|         ), | ||||
|         ("system", | ||||
|          "Preséntate como Mariana en el primer mensaje y pregunta el nombre " | ||||
|          "del usuario si no lo tienes registrado." | ||||
|          ), | ||||
|         ("system", | ||||
|          "Si el usuario ya ha interactuado antes, usa su nombre sin necesidad " | ||||
|          "de volver a preguntar." | ||||
|          ), | ||||
|         ("system", | ||||
|          "Si el primer mensaje del usuario es una solicitud, pregúntale su " | ||||
|          "nombre antes de responder si aún no lo conoces." | ||||
|          ), | ||||
|         ("system", | ||||
|          "OneCluster es una empresa de software especializada en desarrollo a " | ||||
|          "medida. Solo responde a preguntas y solicitudes relacionadas con la " | ||||
|          "empresa y sus servicios." | ||||
|          ), | ||||
|         ("system", | ||||
|          "Si necesitas información adicional sobre la empresa, usa la función " | ||||
|          "get_company_info." | ||||
|          ), | ||||
|         ("system", | ||||
|          "Antes de enviar correos o crear eventos, muestra los detalles al " | ||||
|          "usuario para que los confirme antes de ejecutar la tarea." | ||||
|          ), | ||||
|         ("system", | ||||
|          "Si te preguntan algo no relacionado con los servicios de OneCluster," | ||||
|          " responde que solo puedes ayudar con temas relacionados con la " | ||||
|          "empresa y sus soluciones." | ||||
|          ), | ||||
|         ( | ||||
|             "system", | ||||
|             "Evita mencionar o hacer alusión a las herramientas que utilizas " | ||||
|             "internamente. Esa información es confidencial." | ||||
|         ), | ||||
|         ("placeholder", "{messages}"), | ||||
|     ] | ||||
| ) | ||||
|  | ||||
| # Definir el estado del asistente | ||||
|  | ||||
|  | ||||
| class State(TypedDict): | ||||
|     messages: Annotated[list, add_messages] | ||||
|     is_last_step: bool | ||||
|  | ||||
|  | ||||
| # Crear el graph con el estado definido | ||||
| graph = create_react_agent( | ||||
|     model=llm, | ||||
|     tools=tools, | ||||
|     state_schema=State, | ||||
|     state_modifier=system_prompt, | ||||
|     checkpointer=MemorySaver() | ||||
| ) | ||||
|  | ||||
| # Ruta de la API para procesar texto | ||||
|  | ||||
|  | ||||
| @app.route('/process_text', methods=['POST']) | ||||
| def process_text(): | ||||
|     user_input = request.json.get('text') | ||||
|  | ||||
|     # Procesar el texto con LangChain | ||||
|     events = graph.stream( | ||||
|         {"messages": [("user", user_input)], "is_last_step": False}, | ||||
|         config={"configurable": { | ||||
|             "thread_id": "thread-1", "recursion_limit": 50}}, | ||||
|         stream_mode="updates" | ||||
|     ) | ||||
|  | ||||
|     # Preparar la respuesta | ||||
|     response = [] | ||||
|     for event in events: | ||||
|         if "agent" in event: | ||||
|             response.append(event["agent"]["messages"][-1].content) | ||||
|  | ||||
|     return jsonify({'response': response}) | ||||
|  | ||||
|  | ||||
| # Ejecutar la app Flask | ||||
| if __name__ == '__main__': | ||||
|     app.run(port=5000) | ||||
| @@ -1,135 +0,0 @@ | ||||
| from langchain_community.tools.tavily_search import TavilySearchResults | ||||
| from typing import Annotated | ||||
| from typing_extensions import TypedDict | ||||
| from langgraph.graph.message import add_messages | ||||
| from langchain_openai import ChatOpenAI | ||||
| from dotenv import load_dotenv | ||||
| from langgraph.prebuilt import create_react_agent | ||||
| from langchain_core.prompts import ChatPromptTemplate | ||||
| from langgraph.checkpoint.memory import MemorySaver | ||||
| from langchain_tools.agent_tools import ( | ||||
|     redact_email, list_calendar_events, | ||||
|     create_calendar_event, | ||||
|     get_company_info, | ||||
|     get_current_date_and_time | ||||
| ) | ||||
| from langchain_community.tools.gmail.utils import ( | ||||
|     build_resource_service, | ||||
|     get_gmail_credentials, | ||||
| ) | ||||
| from langchain_community.agent_toolkits import GmailToolkit | ||||
|  | ||||
|  | ||||
| load_dotenv() | ||||
|  | ||||
|  | ||||
| # Inicialiamos un LLM de OpenaIA | ||||
| llm = ChatOpenAI( | ||||
|     model="gpt-4o-mini", | ||||
|     temperature=0.9 | ||||
| ) | ||||
|  | ||||
| toolkit = GmailToolkit() | ||||
|  | ||||
| # Can review scopes here https://developers.google.com/gmail/api/auth/scopes | ||||
| # For instance, readonly scope is | ||||
| # 'https://www.googleapis.com/auth/gmail.readonly' | ||||
|  | ||||
| credentials = get_gmail_credentials( | ||||
|     token_file="token.json", | ||||
|     scopes=["https://mail.google.com/"], | ||||
|     client_secrets_file="credentials.json", | ||||
| ) | ||||
| api_resource = build_resource_service(credentials=credentials) | ||||
| toolkit = GmailToolkit(api_resource=api_resource) | ||||
|  | ||||
| # creamos la lista de herramientas de gmail | ||||
| tools = toolkit.get_tools() | ||||
|  | ||||
| search = TavilySearchResults(max_results=2) | ||||
| tools.append(search) | ||||
| tools.append(redact_email) | ||||
| tools.append(list_calendar_events) | ||||
| tools.append(create_calendar_event) | ||||
| tools.append(get_company_info) | ||||
| tools.append(get_current_date_and_time) | ||||
|  | ||||
| system_prompt = ChatPromptTemplate.from_messages( | ||||
|     [ | ||||
|         ( | ||||
|             "system", | ||||
|             "Eres Mariana, el asistente virtual de OneCluster, una empresa de " | ||||
|             "software que ofrece soluciones personalizadas. Asume el tono de " | ||||
|             "J.A.R.V.I.S.: cordial, atento y con tacto en todo momento." | ||||
|         ), | ||||
|         ("system", | ||||
|          "Preséntate como Mariana en el primer mensaje y pregunta el nombre " | ||||
|          "del usuario si no lo tienes registrado." | ||||
|          ), | ||||
|         ("system", | ||||
|          "Si el usuario ya ha interactuado antes, usa su nombre sin necesidad " | ||||
|          "de volver a preguntar." | ||||
|          ), | ||||
|         ("system", | ||||
|          "Si el primer mensaje del usuario es una solicitud, pregúntale su " | ||||
|          "nombre antes de responder si aún no lo conoces." | ||||
|          ), | ||||
|         ("system", | ||||
|          "OneCluster es una empresa de software especializada en desarrollo a " | ||||
|          "medida. Solo responde a preguntas y solicitudes relacionadas con la " | ||||
|          "empresa y sus servicios." | ||||
|          ), | ||||
|         ("system", | ||||
|          "Si necesitas información adicional sobre la empresa, usa la función " | ||||
|          "get_company_info." | ||||
|          ), | ||||
|         ("system", | ||||
|          "Antes de enviar correos o crear eventos, muestra los detalles al " | ||||
|          "usuario para que los confirme antes de ejecutar la tarea." | ||||
|          ), | ||||
|         ("system", | ||||
|          "Si te preguntan algo no relacionado con los servicios de OneCluster," | ||||
|          " responde que solo puedes ayudar con temas relacionados con la " | ||||
|          "empresa y sus soluciones." | ||||
|          ), | ||||
|         ( | ||||
|             "system", | ||||
|             "Evita mencionar o hacer alusión a las herramientas que utilizas " | ||||
|             "internamente. Esa información es confidencial." | ||||
|         ), | ||||
|         ("placeholder", "{messages}"), | ||||
|     ] | ||||
| ) | ||||
|  | ||||
|  | ||||
| class State(TypedDict): | ||||
|     messages: Annotated[list, add_messages] | ||||
|     is_last_step: bool  # Cambiar a booleano si es necesario | ||||
|  | ||||
|  | ||||
| # Creamos el graph con el estado definido | ||||
| graph = create_react_agent( | ||||
|     model=llm, | ||||
|     tools=tools, | ||||
|     state_schema=State, | ||||
|     state_modifier=system_prompt, | ||||
|     checkpointer=MemorySaver() | ||||
| ) | ||||
|  | ||||
| config = {"configurable": {"thread_id": "thread-1", "recursion_limit": 50}} | ||||
|  | ||||
|  | ||||
| while True: | ||||
|     user_input = input("User: ") | ||||
|     if user_input.lower() in ["quit", "exit", "q"]: | ||||
|         print("Goodbye!") | ||||
|         break | ||||
|  | ||||
|     events = graph.stream({ | ||||
|         "messages": [("user", user_input)], | ||||
|         "is_last_step": False}, | ||||
|         config, stream_mode="updates") | ||||
|  | ||||
|     for event in events: | ||||
|         if "agent" in event: | ||||
|             print(f"\nAsistente: {event['agent']['messages'][-1].content}\n") | ||||
| @@ -1,109 +0,0 @@ | ||||
| import telebot | ||||
| from dotenv import load_dotenv | ||||
| import os | ||||
| from api_openai.whisper import whisper_api, tts_api | ||||
| from langchain_tools.agent_tools import LangChainTools | ||||
| from langchain_tools.agents import AgentTools | ||||
| from langchain_core.messages import AIMessage, HumanMessage | ||||
| # from tools.scaped import scaped | ||||
|  | ||||
| # Configuración del bot | ||||
| load_dotenv() | ||||
| API_TOKEN_BOT = os.getenv("API_TOKEN_BOT") | ||||
| bot = telebot.TeleBot(API_TOKEN_BOT) | ||||
|  | ||||
| # Handle '/start' and '/help' | ||||
| wellcome = "¡Bienvenido! ¿Cómo puedo ayudarte?" | ||||
|  | ||||
|  | ||||
| @bot.message_handler(commands=['help', 'start']) | ||||
| def send_welcome(message): | ||||
|     bot.reply_to(message, wellcome, parse_mode="Markdown") | ||||
|  | ||||
|  | ||||
| # Creamos una lista para el historial fuera de las funciones | ||||
| history = [] | ||||
|  | ||||
|  | ||||
| @bot.message_handler(content_types=["text", "voice"]) | ||||
| def bot_mensajes(message): | ||||
|     global history  # Para acceder a la variable global 'history' | ||||
|  | ||||
|     # Si el mensaje es una nota de voz | ||||
|     if message.voice: | ||||
|         # user_ = message.from_user.first_name | ||||
|         file_info = bot.get_file(message.voice.file_id) | ||||
|         downloaded_file = bot.download_file(file_info.file_path) | ||||
|         file_path = "audios/nota_de_voz.ogg" | ||||
|  | ||||
|         with open(file_path, 'wb') as new_file: | ||||
|             new_file.write(downloaded_file) | ||||
|  | ||||
|         pregunta_usuario = whisper_api(file_path) | ||||
|         print(f"Pregunta del usuario: {pregunta_usuario}") | ||||
|         langChainTools = LangChainTools() | ||||
|         llm = langChainTools.load_llm_openai() | ||||
|  | ||||
|         agentTools = AgentTools() | ||||
|         tools = agentTools.load_tools() | ||||
|         agent_executor = agentTools.load_agent(llm, tools) | ||||
|  | ||||
|         respuesta_agente = agent_executor.invoke( | ||||
|             { | ||||
|                 "input": pregunta_usuario, | ||||
|                 "chat_history": history, | ||||
|             } | ||||
|         ) | ||||
|  | ||||
|         bot.send_message(message.chat.id, respuesta_agente["output"], | ||||
|                          parse_mode="Markdown") | ||||
|  | ||||
|         path_voice: str = tts_api(respuesta_agente["output"]) | ||||
|         with open(path_voice, 'rb') as voice: | ||||
|             bot.send_voice(message.chat.id, voice=voice) | ||||
|  | ||||
|         history.append(HumanMessage(content=pregunta_usuario)) | ||||
|         history.append(AIMessage(content=respuesta_agente["output"])) | ||||
|  | ||||
|     # Si el mensaje es de texto | ||||
|     if message.text: | ||||
|         pregunta_usuario = message.text | ||||
|         langChainTools = LangChainTools() | ||||
|         llm = langChainTools.load_llm_openai() | ||||
|  | ||||
|         agentTools = AgentTools() | ||||
|         tools = agentTools.load_tools() | ||||
|         agent_executor = agentTools.load_agent(llm, tools) | ||||
|  | ||||
|         respuesta_agente = agent_executor.invoke( | ||||
|             { | ||||
|                 "input": pregunta_usuario, | ||||
|                 "chat_history": history, | ||||
|             } | ||||
|         ) | ||||
|  | ||||
|         # texto_respuesta: str = scaped(respuesta_agente["output"]) | ||||
|         texto_respuesta: str = respuesta_agente["output"] | ||||
|         bot.send_message( | ||||
|             message.chat.id, texto_respuesta, | ||||
|             parse_mode="Markdown") | ||||
|  | ||||
|         # Mandar mensaje de voz | ||||
|         # path_voice: str = tts_api(respuesta_agente["output"]) | ||||
|         # with open(path_voice, 'rb') as voice: | ||||
|         #     bot.send_voice(message.chat.id, voice=voice) | ||||
|  | ||||
|         history.append(HumanMessage(content=pregunta_usuario)) | ||||
|         history.append(AIMessage(content=respuesta_agente["output"])) | ||||
|         # print(history) | ||||
|  | ||||
|     # Enviar el historial después de cada interacción | ||||
|     # bot.send_message(message.chat.id, history) | ||||
|  | ||||
|  | ||||
| # while True: | ||||
| #     time.sleep(60) | ||||
| #     mensaje = 'Que mas pues!!' | ||||
| #     bot.send_message('5076346205', mensaje) | ||||
|  | ||||
| bot.infinity_polling() | ||||
| @@ -1,33 +0,0 @@ | ||||
| from rag.split_docs import load_split_docs | ||||
| from rag.llm import load_llm_openai | ||||
| from rag.embeddings import load_embeddins | ||||
| from rag.retriever import create_retriever | ||||
| from rag.vectorstore import create_verctorstore | ||||
| from rag.rag_chain import create_rag_chain | ||||
|  | ||||
| dir_pdfs: str = 'documents/pdfs/' | ||||
| file_name: str = 'onecluster_info.pdf' | ||||
| file_path: str = 'onecluster_info.pdf' | ||||
|  | ||||
| docs_split: list = load_split_docs(file_path) | ||||
| embeddings_model = load_embeddins() | ||||
| llm = load_llm_openai() | ||||
| create_verctorstore( | ||||
|     docs_split, | ||||
|     embeddings_model, | ||||
|     file_path | ||||
| ) | ||||
| retriever = create_retriever( | ||||
|     embeddings_model, | ||||
|     persist_directory="embeddings/onecluster_info" | ||||
| ) | ||||
| qa = create_rag_chain( | ||||
|     llm, retriever) | ||||
|  | ||||
| prompt: str =\ | ||||
|     "Dame información detallada sobre los sercivios que ofrese OneCluster." | ||||
| respuesta = qa.invoke( | ||||
|     {"input": prompt, "chat_history": []} | ||||
| ) | ||||
|  | ||||
| print(respuesta["answer"]) | ||||
| @@ -1 +1 @@ | ||||
| {"token": "ya29.a0AeDClZCwppzEBj1JXTYUVQz2C4XdMhwXUJ_5QFMjwccd4xsXl3HhHuio_OouTtnY2uH64MNEfuT-IK0xD6gX_W6_snvPMzq1JELtIzVvqb4eSCNrAVTp0qy4sQwPKKWnEi2HTOV6ZNyy3vOj4VmywjLpW53b5wyuYDC9hoU41waCgYKAQISARASFQHGX2MiAU9Ploii4o7cTRKSiRUyzg0177", "refresh_token": "1//05Hc2PufwLWe2CgYIARAAGAUSNwF-L9IrU2s--kkp6Q2fCzN5uf2vLBvEcYHyJOFZjCQAUpBSC0FWiCUg9wgm8cCMccEGe4Q7MPg", "token_uri": "https://oauth2.googleapis.com/token", "client_id": "19011937557-bi5nh4afvg4tuqr87v6dp55qj9a9o1h2.apps.googleusercontent.com", "client_secret": "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", "scopes": ["https://mail.google.com/"], "universe_domain": "googleapis.com", "account": "", "expiry": "2024-11-07T22:33:09.497974Z"} | ||||
| {"token": "ya29.a0AeDClZCbHA_Ubcy6IrJJZgcHOb95xrUDMIjoNryaK6DaV7RH93INeBAeKIAfaDw73grkGnBD2Ub2QuvqvRMJO6DvivonA6yoLj7hFvDgSLsfNXo5xcLJqsgiIiOfExr74m4WcT5jnpZswxk3ZuEtAoXuPDW7hzLXnTwk6sFDmAaCgYKAeoSARASFQHGX2MiPDgm1tQCv4YbN3Bq9f4zvg0177", "refresh_token": "1//05Hc2PufwLWe2CgYIARAAGAUSNwF-L9IrU2s--kkp6Q2fCzN5uf2vLBvEcYHyJOFZjCQAUpBSC0FWiCUg9wgm8cCMccEGe4Q7MPg", "token_uri": "https://oauth2.googleapis.com/token", "client_id": "19011937557-bi5nh4afvg4tuqr87v6dp55qj9a9o1h2.apps.googleusercontent.com", "client_secret": "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", "scopes": ["https://mail.google.com/"], "universe_domain": "googleapis.com", "account": "", "expiry": "2024-11-07T23:37:58.891983Z"} | ||||
| @@ -1 +0,0 @@ | ||||
| {"installed":{"client_id":"19011937557-bi5nh4afvg4tuqr87v6dp55qj9a9o1h2.apps.googleusercontent.com","project_id":"oc-aassistent","auth_uri":"https://accounts.google.com/o/oauth2/auth","token_uri":"https://oauth2.googleapis.com/token","auth_provider_x509_cert_url":"https://www.googleapis.com/oauth2/v1/certs","client_secret":"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","redirect_uris":["http://localhost"]}} | ||||
| @@ -1 +0,0 @@ | ||||
| {"installed":{"client_id":"629922809906-pl9l1ipout6d5hh19ku50sfvnqgu8ir2.apps.googleusercontent.com","project_id":"calendar-424503","auth_uri":"https://accounts.google.com/o/oauth2/auth","token_uri":"https://oauth2.googleapis.com/token","auth_provider_x509_cert_url":"https://www.googleapis.com/oauth2/v1/certs","client_secret":"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","redirect_uris":["http://localhost"]}} | ||||
| @@ -1 +0,0 @@ | ||||
| {"token": "ya29.a0AeDClZCwppzEBj1JXTYUVQz2C4XdMhwXUJ_5QFMjwccd4xsXl3HhHuio_OouTtnY2uH64MNEfuT-IK0xD6gX_W6_snvPMzq1JELtIzVvqb4eSCNrAVTp0qy4sQwPKKWnEi2HTOV6ZNyy3vOj4VmywjLpW53b5wyuYDC9hoU41waCgYKAQISARASFQHGX2MiAU9Ploii4o7cTRKSiRUyzg0177", "refresh_token": "1//05Hc2PufwLWe2CgYIARAAGAUSNwF-L9IrU2s--kkp6Q2fCzN5uf2vLBvEcYHyJOFZjCQAUpBSC0FWiCUg9wgm8cCMccEGe4Q7MPg", "token_uri": "https://oauth2.googleapis.com/token", "client_id": "19011937557-bi5nh4afvg4tuqr87v6dp55qj9a9o1h2.apps.googleusercontent.com", "client_secret": "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", "scopes": ["https://mail.google.com/"], "universe_domain": "googleapis.com", "account": "", "expiry": "2024-11-07T22:33:09.497974Z"} | ||||
| @@ -1 +0,0 @@ | ||||
| {"token": "ya29.a0AeDClZC30_gM1YE3u8XHKpmLqIcABYBZzJOVQfrNyUJrFuOPxNpt96IbTeCIM266Es--v34aMA70oZG1GIELbcuExSSQ25_pzdHd9yS18vDOgdz8OtuG24_2DGnaCy7FOQBZzFr5GxgLJ_657Zal3RF8lreEFLyVLKdGf6TiaCgYKATMSARASFQHGX2MiirXKmKxKPjAWuSG0af_xmw0175", "refresh_token": "1//05akITlwASya5CgYIARAAGAUSNwF-L9IryPh1Y647Eim5KZZhD3DKCuUtkAp28N30ed1TALtax5TkflyKGxDtBmayKowuMZkaT9k", "token_uri": "https://oauth2.googleapis.com/token", "client_id": "629922809906-pl9l1ipout6d5hh19ku50sfvnqgu8ir2.apps.googleusercontent.com", "client_secret": "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", "scopes": ["https://www.googleapis.com/auth/calendar"], "universe_domain": "googleapis.com", "account": "", "expiry": "2024-11-02T23:47:30.521929Z"} | ||||
		Reference in New Issue
	
	Block a user