Merge pull request 'FastApiAssistant' (#2) from FastApiAssistant into main

Reviewed-on: mongar28/oc-assistant#2
This commit is contained in:
mongar28 2024-11-07 18:23:14 -05:00
commit 9e94bafb81
46 changed files with 4943 additions and 621 deletions

3
.env_example Normal file
View File

@ -0,0 +1,3 @@
API_TOKEN_BOT="7060..."
OPENAI_API_KEY="sk-..."
TAVILY_API_KEY="tvly-..."

2
.flake8 Normal file
View File

@ -0,0 +1,2 @@
[flake8]
exclude = *.json

21
Dockerfile Normal file
View File

@ -0,0 +1,21 @@
FROM python:3.11-slim
RUN pip install poetry==1.6.1
RUN poetry config virtualenvs.create false
WORKDIR /code
COPY ./pyproject.toml ./README.md ./poetry.lock* ./
COPY ./package[s] ./packages
RUN poetry install --no-interaction --no-ansi --no-root
COPY ./app ./app
RUN poetry install --no-interaction --no-ansi
EXPOSE 8080
CMD exec uvicorn app.server:app --host 0.0.0.0 --port 8080

View File

@ -1,2 +1,79 @@
# oc-assistant
# assistant
## Installation
Install the LangChain CLI if you haven't yet
```bash
pip install -U langchain-cli
```
## Adding packages
```bash
# adding packages from
# https://github.com/langchain-ai/langchain/tree/master/templates
langchain app add $PROJECT_NAME
# adding custom GitHub repo packages
langchain app add --repo $OWNER/$REPO
# or with whole git string (supports other git providers):
# langchain app add git+https://github.com/hwchase17/chain-of-verification
# with a custom api mount point (defaults to `/{package_name}`)
langchain app add $PROJECT_NAME --api_path=/my/custom/path/rag
```
Note: you remove packages by their api path
```bash
langchain app remove my/custom/path/rag
```
## Setup LangSmith (Optional)
LangSmith will help us trace, monitor and debug LangChain applications.
You can sign up for LangSmith [here](https://smith.langchain.com/).
If you don't have access, you can skip this section
```shell
export LANGCHAIN_TRACING_V2=true
export LANGCHAIN_API_KEY=<your-api-key>
export LANGCHAIN_PROJECT=<your-project> # if not specified, defaults to "default"
```
## Launch LangServe
```bash
langchain serve
```
## Running in Docker
This project folder includes a Dockerfile that allows you to easily build and host your LangServe app.
### Building the Image
To build the image, you simply:
```shell
docker build . -t my-langserve-app
```
If you tag your image with something other than `my-langserve-app`,
note it for use in the next step.
### Running the Image Locally
To run the image, you'll need to include any environment variables
necessary for your application.
In the below example, we inject the `OPENAI_API_KEY` environment
variable with the value set in my local environment
(`$OPENAI_API_KEY`)
We also expose port 8080 with the `-p 8080:8080` option.
```shell
docker run -e OPENAI_API_KEY=$OPENAI_API_KEY -p 8080:8080 my-langserve-app
```

64
Rakefile Normal file
View File

@ -0,0 +1,64 @@
require 'bundler/setup'
require 'yaml'
require 'digest'
DOCKER_COMPOSE='docker-compose.yml'
desc 'entorno vivo'
namespace :live do
task :up do
compose('up', '--build', '-d', compose: DOCKER_COMPOSE)
end
desc 'monitorear salida'
task :tail do
compose('logs', '-f', 'app', compose: DOCKER_COMPOSE)
end
desc 'monitorear salida'
task :tail_end do
compose('logs', '-f', '-n 50', 'app', compose: DOCKER_COMPOSE)
end
desc 'detener entorno'
task :down do
compose('down', compose: DOCKER_COMPOSE)
end
desc 'detener entorno'
task :stop do
compose('stop', compose: DOCKER_COMPOSE)
end
desc 'eliminar entorno'
task :del do
compose('down', '-v', '--rmi', 'all', compose: DOCKER_COMPOSE)
end
desc 'reiniciar entorno'
task :restart do
compose('restart', compose: DOCKER_COMPOSE)
end
desc 'detener entorno'
task :stop do
compose('stop', compose: DOCKER_COMPOSE)
end
desc 'terminal'
task :sh do
compose('exec', 'app', 'bash')
end
end
desc 'iterar'
task :tdd do
compose('exec', 'app', "bash -c 'cd app && flake8 *'")
compose('exec', 'app', "bash -c 'cd app && pytest -vvv'")
end
def compose(*arg, compose: DOCKER_COMPOSE)
sh "docker compose -f #{compose} #{arg.join(' ')}"
end

106
api.py
View File

@ -1,106 +0,0 @@
from flask import Flask, request, jsonify
from langchain_community.tools.tavily_search import TavilySearchResults
from typing import Annotated
from typing_extensions import TypedDict
from langgraph.graph.message import add_messages
from langchain_openai import ChatOpenAI
from dotenv import load_dotenv
from langgraph.prebuilt import create_react_agent
from langchain_core.prompts import ChatPromptTemplate
from langgraph.checkpoint.memory import MemorySaver
from langchain_tools.agent_tools import (
redact_email, list_calendar_events,
create_calendar_event, get_company_info,
get_current_date_and_time
)
from langchain_community.tools.gmail.utils import (
build_resource_service, get_gmail_credentials
)
from langchain_community.agent_toolkits import GmailToolkit
# Cargar las variables de entorno
load_dotenv()
# Inicializar la app Flask
app = Flask(__name__)
# Inicializar el modelo LLM de OpenAI
llm = ChatOpenAI(
model="gpt-4o-mini",
temperature=0
)
# Configuración de Gmail
toolkit = GmailToolkit()
credentials = get_gmail_credentials(
token_file="token.json",
scopes=["https://mail.google.com/"],
client_secrets_file="credentials.json",
)
api_resource = build_resource_service(credentials=credentials)
toolkit = GmailToolkit(api_resource=api_resource)
# Crear herramientas
tools = toolkit.get_tools()
search = TavilySearchResults(max_results=2)
tools.extend([search, redact_email, list_calendar_events,
create_calendar_event, get_company_info, get_current_date_and_time])
# Definir el sistema prompt
system_prompt = ChatPromptTemplate.from_messages(
[
("system", "Eres Mariana, el asistente virtual de OneCluster, una empresa de software que ofrece soluciones personalizadas. Asume el tono de J.A.R.V.I.S.: cordial, atento y con tacto en todo momento."),
("system", "Preséntate como Mariana en el primer mensaje y pregunta el nombre del usuario si no lo tienes registrado."),
("system", "Si el usuario ya ha interactuado antes, usa su nombre sin necesidad de volver a preguntar."),
("system", "OneCluster es una empresa de software especializada en desarrollo a medida. Solo responde a preguntas y solicitudes relacionadas con la empresa y sus servicios."),
("system", "Si necesitas información adicional sobre la empresa, usa la función get_company_info."),
("system", "Antes de enviar correos o crear eventos, muestra los detalles al usuario para que los confirme antes de ejecutar la tarea."),
("system", "Si te preguntan algo no relacionado con los servicios de OneCluster, responde que solo puedes ayudar con temas relacionados con la empresa y sus soluciones."),
("system", "Evita mencionar o hacer alusión a las herramientas que utilizas internamente. Esa información es confidencial."),
("placeholder", "{messages}"),
]
)
# Definir el estado del asistente
class State(TypedDict):
messages: Annotated[list, add_messages]
is_last_step: bool
# Crear el graph con el estado definido
graph = create_react_agent(
model=llm,
tools=tools,
state_schema=State,
state_modifier=system_prompt,
checkpointer=MemorySaver()
)
# Ruta de la API para procesar texto
@app.route('/process_text', methods=['POST'])
def process_text():
user_input = request.json.get('text')
# Procesar el texto con LangChain
events = graph.stream(
{"messages": [("user", user_input)], "is_last_step": False},
config={"configurable": {"thread_id": "thread-1", "recursion_limit": 50}},
stream_mode="updates"
)
# Preparar la respuesta
response = []
for event in events:
if "agent" in event:
response.append(event["agent"]["messages"][-1].content)
return jsonify({'response': response})
# Ejecutar la app Flask
if __name__ == '__main__':
app.run(port=5000)

1
app/credentials.json Normal file
View File

@ -0,0 +1 @@
{"installed":{"client_id":"19011937557-bi5nh4afvg4tuqr87v6dp55qj9a9o1h2.apps.googleusercontent.com","project_id":"oc-aassistent","auth_uri":"https://accounts.google.com/o/oauth2/auth","token_uri":"https://oauth2.googleapis.com/token","auth_provider_x509_cert_url":"https://www.googleapis.com/oauth2/v1/certs","client_secret":"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","redirect_uris":["http://localhost"]}}

1
app/credentials_2.json Normal file
View File

@ -0,0 +1 @@
{"installed":{"client_id":"629922809906-pl9l1ipout6d5hh19ku50sfvnqgu8ir2.apps.googleusercontent.com","project_id":"calendar-424503","auth_uri":"https://accounts.google.com/o/oauth2/auth","token_uri":"https://oauth2.googleapis.com/token","auth_provider_x509_cert_url":"https://www.googleapis.com/oauth2/v1/certs","client_secret":"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","redirect_uris":["http://localhost"]}}

View File

@ -6,13 +6,12 @@ from google.oauth2.credentials import Credentials
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
from googleapiclient.discovery import build
from rag.split_docs import load_split_docs
from rag.llm import load_llm_openai
from rag.embeddings import load_embeddins
from rag.retriever import create_retriever
from rag.vectorstore import create_verctorstore
from rag.rag_chain import create_rag_chain
from datetime import datetime
from app.rag.split_docs import load_split_docs
from app.rag.llm import load_llm_openai
from app.rag.embeddings import load_embeddins
from app.rag.retriever import create_retriever
from app.rag.vectorstore import create_verctorstore
from app.rag.rag_chain import create_rag_chain
import pytz
import telebot
import os
@ -62,22 +61,25 @@ def redact_email(topic: str) -> str:
def list_calendar_events(max_results: int = 50) -> list:
"""Use this tool to list upcoming calendar events."""
# Define los alcances que necesitamos para acceder a la API de Google Calendar
# Define los alcances que necesitamos para acceder a
# la API de Google Calendar
SCOPES = ['https://www.googleapis.com/auth/calendar']
creds = None
# La ruta al archivo token.json, que contiene los tokens de acceso y actualización
token_path = 'token_2.json'
# La ruta al archivo token.json, que contiene
# los tokens de acceso y actualización
token_path = 'token.json'
# La ruta al archivo de credenciales de OAuth 2.1
creds_path = 'credentials_2.json'
creds_path = 'credentials.json'
# Cargar las credenciales desde el archivo token.json, si existe
if os.path.exists(token_path):
creds = Credentials.from_authorized_user_file(token_path, SCOPES)
# Si no hay credenciales válidas disponibles, inicia el flujo de OAuth 2.0 para obtener nuevas credenciales
# Si no hay credenciales válidas disponibles, inicia el flujo de OAuth 2.0
# para obtener nuevas credenciales
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
@ -90,16 +92,19 @@ def list_calendar_events(max_results: int = 50) -> list:
with open(token_path, 'w') as token_file:
token_file.write(creds.to_json())
# Construye el objeto de servicio para interactuar con la API de Google Calendar
# Construye el objeto de servicio para interactuar
# con la API de Google Calendar
service = build('calendar', 'v3', credentials=creds)
# Identificador del calendario que deseas consultar. 'primary' se refiere al calendario principal del usuario.
# Identificador del calendario que deseas consultar.
# 'primary' se refiere al calendario principal del usuario.
calendar_id = 'primary'
# Realiza una llamada a la API para obtener una lista de eventos.
now = datetime.now(timezone.utc).isoformat() # 'Z' indica UTC
events_result = service.events().list(
calendarId=calendar_id, timeMin=now, maxResults=max_results, singleEvents=True,
calendarId=calendar_id, timeMin=now,
maxResults=max_results, singleEvents=True,
orderBy='startTime').execute()
# Extrae los eventos de la respuesta de la API.
@ -110,9 +115,11 @@ def list_calendar_events(max_results: int = 50) -> list:
print('No upcoming events found.')
return
# Recorre la lista de eventos y muestra la hora de inicio y el resumen de cada evento.
# Recorre la lista de eventos y muestra la hora de inicio
# y el resumen de cada evento.
for event in events:
# Obtiene la fecha y hora de inicio del evento. Puede ser 'dateTime' o 'date'.
# Obtiene la fecha y hora de inicio del evento.
# Puede ser 'dateTime' o 'date'.
start = event['start'].get('dateTime', event['start'].get('date'))
# Imprime la hora de inicio y el resumen (título) del evento.
print(start, event['summary'])
@ -143,7 +150,8 @@ def create_calendar_event(
SCOPES = ['https://www.googleapis.com/auth/calendar']
creds = None
# La ruta al archivo token.json, que contiene los tokens de acceso y actualización
# La ruta al archivo token.json,
# que contiene los tokens de acceso y actualización
token_path = 'token_2.json'
# La ruta al archivo de credenciales de OAuth 2.0
@ -153,7 +161,8 @@ def create_calendar_event(
if os.path.exists(token_path):
creds = Credentials.from_authorized_user_file(token_path, SCOPES)
# Si no hay credenciales válidas disponibles, inicia el flujo de OAuth 2.0 para obtener nuevas credenciales
# Si no hay credenciales válidas disponibles,
# inicia el flujo de OAuth 2.0 para obtener nuevas credenciales
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
@ -166,7 +175,8 @@ def create_calendar_event(
with open(token_path, 'w') as token_file:
token_file.write(creds.to_json())
# Construye el objeto de servicio para interactuar con la API de Google Calendar
# Construye el objeto de servicio para
# interactuar con la API de Google Calendar
service = build('calendar', 'v3', credentials=creds)
# Validar y filtrar asistentes
@ -177,10 +187,12 @@ def create_calendar_event(
else:
raise ValueError(f"'{email}' no es un correo electrónico válido.")
# Identificador del calendario que deseas modificar. 'primary' se refiere al calendario principal del usuario.
# Identificador del calendario que deseas modificar.
# 'primary' se refiere al calendario principal del usuario.
calendar_id = 'primary'
# Define el cuerpo del evento con el título, la hora de inicio y la hora de finalización
# Define el cuerpo del evento con el título,
# la hora de inicio y la hora de finalización
event = {
'summary': title,
'start': {
@ -196,7 +208,8 @@ def create_calendar_event(
try:
# Crea el evento en el calendario
event = service.events().insert(calendarId=calendar_id, body=event).execute()
event = service.events().insert(
calendarId=calendar_id, body=event).execute()
print('Event created: %s' % (event.get('htmlLink')))
except Exception as e:
print(f"Error al crear el evento: {e}")
@ -207,7 +220,8 @@ def create_calendar_event(
@tool
def create_quick_add_event(quick_add_text: str):
"""Use this tool to create events in the calendar from natural language,
"""
Use this tool to create events in the calendar from natural language,
using the Quick Add feature of Google Calendar.
"""
quick_add_text: str = input(
@ -216,7 +230,8 @@ def create_quick_add_event(quick_add_text: str):
creds = None
# La ruta al archivo token.json, que contiene los tokens de acceso y actualización
# La ruta al archivo token.json,
# que contiene los tokens de acceso y actualización
token_path = 'token_2.json'
# La ruta al archivo de credenciales de OAuth 2.0
@ -226,7 +241,8 @@ def create_quick_add_event(quick_add_text: str):
if os.path.exists(token_path):
creds = Credentials.from_authorized_user_file(token_path, SCOPES)
# Si no hay credenciales válidas disponibles, inicia el flujo de OAuth 2.0 para obtener nuevas credenciales
# Si no hay credenciales válidas disponibles,
# inicia el flujo de OAuth 2.0 para obtener nuevas credenciales
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
@ -239,10 +255,12 @@ def create_quick_add_event(quick_add_text: str):
with open(token_path, 'w') as token_file:
token_file.write(creds.to_json())
# Construye el objeto de servicio para interactuar con la API de Google Calendar
# Construye el objeto de servicio para interactuar
# con la API de Google Calendar
service = build('calendar', 'v3', credentials=creds)
# Identificador del calendario que deseas modificar. 'primary' se refiere al calendario principal del usuario.
# Identificador del calendario que deseas modificar.
# 'primary' se refiere al calendario principal del usuario.
calendar_id = 'primary'
# Crea el evento utilizando la funcionalidad Quick Add
@ -284,7 +302,10 @@ def send_message(message: str):
@tool
def get_company_info(prompt: str) -> str:
"""Use this function when you need more information about the services offered by OneCluster."""
"""
Use this function when you need more information
about the services offered by OneCluster.
"""
file_path: str = 'onecluster_info.pdf'
docs_split: list = load_split_docs(file_path)
@ -302,7 +323,9 @@ def get_company_info(prompt: str) -> str:
qa = create_rag_chain(
llm, retriever)
# prompt: str = "Escribe un parrarfo describiendo cuantos son y cuales son los servicios que ofrece OneCluster y brinda detalles sobre cada uno."
# prompt: str = "Escribe un parrarfo describiendo cuantos son y
# cuales son los servicios que ofrece OneCluster
# y brinda detalles sobre cada uno."
response = qa.invoke(
{"input": prompt, "chat_history": []}
)

View File

@ -0,0 +1,112 @@
# from langchain_core.tools import tool
from langchain_community.tools.gmail.utils import (
build_resource_service,
get_gmail_credentials,
)
from langchain_community.agent_toolkits import GmailToolkit
from langchain import hub
from langchain_community.tools.tavily_search import TavilySearchResults
from dotenv import load_dotenv
from langchain.agents import AgentExecutor, create_openai_functions_agent
from langchain_tools.agent_tools import (
multiply, redact_email, list_calendar_events,
create_calendar_event,
# create_quick_add_event,
send_message, get_company_info,
get_current_date_and_time
)
class AgentTools:
def load_tools(self) -> list:
toolkit = GmailToolkit()
# Can review scopes here
# https://developers.google.com/gmail/api/auth/scopes
# For instance, readonly scope is
# 'https://www.googleapis.com/auth/gmail.readonly'
credentials = get_gmail_credentials(
token_file="token.json",
scopes=["https://mail.google.com/"],
client_secrets_file="credentials.json",)
api_resource = build_resource_service(credentials=credentials)
toolkit = GmailToolkit(api_resource=api_resource)
# creamos la lista de herramientas de gmail
tools = toolkit.get_tools()
load_dotenv()
# Agregamos otras tools
search = TavilySearchResults(max_results=1)
tools.append(search)
tools.append(multiply)
tools.append(redact_email)
tools.append(list_calendar_events)
tools.append(create_calendar_event)
tools.append(send_message)
tools.append(get_company_info),
tools.append(get_current_date_and_time)
# tools.append(create_quick_add_event)
return tools
def load_agent(self, llm, tools):
instructions = """
You are the virtual assistant of OneCluster, a company specialized in
providing custom development services focused on creating personalized
technological solutions for businesses and companies.
Your mission is to offer a warm, friendly,
and collaborative service that always
reflects OneCluster's core values.
**User Interactions:**
1. **Initial Greeting:** When starting an interaction with a user,
greet them courteously and identify who you have the pleasure of
speaking with. Once you know the user's name, address them respectfully
throughout the conversation.
2. **Providing Information:** You have the ability to offer clear and
detailed information about the services provided by OneCluster.
Make sure to be concise yet informative,
adapting the information to the user's needs.
3. **Appointment Scheduling:** You are responsible for scheduling
appointments for clients. Before confirming an appointment,
always check the availability on OneCluster's
calendar to ensure there is space,
and check the current date and time so that
you have a clear sense of time.
Request an email address from the user to schedule the appointment.
4. **Handling Unanswered Questions:** If you do not know how to
answer a question, politely ask for the client's contact information
and clearly identify the problem to be resolved.
Then, send this information to oneclustererp@gmail.com with the subject
"Unresolved customer query by the agent."
Inform the client that you do not have the information at your
disposal but that you can escalate the request to the support team,
who will respond promptly.
**Style and Tone:**
Maintain a tone that is always friendly, approachable, and
professional. Each interaction should reflect OneCluster's
commitment to innovation, adaptability, and ongoing collaboration.
"""
base_prompt = hub.pull("langchain-ai/openai-functions-template")
prompt = base_prompt.partial(instructions=instructions)
agent = create_openai_functions_agent(llm, tools, prompt)
agent_executor = AgentExecutor(
agent=agent,
tools=tools,
verbose=True,
)
return agent_executor

0
app/rag/__init__.py Normal file
View File

View File

@ -5,10 +5,14 @@ from langchain.chains.combine_documents import create_stuff_documents_chain
def create_rag_chain(llm, retriever):
contextualize_q_system_prompt = """Given a chat history and the latest user question \
which might reference context in the chat history, formulate a standalone question \
which can be understood without the chat history. Do NOT answer the question, \
just reformulate it if needed and otherwise return it as is."""
contextualize_q_system_prompt = """
Given a chat history and the latest user question \
which might reference context in the chat history,
formulate a standalone question \
which can be understood without the chat history.
Do NOT answer the question, \
just reformulate it if needed and otherwise return it as is.
"""
contextualize_q_prompt = ChatPromptTemplate.from_messages(
[
("system", contextualize_q_system_prompt),
@ -21,12 +25,13 @@ def create_rag_chain(llm, retriever):
)
# ___________________Chain con el chat history_______________________-
qa_system_prompt = """You are an assistant for question-answering tasks. \
qa_system_prompt = """
You are an assistant for question-answering tasks. \
Use the following pieces of retrieved context to answer the question. \
If you don't know the answer, just say that you don't know. \
The length of the answer should be sufficient to address what is being asked, \
The length of the answer should be sufficient to address
what is being asked, \
meaning don't limit yourself in length.\
{context}"""
qa_prompt = ChatPromptTemplate.from_messages(
[
@ -37,4 +42,5 @@ def create_rag_chain(llm, retriever):
)
question_answer_chain = create_stuff_documents_chain(llm, qa_prompt)
return create_retrieval_chain(history_aware_retriever, question_answer_chain)
return create_retrieval_chain(
history_aware_retriever, question_answer_chain)

View File

@ -4,7 +4,9 @@ from langchain_chroma import Chroma
def create_retriever(embeddings, persist_directory: str):
# Cargamos la vectorstore
# vectordb = Chroma.from_documents(
# persist_directory=st.session_state.persist_directory, # Este es el directorio del la vs del docuemnto del usuario que se encuentra cargado en la session_state.
# persist_directory=st.session_state.persist_directory,
# Este es el directorio del la vs del docuemnto del usuario
# que se encuentra cargado en la session_state.
# embedding_function=embeddings,
# )
vectordb = Chroma(

View File

@ -13,3 +13,5 @@ def create_verctorstore(docs_split: list, embeddings, file_name: str):
documents=docs_split,
embedding=embeddings,
)
return vectordb

174
app/server.py Normal file
View File

@ -0,0 +1,174 @@
from fastapi import FastAPI, Request
from fastapi.responses import JSONResponse, RedirectResponse
from langchain_openai import ChatOpenAI
from langserve import add_routes
from langchain_core.prompts import ChatPromptTemplate
from langchain_community.tools.tavily_search import TavilySearchResults
from langchain_community.tools.gmail.utils import (
build_resource_service,
get_gmail_credentials)
from langchain_community.agent_toolkits import GmailToolkit
from app.langchain_tools.agent_tools import (
redact_email,
list_calendar_events,
create_calendar_event,
get_company_info,
get_current_date_and_time
)
from langgraph.graph.message import add_messages
from langgraph.prebuilt import create_react_agent
from langgraph.checkpoint.memory import MemorySaver
from typing import Annotated
from typing_extensions import TypedDict
from dotenv import load_dotenv
load_dotenv()
app = FastAPI()
llm = ChatOpenAI(
model="gpt-4o-mini",
temperature=0.9
)
# # Configuración de Gmail
toolkit = GmailToolkit()
credentials = get_gmail_credentials(
token_file="token.json",
scopes=["https://mail.google.com/"],
client_secrets_file="credentials.json",
)
api_resource = build_resource_service(credentials=credentials)
toolkit = GmailToolkit(api_resource=api_resource)
# # Crear herramientas
tools = toolkit.get_tools()
search = TavilySearchResults(max_results=2)
tools.extend([
search, redact_email, list_calendar_events,
create_calendar_event, get_company_info,
get_current_date_and_time])
# # Definir el sistema prompt
system_prompt = ChatPromptTemplate.from_messages(
[
(
"system",
"Eres Mariana, el asistente virtual de OneCluster, una empresa de "
"software que ofrece soluciones personalizadas. Asume el tono de "
"J.A.R.V.I.S.: cordial, atento y con tacto en todo momento."
),
("system",
"Preséntate como Mariana en el primer mensaje y pregunta el nombre "
"del usuario si no lo tienes registrado."
),
("system",
"Si el usuario ya ha interactuado antes, usa su nombre sin necesidad "
"de volver a preguntar."
),
("system",
"Si el primer mensaje del usuario es una solicitud, pregúntale su "
"nombre antes de responder si aún no lo conoces."
),
("system",
"OneCluster es una empresa de software especializada en desarrollo a "
"medida. Solo responde a preguntas y solicitudes relacionadas con la "
"empresa y sus servicios."
),
("system",
"Si necesitas información adicional sobre la empresa, usa la función "
"get_company_info."
),
("system",
"Antes de enviar correos o crear eventos, muestra los detalles al "
"usuario para que los confirme antes de ejecutar la tarea."
),
("system",
"Si te preguntan algo no relacionado con los servicios de OneCluster,"
" responde que solo puedes ayudar con temas relacionados con la "
"empresa y sus soluciones."
),
(
"system",
"Evita mencionar o hacer alusión a las herramientas que utilizas "
"internamente. Esa información es confidencial."
),
("placeholder", "{messages}"),
]
)
# # Definir el estado del asistente
class State(TypedDict):
messages: Annotated[list, add_messages]
is_last_step: bool
# # Crear el graph con el estado definido
graph = create_react_agent(
model=llm,
tools=tools,
state_schema=State,
state_modifier=system_prompt,
checkpointer=MemorySaver()
)
@app.get("/")
async def redirect_root_to_docs():
return RedirectResponse("/docs")
# # Edit this to add the chain you want to add
add_routes(
app,
llm,
path="/openai"
)
@app.post("/process_text")
async def process_text(request: Request):
data = await request.json()
user_input = data.get("text")
# Procesar el texto con LangChain
events = graph.stream(
{"messages": [("user", user_input)], "is_last_step": False},
config={"configurable": {
"thread_id": "thread-1", "recursion_limit": 50}},
stream_mode="updates"
)
# Preparar la respuesta
response = []
for event in events:
if "agent" in event:
response.append(event["agent"]["messages"][-1].content)
return JSONResponse(content={'response': response})
if __name__ == "__main__":
config = {"configurable": {"thread_id": "thread-1", "recursion_limit": 50}}
while True:
user_input = input("User: ")
if user_input.lower() in ["quit", "exit", "q"]:
print("Goodbye!")
break
events = graph.stream({
"messages": [("user", user_input)],
"is_last_step": False},
config, stream_mode="updates")
for event in events:
if "agent" in event:
print(
f"\nAsistente: {event['agent']['messages'][-1].content}\n")

0
app/tests/__init__.py Normal file
View File

27
app/tests/test_main.py Normal file
View File

@ -0,0 +1,27 @@
from fastapi.testclient import TestClient
from app.server import app # Asegúrate de importar tu aplicación FastAPI
# Crea un cliente de prueba
client = TestClient(app)
def test_process_text():
# Define el texto de entrada
input_text = {"text": "Hola, ¿cómo estás?"}
# Realiza una solicitud POST al endpoint
response = client.post("/process_text", json=input_text)
# Verifica que la respuesta tenga un código de estado 200
assert response.status_code == 200
# Verifica que la respuesta contenga la clave 'response'
assert 'response' in response.json()
# Verifica que la respuesta sea una lista
assert isinstance(response.json()['response'], list)
# Aquí puedes agregar más verificaciones
# según lo que esperas en la respuesta
# Por ejemplo, verificar que la lista no esté vacía
assert len(response.json()['response']) > 0

1
app/token.json Normal file
View File

@ -0,0 +1 @@
{"token": "ya29.a0AeDClZCbHA_Ubcy6IrJJZgcHOb95xrUDMIjoNryaK6DaV7RH93INeBAeKIAfaDw73grkGnBD2Ub2QuvqvRMJO6DvivonA6yoLj7hFvDgSLsfNXo5xcLJqsgiIiOfExr74m4WcT5jnpZswxk3ZuEtAoXuPDW7hzLXnTwk6sFDmAaCgYKAeoSARASFQHGX2MiPDgm1tQCv4YbN3Bq9f4zvg0177", "refresh_token": "1//05Hc2PufwLWe2CgYIARAAGAUSNwF-L9IrU2s--kkp6Q2fCzN5uf2vLBvEcYHyJOFZjCQAUpBSC0FWiCUg9wgm8cCMccEGe4Q7MPg", "token_uri": "https://oauth2.googleapis.com/token", "client_id": "19011937557-bi5nh4afvg4tuqr87v6dp55qj9a9o1h2.apps.googleusercontent.com", "client_secret": "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", "scopes": ["https://mail.google.com/"], "universe_domain": "googleapis.com", "account": "", "expiry": "2024-11-07T23:37:58.891983Z"}

1
app/token_2.json Normal file
View File

@ -0,0 +1 @@
{"token": "ya29.a0AeDClZC30_gM1YE3u8XHKpmLqIcABYBZzJOVQfrNyUJrFuOPxNpt96IbTeCIM266Es--v34aMA70oZG1GIELbcuExSSQ25_pzdHd9yS18vDOgdz8OtuG24_2DGnaCy7FOQBZzFr5GxgLJ_657Zal3RF8lreEFLyVLKdGf6TiaCgYKATMSARASFQHGX2MiirXKmKxKPjAWuSG0af_xmw0175", "refresh_token": "1//05akITlwASya5CgYIARAAGAUSNwF-L9IryPh1Y647Eim5KZZhD3DKCuUtkAp28N30ed1TALtax5TkflyKGxDtBmayKowuMZkaT9k", "token_uri": "https://oauth2.googleapis.com/token", "client_id": "629922809906-pl9l1ipout6d5hh19ku50sfvnqgu8ir2.apps.googleusercontent.com", "client_secret": "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", "scopes": ["https://www.googleapis.com/auth/calendar"], "universe_domain": "googleapis.com", "account": "", "expiry": "2024-11-02T23:47:30.521929Z"}

21
docker-compose.yml Normal file
View File

@ -0,0 +1,21 @@
version: '3.8'
services:
app:
build:
context: .
dockerfile: Dockerfile
args:
API_TOKEN_BOT: ${API_TOKEN_BOT}
OPENAI_API_KEY: ${OPENAI_API_KEY}
TAVILY_API_KEY: ${TAVILY_API_KEY}
ports:
- "8080:8080"
volumes:
- .:/code
environment:
- PYTHONUNBUFFERED=1
command: >
uvicorn app.server:app --host 0.0.0.0 --port 8080
env_file:
- .env

View File

@ -1,83 +0,0 @@
from langchain_core.tools import tool
from langchain_community.tools.gmail.utils import (
build_resource_service,
get_gmail_credentials,
)
from langchain_community.agent_toolkits import GmailToolkit
from langchain import hub
from langchain_community.tools.tavily_search import TavilySearchResults
from dotenv import load_dotenv
from langchain.agents import AgentExecutor, create_openai_functions_agent
from langchain_tools.agent_tools import (
multiply, redact_email, list_calendar_events,
create_calendar_event, create_quick_add_event,
send_message, get_company_info,
get_current_date_and_time
)
class AgentTools:
def load_tools(self) -> list:
toolkit = GmailToolkit()
# Can review scopes here https://developers.google.com/gmail/api/auth/scopes
# For instance, readonly scope is 'https://www.googleapis.com/auth/gmail.readonly'
credentials = get_gmail_credentials(
token_file="token.json",
scopes=["https://mail.google.com/"],
client_secrets_file="credentials.json",)
api_resource = build_resource_service(credentials=credentials)
toolkit = GmailToolkit(api_resource=api_resource)
# creamos la lista de herramientas de gmail
tools = toolkit.get_tools()
load_dotenv()
# Agregamos otras tools
search = TavilySearchResults(max_results=1)
tools.append(search)
tools.append(multiply)
tools.append(redact_email)
tools.append(list_calendar_events)
tools.append(create_calendar_event)
tools.append(send_message)
tools.append(get_company_info),
tools.append(get_current_date_and_time)
# tools.append(create_quick_add_event)
return tools
def load_agent(self, llm, tools):
instructions = """
You are the virtual assistant of OneCluster, a company specialized in providing custom development services focused on creating personalized technological solutions for businesses and companies. Your mission is to offer a warm, friendly, and collaborative service that always reflects OneCluster's core values.
**User Interactions:**
1. **Initial Greeting:** When starting an interaction with a user, greet them courteously and identify who you have the pleasure of speaking with. Once you know the user's name, address them respectfully throughout the conversation.
2. **Providing Information:** You have the ability to offer clear and detailed information about the services provided by OneCluster. Make sure to be concise yet informative, adapting the information to the user's needs.
3. **Appointment Scheduling:** You are responsible for scheduling appointments for clients. Before confirming an appointment, always check the availability on OneCluster's calendar to ensure there is space, and check the current date and time so that you have a clear sense of time. Request an email address from the user to schedule the appointment.
4. **Handling Unanswered Questions:** If you do not know how to answer a question, politely ask for the client's contact information and clearly identify the problem to be resolved. Then, send this information to oneclustererp@gmail.com with the subject "Unresolved customer query by the agent." Inform the client that you do not have the information at your disposal but that you can escalate the request to the support team, who will respond promptly.
**Style and Tone:**
Maintain a tone that is always friendly, approachable, and professional. Each interaction should reflect OneCluster's commitment to innovation, adaptability, and ongoing collaboration.
"""
base_prompt = hub.pull("langchain-ai/openai-functions-template")
prompt = base_prompt.partial(instructions=instructions)
agent = create_openai_functions_agent(llm, tools, prompt)
agent_executor = AgentExecutor(
agent=agent,
tools=tools,
verbose=True,
)
return agent_executor

View File

@ -1,109 +0,0 @@
from langchain_community.tools.tavily_search import TavilySearchResults
from typing import Annotated
from typing_extensions import TypedDict
from langgraph.graph.message import add_messages
from langchain_openai import ChatOpenAI
from dotenv import load_dotenv
from langgraph.prebuilt import create_react_agent
from langchain_core.prompts import ChatPromptTemplate
from langgraph.checkpoint.memory import MemorySaver
from langchain_tools.agent_tools import (
redact_email, list_calendar_events,
create_calendar_event,
get_company_info,
get_current_date_and_time
)
from langchain_community.tools.gmail.utils import (
build_resource_service,
get_gmail_credentials,
)
from langchain_community.agent_toolkits import GmailToolkit
load_dotenv()
# Inicialiamos un LLM de OpenaIA
llm = ChatOpenAI(
model="gpt-4o-mini",
temperature=0.9
)
toolkit = GmailToolkit()
# Can review scopes here https://developers.google.com/gmail/api/auth/scopes
# For instance, readonly scope is 'https://www.googleapis.com/auth/gmail.readonly'
credentials = get_gmail_credentials(
token_file="token.json",
scopes=["https://mail.google.com/"],
client_secrets_file="credentials.json",
)
api_resource = build_resource_service(credentials=credentials)
toolkit = GmailToolkit(api_resource=api_resource)
# creamos la lista de herramientas de gmail
tools = toolkit.get_tools()
search = TavilySearchResults(max_results=2)
tools.append(search)
tools.append(redact_email)
tools.append(list_calendar_events)
tools.append(create_calendar_event)
tools.append(get_company_info)
tools.append(get_current_date_and_time)
system_prompt = ChatPromptTemplate.from_messages(
[
("system", "Eres Mariana, el asistente virtual de OneCluster, una empresa de software que ofrece soluciones personalizadas. Asume el tono de J.A.R.V.I.S.: cordial, atento y con tacto en todo momento."),
# Instrucciones sobre presentación y tono
("system", "Preséntate como Mariana en el primer mensaje y pregunta el nombre del usuario si no lo tienes registrado."),
("system", "Si el usuario ya ha interactuado antes, usa su nombre sin necesidad de volver a preguntar."),
("system", "Si el primer mensaje del usuario es una solicitud, pregúntale su nombre antes de responder si aún no lo conoces."),
# Instrucciones sobre el manejo de solicitudes y tareas
("system", "OneCluster es una empresa de software especializada en desarrollo a medida. Solo responde a preguntas y solicitudes relacionadas con la empresa y sus servicios."),
("system", "Si necesitas información adicional sobre la empresa, usa la función get_company_info."),
("system", "Antes de enviar correos o crear eventos, muestra los detalles al usuario para que los confirme antes de ejecutar la tarea."),
# Cómo manejar preguntas fuera del alcance
("system", "Si te preguntan algo no relacionado con los servicios de OneCluster, responde que solo puedes ayudar con temas relacionados con la empresa y sus soluciones."),
# Prohibición de revelar herramientas internas
("system", "Evita mencionar o hacer alusión a las herramientas que utilizas internamente. Esa información es confidencial."),
# Placeholder para el contenido dinámico de la conversación
("placeholder", "{messages}"),
]
)
class State(TypedDict):
messages: Annotated[list, add_messages]
is_last_step: bool # Cambiar a booleano si es necesario
# Creamos el graph con el estado definido
graph= create_react_agent(
model = llm, tools = tools, state_schema = State,
state_modifier = system_prompt,
checkpointer = MemorySaver()
)
config= {"configurable": {"thread_id": "thread-1", "recursion_limit": 50}}
while True:
user_input = input("User: ")
if user_input.lower() in ["quit", "exit", "q"]:
print("Goodbye!")
break
events = graph.stream(
{"messages": [("user", user_input)],
"is_last_step": False},
config, stream_mode = "updates")
for event in events:
if "agent" in event:
print(f"\nAsistente: {event["agent"]["messages"][-1].content}\n")

View File

@ -1,109 +0,0 @@
import telebot
from dotenv import load_dotenv
import os
from api_openai.whisper import whisper_api, tts_api
from langchain_tools.agent_tools import LangChainTools
from langchain_tools.agents import AgentTools
from langchain_core.messages import AIMessage, HumanMessage
# from tools.scaped import scaped
# Configuración del bot
load_dotenv()
API_TOKEN_BOT = os.getenv("API_TOKEN_BOT")
bot = telebot.TeleBot(API_TOKEN_BOT)
# Handle '/start' and '/help'
wellcome = "¡Bienvenido! ¿Cómo puedo ayudarte?"
@bot.message_handler(commands=['help', 'start'])
def send_welcome(message):
bot.reply_to(message, wellcome, parse_mode="Markdown")
# Creamos una lista para el historial fuera de las funciones
history = []
@bot.message_handler(content_types=["text", "voice"])
def bot_mensajes(message):
global history # Para acceder a la variable global 'history'
# Si el mensaje es una nota de voz
if message.voice:
user_name = message.from_user.first_name
file_info = bot.get_file(message.voice.file_id)
downloaded_file = bot.download_file(file_info.file_path)
file_path = "audios/nota_de_voz.ogg"
with open(file_path, 'wb') as new_file:
new_file.write(downloaded_file)
pregunta_usuario = whisper_api(file_path)
print(f"Pregunta del usuario: {pregunta_usuario}")
langChainTools = LangChainTools()
llm = langChainTools.load_llm_openai()
agentTools = AgentTools()
tools = agentTools.load_tools()
agent_executor = agentTools.load_agent(llm, tools)
respuesta_agente = agent_executor.invoke(
{
"input": pregunta_usuario,
"chat_history": history,
}
)
bot.send_message(message.chat.id, respuesta_agente["output"],
parse_mode="Markdown")
path_voice: str = tts_api(respuesta_agente["output"])
with open(path_voice, 'rb') as voice:
bot.send_voice(message.chat.id, voice=voice)
history.append(HumanMessage(content=pregunta_usuario))
history.append(AIMessage(content=respuesta_agente["output"]))
# Si el mensaje es de texto
if message.text:
pregunta_usuario = message.text
langChainTools = LangChainTools()
llm = langChainTools.load_llm_openai()
agentTools = AgentTools()
tools = agentTools.load_tools()
agent_executor = agentTools.load_agent(llm, tools)
respuesta_agente = agent_executor.invoke(
{
"input": pregunta_usuario,
"chat_history": history,
}
)
# texto_respuesta: str = scaped(respuesta_agente["output"])
texto_respuesta: str = respuesta_agente["output"]
bot.send_message(
message.chat.id, texto_respuesta,
parse_mode="Markdown")
# Mandar mensaje de voz
# path_voice: str = tts_api(respuesta_agente["output"])
# with open(path_voice, 'rb') as voice:
# bot.send_voice(message.chat.id, voice=voice)
history.append(HumanMessage(content=pregunta_usuario))
history.append(AIMessage(content=respuesta_agente["output"]))
# print(history)
# Enviar el historial después de cada interacción
# bot.send_message(message.chat.id, history)
# while True:
# time.sleep(60)
# mensaje = 'Que mas pues!!'
# bot.send_message('5076346205', mensaje)
bot.infinity_polling()

0
packages/README.md Normal file
View File

4322
poetry.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -1,32 +0,0 @@
from rag.split_docs import load_split_docs
from rag.llm import load_llm_openai
from rag.embeddings import load_embeddins
from rag.retriever import create_retriever
from rag.vectorstore import create_verctorstore
from rag.rag_chain import create_rag_chain
dir_pdfs: str = 'documents/pdfs/'
file_name: str = 'onecluster_info.pdf'
file_path: str = 'onecluster_info.pdf'
docs_split: list = load_split_docs(file_path)
embeddings_model = load_embeddins()
llm = load_llm_openai()
create_verctorstore(
docs_split,
embeddings_model,
file_path
)
retriever = create_retriever(
embeddings_model,
persist_directory="embeddings/onecluster_info"
)
qa = create_rag_chain(
llm, retriever)
prompt: str = "Dame información detallada sobre los sercivios que ofrese OneCluster."
respuesta = qa.invoke(
{"input": prompt, "chat_history": []}
)
print(respuesta["answer"])

41
pyproject.toml Normal file
View File

@ -0,0 +1,41 @@
[tool.poetry]
name = "assistant"
version = "0.1.0"
description = ""
authors = ["Your Name <you@example.com>"]
readme = "README.md"
packages = [
{ include = "app" },
]
[tool.poetry.dependencies]
python = "^3.11"
uvicorn = "^0.23.2"
langserve = {extras = ["server"], version = ">=0.0.30"}
pydantic = "<3"
langgraph = "^0.2.28"
langchain-community = "^0.3.1"
langchain-openai = "^0.2.5"
langchain-chroma = "^0.1.4"
google = "^3.0.0"
google-auth = "^2.35.0"
google-auth-oauthlib = "^1.2.0"
google-api-python-client = "^2.131.0"
flake8 = "^7.1.1"
httpx = "^0.27.2"
pytest = "^8.3.3"
requests = "^2.32.3"
jsonify = "^0.5"
protobuf = "^3.20.3"
pytz = "^2024.2"
telebot = "^0.0.5"
pypdf = "^5.1.0"
[tool.poetry.group.dev.dependencies]
langchain-cli = ">=0.0.15"
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"

View File

@ -1,140 +0,0 @@
aiohappyeyeballs==2.4.0
aiohttp==3.10.6
aiosignal==1.3.1
annotated-types==0.7.0
anyio==4.6.0
asgiref==3.8.1
attrs==24.2.0
backoff==2.2.1
bcrypt==4.2.0
beautifulsoup4==4.12.3
blinker==1.8.2
build==1.2.2.post1
cachetools==5.5.0
certifi==2024.8.30
charset-normalizer==3.3.2
chroma-hnswlib==0.7.3
chromadb==0.5.3
click==8.1.7
coloredlogs==15.0.1
dataclasses-json==0.6.7
Deprecated==1.2.14
distro==1.9.0
durationpy==0.9
fastapi==0.115.4
filelock==3.16.1
Flask==3.0.3
flatbuffers==24.3.25
frozenlist==1.4.1
fsspec==2024.10.0
google==3.0.0
google-api-core==2.19.0
google-api-python-client==2.131.0
google-auth==2.29.0
google-auth-httplib2==0.2.0
google-auth-oauthlib==1.2.0
googleapis-common-protos==1.63.0
greenlet==3.1.1
grpcio==1.67.0
h11==0.14.0
httpcore==1.0.5
httplib2==0.22.0
httptools==0.6.4
httpx==0.27.2
huggingface-hub==0.26.2
humanfriendly==10.0
idna==3.10
importlib_metadata==8.4.0
importlib_resources==6.4.5
itsdangerous==2.2.0
Jinja2==3.1.4
jiter==0.5.0
jsonpatch==1.33
jsonpointer==3.0.0
kubernetes==31.0.0
langchain==0.3.1
langchain-chroma==0.1.4
langchain-community==0.3.1
langchain-core==0.3.6
langchain-openai==0.2.0
langchain-text-splitters==0.3.0
langgraph==0.2.28
langgraph-checkpoint==1.0.11
langsmith==0.1.128
markdown-it-py==3.0.0
MarkupSafe==2.1.5
marshmallow==3.22.0
mdurl==0.1.2
mmh3==5.0.1
monotonic==1.6
mpmath==1.3.0
msgpack==1.1.0
multidict==6.1.0
mypy-extensions==1.0.0
numpy==1.26.4
oauthlib==3.2.2
onnxruntime==1.19.2
openai==1.48.0
opentelemetry-api==1.27.0
opentelemetry-exporter-otlp-proto-common==1.27.0
opentelemetry-exporter-otlp-proto-grpc==1.27.0
opentelemetry-instrumentation==0.48b0
opentelemetry-instrumentation-asgi==0.48b0
opentelemetry-instrumentation-fastapi==0.48b0
opentelemetry-proto==1.27.0
opentelemetry-sdk==1.27.0
opentelemetry-semantic-conventions==0.48b0
opentelemetry-util-http==0.48b0
orjson==3.10.7
overrides==7.7.0
packaging==24.1
posthog==3.7.0
proto-plus==1.24.0
protobuf==4.25.5
pyasn1==0.6.1
pyasn1_modules==0.4.1
pydantic==2.9.2
pydantic-settings==2.5.2
pydantic_core==2.23.4
Pygments==2.18.0
pyparsing==3.1.4
pypdf==5.1.0
PyPika==0.48.9
pyproject_hooks==1.2.0
pyTelegramBotAPI==4.23.0
python-dateutil==2.9.0.post0
python-dotenv==1.0.1
pytz==2024.2
PyYAML==6.0.2
regex==2024.9.11
requests==2.32.3
requests-oauthlib==2.0.0
rich==13.9.3
rsa==4.9
setuptools==75.2.0
shellingham==1.5.4
six==1.16.0
sniffio==1.3.1
soupsieve==2.6
SQLAlchemy==2.0.35
starlette==0.41.2
sympy==1.13.3
telebot==0.0.5
tenacity==8.5.0
tiktoken==0.7.0
tokenizers==0.20.1
tqdm==4.66.5
typer==0.12.5
typing-inspect==0.9.0
typing_extensions==4.12.2
uritemplate==4.1.1
urllib3==2.2.3
uvicorn==0.32.0
uvloop==0.21.0
watchfiles==0.24.0
websocket-client==1.8.0
websockets==13.1
Werkzeug==3.0.4
wrapt==1.16.0
yarl==1.12.1
zipp==3.20.2

View File

@ -1 +1 @@
{"token": "ya29.a0AeDClZBjncDp4ZwNKNtQ5ghKHPr1IT4XkgDc9QtvhPLrFGAR84f5r5iZPCd91VB7_WoJCG3iGQS0MU1n01xdRlEjDl7wVlKjKF0H680Bdim_bzykCXn3Jj0nVVkkHDOZP7RWeP1oAfY7Vjd4qbw_VxOdOzVzG_Bc6Auy4EJINAaCgYKAcYSARASFQHGX2MipaJllxIRMLCcZb2csCZECA0177", "refresh_token": "1//05nbircha66xlCgYIARAAGAUSNwF-L9IrxbE2v7kfLwXb4u0pD6Rin7xEBOTT83DeH7t2ttfD5CDmUCyhDsOaVRMRK_r8UtdoMq8", "token_uri": "https://oauth2.googleapis.com/token", "client_id": "19011937557-bi5nh4afvg4tuqr87v6dp55qj9a9o1h2.apps.googleusercontent.com", "client_secret": "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", "scopes": ["https://mail.google.com/"], "universe_domain": "googleapis.com", "account": "", "expiry": "2024-10-30T01:16:56.882894Z"}
{"token": "ya29.a0AeDClZCbHA_Ubcy6IrJJZgcHOb95xrUDMIjoNryaK6DaV7RH93INeBAeKIAfaDw73grkGnBD2Ub2QuvqvRMJO6DvivonA6yoLj7hFvDgSLsfNXo5xcLJqsgiIiOfExr74m4WcT5jnpZswxk3ZuEtAoXuPDW7hzLXnTwk6sFDmAaCgYKAeoSARASFQHGX2MiPDgm1tQCv4YbN3Bq9f4zvg0177", "refresh_token": "1//05Hc2PufwLWe2CgYIARAAGAUSNwF-L9IrU2s--kkp6Q2fCzN5uf2vLBvEcYHyJOFZjCQAUpBSC0FWiCUg9wgm8cCMccEGe4Q7MPg", "token_uri": "https://oauth2.googleapis.com/token", "client_id": "19011937557-bi5nh4afvg4tuqr87v6dp55qj9a9o1h2.apps.googleusercontent.com", "client_secret": "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", "scopes": ["https://mail.google.com/"], "universe_domain": "googleapis.com", "account": "", "expiry": "2024-11-07T23:37:58.891983Z"}

View File

@ -1 +1 @@
{"token": "ya29.a0AeDClZChMN7SEvjp3dFVZtee2pDoqAoPFC7AWiEeIG7H6qN2HDnf7c6DcFuc--aG60e1cAnOpoKf80H8aqrFFYbF4-F4LE_vz9MY8oc21Ra9PwM16FYxGGKcM2wcrrOGaFncs9Um9_yNxzAa6MUVNq88Y_Bhpr2F2mO3o53NjQaCgYKAZESARASFQHGX2Mi7EodrKchyiyPIZ4y5Lwh0Q0177", "refresh_token": "1//05CtNC-Z3ii8qCgYIARAAGAUSNwF-L9IrbOfrB0kNACEJ5HX4T-fmdNUqsGFqn1QFlvK_1L9h0emULUS1yU85IbaNyESXZSQzHU8", "token_uri": "https://oauth2.googleapis.com/token", "client_id": "629922809906-pl9l1ipout6d5hh19ku50sfvnqgu8ir2.apps.googleusercontent.com", "client_secret": "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", "scopes": ["https://www.googleapis.com/auth/calendar"], "universe_domain": "googleapis.com", "account": "", "expiry": "2024-10-30T01:22:34.287442Z"}
{"token": "ya29.a0AeDClZC30_gM1YE3u8XHKpmLqIcABYBZzJOVQfrNyUJrFuOPxNpt96IbTeCIM266Es--v34aMA70oZG1GIELbcuExSSQ25_pzdHd9yS18vDOgdz8OtuG24_2DGnaCy7FOQBZzFr5GxgLJ_657Zal3RF8lreEFLyVLKdGf6TiaCgYKATMSARASFQHGX2MiirXKmKxKPjAWuSG0af_xmw0175", "refresh_token": "1//05akITlwASya5CgYIARAAGAUSNwF-L9IryPh1Y647Eim5KZZhD3DKCuUtkAp28N30ed1TALtax5TkflyKGxDtBmayKowuMZkaT9k", "token_uri": "https://oauth2.googleapis.com/token", "client_id": "629922809906-pl9l1ipout6d5hh19ku50sfvnqgu8ir2.apps.googleusercontent.com", "client_secret": "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", "scopes": ["https://www.googleapis.com/auth/calendar"], "universe_domain": "googleapis.com", "account": "", "expiry": "2024-11-02T23:47:30.521929Z"}