import gradio as gr import os import random from dotenv import load_dotenv from huggingface_hub import list_models, login from langchain.docstore.document import Document from langchain_community.retrievers import BM25Retriever import datasets from smolagents import GradioUI, CodeAgent, InferenceClientModel, Tool, DuckDuckGoSearchTool # Cargar variables de entorno desde el archivo .env load_dotenv() class GuestInfoRetrieverTool(Tool): name = "guest_info_retriever" description = "Retrieves detailed information about gala guests based on their name or relation." inputs = { "query": { "type": "string", "description": "The name or relation of the guest you want information about." } } output_type = "string" def __init__(self, docs): self.is_initialized = False self.retriever = BM25Retriever.from_documents(docs) def forward(self, query: str): results = self.retriever.get_relevant_documents(query) if results: return "\n\n".join([doc.page_content for doc in results[:3]]) else: return "No matching guest information found." def load_guest_dataset(): # Load the dataset guest_dataset = datasets.load_dataset("agents-course/unit3-invitees", split="train") # Convert dataset entries into Document objects docs = [ Document( page_content="\n".join([ f"Name: {guest['name']}", f"Relation: {guest['relation']}", f"Description: {guest['description']}", f"Email: {guest['email']}" ]), metadata={"name": guest["name"]} ) for guest in guest_dataset ] # Return the tool return GuestInfoRetrieverTool(docs) search_tool = DuckDuckGoSearchTool() class WeatherInfoTool(Tool): name = "weather_info" description = "Fetches dummy weather information for a given location." inputs = { "location": { "type": "string", "description": "The location to get weather information for." } } output_type = "string" def forward(self, location: str): # Dummy weather data weather_conditions = [ {"condition": "Rainy", "temp_c": 15}, {"condition": "Clear", "temp_c": 25}, {"condition": "Windy", "temp_c": 20} ] # Randomly select a weather condition data = random.choice(weather_conditions) return f"Weather in {location}: {data['condition']}, {data['temp_c']}°C" class HubStatsTool(Tool): name = "hub_stats" description = "Fetches the most downloaded model from a specific author on the Hugging Face Hub." inputs = { "author": { "type": "string", "description": "The username of the model author/organization to find models from." } } output_type = "string" def forward(self, author: str): try: # List models from the specified author, sorted by downloads models = list(list_models(author=author, sort="downloads", direction=-1, limit=1)) if models: model = models[0] return f"The most downloaded model by {author} is {model.id} with {model.downloads:,} downloads." else: return f"No models found for author {author}." except Exception as e: return f"Error fetching models for {author}: {str(e)}" # Initialize the Hugging Face model token = os.getenv("HUGGINGFACE_TOKEN") if not token: raise ValueError("Por favor, configura HUGGINGFACE_TOKEN en el archivo .env") login(token=token) model = InferenceClientModel() # Initialize the web search tool search_tool = DuckDuckGoSearchTool() # Initialize the weather tool weather_info_tool = WeatherInfoTool() # Initialize the Hub stats tool hub_stats_tool = HubStatsTool() # Load the guest dataset and initialize the guest info tool guest_info_tool = load_guest_dataset() # Create Alfred with all the tools alfred = CodeAgent( tools=[guest_info_tool, weather_info_tool, hub_stats_tool, search_tool], model=model, add_base_tools=True, # Add any additional base tools planning_interval=3, # Enable planning every 3 steps max_steps=5 # Limitar el número máximo de pasos ) if __name__ == "__main__": GradioUI(alfred).launch(share=False)