Spaces:
Sleeping
Sleeping
| # aura-mind-glow/tools.py | |
| import torch | |
| from PIL import Image | |
| from transformers import AutoProcessor | |
| from unsloth import FastVisionModel | |
| from langchain_community.vectorstores import FAISS | |
| from vector_store import search_documents | |
| from bigquery_search import search_bigquery_for_remedy | |
| def create_plant_diagnosis_tool(model: FastVisionModel, processor: AutoProcessor): | |
| """Factory function to create the plant diagnosis tool.""" | |
| def diagnose_plant(file_path: str) -> str: | |
| """ | |
| Analyzes an image of a maize plant from a given file path and returns a diagnosis. | |
| Args: | |
| file_path: The local path to the image file. | |
| """ | |
| if model is None or processor is None or file_path is None: | |
| return "Error: Vision model is not loaded or no file path was provided." | |
| try: | |
| image = Image.open(file_path) | |
| except Exception as e: | |
| return f"Error opening image file: {e}" | |
| image = image.convert("RGB") | |
| messages = [ | |
| {"role": "user", "content": [{"type": "text", "text": "What is the condition of this maize plant?"}, {"type": "image", "image": image}]} | |
| ] | |
| text_prompt = processor.tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True) | |
| inputs = processor(text=text_prompt, images=image, return_tensors="pt").to(model.device) | |
| with torch.inference_mode(): | |
| outputs = model.generate(**inputs, max_new_tokens=48, use_cache=True) | |
| print(f"Model outputs: {outputs}") | |
| response = processor.batch_decode(outputs, skip_special_tokens=True)[0] | |
| # Clean up the model's output to get only the diagnosis | |
| answer_start_index = response.rfind("model\n") | |
| if answer_start_index != -1: | |
| return response[answer_start_index + len("model\n"): | |
| ].strip() | |
| return "Could not parse diagnosis from model output." | |
| return diagnose_plant | |
| def create_remedy_retrieval_tool(retriever: FAISS): | |
| """Factory function to create the remedy retrieval tool.""" | |
| def retrieve_remedy(diagnosis: str) -> str: | |
| """ | |
| Takes a plant health diagnosis (e.g., 'Phosphorus Deficiency') and returns a suggested remedy from a local knowledge base. | |
| """ | |
| if retriever is None: | |
| return "Error: Knowledge base is not loaded." | |
| retrieved_docs = retriever.invoke(diagnosis) | |
| if retrieved_docs: | |
| return retrieved_docs[0].page_content | |
| return "No specific remedy found in the knowledge base for this condition." | |
| return retrieve_remedy | |
| def create_chroma_db_search_tool(): | |
| """Factory function to create the ChromaDB search tool.""" | |
| def search_chroma_db(query: str) -> str: | |
| """ | |
| Searches the local ChromaDB vector store for a remedy based on a diagnosis query. | |
| """ | |
| results = search_documents(query) | |
| return results[0] if results else "No remedy found in local knowledge base." | |
| return search_chroma_db | |
| def create_bigquery_search_tool(): | |
| """Factory function to create the BigQuery search tool.""" | |
| def search_bigquery(query: str) -> str: | |
| """ | |
| Searches BigQuery for a remedy based on a diagnosis query. | |
| """ | |
| return search_bigquery_for_remedy(query) | |
| return search_bigquery | |