Spaces:
Sleeping
Sleeping
Commit
·
20950eb
1
Parent(s):
f9d4dc5
rm sqlite
Browse files- app.py +21 -12
- bigquery_uploader.py +19 -42
- local_database.py +0 -136
app.py
CHANGED
|
@@ -24,8 +24,7 @@ from agent_setup import initialize_adk
|
|
| 24 |
from google.genai import types
|
| 25 |
from story_generator import create_story_prompt_from_pdf, generate_video_from_prompt
|
| 26 |
from langchain_huggingface import HuggingFaceEndpoint
|
| 27 |
-
from bigquery_uploader import
|
| 28 |
-
import local_database
|
| 29 |
|
| 30 |
print("✅ All libraries imported successfully.")
|
| 31 |
|
|
@@ -106,6 +105,23 @@ def create_field_mode_ui():
|
|
| 106 |
{remedy}
|
| 107 |
"""
|
| 108 |
print("Workflow complete. Returning response.")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 109 |
return final_response
|
| 110 |
|
| 111 |
except Exception as e:
|
|
@@ -309,13 +325,7 @@ def create_settings_ui():
|
|
| 309 |
status_output = gr.Textbox(label="Sync Status", interactive=False, lines=5)
|
| 310 |
|
| 311 |
def sync_data_to_cloud():
|
| 312 |
-
yield "
|
| 313 |
-
try:
|
| 314 |
-
# Assuming your bigquery_uploader has a function that returns a summary
|
| 315 |
-
result_message = upload_to_bigquery()
|
| 316 |
-
yield f"Sync successful!\n{result_message}"
|
| 317 |
-
except Exception as e:
|
| 318 |
-
yield f"Sync failed!\nError: {e}"
|
| 319 |
|
| 320 |
sync_btn.click(
|
| 321 |
sync_data_to_cloud,
|
|
@@ -382,8 +392,7 @@ def check_internet_connection(host="8.8.8.8", port=53, timeout=3):
|
|
| 382 |
|
| 383 |
|
| 384 |
if __name__ == "__main__":
|
| 385 |
-
#
|
| 386 |
-
local_database.init_db()
|
| 387 |
|
| 388 |
field_mode_ui = create_field_mode_ui()
|
| 389 |
interface_list = [field_mode_ui]
|
|
@@ -429,4 +438,4 @@ if __name__ == "__main__":
|
|
| 429 |
else:
|
| 430 |
ui = field_mode_ui
|
| 431 |
|
| 432 |
-
ui.launch(share=True, debug=True)
|
|
|
|
| 24 |
from google.genai import types
|
| 25 |
from story_generator import create_story_prompt_from_pdf, generate_video_from_prompt
|
| 26 |
from langchain_huggingface import HuggingFaceEndpoint
|
| 27 |
+
from bigquery_uploader import upload_diagnosis_to_bigquery
|
|
|
|
| 28 |
|
| 29 |
print("✅ All libraries imported successfully.")
|
| 30 |
|
|
|
|
| 105 |
{remedy}
|
| 106 |
"""
|
| 107 |
print("Workflow complete. Returning response.")
|
| 108 |
+
|
| 109 |
+
# Prepare data for BigQuery upload
|
| 110 |
+
diagnosis_data = {
|
| 111 |
+
"ai_diagnosis": report_title,
|
| 112 |
+
"recommended_action": remedy,
|
| 113 |
+
"confidence_score": None, # Placeholder, as confidence score is not calculated here
|
| 114 |
+
"farmer_id": "unknown", # Placeholder
|
| 115 |
+
"gps_latitude": None, # Placeholder
|
| 116 |
+
"gps_longitude": None, # Placeholder
|
| 117 |
+
"crop_type": "Maize", # Assuming maize for this app
|
| 118 |
+
"crop_variety": None, # Placeholder
|
| 119 |
+
"farmer_feedback": None, # Placeholder
|
| 120 |
+
"treatment_applied": None, # Placeholder
|
| 121 |
+
"outcome_image_id": None, # Placeholder
|
| 122 |
+
}
|
| 123 |
+
upload_diagnosis_to_bigquery(diagnosis_data)
|
| 124 |
+
|
| 125 |
return final_response
|
| 126 |
|
| 127 |
except Exception as e:
|
|
|
|
| 325 |
status_output = gr.Textbox(label="Sync Status", interactive=False, lines=5)
|
| 326 |
|
| 327 |
def sync_data_to_cloud():
|
| 328 |
+
yield "Local data sync is no longer required as diagnoses are uploaded directly to BigQuery."
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 329 |
|
| 330 |
sync_btn.click(
|
| 331 |
sync_data_to_cloud,
|
|
|
|
| 392 |
|
| 393 |
|
| 394 |
if __name__ == "__main__":
|
| 395 |
+
# No local database initialization needed
|
|
|
|
| 396 |
|
| 397 |
field_mode_ui = create_field_mode_ui()
|
| 398 |
interface_list = [field_mode_ui]
|
|
|
|
| 438 |
else:
|
| 439 |
ui = field_mode_ui
|
| 440 |
|
| 441 |
+
ui.launch(share=True, debug=True)
|
bigquery_uploader.py
CHANGED
|
@@ -2,7 +2,8 @@
|
|
| 2 |
|
| 3 |
from google.cloud import bigquery
|
| 4 |
from google.cloud.exceptions import NotFound
|
| 5 |
-
import
|
|
|
|
| 6 |
|
| 7 |
PROJECT_ID = "gem-creation"
|
| 8 |
DATASET_ID = "aura_mind_glow_data"
|
|
@@ -59,54 +60,30 @@ def create_table_if_not_exists(client):
|
|
| 59 |
table = client.create_table(table) # Make an API request.
|
| 60 |
print(f"Created table {table.project}.{table.dataset_id}.{table.table_id}")
|
| 61 |
|
| 62 |
-
|
| 63 |
-
|
| 64 |
-
"""Uploads data from the local SQLite database to BigQuery."""
|
| 65 |
-
conn = local_database.get_db_connection()
|
| 66 |
-
if conn is None:
|
| 67 |
-
print("Could not connect to the local database.")
|
| 68 |
-
return
|
| 69 |
-
|
| 70 |
-
rows = local_database.get_all_analysis(conn)
|
| 71 |
-
if not rows:
|
| 72 |
-
print("No data to upload from the local database.")
|
| 73 |
-
conn.close()
|
| 74 |
-
return
|
| 75 |
-
|
| 76 |
client = get_bigquery_client()
|
| 77 |
if client is None:
|
| 78 |
-
|
| 79 |
-
return
|
| 80 |
|
| 81 |
create_dataset_if_not_exists(client)
|
| 82 |
create_table_if_not_exists(client)
|
| 83 |
|
| 84 |
table_id = f"{PROJECT_ID}.{DATASET_ID}.{TABLE_ID}"
|
| 85 |
-
# Convert rows to list of dictionaries
|
| 86 |
-
rows_to_insert = []
|
| 87 |
-
for row in rows:
|
| 88 |
-
rows_to_insert.append({
|
| 89 |
-
"analysis_id": row[0],
|
| 90 |
-
"timestamp": row[1],
|
| 91 |
-
"farmer_id": row[2],
|
| 92 |
-
"gps_latitude": row[3],
|
| 93 |
-
"gps_longitude": row[4],
|
| 94 |
-
"crop_type": row[5],
|
| 95 |
-
"crop_variety": row[6],
|
| 96 |
-
"ai_diagnosis": row[7],
|
| 97 |
-
"confidence_score": row[8],
|
| 98 |
-
"recommended_action": row[9],
|
| 99 |
-
"farmer_feedback": row[10],
|
| 100 |
-
"treatment_applied": row[11],
|
| 101 |
-
"outcome_image_id": row[12],
|
| 102 |
-
})
|
| 103 |
|
| 104 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 105 |
if errors == []:
|
| 106 |
-
print("
|
| 107 |
-
|
| 108 |
-
print("Local database cleared.")
|
| 109 |
else:
|
| 110 |
-
print(f"Encountered errors while inserting
|
| 111 |
-
|
| 112 |
-
conn.close()
|
|
|
|
| 2 |
|
| 3 |
from google.cloud import bigquery
|
| 4 |
from google.cloud.exceptions import NotFound
|
| 5 |
+
import uuid
|
| 6 |
+
from datetime import datetime
|
| 7 |
|
| 8 |
PROJECT_ID = "gem-creation"
|
| 9 |
DATASET_ID = "aura_mind_glow_data"
|
|
|
|
| 60 |
table = client.create_table(table) # Make an API request.
|
| 61 |
print(f"Created table {table.project}.{table.dataset_id}.{table.table_id}")
|
| 62 |
|
| 63 |
+
def upload_diagnosis_to_bigquery(diagnosis_data: dict):
|
| 64 |
+
"""Uploads a single diagnosis record to BigQuery."""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 65 |
client = get_bigquery_client()
|
| 66 |
if client is None:
|
| 67 |
+
print("BigQuery client not available. Cannot upload diagnosis.")
|
| 68 |
+
return "BigQuery client not available."
|
| 69 |
|
| 70 |
create_dataset_if_not_exists(client)
|
| 71 |
create_table_if_not_exists(client)
|
| 72 |
|
| 73 |
table_id = f"{PROJECT_ID}.{DATASET_ID}.{TABLE_ID}"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 74 |
|
| 75 |
+
# Add required fields if not present
|
| 76 |
+
if "analysis_id" not in diagnosis_data:
|
| 77 |
+
diagnosis_data["analysis_id"] = str(uuid.uuid4())
|
| 78 |
+
if "timestamp" not in diagnosis_data:
|
| 79 |
+
diagnosis_data["timestamp"] = datetime.now().isoformat()
|
| 80 |
+
|
| 81 |
+
rows_to_insert = [diagnosis_data]
|
| 82 |
+
|
| 83 |
+
errors = client.insert_rows_json(table_id, rows_to_insert)
|
| 84 |
if errors == []:
|
| 85 |
+
print(f"Diagnosis record {diagnosis_data.get('analysis_id')} uploaded successfully to BigQuery.")
|
| 86 |
+
return "Diagnosis uploaded successfully."
|
|
|
|
| 87 |
else:
|
| 88 |
+
print(f"Encountered errors while inserting diagnosis record: {errors}")
|
| 89 |
+
return f"Error uploading diagnosis: {errors}"
|
|
|
local_database.py
DELETED
|
@@ -1,136 +0,0 @@
|
|
| 1 |
-
import sqlite3
|
| 2 |
-
import os
|
| 3 |
-
from sqlite3 import Error
|
| 4 |
-
|
| 5 |
-
DB_FILE = "auramind_local.db"
|
| 6 |
-
INDEX_FILE = "auramind_faiss.index"
|
| 7 |
-
|
| 8 |
-
def get_db_connection():
|
| 9 |
-
"""Establishes a connection to the database."""
|
| 10 |
-
conn = None
|
| 11 |
-
print(f"Attempting to connect to database: {DB_FILE}")
|
| 12 |
-
try:
|
| 13 |
-
conn = sqlite3.connect(DB_FILE)
|
| 14 |
-
conn.row_factory = sqlite3.Row
|
| 15 |
-
print(f"Successfully connected to database: {DB_FILE}")
|
| 16 |
-
except Error as e:
|
| 17 |
-
print(f"Error connecting to database: {e}")
|
| 18 |
-
return conn
|
| 19 |
-
|
| 20 |
-
def init_db():
|
| 21 |
-
"""
|
| 22 |
-
Initializes a more robust database schema for multimodal data.
|
| 23 |
-
- 'documents' table tracks the source files.
|
| 24 |
-
- 'chunks' table stores the individual encrypted text/image chunks.
|
| 25 |
-
- 'farm_analysis' table stores analysis data.
|
| 26 |
-
"""
|
| 27 |
-
print("Initializing database...")
|
| 28 |
-
# Ensure a clean slate for development/testing
|
| 29 |
-
delete_database_and_index()
|
| 30 |
-
|
| 31 |
-
conn = get_db_connection()
|
| 32 |
-
if conn is None:
|
| 33 |
-
print("Database connection failed, cannot initialize.")
|
| 34 |
-
return
|
| 35 |
-
cursor = conn.cursor()
|
| 36 |
-
|
| 37 |
-
# Table to track the source documents (e.g., 'healthy_maize.txt', 'user_guide.pdf')
|
| 38 |
-
cursor.execute("""
|
| 39 |
-
CREATE TABLE IF NOT EXISTS documents (
|
| 40 |
-
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 41 |
-
name TEXT NOT NULL UNIQUE
|
| 42 |
-
)
|
| 43 |
-
"""
|
| 44 |
-
)
|
| 45 |
-
print("Table 'documents' checked/created.")
|
| 46 |
-
|
| 47 |
-
# Table to store each chunk of content (text or image)
|
| 48 |
-
# The faiss_id will correspond to the row number in the FAISS index
|
| 49 |
-
cursor.execute("""
|
| 50 |
-
CREATE TABLE IF NOT EXISTS chunks (
|
| 51 |
-
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 52 |
-
doc_id INTEGER,
|
| 53 |
-
content_type TEXT NOT NULL, -- 'text' or 'image'
|
| 54 |
-
encrypted_content BLOB NOT NULL,
|
| 55 |
-
page_num INTEGER,
|
| 56 |
-
FOREIGN KEY (doc_id) REFERENCES documents (id)
|
| 57 |
-
)
|
| 58 |
-
"""
|
| 59 |
-
)
|
| 60 |
-
print("Table 'chunks' checked/created.")
|
| 61 |
-
|
| 62 |
-
# Table for farm analysis data
|
| 63 |
-
sql_create_table = """ CREATE TABLE IF NOT EXISTS farm_analysis (
|
| 64 |
-
analysis_id TEXT PRIMARY KEY,
|
| 65 |
-
timestamp TEXT NOT NULL,
|
| 66 |
-
farmer_id TEXT,
|
| 67 |
-
gps_latitude REAL,
|
| 68 |
-
gps_longitude REAL,
|
| 69 |
-
crop_type TEXT,
|
| 70 |
-
crop_variety TEXT,
|
| 71 |
-
ai_diagnosis TEXT,
|
| 72 |
-
confidence_score REAL,
|
| 73 |
-
recommended_action TEXT,
|
| 74 |
-
farmer_feedback TEXT,
|
| 75 |
-
treatment_applied TEXT,
|
| 76 |
-
outcome_image_id TEXT
|
| 77 |
-
); """
|
| 78 |
-
cursor.execute(sql_create_table)
|
| 79 |
-
print("Table 'farm_analysis' checked/created.")
|
| 80 |
-
|
| 81 |
-
conn.commit()
|
| 82 |
-
print("Database changes committed.")
|
| 83 |
-
conn.close()
|
| 84 |
-
print("Database connection closed.")
|
| 85 |
-
|
| 86 |
-
def check_if_indexed():
|
| 87 |
-
"""Checks if the initial database and index file exist."""
|
| 88 |
-
# A basic check. A more robust check might query the db for content.
|
| 89 |
-
return os.path.exists(DB_FILE) and os.path.exists(INDEX_FILE)
|
| 90 |
-
|
| 91 |
-
def delete_database_and_index():
|
| 92 |
-
"""Deletes existing db and index files for a clean rebuild."""
|
| 93 |
-
if os.path.exists(DB_FILE):
|
| 94 |
-
os.remove(DB_FILE)
|
| 95 |
-
print(f"Removed old database: {DB_FILE}")
|
| 96 |
-
if os.path.exists(INDEX_FILE):
|
| 97 |
-
os.remove(INDEX_FILE)
|
| 98 |
-
print(f"Removed old index: {INDEX_FILE}")
|
| 99 |
-
|
| 100 |
-
def insert_analysis(conn, analysis_data):
|
| 101 |
-
"""
|
| 102 |
-
Create a new analysis record into the farm_analysis table
|
| 103 |
-
:param conn:
|
| 104 |
-
:param analysis_data:
|
| 105 |
-
:return: project id
|
| 106 |
-
"""
|
| 107 |
-
sql = """ INSERT INTO farm_analysis(analysis_id,timestamp,farmer_id,gps_latitude,gps_longitude,crop_type,crop_variety,ai_diagnosis,confidence_score,recommended_action,farmer_feedback,treatment_applied,outcome_image_id)
|
| 108 |
-
VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?) """
|
| 109 |
-
cur = conn.cursor()
|
| 110 |
-
cur.execute(sql, analysis_data)
|
| 111 |
-
conn.commit()
|
| 112 |
-
return cur.lastrowid
|
| 113 |
-
|
| 114 |
-
def get_all_analysis(conn):
|
| 115 |
-
"""
|
| 116 |
-
Query all rows in the farm_analysis table
|
| 117 |
-
:param conn: the Connection object
|
| 118 |
-
:return:
|
| 119 |
-
"""
|
| 120 |
-
cur = conn.cursor()
|
| 121 |
-
cur.execute("SELECT * FROM farm_analysis")
|
| 122 |
-
|
| 123 |
-
rows = cur.fetchall()
|
| 124 |
-
|
| 125 |
-
return rows
|
| 126 |
-
|
| 127 |
-
def clear_all_analysis(conn):
|
| 128 |
-
"""
|
| 129 |
-
Delete all rows in the farm_analysis table
|
| 130 |
-
:param conn: the Connection object
|
| 131 |
-
:return:
|
| 132 |
-
"""
|
| 133 |
-
sql = 'DELETE FROM farm_analysis'
|
| 134 |
-
cur = conn.cursor()
|
| 135 |
-
cur.execute(sql)
|
| 136 |
-
conn.commit()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|