Update app.py
Browse files
app.py
CHANGED
|
@@ -12,9 +12,14 @@ import json
|
|
| 12 |
from io import StringIO
|
| 13 |
import streamlit.components.v1 as components
|
| 14 |
import base64
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 15 |
|
| 16 |
# Page configuration
|
| 17 |
-
st.set_page_config(layout="wide", page_title="Pakistan Climate & Disaster Monitor")
|
| 18 |
|
| 19 |
class DataCollector:
|
| 20 |
def __init__(self):
|
|
@@ -31,89 +36,35 @@ class DataCollector:
|
|
| 31 |
'Hyderabad': {'lat': 25.3960, 'lon': 68.3578}
|
| 32 |
}
|
| 33 |
|
| 34 |
-
|
| 35 |
-
"""Fetch earthquake data from USGS website"""
|
| 36 |
-
url = "https://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/2.5_month.geojson"
|
| 37 |
-
try:
|
| 38 |
-
response = requests.get(url)
|
| 39 |
-
data = response.json()
|
| 40 |
-
pakistan_data = {
|
| 41 |
-
"type": "FeatureCollection",
|
| 42 |
-
"features": [
|
| 43 |
-
feature for feature in data["features"]
|
| 44 |
-
if 60.878 <= feature["geometry"]["coordinates"][0] <= 77.840
|
| 45 |
-
and 23.692 <= feature["geometry"]["coordinates"][1] <= 37.097
|
| 46 |
-
]
|
| 47 |
-
}
|
| 48 |
-
return pakistan_data
|
| 49 |
-
except Exception as e:
|
| 50 |
-
st.error(f"Error fetching earthquake data: {e}")
|
| 51 |
-
return None
|
| 52 |
-
|
| 53 |
-
def fetch_weather_data(self):
|
| 54 |
-
"""Fetch weather data from OpenMeteo"""
|
| 55 |
-
weather_data = []
|
| 56 |
-
for city, coords in self.cities.items():
|
| 57 |
-
url = f"https://api.open-meteo.com/v1/forecast?latitude={coords['lat']}&longitude={coords['lon']}&hourly=temperature_2m,relativehumidity_2m,precipitation,windspeed_10m&daily=temperature_2m_max,temperature_2m_min,precipitation_sum&timezone=auto"
|
| 58 |
-
try:
|
| 59 |
-
response = requests.get(url)
|
| 60 |
-
data = response.json()
|
| 61 |
-
|
| 62 |
-
# Hourly data
|
| 63 |
-
hourly_df = pd.DataFrame({
|
| 64 |
-
'datetime': pd.to_datetime(data['hourly']['time']),
|
| 65 |
-
'temperature': data['hourly']['temperature_2m'],
|
| 66 |
-
'humidity': data['hourly']['relativehumidity_2m'],
|
| 67 |
-
'precipitation': data['hourly']['precipitation'],
|
| 68 |
-
'wind_speed': data['hourly']['windspeed_10m']
|
| 69 |
-
})
|
| 70 |
-
|
| 71 |
-
# Daily data
|
| 72 |
-
daily_df = pd.DataFrame({
|
| 73 |
-
'date': pd.to_datetime(data['daily']['time']),
|
| 74 |
-
'temp_max': data['daily']['temperature_2m_max'],
|
| 75 |
-
'temp_min': data['daily']['temperature_2m_min'],
|
| 76 |
-
'precipitation_sum': data['daily']['precipitation_sum']
|
| 77 |
-
})
|
| 78 |
-
|
| 79 |
-
weather_data.append({
|
| 80 |
-
'city': city,
|
| 81 |
-
'hourly': hourly_df,
|
| 82 |
-
'daily': daily_df,
|
| 83 |
-
'coords': coords
|
| 84 |
-
})
|
| 85 |
-
except Exception as e:
|
| 86 |
-
st.error(f"Error fetching weather data for {city}: {e}")
|
| 87 |
-
continue
|
| 88 |
-
|
| 89 |
-
return weather_data if weather_data else None
|
| 90 |
|
| 91 |
-
def
|
| 92 |
-
"""
|
| 93 |
-
|
| 94 |
-
for
|
| 95 |
-
|
| 96 |
-
|
| 97 |
-
|
| 98 |
-
|
| 99 |
-
|
| 100 |
-
|
| 101 |
-
|
| 102 |
-
|
| 103 |
-
|
| 104 |
-
|
| 105 |
-
|
| 106 |
-
|
| 107 |
-
|
| 108 |
-
|
| 109 |
-
|
| 110 |
-
|
| 111 |
-
|
| 112 |
-
|
| 113 |
-
|
|
|
|
| 114 |
|
| 115 |
-
def create_cesium_component():
|
| 116 |
-
"""
|
| 117 |
cesium_html = """
|
| 118 |
<div id="cesiumContainer" style="width: 100%; height: 600px;"></div>
|
| 119 |
<script src="https://cesium.com/downloads/cesiumjs/releases/1.95/Build/Cesium/Cesium.js"></script>
|
|
@@ -121,24 +72,85 @@ def create_cesium_component():
|
|
| 121 |
<script>
|
| 122 |
Cesium.Ion.defaultAccessToken = 'your-access-token';
|
| 123 |
const viewer = new Cesium.Viewer('cesiumContainer', {
|
| 124 |
-
terrainProvider: Cesium.createWorldTerrain()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 125 |
});
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 126 |
viewer.camera.flyTo({
|
| 127 |
-
destination: Cesium.Cartesian3.fromDegrees(69.3451, 30.3753, 1000000.0)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 128 |
});
|
| 129 |
</script>
|
| 130 |
"""
|
| 131 |
components.html(cesium_html, height=600)
|
| 132 |
|
| 133 |
-
def
|
| 134 |
-
"""
|
| 135 |
-
|
| 136 |
-
|
| 137 |
-
|
| 138 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 139 |
|
| 140 |
def show_weather_analysis(data_collector):
|
| 141 |
-
st.header("Weather Analysis")
|
| 142 |
|
| 143 |
weather_data = data_collector.fetch_weather_data()
|
| 144 |
if weather_data:
|
|
@@ -152,81 +164,294 @@ def show_weather_analysis(data_collector):
|
|
| 152 |
# Add download button for data
|
| 153 |
st.markdown(download_csv(city_data['hourly'], f"{selected_city}_weather_data"), unsafe_allow_html=True)
|
| 154 |
|
| 155 |
-
tabs = st.tabs(["Temperature", "Precipitation
|
|
|
|
| 156 |
|
| 157 |
with tabs[0]:
|
| 158 |
-
|
| 159 |
-
|
| 160 |
-
|
| 161 |
-
|
| 162 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 163 |
|
| 164 |
with tabs[1]:
|
| 165 |
-
|
| 166 |
-
|
| 167 |
-
|
| 168 |
-
|
| 169 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 170 |
|
| 171 |
with tabs[2]:
|
| 172 |
-
|
| 173 |
-
|
| 174 |
-
|
| 175 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 176 |
st.plotly_chart(fig, use_container_width=True)
|
| 177 |
|
| 178 |
with tabs[3]:
|
| 179 |
-
|
| 180 |
-
|
| 181 |
-
|
| 182 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 183 |
st.plotly_chart(fig, use_container_width=True)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 184 |
|
| 185 |
def show_disaster_monitor(data_collector):
|
| 186 |
-
st.header("Disaster Monitoring")
|
| 187 |
|
| 188 |
earthquake_data = data_collector.fetch_usgs_earthquake_data()
|
| 189 |
|
| 190 |
if earthquake_data:
|
| 191 |
-
# 3D visualization
|
| 192 |
-
st.subheader("3D Terrain
|
| 193 |
-
create_cesium_component()
|
| 194 |
|
| 195 |
-
#
|
| 196 |
-
st.subheader("
|
| 197 |
-
m = folium.Map(location=[30.3753, 69.3451], zoom_start=5)
|
| 198 |
|
| 199 |
-
|
| 200 |
-
coords = eq['geometry']['coordinates']
|
| 201 |
-
mag = eq['properties']['mag']
|
| 202 |
-
time = datetime.fromtimestamp(eq['properties']['time']/1000)
|
| 203 |
-
|
| 204 |
-
folium.CircleMarker(
|
| 205 |
-
location=[coords[1], coords[0]],
|
| 206 |
-
radius=mag * 3,
|
| 207 |
-
color='red',
|
| 208 |
-
fill=True,
|
| 209 |
-
popup=f"Magnitude: {mag}<br>Time: {time}",
|
| 210 |
-
).add_to(m)
|
| 211 |
-
|
| 212 |
-
folium_static(m)
|
| 213 |
-
|
| 214 |
-
# Earthquake data table
|
| 215 |
-
st.subheader("Recent Earthquakes")
|
| 216 |
eq_df = pd.DataFrame([
|
| 217 |
{
|
| 218 |
-
'
|
| 219 |
-
'
|
| 220 |
-
'
|
| 221 |
-
'
|
|
|
|
|
|
|
| 222 |
}
|
| 223 |
for eq in earthquake_data['features']
|
| 224 |
])
|
| 225 |
-
|
| 226 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 227 |
|
| 228 |
def show_environmental_data(data_collector):
|
| 229 |
-
st.header("Environmental
|
| 230 |
|
| 231 |
aqi_data = data_collector.fetch_air_quality_data()
|
| 232 |
|
|
@@ -237,60 +462,180 @@ def show_environmental_data(data_collector):
|
|
| 237 |
# Add download button
|
| 238 |
st.markdown(download_csv(city_data, f"{selected_city}_air_quality_data"), unsafe_allow_html=True)
|
| 239 |
|
| 240 |
-
#
|
| 241 |
-
|
| 242 |
-
|
| 243 |
-
|
| 244 |
-
|
| 245 |
-
|
| 246 |
-
|
| 247 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 248 |
|
| 249 |
-
|
|
|
|
|
|
|
| 250 |
|
| 251 |
with tabs[0]:
|
|
|
|
|
|
|
| 252 |
current_aqi = city_data['AQI'].iloc[-1]
|
| 253 |
-
|
| 254 |
-
|
| 255 |
-
|
| 256 |
-
|
| 257 |
-
|
|
|
|
| 258 |
|
| 259 |
-
|
| 260 |
-
|
| 261 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 262 |
|
| 263 |
with tabs[1]:
|
|
|
|
| 264 |
pollutants = ['PM2.5', 'PM10', 'CO', 'NO2', 'O3']
|
| 265 |
-
|
| 266 |
|
| 267 |
-
fig = px.
|
| 268 |
-
|
|
|
|
|
|
|
|
|
|
| 269 |
st.plotly_chart(fig, use_container_width=True)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 270 |
|
| 271 |
with tabs[2]:
|
| 272 |
-
#
|
| 273 |
-
|
| 274 |
|
| 275 |
-
|
| 276 |
-
|
| 277 |
-
|
| 278 |
-
|
| 279 |
-
|
| 280 |
-
fig.
|
|
|
|
|
|
|
|
|
|
|
|
|
| 281 |
st.plotly_chart(fig, use_container_width=True)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 282 |
|
| 283 |
def main():
|
| 284 |
-
st.title("Pakistan Climate & Disaster Monitoring System")
|
|
|
|
|
|
|
|
|
|
|
|
|
| 285 |
|
| 286 |
data_collector = DataCollector()
|
| 287 |
|
| 288 |
-
#
|
| 289 |
-
page = st.sidebar.
|
| 290 |
"Select Module",
|
| 291 |
-
["Weather Analysis", "Disaster Monitor", "Environmental Data"]
|
|
|
|
|
|
|
|
|
|
| 292 |
)
|
| 293 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 294 |
if page == "Weather Analysis":
|
| 295 |
show_weather_analysis(data_collector)
|
| 296 |
elif page == "Disaster Monitor":
|
|
|
|
| 12 |
from io import StringIO
|
| 13 |
import streamlit.components.v1 as components
|
| 14 |
import base64
|
| 15 |
+
from sklearn.ensemble import RandomForestRegressor
|
| 16 |
+
from prophet import Prophet
|
| 17 |
+
import tensorflow as tf
|
| 18 |
+
from xgboost import XGBRegressor
|
| 19 |
+
import seaborn as sns
|
| 20 |
|
| 21 |
# Page configuration
|
| 22 |
+
st.set_page_config(layout="wide", page_title="Pakistan Climate & Disaster Monitor", page_icon="π")
|
| 23 |
|
| 24 |
class DataCollector:
|
| 25 |
def __init__(self):
|
|
|
|
| 36 |
'Hyderabad': {'lat': 25.3960, 'lon': 68.3578}
|
| 37 |
}
|
| 38 |
|
| 39 |
+
# [Previous methods remain the same...]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 40 |
|
| 41 |
+
def create_ml_features(self, weather_data):
|
| 42 |
+
"""Create features for ML predictions"""
|
| 43 |
+
features_df = pd.DataFrame()
|
| 44 |
+
for city_data in weather_data:
|
| 45 |
+
df = city_data['hourly'].copy()
|
| 46 |
+
df['city'] = city_data['city']
|
| 47 |
+
|
| 48 |
+
# Create time-based features
|
| 49 |
+
df['hour'] = df['datetime'].dt.hour
|
| 50 |
+
df['day'] = df['datetime'].dt.day
|
| 51 |
+
df['month'] = df['datetime'].dt.month
|
| 52 |
+
df['day_of_week'] = df['datetime'].dt.dayofweek
|
| 53 |
+
|
| 54 |
+
# Create lag features
|
| 55 |
+
df['temp_lag_1'] = df['temperature'].shift(1)
|
| 56 |
+
df['temp_lag_24'] = df['temperature'].shift(24)
|
| 57 |
+
|
| 58 |
+
# Create rolling means
|
| 59 |
+
df['temp_rolling_mean_6h'] = df['temperature'].rolling(window=6).mean()
|
| 60 |
+
df['temp_rolling_mean_24h'] = df['temperature'].rolling(window=24).mean()
|
| 61 |
+
|
| 62 |
+
features_df = pd.concat([features_df, df])
|
| 63 |
+
|
| 64 |
+
return features_df.dropna()
|
| 65 |
|
| 66 |
+
def create_cesium_component(earthquake_data=None, weather_data=None):
|
| 67 |
+
"""Enhanced Cesium 3D visualization"""
|
| 68 |
cesium_html = """
|
| 69 |
<div id="cesiumContainer" style="width: 100%; height: 600px;"></div>
|
| 70 |
<script src="https://cesium.com/downloads/cesiumjs/releases/1.95/Build/Cesium/Cesium.js"></script>
|
|
|
|
| 72 |
<script>
|
| 73 |
Cesium.Ion.defaultAccessToken = 'your-access-token';
|
| 74 |
const viewer = new Cesium.Viewer('cesiumContainer', {
|
| 75 |
+
terrainProvider: Cesium.createWorldTerrain(),
|
| 76 |
+
timeline: true,
|
| 77 |
+
animation: true,
|
| 78 |
+
baseLayerPicker: true,
|
| 79 |
+
scene3DOnly: false,
|
| 80 |
+
navigationHelpButton: true,
|
| 81 |
+
navigationInstructionsInitiallyVisible: false,
|
| 82 |
+
selectionIndicator: true,
|
| 83 |
+
infoBox: true
|
| 84 |
});
|
| 85 |
+
|
| 86 |
+
// Add Pakistan terrain
|
| 87 |
+
viewer.scene.globe.enableLighting = true;
|
| 88 |
+
viewer.scene.globe.terrainExaggeration = 1.5;
|
| 89 |
+
|
| 90 |
+
// Add weather visualization
|
| 91 |
+
const weatherEntities = new Cesium.CustomDataSource('Weather');
|
| 92 |
+
viewer.dataSources.add(weatherEntities);
|
| 93 |
+
"""
|
| 94 |
+
|
| 95 |
+
# Add earthquake data if available
|
| 96 |
+
if earthquake_data:
|
| 97 |
+
cesium_html += """
|
| 98 |
+
// Add earthquake visualization
|
| 99 |
+
const earthquakeEntities = new Cesium.CustomDataSource('Earthquakes');
|
| 100 |
+
"""
|
| 101 |
+
for eq in earthquake_data['features']:
|
| 102 |
+
coords = eq['geometry']['coordinates']
|
| 103 |
+
mag = eq['properties']['mag']
|
| 104 |
+
cesium_html += f"""
|
| 105 |
+
earthquakeEntities.entities.add({{
|
| 106 |
+
position: Cesium.Cartesian3.fromDegrees({coords[0]}, {coords[1]}, {coords[2]}),
|
| 107 |
+
point: {{
|
| 108 |
+
pixelSize: {mag * 5},
|
| 109 |
+
color: Cesium.Color.RED.withAlpha(0.8),
|
| 110 |
+
outlineColor: Cesium.Color.WHITE,
|
| 111 |
+
outlineWidth: 2
|
| 112 |
+
}},
|
| 113 |
+
description: `Magnitude: {mag}<br>Depth: {coords[2]} km`
|
| 114 |
+
}});
|
| 115 |
+
"""
|
| 116 |
+
|
| 117 |
+
cesium_html += """
|
| 118 |
viewer.camera.flyTo({
|
| 119 |
+
destination: Cesium.Cartesian3.fromDegrees(69.3451, 30.3753, 1000000.0),
|
| 120 |
+
orientation: {
|
| 121 |
+
heading: Cesium.Math.toRadians(0.0),
|
| 122 |
+
pitch: Cesium.Math.toRadians(-45.0),
|
| 123 |
+
roll: 0.0
|
| 124 |
+
}
|
| 125 |
});
|
| 126 |
</script>
|
| 127 |
"""
|
| 128 |
components.html(cesium_html, height=600)
|
| 129 |
|
| 130 |
+
def train_weather_model(features_df, city):
|
| 131 |
+
"""Train ML model for weather predictions"""
|
| 132 |
+
city_data = features_df[features_df['city'] == city].copy()
|
| 133 |
+
|
| 134 |
+
# Prepare features
|
| 135 |
+
feature_cols = ['hour', 'day', 'month', 'day_of_week',
|
| 136 |
+
'temp_lag_1', 'temp_lag_24',
|
| 137 |
+
'temp_rolling_mean_6h', 'temp_rolling_mean_24h']
|
| 138 |
+
X = city_data[feature_cols]
|
| 139 |
+
y = city_data['temperature']
|
| 140 |
+
|
| 141 |
+
# Split data
|
| 142 |
+
split_idx = int(len(X) * 0.8)
|
| 143 |
+
X_train, X_test = X[:split_idx], X[split_idx:]
|
| 144 |
+
y_train, y_test = y[:split_idx], y[split_idx:]
|
| 145 |
+
|
| 146 |
+
# Train model
|
| 147 |
+
model = XGBRegressor(n_estimators=100)
|
| 148 |
+
model.fit(X_train, y_train)
|
| 149 |
+
|
| 150 |
+
return model, X_test, y_test
|
| 151 |
|
| 152 |
def show_weather_analysis(data_collector):
|
| 153 |
+
st.header("Advanced Weather Analysis π€οΈ")
|
| 154 |
|
| 155 |
weather_data = data_collector.fetch_weather_data()
|
| 156 |
if weather_data:
|
|
|
|
| 164 |
# Add download button for data
|
| 165 |
st.markdown(download_csv(city_data['hourly'], f"{selected_city}_weather_data"), unsafe_allow_html=True)
|
| 166 |
|
| 167 |
+
tabs = st.tabs(["Temperature Analysis", "Precipitation Insights",
|
| 168 |
+
"Wind Patterns", "Humidity Trends", "ML Predictions"])
|
| 169 |
|
| 170 |
with tabs[0]:
|
| 171 |
+
col1, col2 = st.columns(2)
|
| 172 |
+
with col1:
|
| 173 |
+
# Enhanced temperature visualization
|
| 174 |
+
fig = go.Figure()
|
| 175 |
+
fig.add_trace(go.Scatter(
|
| 176 |
+
x=city_data['hourly']['datetime'],
|
| 177 |
+
y=city_data['hourly']['temperature'],
|
| 178 |
+
name='Temperature',
|
| 179 |
+
line=dict(color='red', width=2)
|
| 180 |
+
))
|
| 181 |
+
fig.update_layout(
|
| 182 |
+
title='Temperature Trend with Range',
|
| 183 |
+
template='plotly_dark',
|
| 184 |
+
hovermode='x unified'
|
| 185 |
+
)
|
| 186 |
+
st.plotly_chart(fig, use_container_width=True)
|
| 187 |
+
|
| 188 |
+
with col2:
|
| 189 |
+
# Temperature distribution
|
| 190 |
+
fig = px.histogram(
|
| 191 |
+
city_data['hourly'],
|
| 192 |
+
x='temperature',
|
| 193 |
+
nbins=30,
|
| 194 |
+
title='Temperature Distribution'
|
| 195 |
+
)
|
| 196 |
+
st.plotly_chart(fig, use_container_width=True)
|
| 197 |
|
| 198 |
with tabs[1]:
|
| 199 |
+
# Enhanced precipitation analysis
|
| 200 |
+
col1, col2 = st.columns(2)
|
| 201 |
+
with col1:
|
| 202 |
+
fig = px.bar(
|
| 203 |
+
city_data['daily'],
|
| 204 |
+
x='date',
|
| 205 |
+
y='precipitation_sum',
|
| 206 |
+
title='Daily Precipitation',
|
| 207 |
+
color='precipitation_sum',
|
| 208 |
+
color_continuous_scale='Blues'
|
| 209 |
+
)
|
| 210 |
+
st.plotly_chart(fig, use_container_width=True)
|
| 211 |
+
|
| 212 |
+
with col2:
|
| 213 |
+
# Precipitation probability calculation
|
| 214 |
+
precip_prob = (city_data['hourly']['precipitation'] > 0).mean() * 100
|
| 215 |
+
st.metric(
|
| 216 |
+
"Precipitation Probability",
|
| 217 |
+
f"{precip_prob:.1f}%",
|
| 218 |
+
delta=f"{precip_prob - 50:.1f}%"
|
| 219 |
+
)
|
| 220 |
|
| 221 |
with tabs[2]:
|
| 222 |
+
# Enhanced wind analysis
|
| 223 |
+
fig = go.Figure()
|
| 224 |
+
fig.add_trace(go.Scatter(
|
| 225 |
+
x=city_data['hourly']['datetime'],
|
| 226 |
+
y=city_data['hourly']['wind_speed'],
|
| 227 |
+
name='Wind Speed',
|
| 228 |
+
line=dict(color='blue', width=2)
|
| 229 |
+
))
|
| 230 |
+
fig.add_trace(go.Scatter(
|
| 231 |
+
x=city_data['hourly']['datetime'],
|
| 232 |
+
y=city_data['hourly']['wind_speed'].rolling(24).mean(),
|
| 233 |
+
name='24h Moving Average',
|
| 234 |
+
line=dict(color='red', width=2, dash='dash')
|
| 235 |
+
))
|
| 236 |
+
fig.update_layout(
|
| 237 |
+
title='Wind Speed Analysis',
|
| 238 |
+
template='plotly_dark',
|
| 239 |
+
hovermode='x unified'
|
| 240 |
+
)
|
| 241 |
st.plotly_chart(fig, use_container_width=True)
|
| 242 |
|
| 243 |
with tabs[3]:
|
| 244 |
+
# Enhanced humidity analysis
|
| 245 |
+
col1, col2 = st.columns(2)
|
| 246 |
+
with col1:
|
| 247 |
+
fig = px.line(
|
| 248 |
+
city_data['hourly'],
|
| 249 |
+
x='datetime',
|
| 250 |
+
y='humidity',
|
| 251 |
+
title='Humidity Trends',
|
| 252 |
+
color_discrete_sequence=['green']
|
| 253 |
+
)
|
| 254 |
+
st.plotly_chart(fig, use_container_width=True)
|
| 255 |
+
|
| 256 |
+
with col2:
|
| 257 |
+
# Humidity comfort zones
|
| 258 |
+
comfort_zones = pd.cut(
|
| 259 |
+
city_data['hourly']['humidity'],
|
| 260 |
+
bins=[0, 30, 45, 60, 100],
|
| 261 |
+
labels=['Dry', 'Comfortable', 'Moderate', 'Humid']
|
| 262 |
+
).value_counts()
|
| 263 |
+
fig = px.pie(
|
| 264 |
+
values=comfort_zones.values,
|
| 265 |
+
names=comfort_zones.index,
|
| 266 |
+
title='Humidity Comfort Zones'
|
| 267 |
+
)
|
| 268 |
+
st.plotly_chart(fig, use_container_width=True)
|
| 269 |
+
|
| 270 |
+
with tabs[4]:
|
| 271 |
+
st.subheader("Machine Learning Weather Predictions")
|
| 272 |
+
|
| 273 |
+
# Prepare data for ML
|
| 274 |
+
features_df = data_collector.create_ml_features(weather_data)
|
| 275 |
+
model, X_test, y_test = train_weather_model(features_df, selected_city)
|
| 276 |
+
|
| 277 |
+
# Make predictions
|
| 278 |
+
predictions = model.predict(X_test)
|
| 279 |
+
|
| 280 |
+
# Show predictions vs actual
|
| 281 |
+
fig = go.Figure()
|
| 282 |
+
fig.add_trace(go.Scatter(
|
| 283 |
+
x=X_test.index,
|
| 284 |
+
y=y_test,
|
| 285 |
+
name='Actual Temperature',
|
| 286 |
+
line=dict(color='blue')
|
| 287 |
+
))
|
| 288 |
+
fig.add_trace(go.Scatter(
|
| 289 |
+
x=X_test.index,
|
| 290 |
+
y=predictions,
|
| 291 |
+
name='Predicted Temperature',
|
| 292 |
+
line=dict(color='red', dash='dash')
|
| 293 |
+
))
|
| 294 |
+
fig.update_layout(
|
| 295 |
+
title='Temperature Predictions vs Actual',
|
| 296 |
+
template='plotly_dark',
|
| 297 |
+
hovermode='x unified'
|
| 298 |
+
)
|
| 299 |
st.plotly_chart(fig, use_container_width=True)
|
| 300 |
+
|
| 301 |
+
# Model metrics
|
| 302 |
+
mae = np.mean(np.abs(predictions - y_test))
|
| 303 |
+
rmse = np.sqrt(np.mean((predictions - y_test)**2))
|
| 304 |
+
|
| 305 |
+
col1, col2 = st.columns(2)
|
| 306 |
+
col1.metric("Mean Absolute Error", f"{mae:.2f}Β°C")
|
| 307 |
+
col2.metric("Root Mean Square Error", f"{rmse:.2f}Β°C")
|
| 308 |
|
| 309 |
def show_disaster_monitor(data_collector):
|
| 310 |
+
st.header("Advanced Disaster Monitoring System π¨")
|
| 311 |
|
| 312 |
earthquake_data = data_collector.fetch_usgs_earthquake_data()
|
| 313 |
|
| 314 |
if earthquake_data:
|
| 315 |
+
# Enhanced 3D visualization
|
| 316 |
+
st.subheader("3D Terrain Analysis")
|
| 317 |
+
create_cesium_component(earthquake_data)
|
| 318 |
|
| 319 |
+
# Advanced earthquake analysis
|
| 320 |
+
st.subheader("Earthquake Analysis Dashboard")
|
|
|
|
| 321 |
|
| 322 |
+
# Create DataFrame for analysis
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 323 |
eq_df = pd.DataFrame([
|
| 324 |
{
|
| 325 |
+
'time': datetime.fromtimestamp(eq['properties']['time']/1000),
|
| 326 |
+
'magnitude': eq['properties']['mag'],
|
| 327 |
+
'location': eq['properties']['place'],
|
| 328 |
+
'depth': eq['geometry']['coordinates'][2],
|
| 329 |
+
'lat': eq['geometry']['coordinates'][1],
|
| 330 |
+
'lon': eq['geometry']['coordinates'][0]
|
| 331 |
}
|
| 332 |
for eq in earthquake_data['features']
|
| 333 |
])
|
| 334 |
+
|
| 335 |
+
col1, col2 = st.columns(2)
|
| 336 |
+
|
| 337 |
+
with col1:
|
| 338 |
+
# Magnitude distribution
|
| 339 |
+
fig = px.histogram(
|
| 340 |
+
eq_df,
|
| 341 |
+
x='magnitude',
|
| 342 |
+
nbins=20,
|
| 343 |
+
title='Earthquake Magnitude Distribution',
|
| 344 |
+
color_discrete_sequence=['red']
|
| 345 |
+
)
|
| 346 |
+
st.plotly_chart(fig, use_container_width=True)
|
| 347 |
+
|
| 348 |
+
with col2:
|
| 349 |
+
# Depth vs Magnitude scatter
|
| 350 |
+
fig = px.scatter(
|
| 351 |
+
eq_df,
|
| 352 |
+
x='depth',
|
| 353 |
+
y='magnitude',
|
| 354 |
+
title='Depth vs Magnitude',
|
| 355 |
+
color='magnitude',
|
| 356 |
+
size='magnitude',
|
| 357 |
+
color_continuous_scale='Viridis'
|
| 358 |
+
)
|
| 359 |
+
st.plotly_chart(fig, use_container_width=True)
|
| 360 |
+
|
| 361 |
+
# Time series analysis
|
| 362 |
+
st.subheader("Temporal Analysis")
|
| 363 |
+
eq_df# Time series analysis
|
| 364 |
+
eq_df['date'] = eq_df['time'].dt.date
|
| 365 |
+
daily_counts = eq_df.groupby('date').size().reset_index(name='count')
|
| 366 |
+
|
| 367 |
+
fig = px.line(
|
| 368 |
+
daily_counts,
|
| 369 |
+
x='date',
|
| 370 |
+
y='count',
|
| 371 |
+
title='Daily Earthquake Frequency',
|
| 372 |
+
line_shape='spline'
|
| 373 |
+
)
|
| 374 |
+
st.plotly_chart(fig, use_container_width=True)
|
| 375 |
+
|
| 376 |
+
# Risk assessment
|
| 377 |
+
st.subheader("Seismic Risk Assessment")
|
| 378 |
+
risk_zones = folium.Map(location=[30.3753, 69.3451], zoom_start=5)
|
| 379 |
+
|
| 380 |
+
# Create heatmap layer
|
| 381 |
+
heat_data = [[row['lat'], row['lon'], row['magnitude']] for _, row in eq_df.iterrows()]
|
| 382 |
+
folium.plugins.HeatMap(heat_data).add_to(risk_zones)
|
| 383 |
+
|
| 384 |
+
# Add fault lines (simplified example)
|
| 385 |
+
fault_lines = {
|
| 386 |
+
'Main Boundary Thrust': [[34.0151, 71.5249], [33.7294, 73.0931]],
|
| 387 |
+
'Chaman Fault': [[30.1798, 66.9750], [25.1216, 62.3254]],
|
| 388 |
+
}
|
| 389 |
+
|
| 390 |
+
for name, coords in fault_lines.items():
|
| 391 |
+
folium.PolyLine(
|
| 392 |
+
coords,
|
| 393 |
+
color='red',
|
| 394 |
+
weight=2,
|
| 395 |
+
popup=name
|
| 396 |
+
).add_to(risk_zones)
|
| 397 |
+
|
| 398 |
+
folium_static(risk_zones)
|
| 399 |
+
|
| 400 |
+
# Earthquake prediction model
|
| 401 |
+
st.subheader("Seismic Activity Prediction")
|
| 402 |
+
|
| 403 |
+
# Prepare time series data for prediction
|
| 404 |
+
daily_counts['ds'] = pd.to_datetime(daily_counts['date'])
|
| 405 |
+
daily_counts['y'] = daily_counts['count']
|
| 406 |
+
|
| 407 |
+
# Train Prophet model
|
| 408 |
+
model = Prophet(yearly_seasonality=True, weekly_seasonality=True)
|
| 409 |
+
model.fit(daily_counts[['ds', 'y']])
|
| 410 |
+
|
| 411 |
+
# Make future predictions
|
| 412 |
+
future_dates = model.make_future_dataframe(periods=30)
|
| 413 |
+
forecast = model.predict(future_dates)
|
| 414 |
+
|
| 415 |
+
# Plot predictions
|
| 416 |
+
fig = go.Figure()
|
| 417 |
+
fig.add_trace(go.Scatter(
|
| 418 |
+
x=daily_counts['ds'],
|
| 419 |
+
y=daily_counts['y'],
|
| 420 |
+
name='Actual',
|
| 421 |
+
line=dict(color='blue')
|
| 422 |
+
))
|
| 423 |
+
fig.add_trace(go.Scatter(
|
| 424 |
+
x=forecast['ds'],
|
| 425 |
+
y=forecast['yhat'],
|
| 426 |
+
name='Predicted',
|
| 427 |
+
line=dict(color='red', dash='dash')
|
| 428 |
+
))
|
| 429 |
+
fig.add_trace(go.Scatter(
|
| 430 |
+
x=forecast['ds'],
|
| 431 |
+
y=forecast['yhat_upper'],
|
| 432 |
+
fill=None,
|
| 433 |
+
mode='lines',
|
| 434 |
+
line=dict(color='rgba(255,0,0,0)'),
|
| 435 |
+
showlegend=False
|
| 436 |
+
))
|
| 437 |
+
fig.add_trace(go.Scatter(
|
| 438 |
+
x=forecast['ds'],
|
| 439 |
+
y=forecast['yhat_lower'],
|
| 440 |
+
fill='tonexty',
|
| 441 |
+
mode='lines',
|
| 442 |
+
line=dict(color='rgba(255,0,0,0)'),
|
| 443 |
+
name='Prediction Interval'
|
| 444 |
+
))
|
| 445 |
+
fig.update_layout(
|
| 446 |
+
title='Seismic Activity Forecast (30 Days)',
|
| 447 |
+
xaxis_title='Date',
|
| 448 |
+
yaxis_title='Number of Earthquakes',
|
| 449 |
+
template='plotly_dark'
|
| 450 |
+
)
|
| 451 |
+
st.plotly_chart(fig, use_container_width=True)
|
| 452 |
|
| 453 |
def show_environmental_data(data_collector):
|
| 454 |
+
st.header("Advanced Environmental Analysis πΏ")
|
| 455 |
|
| 456 |
aqi_data = data_collector.fetch_air_quality_data()
|
| 457 |
|
|
|
|
| 462 |
# Add download button
|
| 463 |
st.markdown(download_csv(city_data, f"{selected_city}_air_quality_data"), unsafe_allow_html=True)
|
| 464 |
|
| 465 |
+
# Enhanced AQI calculation with weights
|
| 466 |
+
weights = {
|
| 467 |
+
'PM2.5': 0.3,
|
| 468 |
+
'PM10': 0.2,
|
| 469 |
+
'NO2': 0.2,
|
| 470 |
+
'O3': 0.2,
|
| 471 |
+
'CO': 0.1
|
| 472 |
+
}
|
| 473 |
+
|
| 474 |
+
# Normalize and calculate weighted AQI
|
| 475 |
+
for pollutant in weights.keys():
|
| 476 |
+
max_val = city_data[pollutant].max()
|
| 477 |
+
city_data[f'{pollutant}_normalized'] = city_data[pollutant] / max_val * 100
|
| 478 |
+
city_data[f'{pollutant}_weighted'] = city_data[f'{pollutant}_normalized'] * weights[pollutant]
|
| 479 |
|
| 480 |
+
city_data['AQI'] = sum(city_data[f'{p}_weighted'] for p in weights.keys())
|
| 481 |
+
|
| 482 |
+
tabs = st.tabs(["AQI Dashboard", "Pollutant Analysis", "Trends & Forecasting", "Health Impact"])
|
| 483 |
|
| 484 |
with tabs[0]:
|
| 485 |
+
col1, col2, col3 = st.columns(3)
|
| 486 |
+
|
| 487 |
current_aqi = city_data['AQI'].iloc[-1]
|
| 488 |
+
with col1:
|
| 489 |
+
st.metric(
|
| 490 |
+
"Current AQI",
|
| 491 |
+
f"{current_aqi:.1f}",
|
| 492 |
+
delta=f"{current_aqi - city_data['AQI'].iloc[-2]:.1f}"
|
| 493 |
+
)
|
| 494 |
|
| 495 |
+
# AQI categories
|
| 496 |
+
aqi_categories = pd.cut(
|
| 497 |
+
city_data['AQI'],
|
| 498 |
+
bins=[0, 50, 100, 150, 200, 300, float('inf')],
|
| 499 |
+
labels=['Good', 'Moderate', 'Unhealthy for Sensitive Groups', 'Unhealthy', 'Very Unhealthy', 'Hazardous']
|
| 500 |
+
).value_counts()
|
| 501 |
+
|
| 502 |
+
with col2:
|
| 503 |
+
fig = px.pie(
|
| 504 |
+
values=aqi_categories.values,
|
| 505 |
+
names=aqi_categories.index,
|
| 506 |
+
title='AQI Distribution'
|
| 507 |
+
)
|
| 508 |
+
st.plotly_chart(fig, use_container_width=True)
|
| 509 |
+
|
| 510 |
+
with col3:
|
| 511 |
+
# Daily pattern
|
| 512 |
+
hourly_avg = city_data.groupby(city_data['datetime'].dt.hour)['AQI'].mean()
|
| 513 |
+
fig = px.line(
|
| 514 |
+
x=hourly_avg.index,
|
| 515 |
+
y=hourly_avg.values,
|
| 516 |
+
title='Daily AQI Pattern',
|
| 517 |
+
labels={'x': 'Hour of Day', 'y': 'Average AQI'}
|
| 518 |
+
)
|
| 519 |
+
st.plotly_chart(fig, use_container_width=True)
|
| 520 |
|
| 521 |
with tabs[1]:
|
| 522 |
+
# Pollutant correlation analysis
|
| 523 |
pollutants = ['PM2.5', 'PM10', 'CO', 'NO2', 'O3']
|
| 524 |
+
corr_matrix = city_data[pollutants].corr()
|
| 525 |
|
| 526 |
+
fig = px.imshow(
|
| 527 |
+
corr_matrix,
|
| 528 |
+
title='Pollutant Correlation Matrix',
|
| 529 |
+
color_continuous_scale='RdBu'
|
| 530 |
+
)
|
| 531 |
st.plotly_chart(fig, use_container_width=True)
|
| 532 |
+
|
| 533 |
+
# Individual pollutant analysis
|
| 534 |
+
selected_pollutant = st.selectbox("Select Pollutant", pollutants)
|
| 535 |
+
|
| 536 |
+
col1, col2 = st.columns(2)
|
| 537 |
+
with col1:
|
| 538 |
+
fig = px.line(
|
| 539 |
+
city_data,
|
| 540 |
+
x='datetime',
|
| 541 |
+
y=selected_pollutant,
|
| 542 |
+
title=f'{selected_pollutant} Trend'
|
| 543 |
+
)
|
| 544 |
+
st.plotly_chart(fig, use_container_width=True)
|
| 545 |
+
|
| 546 |
+
with col2:
|
| 547 |
+
fig = px.box(
|
| 548 |
+
city_data,
|
| 549 |
+
y=selected_pollutant,
|
| 550 |
+
title=f'{selected_pollutant} Distribution'
|
| 551 |
+
)
|
| 552 |
+
st.plotly_chart(fig, use_container_width=True)
|
| 553 |
|
| 554 |
with tabs[2]:
|
| 555 |
+
# Time series decomposition
|
| 556 |
+
from statsmodels.tsa.seasonal import seasonal_decompose
|
| 557 |
|
| 558 |
+
# Resample to hourly data for decomposition
|
| 559 |
+
hourly_data = city_data.set_index('datetime')['AQI'].resample('H').mean()
|
| 560 |
+
decomposition = seasonal_decompose(hourly_data, period=24)
|
| 561 |
+
|
| 562 |
+
fig = make_subplots(rows=4, cols=1, subplot_titles=('Observed', 'Trend', 'Seasonal', 'Residual'))
|
| 563 |
+
fig.add_trace(go.Scatter(x=hourly_data.index, y=hourly_data.values, name='Observed'), row=1, col=1)
|
| 564 |
+
fig.add_trace(go.Scatter(x=hourly_data.index, y=decomposition.trend, name='Trend'), row=2, col=1)
|
| 565 |
+
fig.add_trace(go.Scatter(x=hourly_data.index, y=decomposition.seasonal, name='Seasonal'), row=3, col=1)
|
| 566 |
+
fig.add_trace(go.Scatter(x=hourly_data.index, y=decomposition.resid, name='Residual'), row=4, col=1)
|
| 567 |
+
fig.update_layout(height=800, title_text="AQI Time Series Decomposition")
|
| 568 |
st.plotly_chart(fig, use_container_width=True)
|
| 569 |
+
|
| 570 |
+
with tabs[3]:
|
| 571 |
+
st.subheader("Health Impact Assessment")
|
| 572 |
+
|
| 573 |
+
# Define health impact thresholds
|
| 574 |
+
impact_thresholds = {
|
| 575 |
+
'PM2.5': [12, 35.4, 55.4, 150.4],
|
| 576 |
+
'PM10': [54, 154, 254, 354],
|
| 577 |
+
'NO2': [53, 100, 360, 649],
|
| 578 |
+
'O3': [54, 70, 85, 105],
|
| 579 |
+
'CO': [4.4, 9.4, 12.4, 15.4]
|
| 580 |
+
}
|
| 581 |
+
|
| 582 |
+
# Calculate current health risks
|
| 583 |
+
current_risks = {}
|
| 584 |
+
for pollutant, thresholds in impact_thresholds.items():
|
| 585 |
+
current_val = city_data[pollutant].iloc[-1]
|
| 586 |
+
if current_val <= thresholds[0]:
|
| 587 |
+
risk = 'Low'
|
| 588 |
+
elif current_val <= thresholds[1]:
|
| 589 |
+
risk = 'Moderate'
|
| 590 |
+
elif current_val <= thresholds[2]:
|
| 591 |
+
risk = 'High'
|
| 592 |
+
else:
|
| 593 |
+
risk = 'Very High'
|
| 594 |
+
current_risks[pollutant] = {'value': current_val, 'risk': risk}
|
| 595 |
+
|
| 596 |
+
# Display health risks
|
| 597 |
+
col1, col2 = st.columns(2)
|
| 598 |
+
with col1:
|
| 599 |
+
for pollutant, data in current_risks.items():
|
| 600 |
+
st.metric(
|
| 601 |
+
f"{pollutant} Health Risk",
|
| 602 |
+
data['risk'],
|
| 603 |
+
f"{data['value']:.1f}"
|
| 604 |
+
)
|
| 605 |
+
|
| 606 |
+
with col2:
|
| 607 |
+
# Health recommendations based on current AQI
|
| 608 |
+
if current_aqi <= 50:
|
| 609 |
+
st.success("Air quality is good. Outdoor activities are safe.")
|
| 610 |
+
elif current_aqi <= 100:
|
| 611 |
+
st.info("Sensitive individuals should consider reducing prolonged outdoor exertion.")
|
| 612 |
+
elif current_aqi <= 150:
|
| 613 |
+
st.warning("Everyone should reduce prolonged outdoor exertion.")
|
| 614 |
+
else:
|
| 615 |
+
st.error("Avoid outdoor activities. Use air purifiers indoors.")
|
| 616 |
|
| 617 |
def main():
|
| 618 |
+
st.title("π Pakistan Climate & Disaster Monitoring System")
|
| 619 |
+
|
| 620 |
+
# Add dashboard info
|
| 621 |
+
st.sidebar.image("https://upload.wikimedia.org/wikipedia/commons/3/32/Flag_of_Pakistan.svg", width=100)
|
| 622 |
+
st.sidebar.title("Dashboard Controls")
|
| 623 |
|
| 624 |
data_collector = DataCollector()
|
| 625 |
|
| 626 |
+
# Enhanced navigation
|
| 627 |
+
page = st.sidebar.radio(
|
| 628 |
"Select Module",
|
| 629 |
+
["Weather Analysis", "Disaster Monitor", "Environmental Data"],
|
| 630 |
+
format_func=lambda x: f"π {x}" if x == "Weather Analysis" else
|
| 631 |
+
f"π¨ {x}" if x == "Disaster Monitor" else
|
| 632 |
+
f"πΏ {x}"
|
| 633 |
)
|
| 634 |
|
| 635 |
+
# Add data timestamp
|
| 636 |
+
st.sidebar.markdown("---")
|
| 637 |
+
st.sidebar.markdown(f"Last updated: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
|
| 638 |
+
|
| 639 |
if page == "Weather Analysis":
|
| 640 |
show_weather_analysis(data_collector)
|
| 641 |
elif page == "Disaster Monitor":
|