Spaces:
Running
on
Zero
Running
on
Zero
hengli
commited on
Commit
·
d8b386f
1
Parent(s):
a8f42dc
update hf link
Browse files
app.py
CHANGED
|
@@ -141,8 +141,7 @@ def run_model(target_dir, model, anchor_size=100) -> dict:
|
|
| 141 |
|
| 142 |
# Clean up
|
| 143 |
torch.cuda.empty_cache()
|
| 144 |
-
|
| 145 |
-
print(f"{k}: {v.shape if isinstance(v, np.ndarray) else type(v)}")
|
| 146 |
return predictions
|
| 147 |
|
| 148 |
|
|
@@ -271,7 +270,8 @@ def gradio_demo(
|
|
| 271 |
print("Running run_model...")
|
| 272 |
with torch.no_grad():
|
| 273 |
predictions = run_model(target_dir, model)
|
| 274 |
-
|
|
|
|
| 275 |
# Save predictions
|
| 276 |
prediction_save_path = os.path.join(target_dir, "predictions.npz")
|
| 277 |
np.savez(prediction_save_path, **predictions)
|
|
|
|
| 141 |
|
| 142 |
# Clean up
|
| 143 |
torch.cuda.empty_cache()
|
| 144 |
+
|
|
|
|
| 145 |
return predictions
|
| 146 |
|
| 147 |
|
|
|
|
| 270 |
print("Running run_model...")
|
| 271 |
with torch.no_grad():
|
| 272 |
predictions = run_model(target_dir, model)
|
| 273 |
+
for k,v in predictions.items():
|
| 274 |
+
print(f"{k}: {v.shape if isinstance(v, np.ndarray) else type(v)}")
|
| 275 |
# Save predictions
|
| 276 |
prediction_save_path = os.path.join(target_dir, "predictions.npz")
|
| 277 |
np.savez(prediction_save_path, **predictions)
|