Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
|
@@ -25,8 +25,8 @@ from pathlib import Path
|
|
| 25 |
|
| 26 |
from huggingface_hub import hf_hub_download
|
| 27 |
|
| 28 |
-
# Add parent directory to path to allow importing from utils
|
| 29 |
-
sys.path.insert(0, os.path.dirname(os.path.
|
| 30 |
|
| 31 |
# Import utilities
|
| 32 |
from utils.image_processing import process_image, batch_process_images
|
|
@@ -34,6 +34,11 @@ from utils.file_utils import save_tags_to_file, get_default_save_locations
|
|
| 34 |
from utils.ui_components import display_progress_bar, show_example_images, display_batch_results
|
| 35 |
from utils.onnx_processing import batch_process_images_onnx
|
| 36 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 37 |
# Constants - matching your v1 pattern
|
| 38 |
MODEL_REPO = "Camais03/camie-tagger-v2"
|
| 39 |
ONNX_MODEL_FILE = "camie-tagger-v2.onnx"
|
|
@@ -42,25 +47,65 @@ METADATA_FILE = "camie-tagger-v2-metadata.json"
|
|
| 42 |
VALIDATION_FILE = "full_validation_results.json"
|
| 43 |
|
| 44 |
def get_model_files():
|
| 45 |
-
"""Download model files from HF Hub and return paths -
|
| 46 |
try:
|
| 47 |
-
# Use /tmp directory
|
| 48 |
-
cache_dir = "/tmp/
|
| 49 |
os.makedirs(cache_dir, exist_ok=True)
|
| 50 |
|
| 51 |
-
# Download
|
| 52 |
-
|
| 53 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 54 |
|
| 55 |
-
# Try
|
| 56 |
try:
|
| 57 |
-
safetensors_path = hf_hub_download(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 58 |
except Exception as e:
|
| 59 |
print(f"SafeTensors model not available: {e}")
|
| 60 |
safetensors_path = None
|
| 61 |
|
| 62 |
try:
|
| 63 |
-
validation_path = hf_hub_download(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 64 |
except Exception as e:
|
| 65 |
print(f"Validation results not available: {e}")
|
| 66 |
validation_path = None
|
|
|
|
| 25 |
|
| 26 |
from huggingface_hub import hf_hub_download
|
| 27 |
|
| 28 |
+
# Add parent directory to path to allow importing from utils - updated for new structure
|
| 29 |
+
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
| 30 |
|
| 31 |
# Import utilities
|
| 32 |
from utils.image_processing import process_image, batch_process_images
|
|
|
|
| 34 |
from utils.ui_components import display_progress_bar, show_example_images, display_batch_results
|
| 35 |
from utils.onnx_processing import batch_process_images_onnx
|
| 36 |
|
| 37 |
+
# Add environment variables for HF Spaces permissions
|
| 38 |
+
os.environ['MPLCONFIGDIR'] = '/tmp/matplotlib'
|
| 39 |
+
os.environ['HF_HOME'] = '/tmp/huggingface'
|
| 40 |
+
os.environ['TRANSFORMERS_CACHE'] = '/tmp/transformers'
|
| 41 |
+
|
| 42 |
# Constants - matching your v1 pattern
|
| 43 |
MODEL_REPO = "Camais03/camie-tagger-v2"
|
| 44 |
ONNX_MODEL_FILE = "camie-tagger-v2.onnx"
|
|
|
|
| 47 |
VALIDATION_FILE = "full_validation_results.json"
|
| 48 |
|
| 49 |
def get_model_files():
|
| 50 |
+
"""Download model files from HF Hub and return paths - optimized for HF Spaces"""
|
| 51 |
try:
|
| 52 |
+
# Use smaller /tmp directory and be more careful with large files
|
| 53 |
+
cache_dir = "/tmp/hf_cache"
|
| 54 |
os.makedirs(cache_dir, exist_ok=True)
|
| 55 |
|
| 56 |
+
# Download metadata first (small file)
|
| 57 |
+
metadata_path = hf_hub_download(
|
| 58 |
+
repo_id=MODEL_REPO,
|
| 59 |
+
filename=METADATA_FILE,
|
| 60 |
+
cache_dir=cache_dir,
|
| 61 |
+
resume_download=True # Allow resuming if interrupted
|
| 62 |
+
)
|
| 63 |
+
|
| 64 |
+
# Try streaming download for large ONNX file
|
| 65 |
+
try:
|
| 66 |
+
onnx_path = hf_hub_download(
|
| 67 |
+
repo_id=MODEL_REPO,
|
| 68 |
+
filename=ONNX_MODEL_FILE,
|
| 69 |
+
cache_dir=cache_dir,
|
| 70 |
+
resume_download=True,
|
| 71 |
+
force_download=False # Use cached version if available
|
| 72 |
+
)
|
| 73 |
+
except Exception as e:
|
| 74 |
+
print(f"ONNX download failed: {e}")
|
| 75 |
+
# Fallback: try direct URL download with requests
|
| 76 |
+
import requests
|
| 77 |
+
onnx_url = f"https://huggingface.co/{MODEL_REPO}/resolve/main/{ONNX_MODEL_FILE}"
|
| 78 |
+
onnx_path = os.path.join(cache_dir, ONNX_MODEL_FILE)
|
| 79 |
+
|
| 80 |
+
print(f"Trying direct download from: {onnx_url}")
|
| 81 |
+
response = requests.get(onnx_url, stream=True)
|
| 82 |
+
response.raise_for_status()
|
| 83 |
+
|
| 84 |
+
with open(onnx_path, 'wb') as f:
|
| 85 |
+
for chunk in response.iter_content(chunk_size=8192):
|
| 86 |
+
if chunk:
|
| 87 |
+
f.write(chunk)
|
| 88 |
+
print(f"Direct download successful: {onnx_path}")
|
| 89 |
|
| 90 |
+
# Try optional files
|
| 91 |
try:
|
| 92 |
+
safetensors_path = hf_hub_download(
|
| 93 |
+
repo_id=MODEL_REPO,
|
| 94 |
+
filename=SAFETENSORS_MODEL_FILE,
|
| 95 |
+
cache_dir=cache_dir,
|
| 96 |
+
resume_download=True
|
| 97 |
+
)
|
| 98 |
except Exception as e:
|
| 99 |
print(f"SafeTensors model not available: {e}")
|
| 100 |
safetensors_path = None
|
| 101 |
|
| 102 |
try:
|
| 103 |
+
validation_path = hf_hub_download(
|
| 104 |
+
repo_id=MODEL_REPO,
|
| 105 |
+
filename=VALIDATION_FILE,
|
| 106 |
+
cache_dir=cache_dir,
|
| 107 |
+
resume_download=True
|
| 108 |
+
)
|
| 109 |
except Exception as e:
|
| 110 |
print(f"Validation results not available: {e}")
|
| 111 |
validation_path = None
|