Spaces:
Running
Running
Add health check for the virtual tryon model
Browse files- app.py +5 -0
- mcp_host/ui.py +2 -2
- utils.py +13 -0
app.py
CHANGED
|
@@ -1,8 +1,11 @@
|
|
| 1 |
from __future__ import annotations
|
| 2 |
|
| 3 |
import os
|
|
|
|
| 4 |
from typing import TYPE_CHECKING, Any
|
| 5 |
|
|
|
|
|
|
|
| 6 |
|
| 7 |
IS_HF_ZERO_GPU = os.getenv("SPACE_ID", "").startswith("sitatech/")
|
| 8 |
IS_LOCAL = os.getenv("LOCALE_RUN") is not None
|
|
@@ -146,6 +149,8 @@ def set_client_for_session(request: gr.Request):
|
|
| 146 |
raise WebRTCError(
|
| 147 |
f"Inference server is not available. Status code: {health_check_response.status_code}"
|
| 148 |
)
|
|
|
|
|
|
|
| 149 |
|
| 150 |
if not vibe_shopping_agent.clients_connected:
|
| 151 |
vibe_shopping_agent.connect_clients()
|
|
|
|
| 1 |
from __future__ import annotations
|
| 2 |
|
| 3 |
import os
|
| 4 |
+
import threading
|
| 5 |
from typing import TYPE_CHECKING, Any
|
| 6 |
|
| 7 |
+
from utils import health_check_virtual_try_model
|
| 8 |
+
|
| 9 |
|
| 10 |
IS_HF_ZERO_GPU = os.getenv("SPACE_ID", "").startswith("sitatech/")
|
| 11 |
IS_LOCAL = os.getenv("LOCALE_RUN") is not None
|
|
|
|
| 149 |
raise WebRTCError(
|
| 150 |
f"Inference server is not available. Status code: {health_check_response.status_code}"
|
| 151 |
)
|
| 152 |
+
|
| 153 |
+
threading.Thread(target=health_check_virtual_try_model).start()
|
| 154 |
|
| 155 |
if not vibe_shopping_agent.clients_connected:
|
| 156 |
vibe_shopping_agent.connect_clients()
|
mcp_host/ui.py
CHANGED
|
@@ -67,7 +67,7 @@ def ProductList(products: list[dict[str, str]]):
|
|
| 67 |
object-fit: contain;
|
| 68 |
border-radius: 8px 8px 0 0;
|
| 69 |
" />
|
| 70 |
-
<div style="width: 100%; padding: 8px; display: flex; flex-direction: column; align-items: center; justify-content: space-between; height:
|
| 71 |
<h5 style="
|
| 72 |
opacity: 0.7;
|
| 73 |
margin: 0 5px;
|
|
@@ -90,7 +90,7 @@ def ProductList(products: list[dict[str, str]]):
|
|
| 90 |
display: grid;
|
| 91 |
grid-template-columns: repeat(auto-fit, minmax(200px, 1fr));
|
| 92 |
gap: 0.8rem;
|
| 93 |
-
padding:
|
| 94 |
height: 600px;
|
| 95 |
width: 100%;
|
| 96 |
overflow-y: auto;
|
|
|
|
| 67 |
object-fit: contain;
|
| 68 |
border-radius: 8px 8px 0 0;
|
| 69 |
" />
|
| 70 |
+
<div style="width: 100%; padding: 8px; display: flex; flex-direction: column; align-items: center; justify-content: space-between; height: 52px;">
|
| 71 |
<h5 style="
|
| 72 |
opacity: 0.7;
|
| 73 |
margin: 0 5px;
|
|
|
|
| 90 |
display: grid;
|
| 91 |
grid-template-columns: repeat(auto-fit, minmax(200px, 1fr));
|
| 92 |
gap: 0.8rem;
|
| 93 |
+
padding: 0.5rem 0;
|
| 94 |
height: 600px;
|
| 95 |
width: 100%;
|
| 96 |
overflow-y: auto;
|
utils.py
CHANGED
|
@@ -1,6 +1,8 @@
|
|
| 1 |
import os
|
| 2 |
from io import BytesIO
|
| 3 |
from PIL import Image
|
|
|
|
|
|
|
| 4 |
|
| 5 |
|
| 6 |
class ImageUploader:
|
|
@@ -49,3 +51,14 @@ def pil_to_bytes(image, format: str = "PNG") -> bytes:
|
|
| 49 |
def get_hf_space_file_url_prefix() -> str:
|
| 50 |
space_host = os.getenv("SPACE_HOST")
|
| 51 |
return f"https://{space_host}/gradio_api/file="
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
import os
|
| 2 |
from io import BytesIO
|
| 3 |
from PIL import Image
|
| 4 |
+
from fastrtc import WebRTCError
|
| 5 |
+
import requests
|
| 6 |
|
| 7 |
|
| 8 |
class ImageUploader:
|
|
|
|
| 51 |
def get_hf_space_file_url_prefix() -> str:
|
| 52 |
space_host = os.getenv("SPACE_HOST")
|
| 53 |
return f"https://{space_host}/gradio_api/file="
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
def health_check_virtual_try_model():
|
| 57 |
+
try:
|
| 58 |
+
virtual_try_hc_response = requests.get(
|
| 59 |
+
"https://sita-berete-3-vibe-shopping--health-check.modal.run"
|
| 60 |
+
)
|
| 61 |
+
virtual_try_hc_response.raise_for_status()
|
| 62 |
+
except Exception as e:
|
| 63 |
+
print(f"Virtual try-on model health check failed: {e}")
|
| 64 |
+
raise WebRTCError("Error: Virtual try-on server failed to start")
|