Spaces:
Running
on
Zero
Running
on
Zero
Vladyslav Humennyy
Claude
commited on
Commit
·
e6d0602
1
Parent(s):
f5ccf3a
Fix Gradio validation and empty history errors
Browse files- Filter invalid alt_text fields in _clean_history_for_display to prevent Pydantic validation errors
- Add empty history check before apply_chat_template to prevent IndexError
- Add early return when no valid input exists
🤖 Generated with [Claude Code](https://claude.com/claude-code)
Co-Authored-By: Claude <[email protected]>
app.py
CHANGED
|
@@ -274,15 +274,28 @@ def _clean_history_for_display(history: list[dict[str, Any]]) -> list[dict[str,
|
|
| 274 |
cleaned_content = []
|
| 275 |
for item in content:
|
| 276 |
if isinstance(item, dict):
|
| 277 |
-
# Remove _pil_image
|
| 278 |
-
cleaned_item = {
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 279 |
cleaned_content.append(cleaned_item)
|
| 280 |
else:
|
| 281 |
cleaned_content.append(item)
|
| 282 |
cleaned_message["content"] = cleaned_content
|
| 283 |
elif isinstance(content, dict):
|
| 284 |
-
# Remove _pil_image
|
| 285 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 286 |
else:
|
| 287 |
cleaned_message["content"] = content
|
| 288 |
|
|
@@ -312,18 +325,26 @@ def bot(
|
|
| 312 |
|
| 313 |
text_history = _prepare_text_history(history)
|
| 314 |
|
| 315 |
-
|
| 316 |
-
|
| 317 |
-
|
| 318 |
-
|
| 319 |
-
|
| 320 |
-
|
| 321 |
-
|
| 322 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 323 |
input_text = input_text.replace(tokenizer.bos_token, "", 1)
|
| 324 |
print(input_text)
|
| 325 |
model_inputs = None
|
| 326 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 327 |
if processor is not None and any(_message_contains_image(msg) for msg in history):
|
| 328 |
try:
|
| 329 |
processor_history = _prepare_processor_history(history)
|
|
|
|
| 274 |
cleaned_content = []
|
| 275 |
for item in content:
|
| 276 |
if isinstance(item, dict):
|
| 277 |
+
# Remove _pil_image and ensure alt_text is string or absent
|
| 278 |
+
cleaned_item = {}
|
| 279 |
+
for k, v in item.items():
|
| 280 |
+
if k == "_pil_image":
|
| 281 |
+
continue
|
| 282 |
+
if k == "alt_text" and not isinstance(v, str):
|
| 283 |
+
continue
|
| 284 |
+
cleaned_item[k] = v
|
| 285 |
cleaned_content.append(cleaned_item)
|
| 286 |
else:
|
| 287 |
cleaned_content.append(item)
|
| 288 |
cleaned_message["content"] = cleaned_content
|
| 289 |
elif isinstance(content, dict):
|
| 290 |
+
# Remove _pil_image and ensure alt_text is string or absent
|
| 291 |
+
cleaned_item = {}
|
| 292 |
+
for k, v in content.items():
|
| 293 |
+
if k == "_pil_image":
|
| 294 |
+
continue
|
| 295 |
+
if k == "alt_text" and not isinstance(v, str):
|
| 296 |
+
continue
|
| 297 |
+
cleaned_item[k] = v
|
| 298 |
+
cleaned_message["content"] = cleaned_item
|
| 299 |
else:
|
| 300 |
cleaned_message["content"] = content
|
| 301 |
|
|
|
|
| 325 |
|
| 326 |
text_history = _prepare_text_history(history)
|
| 327 |
|
| 328 |
+
# Handle empty history case
|
| 329 |
+
if not text_history:
|
| 330 |
+
input_text = ""
|
| 331 |
+
else:
|
| 332 |
+
input_text: str = tokenizer.apply_chat_template(
|
| 333 |
+
text_history,
|
| 334 |
+
tokenize=False,
|
| 335 |
+
add_generation_prompt=True,
|
| 336 |
+
# enable_thinking=True,
|
| 337 |
+
)
|
| 338 |
+
|
| 339 |
+
if input_text and tokenizer.bos_token:
|
| 340 |
input_text = input_text.replace(tokenizer.bos_token, "", 1)
|
| 341 |
print(input_text)
|
| 342 |
model_inputs = None
|
| 343 |
|
| 344 |
+
# Early return if no input
|
| 345 |
+
if not input_text and not any(_message_contains_image(msg) for msg in history):
|
| 346 |
+
return
|
| 347 |
+
|
| 348 |
if processor is not None and any(_message_contains_image(msg) for msg in history):
|
| 349 |
try:
|
| 350 |
processor_history = _prepare_processor_history(history)
|