Spaces:
Sleeping
Sleeping
Upload 2 files
Browse files- app.py +2 -4
- llava_inference.py +4 -3
app.py
CHANGED
@@ -36,10 +36,8 @@ demo = gr.Interface(
|
|
36 |
outputs=gr.Textbox(label="Answer"),
|
37 |
title="UK Public Transport Assistant",
|
38 |
description="Upload an image of UK public transport signage (like train timetables or metro maps), and ask a question related to it. Powered by LLaVA-1.5.",
|
39 |
-
examples
|
40 |
-
|
41 |
-
["assets/example.jpg", "Where is platform 3?"] if os.path.exists("assets/example.jpg") else None
|
42 |
-
]
|
43 |
)
|
44 |
|
45 |
if __name__ == "__main__":
|
|
|
36 |
outputs=gr.Textbox(label="Answer"),
|
37 |
title="UK Public Transport Assistant",
|
38 |
description="Upload an image of UK public transport signage (like train timetables or metro maps), and ask a question related to it. Powered by LLaVA-1.5.",
|
39 |
+
# Fix examples format - either provide valid examples or set to None
|
40 |
+
examples=None # Remove examples to avoid format errors
|
|
|
|
|
41 |
)
|
42 |
|
43 |
if __name__ == "__main__":
|
llava_inference.py
CHANGED
@@ -30,10 +30,11 @@ class LLaVAHelper:
|
|
30 |
legacy=True
|
31 |
)
|
32 |
|
33 |
-
# Load model with same cache directory
|
34 |
self.model, self.image_processor, _ = load_pretrained_model(
|
35 |
-
model_name,
|
36 |
-
|
|
|
37 |
cache_dir="./model_cache",
|
38 |
load_8bit=False,
|
39 |
load_4bit=False,
|
|
|
30 |
legacy=True
|
31 |
)
|
32 |
|
33 |
+
# Load model with same cache directory - fix parameter order
|
34 |
self.model, self.image_processor, _ = load_pretrained_model(
|
35 |
+
model_path=model_name,
|
36 |
+
model_name=model_name, # Add the missing model_name parameter
|
37 |
+
model_base=None,
|
38 |
cache_dir="./model_cache",
|
39 |
load_8bit=False,
|
40 |
load_4bit=False,
|