Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -83,7 +83,7 @@ def generate_activations(image):
|
|
83 |
result.scatter_(-1, topk.indices, topk.values)
|
84 |
cached_list.append(result.detach().cpu())
|
85 |
topk_indices = (
|
86 |
-
latents.squeeze(0).mean(dim=0).topk(k=
|
87 |
)
|
88 |
|
89 |
handles = [hooked_module.register_forward_hook(hook)]
|
@@ -209,7 +209,7 @@ with gr.Blocks() as demo:
|
|
209 |
"""
|
210 |
# Large Multi-modal Models Can Interpret Features in Large Multi-modal Models
|
211 |
|
212 |
-
π [ArXiv Paper](https://arxiv.org/abs/2411.14982) | π [LMMs-Lab Homepage](https://lmms-lab.framer.ai) | π€ [Huggingface Collections](https://huggingface.co/collections/lmms-lab/llava-sae-674026e4e7bc8c29c70bc3a3) | [GitHub Repo](https://github.com/EvolvingLMMs-Lab/multimodal-sae)
|
213 |
"""
|
214 |
)
|
215 |
with gr.Accordion("βΉοΈ Instructions", open=False):
|
@@ -234,13 +234,17 @@ with gr.Blocks() as demo:
|
|
234 |
|
235 |
dummy_text = gr.Textbox(visible=False, label="Explanation")
|
236 |
gr.Examples(
|
237 |
-
[
|
238 |
["assets/sunglasses.jpg", 10, "Sunglasses"],
|
239 |
-
["assets/greedy.jpg", 14, "Greedy eating"],
|
240 |
-
["assets/railway.jpg", 28, "Railway tracks"],
|
241 |
["assets/bird.png", 1803, "The seagull feathers."],
|
242 |
-
|
|
|
|
|
243 |
["assets/monkey.png", 2692, "Monkey"],
|
|
|
|
|
|
|
|
|
244 |
],
|
245 |
inputs=[image, feature_num, dummy_text],
|
246 |
label="Examples",
|
|
|
83 |
result.scatter_(-1, topk.indices, topk.values)
|
84 |
cached_list.append(result.detach().cpu())
|
85 |
topk_indices = (
|
86 |
+
latents.squeeze(0).mean(dim=0).topk(k=2000).indices.detach().cpu()
|
87 |
)
|
88 |
|
89 |
handles = [hooked_module.register_forward_hook(hook)]
|
|
|
209 |
"""
|
210 |
# Large Multi-modal Models Can Interpret Features in Large Multi-modal Models
|
211 |
|
212 |
+
π [A Database for Interpreted 5K Features](https://huggingface.co/datasets/lmms-lab/llava-sae-explanations-5k) | π [ArXiv Paper](https://arxiv.org/abs/2411.14982) | π [LMMs-Lab Homepage](https://lmms-lab.framer.ai) | π€ [Huggingface Collections](https://huggingface.co/collections/lmms-lab/llava-sae-674026e4e7bc8c29c70bc3a3) | [GitHub Repo](https://github.com/EvolvingLMMs-Lab/multimodal-sae)
|
213 |
"""
|
214 |
)
|
215 |
with gr.Accordion("βΉοΈ Instructions", open=False):
|
|
|
234 |
|
235 |
dummy_text = gr.Textbox(visible=False, label="Explanation")
|
236 |
gr.Examples(
|
237 |
+
[ ["assets/eyes.png", 2274, "Eyes"],
|
238 |
["assets/sunglasses.jpg", 10, "Sunglasses"],
|
|
|
|
|
239 |
["assets/bird.png", 1803, "The seagull feathers."],
|
240 |
+
|
241 |
+
["assets/railway.jpg", 28, "Railway tracks"],
|
242 |
+
["assets/sunglasses.jpg", 1085, "Human Faces"],
|
243 |
["assets/monkey.png", 2692, "Monkey"],
|
244 |
+
|
245 |
+
["assets/greedy.jpg", 14, "Greedy eating"],
|
246 |
+
["assets/happy.jpg", 19379, "Happy"],
|
247 |
+
["assets/sad.jpg", 108692, "Sad"],
|
248 |
],
|
249 |
inputs=[image, feature_num, dummy_text],
|
250 |
label="Examples",
|