Spaces:
Running
on
T4
Running
on
T4
Updates the title
Browse files- README.md +1 -1
- llava/conversation.py +1 -1
- llava/serve/gradio_web_server.py +2 -2
README.md
CHANGED
|
@@ -1,5 +1,5 @@
|
|
| 1 |
---
|
| 2 |
-
title: LLaVA++
|
| 3 |
emoji: π
|
| 4 |
colorFrom: green
|
| 5 |
colorTo: yellow
|
|
|
|
| 1 |
---
|
| 2 |
+
title: LLaVA++ (Phi-3-V)
|
| 3 |
emoji: π
|
| 4 |
colorFrom: green
|
| 5 |
colorTo: yellow
|
llava/conversation.py
CHANGED
|
@@ -371,7 +371,7 @@ Answer the questions.""",
|
|
| 371 |
)
|
| 372 |
|
| 373 |
conv_phi3_instruct = Conversation(
|
| 374 |
-
system="""<|system|>\
|
| 375 |
roles=("\n<|user|>\n", "\n<|assistant|>\n"),
|
| 376 |
version="phi3",
|
| 377 |
messages=(),
|
|
|
|
| 371 |
)
|
| 372 |
|
| 373 |
conv_phi3_instruct = Conversation(
|
| 374 |
+
system="""<|system|>\nA chat between a curious user and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the user's questions.""",
|
| 375 |
roles=("\n<|user|>\n", "\n<|assistant|>\n"),
|
| 376 |
version="phi3",
|
| 377 |
messages=(),
|
llava/serve/gradio_web_server.py
CHANGED
|
@@ -311,8 +311,8 @@ def build_demo(embed_mode, cur_dir=None, concurrency_count=10):
|
|
| 311 |
], inputs=[imagebox, textbox])
|
| 312 |
|
| 313 |
with gr.Accordion("Parameters", open=False) as parameter_row:
|
| 314 |
-
temperature = gr.Slider(minimum=0.0, maximum=1.0, value=0.
|
| 315 |
-
top_p = gr.Slider(minimum=0.0, maximum=1.0, value=0.
|
| 316 |
max_output_tokens = gr.Slider(minimum=0, maximum=1024, value=512, step=64, interactive=True, label="Max output tokens",)
|
| 317 |
|
| 318 |
with gr.Column(scale=8):
|
|
|
|
| 311 |
], inputs=[imagebox, textbox])
|
| 312 |
|
| 313 |
with gr.Accordion("Parameters", open=False) as parameter_row:
|
| 314 |
+
temperature = gr.Slider(minimum=0.0, maximum=1.0, value=0.1, step=0.1, interactive=True, label="Temperature",)
|
| 315 |
+
top_p = gr.Slider(minimum=0.0, maximum=1.0, value=0.7, step=0.1, interactive=True, label="Top P",)
|
| 316 |
max_output_tokens = gr.Slider(minimum=0, maximum=1024, value=512, step=64, interactive=True, label="Max output tokens",)
|
| 317 |
|
| 318 |
with gr.Column(scale=8):
|