Spaces:
Sleeping
Sleeping
Update Gradio_UI.py
Browse files- Gradio_UI.py +2 -24
Gradio_UI.py
CHANGED
|
@@ -141,9 +141,9 @@ def stream_to_gradio(
|
|
| 141 |
|
| 142 |
for step_log in agent.run(task, stream=True, reset=reset_agent_memory, additional_args=additional_args):
|
| 143 |
# Track tokens if model provides them
|
| 144 |
-
if hasattr(agent.model, "last_input_token_count"):
|
| 145 |
total_input_tokens += agent.model.last_input_token_count
|
| 146 |
-
total_output_tokens += agent.model.last_output_token_count
|
| 147 |
if isinstance(step_log, ActionStep):
|
| 148 |
step_log.input_token_count = agent.model.last_input_token_count
|
| 149 |
step_log.output_token_count = agent.model.last_output_token_count
|
|
@@ -153,28 +153,6 @@ def stream_to_gradio(
|
|
| 153 |
):
|
| 154 |
yield message
|
| 155 |
|
| 156 |
-
final_answer = step_log # Last log is the run's final_answer
|
| 157 |
-
final_answer = handle_agent_output_types(final_answer)
|
| 158 |
-
|
| 159 |
-
if isinstance(final_answer, AgentText):
|
| 160 |
-
yield gr.ChatMessage(
|
| 161 |
-
role="assistant",
|
| 162 |
-
content=f"**Final answer:**\n{final_answer.to_string()}\n",
|
| 163 |
-
)
|
| 164 |
-
elif isinstance(final_answer, AgentImage):
|
| 165 |
-
yield gr.ChatMessage(
|
| 166 |
-
role="assistant",
|
| 167 |
-
content={"path": final_answer.to_string(), "mime_type": "image/png"},
|
| 168 |
-
)
|
| 169 |
-
elif isinstance(final_answer, AgentAudio):
|
| 170 |
-
yield gr.ChatMessage(
|
| 171 |
-
role="assistant",
|
| 172 |
-
content={"path": final_answer.to_string(), "mime_type": "audio/wav"},
|
| 173 |
-
)
|
| 174 |
-
else:
|
| 175 |
-
yield gr.ChatMessage(role="assistant", content=f"**Final answer:** {str(final_answer)}")
|
| 176 |
-
|
| 177 |
-
|
| 178 |
class GradioUI:
|
| 179 |
"""A one-line interface to launch your agent in Gradio"""
|
| 180 |
|
|
|
|
| 141 |
|
| 142 |
for step_log in agent.run(task, stream=True, reset=reset_agent_memory, additional_args=additional_args):
|
| 143 |
# Track tokens if model provides them
|
| 144 |
+
if hasattr(agent.model, "last_input_token_count") and agent.model.last_input_token_count is not None:
|
| 145 |
total_input_tokens += agent.model.last_input_token_count
|
| 146 |
+
total_output_tokens += agent.model.last_output_token_count or 0
|
| 147 |
if isinstance(step_log, ActionStep):
|
| 148 |
step_log.input_token_count = agent.model.last_input_token_count
|
| 149 |
step_log.output_token_count = agent.model.last_output_token_count
|
|
|
|
| 153 |
):
|
| 154 |
yield message
|
| 155 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 156 |
class GradioUI:
|
| 157 |
"""A one-line interface to launch your agent in Gradio"""
|
| 158 |
|