Spaces:
Sleeping
Sleeping
Jonathan Bejarano
commited on
Commit
·
b76be56
1
Parent(s):
daf7e93
make this easier to use
Browse files
app.py
CHANGED
|
@@ -207,11 +207,8 @@ def respond(
|
|
| 207 |
except Exception as e:
|
| 208 |
return f"Error connecting to local model: {str(e)}"
|
| 209 |
else:
|
| 210 |
-
|
| 211 |
-
|
| 212 |
-
return "Please log in with your HuggingFace account to play the geography game!"
|
| 213 |
-
|
| 214 |
-
client = InferenceClient(token=hf_token.token, model=MODEL_NAME)
|
| 215 |
|
| 216 |
response = ""
|
| 217 |
output_token_count = 0
|
|
@@ -286,9 +283,7 @@ def custom_respond(message, history, game_mode_selection):
|
|
| 286 |
return respond(message, history, "", 4000, 0.3, 0.6, game_mode_selection, None)
|
| 287 |
|
| 288 |
with gr.Blocks() as demo:
|
| 289 |
-
|
| 290 |
-
with gr.Sidebar():
|
| 291 |
-
gr.LoginButton()
|
| 292 |
|
| 293 |
gr.Markdown("# 20 Questions Geography Game")
|
| 294 |
gr.Markdown(description)
|
|
@@ -301,28 +296,15 @@ with gr.Blocks() as demo:
|
|
| 301 |
info="Choose what type of location to guess"
|
| 302 |
)
|
| 303 |
|
| 304 |
-
|
| 305 |
-
|
| 306 |
-
|
| 307 |
-
|
| 308 |
-
|
| 309 |
-
|
| 310 |
-
|
| 311 |
-
|
| 312 |
-
|
| 313 |
-
chatbot = gr.ChatInterface(
|
| 314 |
-
respond,
|
| 315 |
-
type="messages",
|
| 316 |
-
examples=examples,
|
| 317 |
-
cache_examples=False,
|
| 318 |
-
additional_inputs=[
|
| 319 |
-
gr.Textbox(value="", visible=False), # system_message (hidden)
|
| 320 |
-
gr.Slider(minimum=1, maximum=4096, value=4000, visible=False), # max_tokens (hidden)
|
| 321 |
-
gr.Slider(minimum=0.1, maximum=2.0, value=0.3, visible=False), # temperature (hidden)
|
| 322 |
-
gr.Slider(minimum=0.1, maximum=1.0, value=0.6, visible=False), # top_p (hidden)
|
| 323 |
-
game_mode_dropdown,
|
| 324 |
-
],
|
| 325 |
-
)
|
| 326 |
|
| 327 |
if __name__ == "__main__":
|
| 328 |
demo.launch()
|
|
|
|
| 207 |
except Exception as e:
|
| 208 |
return f"Error connecting to local model: {str(e)}"
|
| 209 |
else:
|
| 210 |
+
hf_token = os.environ["HF_TOKEN"]
|
| 211 |
+
client = InferenceClient(token=hf_token, model=MODEL_NAME)
|
|
|
|
|
|
|
|
|
|
| 212 |
|
| 213 |
response = ""
|
| 214 |
output_token_count = 0
|
|
|
|
| 283 |
return respond(message, history, "", 4000, 0.3, 0.6, game_mode_selection, None)
|
| 284 |
|
| 285 |
with gr.Blocks() as demo:
|
| 286 |
+
|
|
|
|
|
|
|
| 287 |
|
| 288 |
gr.Markdown("# 20 Questions Geography Game")
|
| 289 |
gr.Markdown(description)
|
|
|
|
| 296 |
info="Choose what type of location to guess"
|
| 297 |
)
|
| 298 |
|
| 299 |
+
|
| 300 |
+
chatbot = gr.ChatInterface(
|
| 301 |
+
custom_respond,
|
| 302 |
+
type="messages",
|
| 303 |
+
examples=examples,
|
| 304 |
+
cache_examples=False,
|
| 305 |
+
additional_inputs=[game_mode_dropdown],
|
| 306 |
+
)
|
| 307 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 308 |
|
| 309 |
if __name__ == "__main__":
|
| 310 |
demo.launch()
|