Jonathan Bejarano
basic functionality
a58fa96
raw
history blame
6.56 kB
import gradio as gr
from huggingface_hub import InferenceClient
import re
import random
# List of countries for the game
COUNTRIES = [
"Afghanistan", "Albania", "Algeria", "Angola", "Argentina", "Armenia", "Australia", "Austria",
"Bangladesh", "Belgium", "Belize", "Bolivia", "Bosnia and Herzegovina", "Botswana", "Brazil",
"Bulgaria", "Burma", "Burundi", "Cambodia", "Canada", "Central African Republic", "Chad", "Chile",
"China", "Colombia", "Costa Rica", "Croatia", "Cuba", "Czech Republic", "Democratic Republic of the Congo",
"Denmark", "Dominican Republic", "Ecuador", "Egypt", "El Salvador", "Estonia", "Ethiopia", "Fiji",
"Finland", "France", "Georgia", "Germany", "Ghana", "Greece", "Grenada", "Guatemala", "Guinea",
"Guyana", "Haiti", "Honduras", "Hungary", "Iceland", "India", "Indonesia", "Iran", "Iraq", "Ireland",
"Israel", "Italy", "Jamaica", "Japan", "Jordan", "Kenya", "Kuwait", "Kyrgyzstan", "Laos", "Latvia",
"Lebanon", "Liberia", "Libya", "Liechtenstein", "Lithuania", "Luxembourg", "Macedonia", "Madagascar",
"Malaysia", "Mali", "Malta", "Mexico", "Moldova", "Monaco", "Mongolia", "Montenegro", "Morocco",
"Mozambique", "Nepal", "Netherlands", "New Zealand", "Nicaragua", "Niger", "Nigeria", "North Korea",
"Norway", "Oman", "Pakistan", "Palestine", "Panama", "Papua New Guinea", "Paraguay", "Peru",
"Philippines", "Poland", "Portugal", "Qatar", "Republic of the Congo", "Romania", "Russia", "Rwanda",
"Samoa", "Saudi Arabia", "Serbia", "Singapore", "Slovakia", "South Korea", "Slovenia", "Somalia",
"South Africa", "Spain", "Sri Lanka", "Sudan", "Suriname", "Swaziland", "Sweden", "Switzerland",
"Syria", "Tajikistan", "Tanzania", "Thailand", "Tonga", "Trinidad and Tobago", "Tunisia", "Turkey",
"Turkmenistan", "Uganda", "Ukraine", "United Arab Emirates", "United Kingdom", "United States",
"Uruguay", "Uzbekistan", "Vanuatu", "Vatican City", "Venezuela", "Vietnam", "Yemen", "Zambia", "Zimbabwe"
]
def get_system_message_with_country():
"""Generate a system message with a randomly selected country"""
selected_country = random.choice(COUNTRIES)
return f"""You are a friendly geography game host playing 20 questions with students. You are thinking of the country: {selected_country}
RULES:
1. NEVER reveal the country name ({selected_country}) in your responses
2. Answer only 'Yes' or 'No' to their questions
3. Keep track of how many questions they've asked
4. When they correctly guess {selected_country}, respond with: 'Congratulations! The country was <<{selected_country}>>'
5. If they reach 20 questions without guessing correctly, respond with: 'Game over! The country was <<{selected_country}>>'
6. Be encouraging and give helpful hints through your yes/no answers"""
def extract_country_name(response):
"""Extract country name from response using the <<COUNTRY_NAME>> format"""
match = re.search(r'<<(.+?)>>', response)
if match:
return match.group(1)
return None
def format_game_result(response):
"""Format the game result with proper styling"""
country = extract_country_name(response)
if not country:
return response
if "Congratulations" in response:
return f"πŸŽ‰ **Congratulations!** You correctly guessed **{country}**! Well done! πŸŽ‰\n\nWould you like to play another round?"
elif "Game over" in response:
return f"πŸ˜” **Game Over!** You've used all 20 questions. The country I was thinking of was **{country}**. πŸ˜”\n\nBetter luck next time! Would you like to try again?"
return response
def respond(
message,
history: list[dict[str, str]],
system_message,
max_tokens,
temperature,
top_p,
hf_token: gr.OAuthToken,
):
"""
For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
"""
client = InferenceClient(token=hf_token.token, model="meta-llama/Llama-3.2-3B-Instruct")
# Generate a new system message with random country for new conversations
if not history:
system_message = get_system_message_with_country()
messages = [{"role": "system", "content": system_message}]
messages.extend(history)
messages.append({"role": "user", "content": message})
response = ""
for message in client.chat_completion(
messages,
max_tokens=max_tokens,
stream=True,
temperature=temperature,
top_p=top_p,
):
choices = message.choices
token = ""
if len(choices) and choices[0].delta.content:
token = choices[0].delta.content
response += token
# Check if this is a game end response and format it nicely
if "<<" in response and ">>" in response:
formatted_response = format_game_result(response)
yield formatted_response
else:
yield response
"""
For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
"""
chatbot = gr.ChatInterface(
respond,
type="messages",
description="I am thinking of a country, you have 20 yes or no questions to ask me to help you figure out what the country is",
examples=[
["Is the country located in Europe?"],
["Is it in the Northern Hemisphere?"],
["Is the official language Spanish?"],
["Is the capital city Rome?"],
["Is this country bordered by an ocean?"],
["Does this country have more than 100 million people?"],
["Is this country known for producing coffee?"],
["Was this country ever a colony of the United Kingdom?"],
["Is this country located on an island?"],
["Is the currency the Euro?"],
],
additional_inputs=[
gr.Textbox(visible=False, value="Geography game placeholder - will be replaced with random country", label="System message"),
gr.Slider(visible=False, minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
gr.Slider(visible=False, minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
gr.Slider(
visible=False,
minimum=0.1,
maximum=1.0,
value=0.95,
step=0.05,
label="Top-p (nucleus sampling)",
),
],
)
with gr.Blocks() as demo:
with gr.Sidebar():
gr.LoginButton()
chatbot.render()
if __name__ == "__main__":
demo.launch()