Jonathan Bejarano commited on
Commit
c0f9112
Β·
1 Parent(s): 036d0fc

Good enough for now

Browse files
Files changed (1) hide show
  1. app.py +59 -37
app.py CHANGED
@@ -39,7 +39,10 @@ COUNTRIES = [
39
 
40
  def get_system_message_with_country():
41
  """Generate a system message with a randomly selected country"""
 
42
  selected_country = random.choice(COUNTRIES)
 
 
43
  return f"""You are a friendly geography game host playing 20 questions with students. You are thinking of the country: {selected_country}
44
 
45
  RULES:
@@ -49,26 +52,26 @@ RULES:
49
  4. When they correctly guess {selected_country}, respond with: 'Congratulations! The country was <<{selected_country}>>'
50
  5. If they reach 20 questions without guessing correctly, respond with: 'Game over! The country was <<{selected_country}>>'
51
  6. Be encouraging and give helpful hints through your yes/no answers
52
- 7. If they want to play again tell them they need to reload the page."""
 
 
 
53
 
54
-
55
- def extract_country_name(response):
56
- """Extract country name from response using the <<COUNTRY_NAME>> format"""
57
- match = re.search(r'<<(.+?)>>', response)
58
- if match:
59
- return match.group(1)
60
- return None
61
 
62
  def format_game_result(response):
63
  """Format the game result with proper styling"""
64
- country = extract_country_name(response)
65
- if not country:
66
- return response
67
-
 
68
  if "Congratulations" in response:
69
- return f"πŸŽ‰ **Congratulations!** You correctly guessed **{country}**! Well done! πŸŽ‰\n\nWould you like to play another round?"
70
  elif "Game over" in response:
71
- return f"πŸ˜” **Game Over!** You've used all 20 questions. The country I was thinking of was **{country}**. πŸ˜”\n\nBetter luck next time! Would you like to try again?"
 
 
72
 
73
  return response
74
 
@@ -85,14 +88,31 @@ def respond(
85
  """
86
  For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
87
  """
88
- # Generate a new system message with random country for new conversations
89
- if not history:
90
- system_message = get_system_message_with_country()
91
 
92
- messages = [{"role": "system", "content": system_message}]
 
93
  messages.extend(history)
94
  messages.append({"role": "user", "content": message})
95
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
96
  # Choose client based on whether we're running locally or in the cloud
97
  if LOCAL_MODE:
98
  # Running locally with custom model settings
@@ -109,6 +129,7 @@ def respond(
109
  client = InferenceClient(token=hf_token.token, model=MODEL_NAME)
110
 
111
  response = ""
 
112
 
113
  try:
114
  for message_chunk in client.chat_completion(
@@ -123,13 +144,23 @@ def respond(
123
  token = ""
124
  if len(choices) and choices[0].delta.content:
125
  token = choices[0].delta.content
 
126
 
127
  response += token
128
 
 
 
 
 
 
 
 
129
  # Check if this is a game end response and format it nicely
130
- if "<<" in response and ">>" in response:
 
131
  return format_game_result(response)
132
  else:
 
133
  return response
134
  except Exception as e:
135
  return f"Error during inference: {str(e)}"
@@ -159,31 +190,22 @@ examples = [
159
  ["Is the currency the Euro?"],
160
  ]
161
 
162
- additional_inputs = [
163
- gr.Textbox(visible=False, value="Geography game placeholder - will be replaced with random country", label="System message"),
164
- gr.Slider(visible=False, minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
165
- gr.Slider(visible=False, minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
166
- gr.Slider(
167
- visible=False,
168
- minimum=0.1,
169
- maximum=1.0,
170
- value=0.95,
171
- step=0.05,
172
- label="Top-p (nucleus sampling)",
173
- ),
174
- ]
175
-
176
  # Create wrapper function for local mode that doesn't expect OAuth token
177
- def respond_local(message, history, system_message, max_tokens, temperature, top_p):
178
- return respond(message, history, system_message, max_tokens, temperature, top_p, None)
 
 
 
 
 
179
 
180
  chatbot = gr.ChatInterface(
181
- (respond_local if LOCAL_MODE else respond),
182
  type="messages",
183
  description=description,
184
  examples=examples,
185
  cache_examples=False,
186
- additional_inputs=additional_inputs,
187
  )
188
 
189
  with gr.Blocks() as demo:
 
39
 
40
  def get_system_message_with_country():
41
  """Generate a system message with a randomly selected country"""
42
+ global selected_country
43
  selected_country = random.choice(COUNTRIES)
44
+ COUNTRIES.remove(selected_country) # Ensure the same country isn't picked again
45
+ print(selected_country)
46
  return f"""You are a friendly geography game host playing 20 questions with students. You are thinking of the country: {selected_country}
47
 
48
  RULES:
 
52
  4. When they correctly guess {selected_country}, respond with: 'Congratulations! The country was <<{selected_country}>>'
53
  5. If they reach 20 questions without guessing correctly, respond with: 'Game over! The country was <<{selected_country}>>'
54
  6. Be encouraging and give helpful hints through your yes/no answers
55
+ 7. If they want to play again tell them they need to reload the page.
56
+ 8. IMPORTANT: Only accept the country name "{selected_country}" as correct, but Spelling is not important and they can ask a question like it is? Do NOT accept neighboring countries, similar countries, or regions that contain this country.
57
+ 9. If they guess a neighboring country or similar country, respond with "No" and continue the game.
58
+ 10. Be very strict about the exact country match - only "{selected_country}" is the correct answer."""
59
 
60
+ current_system = get_system_message_with_country()
 
 
 
 
 
 
61
 
62
  def format_game_result(response):
63
  """Format the game result with proper styling"""
64
+ if "The country was" in response:
65
+ print(f"πŸ” DEBUG - Game end detected! Country extracted: {selected_country}")
66
+ else:
67
+ print("πŸ” DEBUG - Regular response (no game end)")
68
+
69
  if "Congratulations" in response:
70
+ return f"πŸŽ‰ **Congratulations!** You correctly guessed **{selected_country}**! Well done! πŸŽ‰\n\nWould you like to play another round?"
71
  elif "Game over" in response:
72
+ return f"πŸ˜” **Game Over!** You've used all 20 questions. The country I was thinking of was **{selected_country}**. πŸ˜”\n\nBetter luck next time! Would you like to try again?"
73
+ global current_system # Ensure we're using the global variable
74
+ current_system = get_system_message_with_country()
75
 
76
  return response
77
 
 
88
  """
89
  For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
90
  """
 
 
 
91
 
92
+
93
+ messages = [{"role": "system", "content": current_system}]
94
  messages.extend(history)
95
  messages.append({"role": "user", "content": message})
96
 
97
+ # Debug: Calculate approximate input token count
98
+ total_input_chars = sum(len(str(msg.get("content", ""))) for msg in messages)
99
+ estimated_input_tokens = total_input_chars // 4 # Rough approximation: 4 chars per token
100
+ print(f"πŸ” DEBUG - Estimated input tokens: {estimated_input_tokens}")
101
+ print(f"πŸ” DEBUG - Messages count: {len(messages)}")
102
+ print(f"πŸ” DEBUG - Max tokens setting: {max_tokens}")
103
+
104
+ # Debug: Show each message type and length
105
+ for i, msg in enumerate(messages):
106
+ role = msg.get("role", "unknown")
107
+ content = str(msg.get("content", ""))
108
+ print(f"πŸ” DEBUG - Message {i+1} ({role}): {len(content)} chars")
109
+ if role == "system":
110
+ print(f"πŸ” DEBUG - System message preview: {content[:100]}...")
111
+ elif role == "user":
112
+ print(f"πŸ” DEBUG - User message: {content}")
113
+ elif role == "assistant":
114
+ print(f"πŸ” DEBUG - Assistant message: {content[:50]}...")
115
+
116
  # Choose client based on whether we're running locally or in the cloud
117
  if LOCAL_MODE:
118
  # Running locally with custom model settings
 
129
  client = InferenceClient(token=hf_token.token, model=MODEL_NAME)
130
 
131
  response = ""
132
+ output_token_count = 0
133
 
134
  try:
135
  for message_chunk in client.chat_completion(
 
144
  token = ""
145
  if len(choices) and choices[0].delta.content:
146
  token = choices[0].delta.content
147
+ output_token_count += 1
148
 
149
  response += token
150
 
151
+ # Debug: Show output token statistics
152
+ estimated_output_tokens = len(response) // 4 # Rough approximation
153
+ print(f"πŸ” DEBUG - Output token chunks received: {output_token_count}")
154
+ print(f"πŸ” DEBUG - Estimated output tokens (by chars): {estimated_output_tokens}")
155
+ print(f"πŸ” DEBUG - Response length: {len(response)} characters")
156
+ print(f"πŸ” DEBUG - Raw response: {response}")
157
+
158
  # Check if this is a game end response and format it nicely
159
+ if "The country was" in response:
160
+ print(f"πŸ” DEBUG - Game end detected! Country extracted: {selected_country}")
161
  return format_game_result(response)
162
  else:
163
+ print("πŸ” DEBUG - Regular response (no game end)")
164
  return response
165
  except Exception as e:
166
  return f"Error during inference: {str(e)}"
 
190
  ["Is the currency the Euro?"],
191
  ]
192
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
193
  # Create wrapper function for local mode that doesn't expect OAuth token
194
+ def custom_respond(message, history):
195
+ # Hardcoded values - no additional inputs needed
196
+ system_message = ""
197
+ max_tokens = 2048
198
+ temperature = 0.3 # Lower temperature for more consistent responses
199
+ top_p = 0.7 # Lower top_p for more deterministic behavior
200
+ return respond(message, history, system_message, max_tokens, temperature, top_p, None if LOCAL_MODE else hf_token)
201
 
202
  chatbot = gr.ChatInterface(
203
+ custom_respond,
204
  type="messages",
205
  description=description,
206
  examples=examples,
207
  cache_examples=False,
208
+ # No additional_inputs - values are hardcoded in wrapper functions
209
  )
210
 
211
  with gr.Blocks() as demo: