umerforsure commited on
Commit
9584b5a
Β·
1 Parent(s): f2b8ccd

πŸ› Fix: improved model output post-processing to avoid empty answers

Browse files
Files changed (1) hide show
  1. app.py +15 -13
app.py CHANGED
@@ -151,23 +151,25 @@ def ask_question(question):
151
 
152
  context = "\n".join([doc.page_content for doc in docs])
153
  prompt = generate_prompt(context, question)
154
- response = reasoning_pipeline(prompt)[0]['generated_text']
155
 
156
- # Clean out prompt leakage
157
- for token in ["Context:", "Question:", "Instructions:", "Use structured academic language"]:
158
- response = response.replace(token, "").strip()
159
 
160
- # Remove leading/trailing junk
161
- if "Answer:" in response:
162
- response = response.split("Answer:")[-1].strip()
163
- if "." in response:
164
- response = response.rsplit(".", 1)[0] + "."
165
 
166
- # ❗ Fallback if answer is empty or nonsense
167
- if len(response.strip()) < 10:
168
- return "❌ The model could not generate a meaningful answer based on the provided context."
169
 
170
- return post_process_output(response.strip(), question)
 
 
 
 
171
 
172
 
173
  # Gradio UI
 
151
 
152
  context = "\n".join([doc.page_content for doc in docs])
153
  prompt = generate_prompt(context, question)
154
+ output = reasoning_pipeline(prompt)[0]['generated_text'].strip()
155
 
156
+ # Clean unwanted leftovers
157
+ for marker in ["Context:", "Question:", "Instructions:", "Use structured academic language"]:
158
+ output = output.replace(marker, "").strip()
159
 
160
+ # Remove leading "Answer:" if present
161
+ if output.lower().startswith("answer:"):
162
+ output = output[len("answer:"):].strip()
 
 
163
 
164
+ # Ensure proper sentence ending
165
+ if "." in output:
166
+ output = output.rsplit(".", 1)[0] + "."
167
 
168
+ # Fallback if answer too short or generic
169
+ if len(output) < 10 or output.lower() in ["", ".", "use structured academic language.", "use structured academic language"]:
170
+ return "❌ The model could not generate a meaningful answer from the provided material."
171
+
172
+ return post_process_output(output, question)
173
 
174
 
175
  # Gradio UI