apif / main.py
hadheedo's picture
Upload main.py
eab138f verified
raw
history blame
3.73 kB
from flask import Flask, request, jsonify
from sentence_transformers import SentenceTransformer
from pinecone import Pinecone
import google.generativeai as genai
from langdetect import detect
# ุชู‡ูŠุฆุฉ ุงู„ู…ูˆุฏูŠู„ุงุช
embedding_model = SentenceTransformer("intfloat/multilingual-e5-large")
pc = Pinecone(api_key="pcsk_3ax4D8_PH7vWF1KWAMRpyjmEnXhwxswmHSjvqgwovna3xGGbfsgZsMRtRyFi9uCpPyi4B9")
index = pc.Index("newindex")
genai.configure(api_key="AIzaSyBXtRzMkpm9RNDO09A9N3XoG_vfjgUe5Vw")
model = genai.GenerativeModel("gemini-2.0-flash")
app = Flask(__name__)
chat_history = []
def detect_language(text):
try:
return detect(text)
except:
return "unknown"
def get_answer_from_pinecone(user_question, embedding_model, index, top_k=5, similarity_threshold=0.7):
try:
question_vector = embedding_model.encode(user_question).tolist()
except Exception as e:
return [f"โŒ Error embedding question: {e}"]
try:
search_result = index.query(
vector=question_vector,
top_k=top_k,
include_metadata=True
)
except Exception as e:
return [f"โŒ Error querying Pinecone: {e}"]
matches = [m for m in search_result.matches if m.score >= similarity_threshold]
sorted_matches = sorted(matches, key=lambda x: x.score, reverse=True)
answers = []
for m in sorted_matches:
answer = m.metadata.get('answer', '').strip()
source = m.metadata.get('source', 'unknown')
score = round(m.score, 3)
if answer:
answers.append(f"โ€ข ({score}) from [{source}]:\n{answer}")
return answers if answers else ["โš ๏ธ No similar answers found."]
def ask_gemini_with_combined_answer(user_question, pinecone_answers=[], history=[]):
context = "\n".join([f"๐Ÿ‘ค {q}\n๐Ÿค– {a}" for q, a in history])
extracted_info = "\n".join([f"โ€ข {ans}" for ans in pinecone_answers]) if pinecone_answers else "None"
lang = detect_language(user_question)
if lang == "ar":
greeting = "ู…ุฑุญุจู‹ุง! ๐Ÿ˜Š"
instructions = """
โ— ู‡ุงู…: ุงุณุชุฎุฏู… ูู‚ุท ุงู„ู…ุนู„ูˆู…ุงุช ู…ู† ู‚ุงุนุฏุฉ ุงู„ุจูŠุงู†ุงุช.
๐Ÿ“œ ุงู„ู…ุญุงุฏุซุฉ ุงู„ุณุงุจู‚ุฉ:
{context}
๐Ÿ‘ค ุงู„ู…ุณุชุฎุฏู… ูŠุณุฃู„: {user_question}
๐Ÿ“š ู…ุนู„ูˆู…ุงุช ู…ู† ู‚ุงุนุฏุฉ ุงู„ุจูŠุงู†ุงุช:
{extracted_info}
๐Ÿ“Œ ุงู„ุฑุฏ:
"""
else:
greeting = "Hello! ๐Ÿ˜Š"
instructions = """
โ— Important: Use only database information.
๐Ÿ“œ Previous conversation:
{context}
๐Ÿ‘ค User asks: {user_question}
๐Ÿ“š Retrieved info:
{extracted_info}
๐Ÿ“Œ Response:
"""
prompt = instructions.format(
context=context or ("ู„ุง ูŠูˆุฌุฏ" if lang == "ar" else "None"),
user_question=user_question,
extracted_info=extracted_info
)
response = model.generate_content(prompt)
return f"{greeting}\n{response.text.strip()}"
@app.route("/ask", methods=["POST"])
def ask():
data = request.json
question = data.get("question")
if not question:
return jsonify({"error": "Missing question"}), 400
pinecone_answer = get_answer_from_pinecone(question, embedding_model, index)
final_answer = ask_gemini_with_combined_answer(question, pinecone_answer, chat_history)
chat_history.append((question, final_answer))
return jsonify({
"question": question,
"pinecone_matches": pinecone_answer,
"answer": final_answer
})
@app.route("/")
def home():
return "๐Ÿค– API is running. Use POST /ask with {'question': '...'}"
if __name__ == "__main__":
app.run(host="0.0.0.0", port=7860)