File size: 4,525 Bytes
0e43c2e 4195aed 0e43c2e 4195aed 0e43c2e 4195aed 0e43c2e 4195aed 0e43c2e 4195aed 0e43c2e 4195aed 0e43c2e 4195aed 0e43c2e 4195aed 0e43c2e 4195aed 0e43c2e 4195aed 0e43c2e 4195aed 0e43c2e 4195aed 0e43c2e 4195aed 0e43c2e 9b2d93a 0e43c2e 9b2d93a 0e43c2e 4195aed 0e43c2e 4195aed 0e43c2e 4195aed 0e43c2e 4195aed 0e43c2e 4195aed 0e43c2e 9b2d93a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 |
import os
# ุชุญุฏูุฏ ู
ุฌูุฏ ูููุงุด ุฏุงุฎู Docker
os.environ['TRANSFORMERS_CACHE'] = '/tmp/huggingface_cache'
os.environ['HF_HOME'] = '/tmp/huggingface'
from flask import Flask, request, jsonify
from sentence_transformers import SentenceTransformer
from pinecone import Pinecone
import google.generativeai as genai
from langdetect import detect
# ูุงู
ูุณ ุงูุชุญูุงุช
greetings_dict = {
"ุงูุณูุงู
ุนูููู
": "ูุนูููู
ุงูุณูุงู
",
"ุตุจุงุญ ุงูุฎูุฑ": "ุตุจุงุญ ุงูููุฑ",
"ู
ุณุงุก ุงูุฎูุฑ": "ู
ุณุงุก ุงูููุฑ",
"ุฃููุง": "ุฃููุง ุจูู",
"ุฃููุงู": "ุฃููุงู ูุณููุงู",
"ูุงู": "ูุงู",
"ููุง": "ููุง ููู",
"hello": "hello!",
"hi": "hi!",
"hey": "hey there!",
"ุงุฒูู": "ุงูุญู
ุฏ ูููุ ุงูุช ุนุงู
ู ุงููุ",
"ุงุฒููุ": "ุงูุญู
ุฏ ูููุ ุงูุช ุนุงู
ู ุงููุ"
}
def check_greeting(question):
for greeting in greetings_dict:
if greeting.lower() in question.lower():
return greetings_dict[greeting]
return None
# ุชููุฆุฉ ุงูู
ูุฏููุงุช
embedding_model = SentenceTransformer("intfloat/multilingual-e5-large")
pc = Pinecone(api_key="pcsk_3ax4D8_PH7vWF1KWAMRpyjmEnXhwxswmHSjvqgwovna3xGGbfsgZsMRtRyFi9uCpPyi4B9")
index = pc.Index("newindex")
genai.configure(api_key="AIzaSyBXtRzMkpm9RNDO09A9N3XoG_vfjgUe5Vw")
model = genai.GenerativeModel("gemini-2.0-flash")
app = Flask(__name__)
chat_history = []
def detect_language(text):
try:
return detect(text)
except:
return "unknown"
def get_answer_from_pinecone(user_question, embedding_model, index, top_k=5, similarity_threshold=0.7):
try:
question_vector = embedding_model.encode(user_question).tolist()
except Exception as e:
return [f"โ Error embedding question: {e}"]
try:
search_result = index.query(
vector=question_vector,
top_k=top_k,
include_metadata=True
)
except Exception as e:
return [f"โ Error querying Pinecone: {e}"]
matches = [m for m in search_result.matches if m.score >= similarity_threshold]
sorted_matches = sorted(matches, key=lambda x: x.score, reverse=True)
answers = []
for m in sorted_matches:
answer = m.metadata.get('answer', '').strip()
source = m.metadata.get('source', 'unknown')
score = round(m.score, 3)
if answer:
answers.append(f"โข ({score}) from [{source}]:\n{answer}")
return answers if answers else ["โ ๏ธ No similar answers found."]
def ask_gemini_with_combined_answer(user_question, pinecone_answers=[], history=[]):
context = "\n".join([f"๐ค {q}\n๐ค {a}" for q, a in history])
extracted_info = "\n".join([f"โข {ans}" for ans in pinecone_answers]) if pinecone_answers else "None"
lang = detect_language(user_question)
if lang == "ar":
instructions = """
โ ูุงู
: ุงุณุชุฎุฏู
ููุท ุงูู
ุนููู
ุงุช ู
ู ูุงุนุฏุฉ ุงูุจูุงูุงุช.
๐ ุงูู
ุญุงุฏุซุฉ ุงูุณุงุจูุฉ:
{context}
๐ค ุงูู
ุณุชุฎุฏู
ูุณุฃู: {user_question}
๐ ู
ุนููู
ุงุช ู
ู ูุงุนุฏุฉ ุงูุจูุงูุงุช:
{extracted_info}
๐ ุงูุฑุฏ:
"""
else:
instructions = """
โ Important: Use only database information.
๐ Previous conversation:
{context}
๐ค User asks: {user_question}
๐ Retrieved info:
{extracted_info}
๐ Response:
"""
prompt = instructions.format(
context=context or ("ูุง ููุฌุฏ" if lang == "ar" else "None"),
user_question=user_question,
extracted_info=extracted_info
)
response = model.generate_content(prompt)
return response.text.strip()
@app.route("/ask", methods=["POST"])
def ask():
data = request.json
question = data.get("question")
if not question:
return jsonify({"error": "Missing question"}), 400
# ุงูุชุญูู ู
ู ุงูุชุญูุงุช
greeting_response = check_greeting(question)
if greeting_response:
return jsonify({"answer": greeting_response})
pinecone_answer = get_answer_from_pinecone(question, embedding_model, index)
final_answer = ask_gemini_with_combined_answer(question, pinecone_answer, chat_history)
chat_history.append((question, final_answer))
return jsonify({
"answer": final_answer
})
@app.route("/")
def home():
return "๐ค API is running. Use POST /ask with {'question': '...'}"
if __name__ == "__main__":
app.run(host="0.0.0.0", port=7860) |