hadheedo commited on
Commit
4195aed
ยท
verified ยท
1 Parent(s): 9b2d93a

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +45 -18
main.py CHANGED
@@ -1,39 +1,61 @@
1
  import os
2
-
3
  # ุชุญุฏูŠุฏ ู…ุฌู„ุฏ ู„ู„ูƒุงุด ุฏุงุฎู„ Docker
4
  os.environ['TRANSFORMERS_CACHE'] = '/tmp/huggingface_cache'
5
  os.environ['HF_HOME'] = '/tmp/huggingface'
6
-
7
  from flask import Flask, request, jsonify
8
  from sentence_transformers import SentenceTransformer
9
  from pinecone import Pinecone
10
  import google.generativeai as genai
11
  from langdetect import detect
12
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
13
  # ุชู‡ูŠุฆุฉ ุงู„ู…ูˆุฏูŠู„ุงุช
14
  embedding_model = SentenceTransformer("intfloat/multilingual-e5-large")
15
-
16
  pc = Pinecone(api_key="pcsk_3ax4D8_PH7vWF1KWAMRpyjmEnXhwxswmHSjvqgwovna3xGGbfsgZsMRtRyFi9uCpPyi4B9")
17
  index = pc.Index("newindex")
18
-
19
  genai.configure(api_key="AIzaSyBXtRzMkpm9RNDO09A9N3XoG_vfjgUe5Vw")
20
  model = genai.GenerativeModel("gemini-2.0-flash")
21
-
22
  app = Flask(__name__)
23
  chat_history = []
24
-
25
  def detect_language(text):
26
  try:
27
  return detect(text)
28
  except:
29
  return "unknown"
30
-
31
  def get_answer_from_pinecone(user_question, embedding_model, index, top_k=5, similarity_threshold=0.7):
32
  try:
33
  question_vector = embedding_model.encode(user_question).tolist()
34
  except Exception as e:
35
  return [f"โŒ Error embedding question: {e}"]
36
-
37
  try:
38
  search_result = index.query(
39
  vector=question_vector,
@@ -42,10 +64,10 @@ def get_answer_from_pinecone(user_question, embedding_model, index, top_k=5, sim
42
  )
43
  except Exception as e:
44
  return [f"โŒ Error querying Pinecone: {e}"]
45
-
46
  matches = [m for m in search_result.matches if m.score >= similarity_threshold]
47
  sorted_matches = sorted(matches, key=lambda x: x.score, reverse=True)
48
-
49
  answers = []
50
  for m in sorted_matches:
51
  answer = m.metadata.get('answer', '').strip()
@@ -54,12 +76,12 @@ def get_answer_from_pinecone(user_question, embedding_model, index, top_k=5, sim
54
  if answer:
55
  answers.append(f"โ€ข ({score}) from [{source}]:\n{answer}")
56
  return answers if answers else ["โš ๏ธ No similar answers found."]
57
-
58
  def ask_gemini_with_combined_answer(user_question, pinecone_answers=[], history=[]):
59
  context = "\n".join([f"๐Ÿ‘ค {q}\n๐Ÿค– {a}" for q, a in history])
60
  extracted_info = "\n".join([f"โ€ข {ans}" for ans in pinecone_answers]) if pinecone_answers else "None"
61
  lang = detect_language(user_question)
62
-
63
  if lang == "ar":
64
 
65
  instructions = """
@@ -82,32 +104,37 @@ def ask_gemini_with_combined_answer(user_question, pinecone_answers=[], history=
82
  {extracted_info}
83
  ๐Ÿ“Œ Response:
84
  """
85
-
86
  prompt = instructions.format(
87
  context=context or ("ู„ุง ูŠูˆุฌุฏ" if lang == "ar" else "None"),
88
  user_question=user_question,
89
  extracted_info=extracted_info
90
  )
91
  response = model.generate_content(prompt)
92
- return response.text.strip() # โ† ุชู… ุชุนุฏูŠู„ ุงู„ุณุทุฑ ุฏู‡ ูู‚ุท
93
-
94
  @app.route("/ask", methods=["POST"])
95
  def ask():
96
  data = request.json
97
  question = data.get("question")
98
  if not question:
99
  return jsonify({"error": "Missing question"}), 400
 
 
 
 
 
 
100
  pinecone_answer = get_answer_from_pinecone(question, embedding_model, index)
101
  final_answer = ask_gemini_with_combined_answer(question, pinecone_answer, chat_history)
102
  chat_history.append((question, final_answer))
103
  return jsonify({
104
-
105
  "answer": final_answer
106
  })
107
-
108
  @app.route("/")
109
  def home():
110
  return "๐Ÿค– API is running. Use POST /ask with {'question': '...'}"
111
-
112
  if __name__ == "__main__":
113
  app.run(host="0.0.0.0", port=7860)
 
1
  import os
2
+
3
  # ุชุญุฏูŠุฏ ู…ุฌู„ุฏ ู„ู„ูƒุงุด ุฏุงุฎู„ Docker
4
  os.environ['TRANSFORMERS_CACHE'] = '/tmp/huggingface_cache'
5
  os.environ['HF_HOME'] = '/tmp/huggingface'
6
+
7
  from flask import Flask, request, jsonify
8
  from sentence_transformers import SentenceTransformer
9
  from pinecone import Pinecone
10
  import google.generativeai as genai
11
  from langdetect import detect
12
 
13
+ # ู‚ุงู…ูˆุณ ุงู„ุชุญูŠุงุช
14
+ greetings_dict = {
15
+ "ุงู„ุณู„ุงู… ุนู„ูŠูƒู…": "ูˆุนู„ูŠูƒู… ุงู„ุณู„ุงู…",
16
+ "ุตุจุงุญ ุงู„ุฎูŠุฑ": "ุตุจุงุญ ุงู„ู†ูˆุฑ",
17
+ "ู…ุณุงุก ุงู„ุฎูŠุฑ": "ู…ุณุงุก ุงู„ู†ูˆุฑ",
18
+ "ุฃู‡ู„ุง": "ุฃู‡ู„ุง ุจูŠูƒ",
19
+ "ุฃู‡ู„ุงู‹": "ุฃู‡ู„ุงู‹ ูˆุณู‡ู„ุงู‹",
20
+ "ู‡ุงูŠ": "ู‡ุงูŠ",
21
+ "ู‡ู„ุง": "ู‡ู„ุง ููŠูƒ",
22
+ "hello": "hello!",
23
+ "hi": "hi!",
24
+ "hey": "hey there!",
25
+ "ุงุฒูŠูƒ": "ุงู„ุญู…ุฏ ู„ู„ู‡ุŒ ุงู†ุช ุนุงู…ู„ ุงูŠู‡ุŸ",
26
+ "ุงุฒูŠูƒุŸ": "ุงู„ุญู…ุฏ ู„ู„ู‡ุŒ ุงู†ุช ุนุงู…ู„ ุงูŠู‡ุŸ"
27
+ }
28
+
29
+ def check_greeting(question):
30
+ for greeting in greetings_dict:
31
+ if greeting.lower() in question.lower():
32
+ return greetings_dict[greeting]
33
+ return None
34
+
35
  # ุชู‡ูŠุฆุฉ ุงู„ู…ูˆุฏูŠู„ุงุช
36
  embedding_model = SentenceTransformer("intfloat/multilingual-e5-large")
37
+
38
  pc = Pinecone(api_key="pcsk_3ax4D8_PH7vWF1KWAMRpyjmEnXhwxswmHSjvqgwovna3xGGbfsgZsMRtRyFi9uCpPyi4B9")
39
  index = pc.Index("newindex")
40
+
41
  genai.configure(api_key="AIzaSyBXtRzMkpm9RNDO09A9N3XoG_vfjgUe5Vw")
42
  model = genai.GenerativeModel("gemini-2.0-flash")
43
+
44
  app = Flask(__name__)
45
  chat_history = []
46
+
47
  def detect_language(text):
48
  try:
49
  return detect(text)
50
  except:
51
  return "unknown"
52
+
53
  def get_answer_from_pinecone(user_question, embedding_model, index, top_k=5, similarity_threshold=0.7):
54
  try:
55
  question_vector = embedding_model.encode(user_question).tolist()
56
  except Exception as e:
57
  return [f"โŒ Error embedding question: {e}"]
58
+
59
  try:
60
  search_result = index.query(
61
  vector=question_vector,
 
64
  )
65
  except Exception as e:
66
  return [f"โŒ Error querying Pinecone: {e}"]
67
+
68
  matches = [m for m in search_result.matches if m.score >= similarity_threshold]
69
  sorted_matches = sorted(matches, key=lambda x: x.score, reverse=True)
70
+
71
  answers = []
72
  for m in sorted_matches:
73
  answer = m.metadata.get('answer', '').strip()
 
76
  if answer:
77
  answers.append(f"โ€ข ({score}) from [{source}]:\n{answer}")
78
  return answers if answers else ["โš ๏ธ No similar answers found."]
79
+
80
  def ask_gemini_with_combined_answer(user_question, pinecone_answers=[], history=[]):
81
  context = "\n".join([f"๐Ÿ‘ค {q}\n๐Ÿค– {a}" for q, a in history])
82
  extracted_info = "\n".join([f"โ€ข {ans}" for ans in pinecone_answers]) if pinecone_answers else "None"
83
  lang = detect_language(user_question)
84
+
85
  if lang == "ar":
86
 
87
  instructions = """
 
104
  {extracted_info}
105
  ๐Ÿ“Œ Response:
106
  """
107
+
108
  prompt = instructions.format(
109
  context=context or ("ู„ุง ูŠูˆุฌุฏ" if lang == "ar" else "None"),
110
  user_question=user_question,
111
  extracted_info=extracted_info
112
  )
113
  response = model.generate_content(prompt)
114
+ return response.text.strip()
115
+
116
  @app.route("/ask", methods=["POST"])
117
  def ask():
118
  data = request.json
119
  question = data.get("question")
120
  if not question:
121
  return jsonify({"error": "Missing question"}), 400
122
+
123
+ # ุงู„ุชุญู‚ู‚ ู…ู† ุงู„ุชุญูŠุงุช
124
+ greeting_response = check_greeting(question)
125
+ if greeting_response:
126
+ return jsonify({"answer": greeting_response})
127
+
128
  pinecone_answer = get_answer_from_pinecone(question, embedding_model, index)
129
  final_answer = ask_gemini_with_combined_answer(question, pinecone_answer, chat_history)
130
  chat_history.append((question, final_answer))
131
  return jsonify({
 
132
  "answer": final_answer
133
  })
134
+
135
  @app.route("/")
136
  def home():
137
  return "๐Ÿค– API is running. Use POST /ask with {'question': '...'}"
138
+
139
  if __name__ == "__main__":
140
  app.run(host="0.0.0.0", port=7860)