Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
|
@@ -86,8 +86,7 @@ YOUR_OPENAI_TOKEN = os.getenv('OPENAI_API_KEY')
|
|
| 86 |
claude_client = anthropic.Anthropic(api_key=YOUR_ANTHROPIC_TOKEN)
|
| 87 |
openai_client = openai.OpenAI(api_key=YOUR_OPENAI_TOKEN)
|
| 88 |
|
| 89 |
-
|
| 90 |
-
async def try_claude_api(system_message, claude_messages, timeout=15): # timeout์ 15์ด๋ก ์ฆ๊ฐ
|
| 91 |
try:
|
| 92 |
start_time = time.time()
|
| 93 |
with claude_client.messages.stream(
|
|
@@ -97,14 +96,15 @@ async def try_claude_api(system_message, claude_messages, timeout=15): # timeou
|
|
| 97 |
messages=claude_messages
|
| 98 |
) as stream:
|
| 99 |
collected_content = ""
|
| 100 |
-
|
| 101 |
current_time = time.time()
|
| 102 |
if current_time - start_time > timeout:
|
| 103 |
print(f"Claude API response time: {current_time - start_time:.2f} seconds")
|
| 104 |
raise TimeoutError("Claude API timeout")
|
| 105 |
if chunk.type == "content_block_delta":
|
| 106 |
collected_content += chunk.delta.text
|
| 107 |
-
yield collected_content
|
|
|
|
| 108 |
|
| 109 |
# ๊ฐ ์ฒญํฌ๋ง๋ค ํ์์์ ์นด์ดํฐ ๋ฆฌ์
|
| 110 |
start_time = current_time
|
|
@@ -112,6 +112,8 @@ async def try_claude_api(system_message, claude_messages, timeout=15): # timeou
|
|
| 112 |
except Exception as e:
|
| 113 |
print(f"Claude API error: {str(e)}")
|
| 114 |
raise e
|
|
|
|
|
|
|
| 115 |
|
| 116 |
async def try_openai_api(openai_messages):
|
| 117 |
try:
|
|
@@ -161,6 +163,7 @@ async def generation_code(query: Optional[str], _setting: Dict[str, str], _histo
|
|
| 161 |
openai_messages.append({"role": "user", "content": query})
|
| 162 |
|
| 163 |
try:
|
|
|
|
| 164 |
yield [
|
| 165 |
"Generating code...",
|
| 166 |
_history,
|
|
@@ -168,6 +171,7 @@ async def generation_code(query: Optional[str], _setting: Dict[str, str], _histo
|
|
| 168 |
gr.update(active_key="loading"),
|
| 169 |
gr.update(open=True)
|
| 170 |
]
|
|
|
|
| 171 |
|
| 172 |
collected_content = None
|
| 173 |
# Claude API ์๋
|
|
@@ -180,7 +184,8 @@ async def generation_code(query: Optional[str], _setting: Dict[str, str], _histo
|
|
| 180 |
gr.update(active_key="loading"),
|
| 181 |
gr.update(open=True)
|
| 182 |
]
|
| 183 |
-
|
|
|
|
| 184 |
|
| 185 |
except Exception as claude_error:
|
| 186 |
print(f"Falling back to OpenAI API due to Claude error: {str(claude_error)}")
|
|
@@ -194,7 +199,8 @@ async def generation_code(query: Optional[str], _setting: Dict[str, str], _histo
|
|
| 194 |
gr.update(active_key="loading"),
|
| 195 |
gr.update(open=True)
|
| 196 |
]
|
| 197 |
-
|
|
|
|
| 198 |
|
| 199 |
if collected_content:
|
| 200 |
_history = messages_to_history([
|
|
|
|
| 86 |
claude_client = anthropic.Anthropic(api_key=YOUR_ANTHROPIC_TOKEN)
|
| 87 |
openai_client = openai.OpenAI(api_key=YOUR_OPENAI_TOKEN)
|
| 88 |
|
| 89 |
+
async def try_claude_api(system_message, claude_messages, timeout=15):
|
|
|
|
| 90 |
try:
|
| 91 |
start_time = time.time()
|
| 92 |
with claude_client.messages.stream(
|
|
|
|
| 96 |
messages=claude_messages
|
| 97 |
) as stream:
|
| 98 |
collected_content = ""
|
| 99 |
+
for chunk in stream: # async for ์ ๊ฑฐ, ์ผ๋ฐ for ์ฌ์ฉ
|
| 100 |
current_time = time.time()
|
| 101 |
if current_time - start_time > timeout:
|
| 102 |
print(f"Claude API response time: {current_time - start_time:.2f} seconds")
|
| 103 |
raise TimeoutError("Claude API timeout")
|
| 104 |
if chunk.type == "content_block_delta":
|
| 105 |
collected_content += chunk.delta.text
|
| 106 |
+
yield collected_content # ๊ฐ ์ฒญํฌ๋ง๋ค ์ฆ์ yield
|
| 107 |
+
await asyncio.sleep(0) # ์คํธ๋ฆฌ๋ฐ์ ์ํ ๋น๋๊ธฐ ์๋ณด
|
| 108 |
|
| 109 |
# ๊ฐ ์ฒญํฌ๋ง๋ค ํ์์์ ์นด์ดํฐ ๋ฆฌ์
|
| 110 |
start_time = current_time
|
|
|
|
| 112 |
except Exception as e:
|
| 113 |
print(f"Claude API error: {str(e)}")
|
| 114 |
raise e
|
| 115 |
+
|
| 116 |
+
|
| 117 |
|
| 118 |
async def try_openai_api(openai_messages):
|
| 119 |
try:
|
|
|
|
| 163 |
openai_messages.append({"role": "user", "content": query})
|
| 164 |
|
| 165 |
try:
|
| 166 |
+
# ๋จผ์ ์ฝ๋ ๋ทฐ์ด๋ฅผ ์ด๊ธฐ
|
| 167 |
yield [
|
| 168 |
"Generating code...",
|
| 169 |
_history,
|
|
|
|
| 171 |
gr.update(active_key="loading"),
|
| 172 |
gr.update(open=True)
|
| 173 |
]
|
| 174 |
+
await asyncio.sleep(0) # UI ์
๋ฐ์ดํธ๋ฅผ ์ํ ์๋ณด
|
| 175 |
|
| 176 |
collected_content = None
|
| 177 |
# Claude API ์๋
|
|
|
|
| 184 |
gr.update(active_key="loading"),
|
| 185 |
gr.update(open=True)
|
| 186 |
]
|
| 187 |
+
await asyncio.sleep(0) # UI ์
๋ฐ์ดํธ๋ฅผ ์ํ ์๋ณด
|
| 188 |
+
collected_content = content
|
| 189 |
|
| 190 |
except Exception as claude_error:
|
| 191 |
print(f"Falling back to OpenAI API due to Claude error: {str(claude_error)}")
|
|
|
|
| 199 |
gr.update(active_key="loading"),
|
| 200 |
gr.update(open=True)
|
| 201 |
]
|
| 202 |
+
await asyncio.sleep(0) # UI ์
๋ฐ์ดํธ๋ฅผ ์ํ ์๋ณด
|
| 203 |
+
collected_content = content
|
| 204 |
|
| 205 |
if collected_content:
|
| 206 |
_history = messages_to_history([
|