Update app.py
Browse files
app.py
CHANGED
@@ -281,93 +281,92 @@ class Demo:
|
|
281 |
def __init__(self):
|
282 |
pass
|
283 |
|
284 |
-
|
285 |
-
|
286 |
-
|
287 |
-
|
288 |
-
|
289 |
-
|
290 |
-
|
291 |
-
|
292 |
-
|
293 |
-
|
294 |
-
|
295 |
-
|
296 |
-
|
297 |
-
|
298 |
-
|
299 |
-
|
300 |
-
|
301 |
-
|
302 |
-
|
303 |
-
|
304 |
-
|
305 |
-
|
306 |
-
|
307 |
-
|
308 |
-
|
309 |
-
|
310 |
-
|
311 |
-
|
312 |
-
|
313 |
-
|
314 |
-
|
315 |
-
|
316 |
-
|
317 |
-
|
318 |
-
|
319 |
-
|
320 |
-
|
321 |
-
|
322 |
-
|
323 |
-
analysis = analyze_code(code)
|
324 |
-
yield [
|
325 |
-
code,
|
326 |
-
_history,
|
327 |
-
analysis, # 분석 결과를 HTML로 전달
|
328 |
-
gr.update(active_key="loading"),
|
329 |
-
gr.update(open=True)
|
330 |
-
]
|
331 |
-
await asyncio.sleep(0)
|
332 |
-
collected_content = code
|
333 |
-
|
334 |
-
except Exception as claude_error:
|
335 |
-
print(f"Falling back to OpenAI API due to Claude error: {str(claude_error)}")
|
336 |
-
|
337 |
-
async for content in try_openai_api(openai_messages):
|
338 |
-
code = content
|
339 |
-
analysis = analyze_code(code)
|
340 |
-
yield [
|
341 |
-
code,
|
342 |
-
_history,
|
343 |
-
analysis, # 분석 결과를 HTML로 전달
|
344 |
-
gr.update(active_key="loading"),
|
345 |
-
gr.update(open=True)
|
346 |
-
]
|
347 |
-
await asyncio.sleep(0)
|
348 |
-
collected_content = code
|
349 |
-
|
350 |
-
if collected_content:
|
351 |
-
_history = messages_to_history([
|
352 |
-
{'role': Role.SYSTEM, 'content': system_message}
|
353 |
-
] + claude_messages + [{
|
354 |
-
'role': Role.ASSISTANT,
|
355 |
-
'content': collected_content
|
356 |
-
}])
|
357 |
-
|
358 |
yield [
|
359 |
-
|
360 |
-
|
361 |
-
|
362 |
-
|
363 |
-
|
364 |
]
|
365 |
-
|
366 |
-
|
367 |
-
|
368 |
-
|
369 |
-
|
370 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
371 |
|
372 |
def clear_history(self):
|
373 |
return []
|
|
|
281 |
def __init__(self):
|
282 |
pass
|
283 |
|
284 |
+
async def generation_code(self, query: Optional[str], _setting: Dict[str, str], _history: Optional[History]):
|
285 |
+
if not query or query.strip() == '':
|
286 |
+
query = random.choice(DEMO_LIST)['description']
|
287 |
+
|
288 |
+
if _history is None:
|
289 |
+
_history = []
|
290 |
+
|
291 |
+
messages = history_to_messages(_history, _setting['system'])
|
292 |
+
system_message = messages[0]['content']
|
293 |
+
|
294 |
+
claude_messages = [
|
295 |
+
{"role": msg["role"] if msg["role"] != "system" else "user", "content": msg["content"]}
|
296 |
+
for msg in messages[1:] + [{'role': Role.USER, 'content': query}]
|
297 |
+
if msg["content"].strip() != ''
|
298 |
+
]
|
299 |
+
|
300 |
+
openai_messages = [{"role": "system", "content": system_message}]
|
301 |
+
for msg in messages[1:]:
|
302 |
+
openai_messages.append({
|
303 |
+
"role": msg["role"],
|
304 |
+
"content": msg["content"]
|
305 |
+
})
|
306 |
+
openai_messages.append({"role": "user", "content": query})
|
307 |
+
|
308 |
+
try:
|
309 |
+
yield [
|
310 |
+
"Generating code...",
|
311 |
+
_history,
|
312 |
+
None,
|
313 |
+
gr.update(active_key="loading"),
|
314 |
+
gr.update(open=True)
|
315 |
+
]
|
316 |
+
await asyncio.sleep(0)
|
317 |
+
|
318 |
+
collected_content = None
|
319 |
+
try:
|
320 |
+
async for content in try_claude_api(system_message, claude_messages):
|
321 |
+
code = content
|
322 |
+
analysis = analyze_code(code)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
323 |
yield [
|
324 |
+
code,
|
325 |
+
_history,
|
326 |
+
analysis, # 분석 결과를 HTML로 전달
|
327 |
+
gr.update(active_key="loading"),
|
328 |
+
gr.update(open=True)
|
329 |
]
|
330 |
+
await asyncio.sleep(0)
|
331 |
+
collected_content = code
|
332 |
+
|
333 |
+
except Exception as claude_error:
|
334 |
+
print(f"Falling back to OpenAI API due to Claude error: {str(claude_error)}")
|
335 |
+
|
336 |
+
async for content in try_openai_api(openai_messages):
|
337 |
+
code = content
|
338 |
+
analysis = analyze_code(code)
|
339 |
+
yield [
|
340 |
+
code,
|
341 |
+
_history,
|
342 |
+
analysis, # 분석 결과를 HTML로 전달
|
343 |
+
gr.update(active_key="loading"),
|
344 |
+
gr.update(open=True)
|
345 |
+
]
|
346 |
+
await asyncio.sleep(0)
|
347 |
+
collected_content = code
|
348 |
+
|
349 |
+
if collected_content:
|
350 |
+
_history = messages_to_history([
|
351 |
+
{'role': Role.SYSTEM, 'content': system_message}
|
352 |
+
] + claude_messages + [{
|
353 |
+
'role': Role.ASSISTANT,
|
354 |
+
'content': collected_content
|
355 |
+
}])
|
356 |
+
|
357 |
+
yield [
|
358 |
+
collected_content,
|
359 |
+
_history,
|
360 |
+
analyze_code(collected_content), # 최종 분석 결과를 HTML로 전달
|
361 |
+
gr.update(active_key="render"),
|
362 |
+
gr.update(open=True)
|
363 |
+
]
|
364 |
+
else:
|
365 |
+
raise ValueError("No content was generated from either API")
|
366 |
+
|
367 |
+
except Exception as e:
|
368 |
+
print(f"Error details: {str(e)}")
|
369 |
+
raise ValueError(f'Error calling APIs: {str(e)}')
|
370 |
|
371 |
def clear_history(self):
|
372 |
return []
|