Update app.py
Browse files
app.py
CHANGED
@@ -172,92 +172,202 @@ async def try_openai_api(openai_messages):
|
|
172 |
print(f"OpenAI API error: {str(e)}")
|
173 |
raise e
|
174 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
175 |
class Demo:
|
176 |
def __init__(self):
|
177 |
pass
|
178 |
|
179 |
-
|
180 |
-
|
181 |
-
|
182 |
-
|
183 |
-
|
184 |
-
|
185 |
-
|
186 |
-
|
187 |
-
|
188 |
-
|
189 |
-
|
190 |
-
|
191 |
-
|
192 |
-
|
193 |
-
|
194 |
-
|
195 |
-
|
196 |
-
|
197 |
-
|
198 |
-
|
199 |
-
|
200 |
-
|
201 |
-
|
202 |
-
|
203 |
-
|
204 |
-
|
205 |
-
|
206 |
-
|
207 |
-
None,
|
208 |
-
gr.update(active_key="loading"),
|
209 |
-
gr.update(open=True)
|
210 |
-
]
|
211 |
-
await asyncio.sleep(0)
|
212 |
-
|
213 |
-
collected_content = None
|
214 |
-
try:
|
215 |
-
async for content in try_claude_api(system_message, claude_messages):
|
216 |
-
yield [
|
217 |
-
content,
|
218 |
-
_history,
|
219 |
-
None,
|
220 |
-
gr.update(active_key="loading"),
|
221 |
-
gr.update(open=True)
|
222 |
-
]
|
223 |
-
await asyncio.sleep(0)
|
224 |
-
collected_content = content
|
225 |
-
|
226 |
-
except Exception as claude_error:
|
227 |
-
print(f"Falling back to OpenAI API due to Claude error: {str(claude_error)}")
|
228 |
-
|
229 |
-
async for content in try_openai_api(openai_messages):
|
230 |
-
yield [
|
231 |
-
content,
|
232 |
-
_history,
|
233 |
-
None,
|
234 |
-
gr.update(active_key="loading"),
|
235 |
-
gr.update(open=True)
|
236 |
-
]
|
237 |
-
await asyncio.sleep(0)
|
238 |
-
collected_content = content
|
239 |
-
|
240 |
-
if collected_content:
|
241 |
-
_history = messages_to_history([
|
242 |
-
{'role': Role.SYSTEM, 'content': system_message}
|
243 |
-
] + claude_messages + [{
|
244 |
-
'role': Role.ASSISTANT,
|
245 |
-
'content': collected_content
|
246 |
-
}])
|
247 |
-
|
248 |
-
yield [
|
249 |
-
collected_content,
|
250 |
_history,
|
251 |
-
|
252 |
-
gr.update(active_key="
|
253 |
gr.update(open=True)
|
254 |
-
|
255 |
-
|
256 |
-
|
257 |
-
|
258 |
-
|
259 |
-
|
260 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
261 |
|
262 |
def clear_history(self):
|
263 |
return []
|
|
|
172 |
print(f"OpenAI API error: {str(e)}")
|
173 |
raise e
|
174 |
|
175 |
+
|
176 |
+
def analyze_code(code: str) -> str:
|
177 |
+
"""์ฝ๋ ๋ถ์ ๊ฒฐ๊ณผ๋ฅผ HTML ํ์์ผ๋ก ๋ฐํ"""
|
178 |
+
analysis = []
|
179 |
+
|
180 |
+
# 1. ์ฌ์ฉ๋ ๋ผ์ด๋ธ๋ฌ๋ฆฌ ๋ถ์
|
181 |
+
imports = []
|
182 |
+
for line in code.split('\n'):
|
183 |
+
if line.startswith('import ') or line.startswith('from '):
|
184 |
+
imports.append(line.strip())
|
185 |
+
|
186 |
+
if imports:
|
187 |
+
analysis.append("<h2>๐ ์ฌ์ฉ๋ ๋ผ์ด๋ธ๋ฌ๋ฆฌ</h2>")
|
188 |
+
analysis.append("<ul>")
|
189 |
+
for imp in imports:
|
190 |
+
analysis.append(f"<li><code>{imp}</code></li>")
|
191 |
+
analysis.append("</ul>")
|
192 |
+
|
193 |
+
# 2. ํจ์ ๋ถ์
|
194 |
+
functions = []
|
195 |
+
current_func = []
|
196 |
+
in_function = False
|
197 |
+
|
198 |
+
for line in code.split('\n'):
|
199 |
+
if line.strip().startswith('def '):
|
200 |
+
if current_func:
|
201 |
+
functions.append('\n'.join(current_func))
|
202 |
+
current_func = []
|
203 |
+
in_function = True
|
204 |
+
if in_function:
|
205 |
+
current_func.append(line)
|
206 |
+
if in_function and not line.strip():
|
207 |
+
in_function = False
|
208 |
+
if current_func:
|
209 |
+
functions.append('\n'.join(current_func))
|
210 |
+
current_func = []
|
211 |
+
|
212 |
+
if functions:
|
213 |
+
analysis.append("<h2>๐ง ์ฃผ์ ํจ์</h2>")
|
214 |
+
for func in functions:
|
215 |
+
func_name = func.split('def ')[1].split('(')[0]
|
216 |
+
analysis.append(f"<h3><code>{func_name}</code></h3>")
|
217 |
+
analysis.append(f"<p>{get_function_description(func)}</p>")
|
218 |
+
|
219 |
+
# 3. UI ์ปดํฌ๋ํธ ๋ถ์
|
220 |
+
ui_components = []
|
221 |
+
for line in code.split('\n'):
|
222 |
+
if 'gr.' in line:
|
223 |
+
component = line.split('gr.')[1].split('(')[0]
|
224 |
+
if component not in ui_components:
|
225 |
+
ui_components.append(component)
|
226 |
+
|
227 |
+
if ui_components:
|
228 |
+
analysis.append("<h2>๐จ UI ์ปดํฌ๋ํธ</h2>")
|
229 |
+
analysis.append("<ul>")
|
230 |
+
for component in ui_components:
|
231 |
+
analysis.append(f"<li><strong>{component}</strong>: {get_component_description(component)}</li>")
|
232 |
+
analysis.append("</ul>")
|
233 |
+
|
234 |
+
# 4. ์คํ ๋ฐฉ๋ฒ
|
235 |
+
analysis.append("<h2>โถ๏ธ ์คํ ๋ฐฉ๋ฒ</h2>")
|
236 |
+
analysis.append("<ol>")
|
237 |
+
analysis.append("<li>'์คํํ๊ธฐ' ๋ฒํผ์ ํด๋ฆญํ์ฌ Hugging Face Space์ ๋ฐฐํฌ</li>")
|
238 |
+
analysis.append("<li>์์ฑ๋ ๋งํฌ๋ฅผ ํด๋ฆญํ์ฌ ์ ํ๋ฆฌ์ผ์ด์
์คํ</li>")
|
239 |
+
analysis.append("</ol>")
|
240 |
+
|
241 |
+
return "\n".join(analysis)
|
242 |
+
|
243 |
+
def get_function_description(func: str) -> str:
|
244 |
+
"""ํจ์์ ๋ชฉ์ ์ ์ค๋ช
ํ๋ ๋ฌธ์์ด ๋ฐํ"""
|
245 |
+
if 'get_multiplication_table' in func:
|
246 |
+
return "์
๋ ฅ๋ฐ์ ์ซ์์ ๊ตฌ๊ตฌ๋จ์ ๊ณ์ฐํ์ฌ ๋ฌธ์์ด๋ก ๋ฐํ"
|
247 |
+
elif 'get_all_tables' in func:
|
248 |
+
return "2๋จ๋ถํฐ 9๋จ๊น์ง ์ ์ฒด ๊ตฌ๊ตฌ๋จ์ ์์ฑํ์ฌ ๋ฐํ"
|
249 |
+
# ๋ค๋ฅธ ํจ์๋ค์ ๋ํ ์ค๋ช
์ถ๊ฐ
|
250 |
+
return "ํจ์์ ๊ธฐ๋ฅ ์ค๋ช
"
|
251 |
+
|
252 |
+
def get_component_description(component: str) -> str:
|
253 |
+
"""UI ์ปดํฌ๋ํธ์ ๋ํ ์ค๋ช
๋ฐํ"""
|
254 |
+
descriptions = {
|
255 |
+
'Number': '์ซ์ ์
๋ ฅ ํ๋',
|
256 |
+
'Button': 'ํด๋ฆญ ๊ฐ๋ฅํ ๋ฒํผ',
|
257 |
+
'Textbox': 'ํ
์คํธ ์ถ๋ ฅ ์์ญ',
|
258 |
+
'Markdown': '๋งํฌ๋ค์ด ํ์์ ํ
์คํธ ํ์',
|
259 |
+
'Row': '์ํ ๋ฐฉํฅ ๋ ์ด์์',
|
260 |
+
'Column': '์์ง ๋ฐฉํฅ ๋ ์ด์์',
|
261 |
+
'Blocks': '์ ์ฒด UI ์ปจํ
์ด๋',
|
262 |
+
'Image': '์ด๋ฏธ์ง ํ์ ์ปดํฌ๋ํธ',
|
263 |
+
'File': 'ํ์ผ ์
๋ก๋ ์ปดํฌ๋ํธ',
|
264 |
+
'Slider': '์ฌ๋ผ์ด๋ ์
๋ ฅ ์ปดํฌ๋ํธ',
|
265 |
+
'Dropdown': '๋๋กญ๋ค์ด ์ ํ ์ปดํฌ๋ํธ',
|
266 |
+
'Radio': '๋ผ๋์ค ๋ฒํผ ๊ทธ๋ฃน',
|
267 |
+
'Checkbox': '์ฒดํฌ๋ฐ์ค ์ปดํฌ๋ํธ',
|
268 |
+
'Audio': '์ค๋์ค ์ฌ์/๋
น์ ์ปดํฌ๋ํธ',
|
269 |
+
'Video': '๋น๋์ค ์ฌ์ ์ปดํฌ๋ํธ',
|
270 |
+
'HTML': 'HTML ์ฝํ
์ธ ํ์',
|
271 |
+
'JSON': 'JSON ๋ฐ์ดํฐ ํ์',
|
272 |
+
'DataFrame': '๋ฐ์ดํฐํ๋ ์ ํ์',
|
273 |
+
'Plot': '๊ทธ๋ํ/์ฐจํธ ํ์',
|
274 |
+
'Label': '๋ ์ด๋ธ ํ
์คํธ ํ์'
|
275 |
+
}
|
276 |
+
return descriptions.get(component, '์ปดํฌ๋ํธ ์ค๋ช
')
|
277 |
+
|
278 |
+
|
279 |
+
|
280 |
class Demo:
|
281 |
def __init__(self):
|
282 |
pass
|
283 |
|
284 |
+
# Demo ํด๋์ค์ generation_code ๋ฉ์๋ ์์
|
285 |
+
async def generation_code(self, query: Optional[str], _setting: Dict[str, str], _history: Optional[History]):
|
286 |
+
if not query or query.strip() == '':
|
287 |
+
query = random.choice(DEMO_LIST)['description']
|
288 |
+
|
289 |
+
if _history is None:
|
290 |
+
_history = []
|
291 |
+
|
292 |
+
messages = history_to_messages(_history, _setting['system'])
|
293 |
+
system_message = messages[0]['content']
|
294 |
+
|
295 |
+
claude_messages = [
|
296 |
+
{"role": msg["role"] if msg["role"] != "system" else "user", "content": msg["content"]}
|
297 |
+
for msg in messages[1:] + [{'role': Role.USER, 'content': query}]
|
298 |
+
if msg["content"].strip() != ''
|
299 |
+
]
|
300 |
+
|
301 |
+
openai_messages = [{"role": "system", "content": system_message}]
|
302 |
+
for msg in messages[1:]:
|
303 |
+
openai_messages.append({
|
304 |
+
"role": msg["role"],
|
305 |
+
"content": msg["content"]
|
306 |
+
})
|
307 |
+
openai_messages.append({"role": "user", "content": query})
|
308 |
+
|
309 |
+
try:
|
310 |
+
yield [
|
311 |
+
"Generating code...",
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
312 |
_history,
|
313 |
+
None,
|
314 |
+
gr.update(active_key="loading"),
|
315 |
gr.update(open=True)
|
316 |
+
]
|
317 |
+
await asyncio.sleep(0)
|
318 |
+
|
319 |
+
collected_content = None
|
320 |
+
try:
|
321 |
+
async for content in try_claude_api(system_message, claude_messages):
|
322 |
+
code = content
|
323 |
+
analysis = analyze_code(code)
|
324 |
+
yield [
|
325 |
+
code,
|
326 |
+
_history,
|
327 |
+
analysis, # ๋ถ์ ๊ฒฐ๊ณผ๋ฅผ HTML๋ก ์ ๋ฌ
|
328 |
+
gr.update(active_key="loading"),
|
329 |
+
gr.update(open=True)
|
330 |
+
]
|
331 |
+
await asyncio.sleep(0)
|
332 |
+
collected_content = code
|
333 |
+
|
334 |
+
except Exception as claude_error:
|
335 |
+
print(f"Falling back to OpenAI API due to Claude error: {str(claude_error)}")
|
336 |
+
|
337 |
+
async for content in try_openai_api(openai_messages):
|
338 |
+
code = content
|
339 |
+
analysis = analyze_code(code)
|
340 |
+
yield [
|
341 |
+
code,
|
342 |
+
_history,
|
343 |
+
analysis, # ๋ถ์ ๊ฒฐ๊ณผ๋ฅผ HTML๋ก ์ ๋ฌ
|
344 |
+
gr.update(active_key="loading"),
|
345 |
+
gr.update(open=True)
|
346 |
+
]
|
347 |
+
await asyncio.sleep(0)
|
348 |
+
collected_content = code
|
349 |
+
|
350 |
+
if collected_content:
|
351 |
+
_history = messages_to_history([
|
352 |
+
{'role': Role.SYSTEM, 'content': system_message}
|
353 |
+
] + claude_messages + [{
|
354 |
+
'role': Role.ASSISTANT,
|
355 |
+
'content': collected_content
|
356 |
+
}])
|
357 |
+
|
358 |
+
yield [
|
359 |
+
collected_content,
|
360 |
+
_history,
|
361 |
+
analyze_code(collected_content), # ์ต์ข
๋ถ์ ๊ฒฐ๊ณผ๋ฅผ HTML๋ก ์ ๋ฌ
|
362 |
+
gr.update(active_key="render"),
|
363 |
+
gr.update(open=True)
|
364 |
+
]
|
365 |
+
else:
|
366 |
+
raise ValueError("No content was generated from either API")
|
367 |
+
|
368 |
+
except Exception as e:
|
369 |
+
print(f"Error details: {str(e)}")
|
370 |
+
raise ValueError(f'Error calling APIs: {str(e)}')
|
371 |
|
372 |
def clear_history(self):
|
373 |
return []
|