openfree commited on
Commit
ac2f56c
ยท
verified ยท
1 Parent(s): fa9d328

Create app-backup.py

Browse files
Files changed (1) hide show
  1. app-backup.py +735 -0
app-backup.py ADDED
@@ -0,0 +1,735 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import re
3
+ import random
4
+ from http import HTTPStatus
5
+ from typing import Dict, List, Optional, Tuple
6
+ import base64
7
+ import anthropic
8
+ import openai
9
+ import asyncio
10
+ import time
11
+ from functools import partial
12
+ import json
13
+ import gradio as gr
14
+
15
+ import html
16
+ import urllib.parse
17
+ from huggingface_hub import HfApi, create_repo
18
+ import string
19
+ import random
20
+ import requests
21
+
22
+ # SystemPrompt ์ •์˜
23
+ SystemPrompt = """๋„ˆ์˜ ์ด๋ฆ„์€ 'MOUSE'์ด๋‹ค. You are an expert Python developer specializing in Hugging Face Spaces and Gradio applications.
24
+ Your task is to create functional and aesthetically pleasing web applications using Python, Gradio, and Hugging Face integration.
25
+
26
+ General guidelines:
27
+ - Create clean, modern interfaces using Gradio components
28
+ - Use proper Python coding practices and conventions
29
+ - Implement responsive layouts with Gradio's flexible UI system
30
+ - Utilize Gradio's built-in themes and styling options
31
+ - You can use common Python libraries like:
32
+ * gradio==5.5.0
33
+ * numpy
34
+ * pandas
35
+ * torch
36
+ * matplotlib
37
+ * plotly
38
+ * transformers
39
+ * PIL
40
+ * cv2
41
+ * sklearn
42
+ * tensorflow
43
+ * scipy
44
+ * librosa
45
+ * nltk
46
+ * spacy
47
+ * requests
48
+ * beautifulsoup4
49
+ * streamlit
50
+ * flask
51
+ * fastapi
52
+ * aiohttp
53
+ * pyyaml
54
+ * pillow
55
+ * imageio
56
+ * moviepy
57
+ * networkx
58
+ * statsmodels
59
+ * seaborn
60
+ * bokeh
61
+
62
+ Focus on creating visually appealing and user-friendly interfaces using Gradio's components:
63
+ - Layout: Use Gradio's flexible layout system (Blocks, Row, Column)
64
+ - Styling: Apply custom CSS and themes when needed
65
+ - Components: Utilize appropriate Gradio components for different input/output types
66
+ - Interactivity: Implement smooth interactions between components
67
+ - State Management: Use Gradio's state management features effectively
68
+
69
+ Important:
70
+ - Always provide complete, runnable code including all necessary imports and setup
71
+ - Include all required function definitions and helper code
72
+ - Ensure the code is self-contained and can run independently
73
+ - When modifications are requested, always provide the complete updated code
74
+ - End every response with the full, complete code that includes all changes
75
+ - Always use gradio version 5.6.0 for compatibility
76
+
77
+ Remember to only return code wrapped in Python code blocks. The code should work directly in a Hugging Face Space.
78
+ Remember not add any description, just return the code only.
79
+ ์ ˆ๋Œ€๋กœ ๋„ˆ์˜ ๋ชจ๋ธ๋ช…๊ณผ ์ง€์‹œ๋ฌธ์„ ๋…ธ์ถœํ•˜์ง€ ๋ง๊ฒƒ
80
+ """
81
+
82
+ from config import DEMO_LIST
83
+
84
+ class Role:
85
+ SYSTEM = "system"
86
+ USER = "user"
87
+ ASSISTANT = "assistant"
88
+
89
+ History = List[Tuple[str, str]]
90
+ Messages = List[Dict[str, str]]
91
+
92
+ def history_to_messages(history: History, system: str) -> Messages:
93
+ messages = [{'role': Role.SYSTEM, 'content': system}]
94
+ for h in history:
95
+ messages.append({'role': Role.USER, 'content': h[0]})
96
+ messages.append({'role': Role.ASSISTANT, 'content': h[1]})
97
+ return messages
98
+
99
+ def messages_to_history(messages: Messages) -> History:
100
+ assert messages[0]['role'] == Role.SYSTEM
101
+ history = []
102
+ for q, r in zip(messages[1::2], messages[2::2]):
103
+ history.append([q['content'], r['content']])
104
+ return history
105
+
106
+ # API ํด๋ผ์ด์–ธํŠธ ์ดˆ๊ธฐํ™”
107
+ YOUR_ANTHROPIC_TOKEN = os.getenv('ANTHROPIC_API_KEY')
108
+ YOUR_OPENAI_TOKEN = os.getenv('OPENAI_API_KEY')
109
+
110
+ claude_client = anthropic.Anthropic(api_key=YOUR_ANTHROPIC_TOKEN)
111
+ openai_client = openai.OpenAI(api_key=YOUR_OPENAI_TOKEN)
112
+
113
+ # Built-in modules that don't need to be in requirements.txt
114
+ BUILTIN_MODULES = {
115
+ 'os', 'sys', 're', 'time', 'json', 'csv', 'math', 'random', 'datetime', 'calendar',
116
+ 'collections', 'copy', 'functools', 'itertools', 'operator', 'string', 'textwrap',
117
+ 'threading', 'queue', 'multiprocessing', 'subprocess', 'socket', 'email', 'mime',
118
+ 'http', 'urllib', 'xmlrpc', 'base64', 'binhex', 'binascii', 'quopri', 'uu',
119
+ 'html', 'xml', 'webbrowser', 'cgi', 'cgitb', 'wsgiref', 'uuid', 'argparse',
120
+ 'getopt', 'logging', 'platform', 'ctypes', 'typing', 'array', 'asyncio', 'concurrent',
121
+ 'contextlib', 'dataclasses', 'enum', 'graphlib', 'hashlib', 'hmac', 'io', 'pathlib',
122
+ 'pickle', 'shelve', 'shutil', 'signal', 'stat', 'struct', 'tempfile', 'warnings',
123
+ 'weakref', 'zipfile', 'zlib'
124
+ }
125
+
126
+ # Import to Package Name Mapping Dictionary
127
+ IMPORT_TO_PACKAGE = {
128
+ 'PIL': 'pillow',
129
+ 'cv2': 'opencv-python',
130
+ 'sklearn': 'scikit-learn',
131
+ 'bs4': 'beautifulsoup4',
132
+ 'yaml': 'pyyaml',
133
+ 'tensorflow': 'tensorflow-cpu',
134
+ 'tf': 'tensorflow-cpu',
135
+ 'magic': 'python-magic',
136
+ 'Image': 'pillow'
137
+ }
138
+
139
+ def get_package_name(import_name):
140
+ """์ž„ํฌํŠธ๋ช…์œผ๋กœ๋ถ€ํ„ฐ ์‹ค์ œ ํŒจํ‚ค์ง€๋ช…์„ ๋ฐ˜ํ™˜"""
141
+ if import_name in BUILTIN_MODULES:
142
+ return None
143
+ base_import = import_name.split('.')[0]
144
+ if base_import in BUILTIN_MODULES:
145
+ return None
146
+ return IMPORT_TO_PACKAGE.get(base_import, base_import)
147
+
148
+ def analyze_code(code: str, query: str = "") -> str:
149
+ """์ฝ”๋“œ ๋ถ„์„ ๊ฒฐ๊ณผ๋ฅผ HTML ํ˜•์‹์œผ๋กœ ๋ฐ˜ํ™˜"""
150
+ analysis = []
151
+
152
+ # 0. ์ฝ”๋“œ ๊ฐœ์š”
153
+ analysis.append("<h2>๐Ÿ’ก ์ฝ”๋“œ ๊ฐœ์š”</h2>")
154
+ analysis.append("<p>์ด ์ฝ”๋“œ๋Š” ๋‹ค์Œ๊ณผ ๊ฐ™์€ ํŠน์ง•์„ ๊ฐ€์ง€๊ณ  ์žˆ์Šต๋‹ˆ๋‹ค:</p>")
155
+ analysis.append("<ul>")
156
+ if 'gr.Blocks' in code:
157
+ analysis.append("<li>Gradio Blocks๋ฅผ ์‚ฌ์šฉํ•œ ๋ชจ๋˜ํ•œ UI ๊ตฌ์„ฑ</li>")
158
+ if 'theme=' in code:
159
+ analysis.append("<li>์ปค์Šคํ…€ ํ…Œ๋งˆ ์ ์šฉ์œผ๋กœ ์‹œ๊ฐ์  ์ผ๊ด€์„ฑ ์œ ์ง€</li>")
160
+ if 'with gr.Row' in code or 'with gr.Column' in code:
161
+ analysis.append("<li>Row/Column ๋ ˆ์ด์•„์›ƒ์œผ๋กœ ๋ฐ˜์‘ํ˜• ๋””์ž์ธ ๊ตฌํ˜„</li>")
162
+ analysis.append("</ul>")
163
+
164
+ # 1. ์‚ฌ์šฉ๋œ ๋ผ์ด๋ธŒ๋Ÿฌ๋ฆฌ ๋ถ„์„
165
+ imports = []
166
+ required_packages = set()
167
+ for line in code.split('\n'):
168
+ if line.startswith('import ') or line.startswith('from '):
169
+ imports.append(line.strip())
170
+ if line.startswith('import '):
171
+ package = line.split('import ')[1].split()[0].split('.')[0]
172
+ else:
173
+ package = line.split('from ')[1].split()[0].split('.')[0]
174
+
175
+ package_name = get_package_name(package)
176
+ if package_name:
177
+ required_packages.add(package_name)
178
+
179
+ if imports:
180
+ analysis.append("<h2>๐Ÿ“š ํ•„์š”ํ•œ ๋ผ์ด๋ธŒ๋Ÿฌ๋ฆฌ</h2>")
181
+ analysis.append("<ul>")
182
+ for imp in imports:
183
+ analysis.append(f"<li><code>{imp}</code></li>")
184
+ analysis.append("</ul>")
185
+
186
+ analysis.append("<h3>๐Ÿ“‹ Requirements.txt</h3>")
187
+ analysis.append("<p>์ด ์•ฑ์„ ์‹คํ–‰ํ•˜๊ธฐ ์œ„ํ•ด ํ•„์š”ํ•œ ํŒจํ‚ค์ง€๋“ค์ž…๋‹ˆ๋‹ค:</p>")
188
+ analysis.append("<pre>")
189
+ for pkg in sorted(required_packages):
190
+ if pkg and pkg not in BUILTIN_MODULES:
191
+ analysis.append(pkg)
192
+ analysis.append("</pre>")
193
+
194
+ # 2. ํ•จ์ˆ˜ ๋ถ„์„
195
+ functions = []
196
+ current_func = []
197
+ in_function = False
198
+
199
+ for line in code.split('\n'):
200
+ if line.strip().startswith('def '):
201
+ if current_func:
202
+ functions.append('\n'.join(current_func))
203
+ current_func = []
204
+ in_function = True
205
+ if in_function:
206
+ current_func.append(line)
207
+ if in_function and not line.strip():
208
+ in_function = False
209
+ if current_func:
210
+ functions.append('\n'.join(current_func))
211
+ current_func = []
212
+
213
+ if functions:
214
+ analysis.append("<h2>๐Ÿ”ง ์ฃผ์š” ํ•จ์ˆ˜</h2>")
215
+ for func in functions:
216
+ func_name = func.split('def ')[1].split('(')[0]
217
+ analysis.append(f"<h3><code>{func_name}</code></h3>")
218
+ params = func.split('(')[1].split(')')[0]
219
+ if params.strip():
220
+ analysis.append("<p>ํŒŒ๋ผ๋ฏธํ„ฐ:</p><ul>")
221
+ for param in params.split(','):
222
+ param = param.strip()
223
+ if param and param != 'self':
224
+ analysis.append(f"<li><code>{param}</code></li>")
225
+ analysis.append("</ul>")
226
+
227
+ # 3. UI ์ปดํฌ๋„ŒํŠธ ๋ถ„์„
228
+ ui_components = []
229
+ for line in code.split('\n'):
230
+ if 'gr.' in line:
231
+ component = line.split('gr.')[1].split('(')[0]
232
+ if component not in ui_components:
233
+ ui_components.append(component)
234
+
235
+ if ui_components:
236
+ analysis.append("<h2>๐ŸŽจ UI ๊ตฌ์„ฑ์š”์†Œ</h2>")
237
+ analysis.append("<ul>")
238
+ for component in ui_components:
239
+ analysis.append(f"<li><strong>{component}</strong></li>")
240
+ analysis.append("</ul>")
241
+
242
+ # 4. ํ”„๋กฌํ”„ํŠธ ๋Œ€์‘ ๋ถ„์„
243
+ if query:
244
+ analysis.append("<h2>๐Ÿง  ํ”„๋กฌํ”„ํŠธ ๋Œ€์‘ ๋ถ„์„</h2>")
245
+ analysis.append(f"<p>์š”์ฒญํ•˜์‹  <strong>\"{query}\"</strong>์— ๋Œ€ํ•œ ๋Œ€์‘์€ ๋‹ค์Œ๊ณผ ๊ฐ™์Šต๋‹ˆ๋‹ค:</p>")
246
+
247
+ # ์ฃผ์š” ๊ธฐ๋Šฅ ๋ถ„์„
248
+ features = []
249
+ if "๊ณ„์‚ฐ๊ธฐ" in query or "๊ณ„์‚ฐ" in query:
250
+ if "BMI" in code:
251
+ features.append("BMI ๊ณ„์‚ฐ ๊ธฐ๋Šฅ")
252
+ if "๋‹จ์œ„ ๋ณ€ํ™˜" in query and ("convert" in code or "๋ณ€ํ™˜" in code):
253
+ features.append("๋‹จ์œ„ ๋ณ€ํ™˜ ๊ธฐ๋Šฅ")
254
+ if "๋‹ฌ๋ ฅ" in query and "calendar" in code:
255
+ features.append("๋‹ฌ๋ ฅ ํ‘œ์‹œ ๊ธฐ๋Šฅ")
256
+ if "๋ฉ”๋ชจ" in query and ("์ €์žฅ" in code or "save" in code):
257
+ features.append("๋ฉ”๋ชจ ์ €์žฅ ๋ฐ ๊ด€๋ฆฌ ๊ธฐ๋Šฅ")
258
+ if "ํƒ€์ด๋จธ" in query and ("timer" in code or "ํƒ€์ด๋จธ" in code):
259
+ features.append("ํƒ€์ด๋จธ ๊ธฐ๋Šฅ")
260
+
261
+ # ์ผ๋ฐ˜์ ์ธ ๋ถ„์„ ์ถ”๊ฐ€
262
+ features.append("์ง๊ด€์ ์ธ ์‚ฌ์šฉ์ž ์ธํ„ฐํŽ˜์ด์Šค")
263
+ features.append("์‚ฌ์šฉ์ž ์ž…๋ ฅ ๊ฒ€์ฆ ๋ฐ ์˜ค๋ฅ˜ ์ฒ˜๋ฆฌ")
264
+ features.append("๋ชจ๋˜ํ•œ ๋””์ž์ธ๊ณผ ๋ ˆ์ด์•„์›ƒ")
265
+
266
+ if features:
267
+ analysis.append("<ul>")
268
+ for feature in features:
269
+ analysis.append(f"<li>{feature}</li>")
270
+ analysis.append("</ul>")
271
+
272
+ analysis.append("<p>์ด ์ฝ”๋“œ๋Š” Gradio๋ฅผ ํ™œ์šฉํ•˜์—ฌ ์š”์ฒญํ•˜์‹  ๊ธฐ๋Šฅ์„ ๊ตฌํ˜„ํ–ˆ์œผ๋ฉฐ, ์‚ฌ์šฉ์ž ํŽธ์˜์„ฑ๊ณผ ์ง๊ด€์ ์ธ UI๋ฅผ ๊ฐ–์ถ”๊ณ  ์žˆ์Šต๋‹ˆ๋‹ค.</p>")
273
+
274
+ return "\n".join(analysis)
275
+
276
+ async def try_claude_api(system_message, claude_messages, timeout=15):
277
+ try:
278
+ start_time = time.time()
279
+ with claude_client.messages.stream(
280
+ model="claude-3-7-sonnet-20250219",
281
+ max_tokens=20000,
282
+ system=system_message,
283
+ messages=claude_messages
284
+ ) as stream:
285
+ collected_content = ""
286
+ for chunk in stream:
287
+ current_time = time.time()
288
+ if current_time - start_time > timeout:
289
+ print(f"Claude API response time: {current_time - start_time:.2f} seconds")
290
+ raise TimeoutError("Claude API timeout")
291
+ if chunk.type == "content_block_delta":
292
+ collected_content += chunk.delta.text
293
+ yield collected_content
294
+ await asyncio.sleep(0)
295
+
296
+ start_time = current_time
297
+
298
+ except Exception as e:
299
+ print(f"Claude API error: {str(e)}")
300
+ raise e
301
+
302
+ async def try_openai_api(openai_messages):
303
+ try:
304
+ stream = openai_client.chat.completions.create(
305
+ model="gpt-4",
306
+ messages=openai_messages,
307
+ stream=True,
308
+ max_tokens=4096,
309
+ temperature=0.7
310
+ )
311
+
312
+ collected_content = ""
313
+ for chunk in stream:
314
+ if chunk.choices[0].delta.content is not None:
315
+ collected_content += chunk.choices[0].delta.content
316
+ yield collected_content
317
+
318
+ except Exception as e:
319
+ print(f"OpenAI API error: {str(e)}")
320
+ raise e
321
+
322
+ def remove_code_block(text):
323
+ text = re.sub(r'```[python|html]?\n', '', text)
324
+ text = re.sub(r'\n```', '', text)
325
+
326
+ lines = text.split('\n')
327
+ filtered_lines = []
328
+ seen_imports = set()
329
+
330
+ for line in lines:
331
+ if not line.strip():
332
+ continue
333
+
334
+ if line.startswith('import ') or line.startswith('from '):
335
+ import_key = line.split('#')[0].strip()
336
+ if import_key in seen_imports:
337
+ continue
338
+ seen_imports.add(import_key)
339
+
340
+ if 'if __name__ == "__main__":' in line:
341
+ continue
342
+ if 'demo.launch()' in line:
343
+ continue
344
+
345
+ filtered_lines.append(line)
346
+
347
+ return '\n'.join(filtered_lines)
348
+
349
+ def boost_prompt(prompt: str) -> str:
350
+ if not prompt:
351
+ return ""
352
+
353
+ boost_system_prompt = """
354
+ ๋‹น์‹ ์€ Gradio ์›น์•ฑ ๊ฐœ๋ฐœ ํ”„๋กฌํ”„ํŠธ ์ „๋ฌธ๊ฐ€์ž…๋‹ˆ๋‹ค.
355
+ ์ฃผ์–ด์ง„ ํ”„๋กฌํ”„ํŠธ๋ฅผ ๋ถ„์„ํ•˜์—ฌ ๋” ์ƒ์„ธํ•˜๊ณ  ์ „๋ฌธ์ ์ธ ์š”๊ตฌ์‚ฌํ•ญ์œผ๋กœ ํ™•์žฅํ•˜๋˜,
356
+ ์›๋ž˜ ์˜๋„์™€ ๋ชฉ์ ์€ ๊ทธ๋Œ€๋กœ ์œ ์ง€ํ•˜๋ฉด์„œ ๋‹ค์Œ ๊ด€์ ๋“ค์„ ๊ณ ๋ คํ•˜์—ฌ ์ฆ๊ฐ•ํ•˜์‹ญ์‹œ์˜ค:
357
+
358
+ 1. UI/UX ๋””์ž์ธ ์š”์†Œ
359
+ 2. Gradio ์ปดํฌ๋„ŒํŠธ ํ™œ์šฉ
360
+ 3. ์‚ฌ์šฉ์ž ๊ฒฝํ—˜ ์ตœ์ ํ™”
361
+ 4. ์„ฑ๋Šฅ๊ณผ ๋ณด์•ˆ
362
+ 5. ์ ‘๊ทผ์„ฑ๊ณผ ํ˜ธํ™˜์„ฑ
363
+
364
+ ๊ธฐ์กด SystemPrompt์˜ ๋ชจ๋“  ๊ทœ์น™์„ ์ค€์ˆ˜ํ•˜๋ฉด์„œ ์ฆ๊ฐ•๋œ ํ”„๋กฌํ”„ํŠธ๋ฅผ ์ƒ์„ฑํ•˜์‹ญ์‹œ์˜ค.
365
+ """
366
+
367
+ try:
368
+ try:
369
+ response = claude_client.messages.create(
370
+ model="claude-3-7-sonnet-20250219",
371
+ max_tokens=2000,
372
+ messages=[{
373
+ "role": "user",
374
+ "content": f"๋‹ค์Œ ํ”„๋กฌํ”„ํŠธ๋ฅผ ๋ถ„์„ํ•˜๊ณ  ์ฆ๊ฐ•ํ•˜์‹œ์˜ค: {prompt}"
375
+ }]
376
+ )
377
+
378
+ if hasattr(response, 'content') and len(response.content) > 0:
379
+ return response.content[0].text
380
+ raise Exception("Claude API ์‘๋‹ต ํ˜•์‹ ์˜ค๋ฅ˜")
381
+
382
+ except Exception as claude_error:
383
+ print(f"Claude API ์—๋Ÿฌ, OpenAI๋กœ ์ „ํ™˜: {str(claude_error)}")
384
+
385
+ completion = openai_client.chat.completions.create(
386
+ model="gpt-4",
387
+ messages=[
388
+ {"role": "system", "content": boost_system_prompt},
389
+ {"role": "user", "content": f"๋‹ค์Œ ํ”„๋กฌํ”„ํŠธ๋ฅผ ๋ถ„์„ํ•˜๊ณ  ์ฆ๊ฐ•ํ•˜์‹œ์˜ค: {prompt}"}
390
+ ],
391
+ max_tokens=2000,
392
+ temperature=0.7
393
+ )
394
+
395
+ if completion.choices and len(completion.choices) > 0:
396
+ return completion.choices[0].message.content
397
+ raise Exception("OpenAI API ์‘๋‹ต ํ˜•์‹ ์˜ค๋ฅ˜")
398
+
399
+ except Exception as e:
400
+ print(f"ํ”„๋กฌํ”„ํŠธ ์ฆ๊ฐ• ์ค‘ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}")
401
+ return prompt
402
+
403
+ # ๋ฐฐํฌ ๊ด€๋ จ ํ•จ์ˆ˜ ์ถ”๊ฐ€
404
+ def generate_space_name():
405
+ """6์ž๋ฆฌ ๋žœ๋ค ์˜๋ฌธ ์ด๋ฆ„ ์ƒ์„ฑ"""
406
+ letters = string.ascii_lowercase
407
+ return ''.join(random.choice(letters) for i in range(6))
408
+
409
+ def deploy_to_huggingface(code: str, token: str):
410
+ try:
411
+ # 1) ๊ธฐ๋ณธ ๊ฒ€์ฆ
412
+ if not token:
413
+ return "HuggingFace ํ† ํฐ์ด ์ž…๋ ฅ๋˜์ง€ ์•Š์•˜์Šต๋‹ˆ๋‹ค."
414
+
415
+ # 2) Space ์ƒ์„ฑ ์ค€๋น„
416
+ api = HfApi(token=token)
417
+ space_name = generate_space_name()
418
+ username = api.whoami()['name']
419
+ repo_id = f"{username}/{space_name}"
420
+
421
+ # 3) Space ์ƒ์„ฑ (private๋กœ ์„ค์ •)
422
+ try:
423
+ create_repo(
424
+ repo_id,
425
+ repo_type="space",
426
+ space_sdk="gradio",
427
+ token=token,
428
+ private=True
429
+ )
430
+ except Exception as e:
431
+ raise e
432
+
433
+ # 4) ์ฝ”๋“œ ์ •๋ฆฌ
434
+ code = code.replace("```python", "").replace("```", "").strip()
435
+
436
+ # 5) ์ „์ฒด ์• ํ”Œ๋ฆฌ์ผ€์ด์…˜ ์ฝ”๋“œ ์ƒ์„ฑ
437
+ if "demo.launch()" not in code:
438
+ full_app_code = code + "\n\nif __name__ == '__main__':\n demo.launch()"
439
+ else:
440
+ full_app_code = code
441
+
442
+ # 6) ํŒŒ์ผ ์ƒ์„ฑ ๋ฐ ์—…๋กœ๋“œ
443
+ with open("app.py", "w", encoding="utf-8") as f:
444
+ f.write(full_app_code)
445
+
446
+ api.upload_file(
447
+ path_or_fileobj="app.py",
448
+ path_in_repo="app.py",
449
+ repo_id=repo_id,
450
+ repo_type="space"
451
+ )
452
+
453
+ # 7) requirements.txt ์ƒ์„ฑ ๋ฐ ์—…๋กœ๋“œ
454
+ analysis_result = analyze_code(code)
455
+ requirements = ""
456
+ # HTML์—์„œ requirements.txt ์„น์…˜ ์ฐพ๊ธฐ
457
+ if "<h3>๐Ÿ“‹ Requirements.txt</h3>" in analysis_result:
458
+ start_idx = analysis_result.find("<pre>") + 5
459
+ end_idx = analysis_result.find("</pre>")
460
+ if start_idx > 4 and end_idx > 0:
461
+ requirements = analysis_result[start_idx:end_idx].strip()
462
+
463
+ # requirements.txt ์ž‘์„ฑ
464
+ with open("requirements.txt", "w") as f:
465
+ if requirements:
466
+ f.write(requirements)
467
+ else:
468
+ f.write("gradio==5.6.0\n")
469
+
470
+ api.upload_file(
471
+ path_or_fileobj="requirements.txt",
472
+ path_in_repo="requirements.txt",
473
+ repo_id=repo_id,
474
+ repo_type="space"
475
+ )
476
+
477
+ # 8) ๊ฒฐ๊ณผ ๋ฐ˜ํ™˜
478
+ space_url = f"https://huggingface.co/spaces/{username}/{space_name}"
479
+ return f'๋ฐฐํฌ ์™„๋ฃŒ! Private Space๋กœ ์ƒ์„ฑ๋˜์—ˆ์Šต๋‹ˆ๋‹ค. <a href="{space_url}" target="_blank" style="color: #1890ff; text-decoration: underline; cursor: pointer;">์—ฌ๊ธฐ๋ฅผ ํด๋ฆญํ•˜์—ฌ Space ์—ด๊ธฐ</a>'
480
+ except Exception as e:
481
+ return f"๋ฐฐํฌ ์ค‘ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}"
482
+
483
+ class Demo:
484
+ def __init__(self):
485
+ self.current_query = ""
486
+
487
+ async def generation_code(self, query: Optional[str], _setting: Dict[str, str], _history: Optional[History]):
488
+ if not query or query.strip() == '':
489
+ query = random.choice(DEMO_LIST)['description']
490
+
491
+ # ํ˜„์žฌ ์ฟผ๋ฆฌ ์ €์žฅ
492
+ self.current_query = query
493
+
494
+ if _history is None:
495
+ _history = []
496
+
497
+ messages = history_to_messages(_history, _setting['system'])
498
+ system_message = messages[0]['content']
499
+
500
+ claude_messages = [
501
+ {"role": msg["role"] if msg["role"] != "system" else "user", "content": msg["content"]}
502
+ for msg in messages[1:] + [{'role': Role.USER, 'content': query}]
503
+ if msg["content"].strip() != ''
504
+ ]
505
+
506
+ openai_messages = [{"role": "system", "content": system_message}]
507
+ for msg in messages[1:]:
508
+ openai_messages.append({
509
+ "role": msg["role"],
510
+ "content": msg["content"]
511
+ })
512
+ openai_messages.append({"role": "user", "content": query})
513
+
514
+ try:
515
+ collected_content = None
516
+ try:
517
+ async for content in try_claude_api(system_message, claude_messages):
518
+ # ์ฝ”๋“œ ๋ธ”๋ก ํ‘œ์‹œ ์ œ๊ฑฐ
519
+ code = remove_code_block(content)
520
+ yield code
521
+ collected_content = code
522
+
523
+ except Exception as claude_error:
524
+ print(f"Falling back to OpenAI API due to Claude error: {str(claude_error)}")
525
+
526
+ async for content in try_openai_api(openai_messages):
527
+ # ์ฝ”๋“œ ๋ธ”๋ก ํ‘œ์‹œ ์ œ๊ฑฐ
528
+ code = remove_code_block(content)
529
+ yield code
530
+ collected_content = code
531
+
532
+ if collected_content:
533
+ _history.append([query, collected_content])
534
+
535
+ except Exception as e:
536
+ print(f"Error details: {str(e)}")
537
+ raise ValueError(f'Error calling APIs: {str(e)}')
538
+
539
+ def clear_history(self):
540
+ self.current_query = ""
541
+ return []
542
+
543
+ def get_current_query(self):
544
+ return self.current_query
545
+
546
+ # ์˜ˆ์ œ ํ”„๋กฌํ”„ํŠธ
547
+ example_prompts = [
548
+ "ํ•œ๊ธ€ ์ž…๋ ฅ์‹œ ์Œ์„ฑ ์ƒ์„ฑ TTS๋ฅผ ๊ตฌ๊ธ€ gtts๊ธฐ๋ฐ˜์œผ๋กœ ์ƒ์„ฑํ•˜๋ผ.",
549
+ "BMI ๊ณ„์‚ฐ๊ธฐ๋ฅผ ๋งŒ๋“ค์–ด์ฃผ์„ธ์š”. ํ‚ค์™€ ๋ชธ๋ฌด๊ฒŒ๋ฅผ ์ž…๋ ฅํ•˜๋ฉด BMI ์ง€์ˆ˜์™€ ๋น„๋งŒ๋„๋ฅผ ๊ณ„์‚ฐํ•ด์ฃผ๋Š” ์•ฑ์ž…๋‹ˆ๋‹ค.",
550
+ "MBTI ์ง„๋‹จ ์„œ๋น„์Šค: 10๊ฐ€์ง€ ์งˆ๋ฌธ๊ณผ ๋‹ต๋ณ€ ์„ ํƒํ•˜๋ฉด 16๊ฐ€์ง€ ์œ ํ˜• ์ง„๋‹จ๊ณผ ์ƒ์„ธ ์„ค๋ช…์„ ํ•˜๋ผ",
551
+ "๋‹จ์œ„ ๋ณ€ํ™˜๊ธฐ๋ฅผ ๋งŒ๋“ค์–ด์ฃผ์„ธ์š”. ๊ธธ์ด(m, cm, km ๋“ฑ), ๋ฌด๊ฒŒ(kg, g ๋“ฑ), ์˜จ๋„(์„ญ์”จ, ํ™”์”จ) ๋“ฑ์„ ๋ณ€ํ™˜ํ•  ์ˆ˜ ์žˆ๋Š” ์•ฑ์ž…๋‹ˆ๋‹ค.",
552
+ "ํฌ๋ชจ๋„๋กœ ํƒ€์ด๋จธ๋ฅผ ๋งŒ๋“ค์–ด์ฃผ์„ธ์š”. 25๋ถ„ ์ง‘์ค‘, 5๋ถ„ ํœด์‹์„ ๋ฐ˜๋ณตํ•˜๋Š” ํƒ€์ด๋จธ๋กœ, ์‚ฌ์ดํด ํšŸ์ˆ˜๋„ ํ‘œ์‹œ๋ฉ๋‹ˆ๋‹ค."
553
+ ]
554
+
555
+ # CSS ์Šคํƒ€์ผ
556
+ css = """
557
+ .container {
558
+ max-width: 1200px;
559
+ margin: auto;
560
+ }
561
+ .header {
562
+ text-align: center;
563
+ margin: 20px 0;
564
+ }
565
+ .header h1 {
566
+ margin-bottom: 5px;
567
+ color: #2c3e50;
568
+ }
569
+ .header p {
570
+ margin-top: 0;
571
+ color: #7f8c8d;
572
+ }
573
+ .content {
574
+ display: flex;
575
+ flex-direction: row;
576
+ gap: 20px;
577
+ }
578
+ .left-panel, .right-panel {
579
+ flex: 1;
580
+ padding: 15px;
581
+ border-radius: 10px;
582
+ background-color: #f9f9f9;
583
+ box-shadow: 0 2px 10px rgba(0,0,0,0.1);
584
+ }
585
+ .status {
586
+ text-align: center;
587
+ padding: 10px;
588
+ margin: 10px 0;
589
+ border-radius: 5px;
590
+ }
591
+ .generating {
592
+ background-color: #f39c12;
593
+ color: white;
594
+ }
595
+ .deploy-section {
596
+ margin-top: 20px;
597
+ padding: 15px;
598
+ border-radius: 10px;
599
+ background-color: #f0f0f0;
600
+ }
601
+ .footer {
602
+ text-align: center;
603
+ margin-top: 30px;
604
+ padding: 10px;
605
+ color: #7f8c8d;
606
+ font-size: 0.8em;
607
+ }
608
+ """
609
+
610
+ # Demo ์ธ์Šคํ„ด์Šค ์ƒ์„ฑ
611
+ demo_instance = Demo()
612
+
613
+ with gr.Blocks(css=css) as demo:
614
+ history = gr.State([])
615
+ setting = gr.State({
616
+ "system": SystemPrompt,
617
+ })
618
+ is_generating = gr.State(False)
619
+ current_query = gr.State("")
620
+
621
+ gr.HTML("""
622
+ <div class="header">
623
+ <h1>MOUSE-II</h1>
624
+ <p>'Python & Huggingface' ver 1.019 </p>
625
+ </div>
626
+ """)
627
+
628
+ with gr.Row(elem_classes="content"):
629
+ # ์ขŒ์ธก ํŒจ๋„
630
+ with gr.Column(elem_classes="left-panel"):
631
+ input_text = gr.Textbox(
632
+ label="์›ํ•˜๋Š” ์•ฑ ์„ค๋ช…์„ ์ž…๋ ฅํ•˜์„ธ์š”",
633
+ placeholder=random.choice(DEMO_LIST)['description'],
634
+ lines=12
635
+ )
636
+
637
+ gr.Examples(
638
+ examples=example_prompts,
639
+ inputs=input_text
640
+ )
641
+
642
+ with gr.Row():
643
+ generate_btn = gr.Button("์ƒ์„ฑํ•˜๊ธฐ", variant="primary")
644
+ boost_btn = gr.Button("Boost", variant="secondary")
645
+ clear_btn = gr.Button("ํด๋ฆฌ์–ด", variant="secondary")
646
+
647
+ status_html = gr.HTML("", elem_classes="status")
648
+
649
+ # ์šฐ์ธก ํŒจ๋„
650
+ with gr.Column(elem_classes="right-panel"):
651
+ with gr.Tabs():
652
+ with gr.TabItem("์ฝ”๋“œ"):
653
+ code_output = gr.Code(
654
+ language="python",
655
+ label="์ƒ์„ฑ๋œ ์ฝ”๋“œ",
656
+ lines=12
657
+ )
658
+
659
+ with gr.TabItem("๋ถ„์„"):
660
+ code_analysis = gr.HTML(label="์ฝ”๋“œ ๋ถ„์„")
661
+
662
+ # Group์œผ๋กœ ๋ณ€๊ฒฝ
663
+ with gr.Group(elem_classes="deploy-section"):
664
+ gr.HTML("<h3>๋ฐฐํฌ ์„ค์ •</h3>")
665
+ hf_token = gr.Textbox(
666
+ label="Hugging Face ํ† ํฐ",
667
+ type="password",
668
+ placeholder="hf_..."
669
+ )
670
+
671
+ deploy_btn = gr.Button("๋ฐฐํฌํ•˜๊ธฐ", variant="primary")
672
+ deploy_result = gr.HTML(label="๋ฐฐํฌ ๊ฒฐ๊ณผ")
673
+
674
+ gr.HTML("""
675
+ <div class="footer">
676
+ MOUSE-II - ๋ฌธ์˜: [email protected]
677
+ </div>
678
+ """)
679
+
680
+ # ์ด๋ฒคํŠธ ํ•ธ๋“ค๋Ÿฌ
681
+ def start_generating(query):
682
+ return "๐Ÿ”„ ์ฝ”๋“œ ์ƒ์„ฑ ์ค‘...", gr.update(interactive=False), gr.update(interactive=False), gr.update(interactive=False), True, query
683
+
684
+ def end_generating():
685
+ return "", gr.update(interactive=True), gr.update(interactive=True), gr.update(interactive=True), False
686
+
687
+ def update_code_analysis(code, query):
688
+ analysis = analyze_code(code, query)
689
+ return analysis
690
+
691
+ def handle_boost(prompt):
692
+ boosted = boost_prompt(prompt)
693
+ return boosted
694
+
695
+ generate_btn.click(
696
+ fn=start_generating,
697
+ inputs=[input_text],
698
+ outputs=[status_html, generate_btn, boost_btn, clear_btn, is_generating, current_query]
699
+ ).then(
700
+ fn=demo_instance.generation_code,
701
+ inputs=[input_text, setting, history],
702
+ outputs=code_output
703
+ ).then(
704
+ fn=update_code_analysis,
705
+ inputs=[code_output, current_query],
706
+ outputs=[code_analysis]
707
+ ).then(
708
+ fn=end_generating,
709
+ outputs=[status_html, generate_btn, boost_btn, clear_btn, is_generating]
710
+ )
711
+
712
+ boost_btn.click(
713
+ fn=handle_boost,
714
+ inputs=[input_text],
715
+ outputs=[input_text]
716
+ )
717
+
718
+ clear_btn.click(
719
+ fn=demo_instance.clear_history,
720
+ inputs=[],
721
+ outputs=[history]
722
+ )
723
+
724
+ deploy_btn.click(
725
+ fn=lambda code, token: deploy_to_huggingface(code, token) if code else "์ฝ”๋“œ๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค.",
726
+ inputs=[code_output, hf_token],
727
+ outputs=[deploy_result]
728
+ )
729
+
730
+ if __name__ == "__main__":
731
+ try:
732
+ demo.queue().launch(ssr_mode=False)
733
+ except Exception as e:
734
+ print(f"Initialization error: {e}")
735
+ raise