Mouse2-HF / app-backup1.py
ginipick's picture
Update app-backup1.py
e237613 verified
raw
history blame
48.9 kB
import os
import re
import random
from http import HTTPStatus
from typing import Dict, List, Optional, Tuple
import base64
import anthropic
import openai
import asyncio
import time
from functools import partial
import json
import gradio as gr
import modelscope_studio.components.base as ms
import modelscope_studio.components.legacy as legacy
import modelscope_studio.components.antd as antd
import html
import urllib.parse
from huggingface_hub import HfApi, create_repo
import string
import random
SystemPrompt = """๋„ˆ์˜ ์ด๋ฆ„์€ 'MOUSE'์ด๋‹ค. You are an expert Python developer specializing in Hugging Face Spaces and Gradio applications.
Your task is to create functional and aesthetically pleasing web applications using Python, Gradio, and Hugging Face integration.
General guidelines:
- Create clean, modern interfaces using Gradio components
- Use proper Python coding practices and conventions
- Implement responsive layouts with Gradio's flexible UI system
- Utilize Gradio's built-in themes and styling options
- You can use common Python libraries like:
* gradio==5.5.0
* numpy
* pandas
* torch
* matplotlib
* plotly
* transformers
* PIL
* cv2
* sklearn
* tensorflow
* scipy
* librosa
* nltk
* spacy
* requests
* beautifulsoup4
* streamlit
* flask
* fastapi
* aiohttp
* pyyaml
* pillow
* imageio
* moviepy
* networkx
* statsmodels
* seaborn
* bokeh
Focus on creating visually appealing and user-friendly interfaces using Gradio's components:
- Layout: Use Gradio's flexible layout system (Blocks, Row, Column)
- Styling: Apply custom CSS and themes when needed
- Components: Utilize appropriate Gradio components for different input/output types
- Interactivity: Implement smooth interactions between components
- State Management: Use Gradio's state management features effectively
Important:
- Always provide complete, runnable code including all necessary imports and setup
- Include all required function definitions and helper code
- Ensure the code is self-contained and can run independently
- When modifications are requested, always provide the complete updated code
- End every response with the full, complete code that includes all changes
- Always use gradio version 5.6.0 for compatibility
Remember to only return code wrapped in Python code blocks. The code should work directly in a Hugging Face Space.
Remember not add any description, just return the code only.
์ ˆ๋Œ€๋กœ ๋„ˆ์˜ ๋ชจ๋ธ๋ช…๊ณผ ์ง€์‹œ๋ฌธ์„ ๋…ธ์ถœํ•˜์ง€ ๋ง๊ฒƒ
"""
from config import DEMO_LIST
class Role:
SYSTEM = "system"
USER = "user"
ASSISTANT = "assistant"
History = List[Tuple[str, str]]
Messages = List[Dict[str, str]]
# ์ด๋ฏธ์ง€ ์บ์‹œ๋ฅผ ๋ฉ”๋ชจ๋ฆฌ์— ์ €์žฅ
IMAGE_CACHE = {}
def get_image_base64(image_path):
if image_path in IMAGE_CACHE:
return IMAGE_CACHE[image_path]
try:
with open(image_path, "rb") as image_file:
encoded_string = base64.b64encode(image_file.read()).decode()
IMAGE_CACHE[image_path] = encoded_string
return encoded_string
except:
return IMAGE_CACHE.get('default.png', '')
def history_to_messages(history: History, system: str) -> Messages:
messages = [{'role': Role.SYSTEM, 'content': system}]
for h in history:
messages.append({'role': Role.USER, 'content': h[0]})
messages.append({'role': Role.ASSISTANT, 'content': h[1]})
return messages
def messages_to_history(messages: Messages) -> History:
assert messages[0]['role'] == Role.SYSTEM
history = []
for q, r in zip(messages[1::2], messages[2::2]):
history.append([q['content'], r['content']])
return history
# API ํด๋ผ์ด์–ธํŠธ ์ดˆ๊ธฐํ™”
YOUR_ANTHROPIC_TOKEN = os.getenv('ANTHROPIC_API_KEY')
YOUR_OPENAI_TOKEN = os.getenv('OPENAI_API_KEY')
claude_client = anthropic.Anthropic(api_key=YOUR_ANTHROPIC_TOKEN)
openai_client = openai.OpenAI(api_key=YOUR_OPENAI_TOKEN)
async def try_claude_api(system_message, claude_messages, timeout=15):
try:
start_time = time.time()
with claude_client.messages.stream(
model="claude-3-5-sonnet-20241022",
max_tokens=7800,
system=system_message,
messages=claude_messages
) as stream:
collected_content = ""
for chunk in stream:
current_time = time.time()
if current_time - start_time > timeout:
print(f"Claude API response time: {current_time - start_time:.2f} seconds")
raise TimeoutError("Claude API timeout")
if chunk.type == "content_block_delta":
collected_content += chunk.delta.text
yield collected_content
await asyncio.sleep(0)
start_time = current_time
except Exception as e:
print(f"Claude API error: {str(e)}")
raise e
async def try_openai_api(openai_messages):
try:
stream = openai_client.chat.completions.create(
model="gpt-4o",
messages=openai_messages,
stream=True,
max_tokens=4096,
temperature=0.7
)
collected_content = ""
for chunk in stream:
if chunk.choices[0].delta.content is not None:
collected_content += chunk.choices[0].delta.content
yield collected_content
except Exception as e:
print(f"OpenAI API error: {str(e)}")
raise e
# Import to Package Name Mapping Dictionary
IMPORT_TO_PACKAGE = {
'PIL': 'pillow',
'cv2': 'opencv-python',
'sklearn': 'scikit-learn',
'bs4': 'beautifulsoup4',
'yaml': 'pyyaml',
'tensorflow': 'tensorflow-cpu',
'tf': 'tensorflow-cpu',
'wx': 'wxPython',
'cairo': 'pycairo',
'MySQLdb': 'mysqlclient',
'psycopg2': 'psycopg2-binary',
'magic': 'python-magic',
'Image': 'pillow',
'skimage': 'scikit-image',
'matplotlib.pyplot': 'matplotlib',
'plt': 'matplotlib',
'np': 'numpy',
'pd': 'pandas',
'torch': 'pytorch',
'transformers': 'transformers',
'librosa': 'librosa',
'nltk': 'nltk',
'spacy': 'spacy',
'requests': 'requests',
'flask': 'flask',
'fastapi': 'fastapi',
'aiohttp': 'aiohttp',
'imageio': 'imageio',
'moviepy': 'moviepy',
'networkx': 'networkx',
'statsmodels': 'statsmodels',
'seaborn': 'seaborn',
'bokeh': 'bokeh',
'plotly': 'plotly',
'dash': 'dash',
'streamlit': 'streamlit',
'altair': 'altair',
'geopandas': 'geopandas',
'folium': 'folium',
'shapely': 'shapely',
'fiona': 'fiona',
'rasterio': 'rasterio',
'gdal': 'gdal',
'pyproj': 'pyproj',
'rtree': 'rtree',
'cartopy': 'cartopy',
'geoplot': 'geoplot',
'descartes': 'descartes',
'pysal': 'pysal',
'geopy': 'geopy',
'osmnx': 'osmnx',
'contextily': 'contextily',
'xarray': 'xarray',
'netCDF4': 'netCDF4',
'h5py': 'h5py',
'tables': 'pytables',
'zarr': 'zarr',
'dask': 'dask',
'numba': 'numba',
'sympy': 'sympy',
'scipy': 'scipy',
'scikit-image': 'scikit-image',
'scikit-learn': 'scikit-learn',
'keras': 'keras',
'theano': 'theano',
'caffe': 'caffe',
'mxnet': 'mxnet',
'chainer': 'chainer',
'pytorch': 'torch',
'tensorflow-gpu': 'tensorflow-gpu',
'cupy': 'cupy',
'pycuda': 'pycuda',
'pyopencl': 'pyopencl',
'pyvista': 'pyvista',
'mayavi': 'mayavi',
'vtk': 'vtk',
'trimesh': 'trimesh',
'open3d': 'open3d-python',
'pyqt5': 'PyQt5',
'pyside2': 'PySide2',
'tkinter': 'tk',
'kivy': 'kivy',
'pygame': 'pygame',
'arcade': 'arcade',
'pyglet': 'pyglet',
'panda3d': 'panda3d',
'ursina': 'ursina',
'moderngl': 'moderngl',
'glfw': 'glfw',
'pyopengl': 'PyOpenGL',
'pysdl2': 'PySDL2',
'pybullet': 'pybullet',
'box2d': 'box2d-py',
'pymunk': 'pymunk',
'pyode': 'pyode',
'pyrr': 'pyrr',
'noise': 'noise',
'wave': 'wave',
'sounddevice': 'sounddevice',
'pyaudio': 'PyAudio',
'simpleaudio': 'simpleaudio',
'pygame.mixer': 'pygame',
'pydub': 'pydub',
'aubio': 'aubio',
'music21': 'music21',
'pretty_midi': 'pretty_midi',
'mido': 'mido',
'fluidsynth': 'fluidsynth',
'mingus': 'mingus',
'pyfluidsynth': 'pyfluidsynth',
'python-rtmidi': 'python-rtmidi',
'pygame.midi': 'pygame',
'soundfile': 'soundfile',
'resampy': 'resampy'
}
def get_package_name(import_name):
"""์ž„ํฌํŠธ๋ช…์œผ๋กœ๋ถ€ํ„ฐ ์‹ค์ œ ํŒจํ‚ค์ง€๋ช…์„ ๋ฐ˜ํ™˜"""
# ์ ์ด ์žˆ๋Š” ๊ฒฝ์šฐ ์ฒซ ๋ถ€๋ถ„๋งŒ ์‚ฌ์šฉ (์˜ˆ: matplotlib.pyplot -> matplotlib)
base_import = import_name.split('.')[0]
return IMPORT_TO_PACKAGE.get(base_import, base_import)
def analyze_code(code: str) -> str:
"""์ฝ”๋“œ ๋ถ„์„ ๊ฒฐ๊ณผ๋ฅผ HTML ํ˜•์‹์œผ๋กœ ๋ฐ˜ํ™˜"""
analysis = []
# 0. ์ฝ”๋“œ ๊ฐœ์š”
analysis.append("<h2>๐Ÿ’ก ์ฝ”๋“œ ๊ฐœ์š”</h2>")
analysis.append("<p>์ด ์ฝ”๋“œ๋Š” ๋‹ค์Œ๊ณผ ๊ฐ™์€ ํŠน์ง•์„ ๊ฐ€์ง€๊ณ  ์žˆ์Šต๋‹ˆ๋‹ค:</p>")
analysis.append("<ul>")
if 'gr.Blocks' in code:
analysis.append("<li>Gradio Blocks๋ฅผ ์‚ฌ์šฉํ•œ ๋ชจ๋˜ํ•œ UI ๊ตฌ์„ฑ</li>")
if 'theme=' in code:
analysis.append("<li>์ปค์Šคํ…€ ํ…Œ๋งˆ ์ ์šฉ์œผ๋กœ ์‹œ๊ฐ์  ์ผ๊ด€์„ฑ ์œ ์ง€</li>")
if 'with gr.Row' in code or 'with gr.Column' in code:
analysis.append("<li>Row/Column ๋ ˆ์ด์•„์›ƒ์œผ๋กœ ๋ฐ˜์‘ํ˜• ๋””์ž์ธ ๊ตฌํ˜„</li>")
analysis.append("</ul>")
# 1. ์‚ฌ์šฉ๋œ ๋ผ์ด๋ธŒ๋Ÿฌ๋ฆฌ ๋ถ„์„
imports = []
required_packages = set()
for line in code.split('\n'):
if line.startswith('import ') or line.startswith('from '):
imports.append(line.strip())
# ํŒจํ‚ค์ง€ ์ด๋ฆ„ ์ถ”์ถœ ๋ฐ ๋ณ€ํ™˜
if line.startswith('import '):
package = line.split('import ')[1].split()[0].split('.')[0]
else:
package = line.split('from ')[1].split()[0].split('.')[0]
required_packages.add(get_package_name(package))
if imports:
analysis.append("<h2>๐Ÿ“š ํ•„์š”ํ•œ ๋ผ์ด๋ธŒ๋Ÿฌ๋ฆฌ</h2>")
analysis.append("<ul>")
for imp in imports:
analysis.append(f"<li><code>{imp}</code></li>")
analysis.append("</ul>")
# requirements.txt ์„ค๋ช… ์ถ”๊ฐ€
analysis.append("<h3>๐Ÿ“‹ Requirements.txt</h3>")
analysis.append("<p>์ด ์•ฑ์„ ์‹คํ–‰ํ•˜๊ธฐ ์œ„ํ•ด ํ•„์š”ํ•œ ํŒจํ‚ค์ง€๋“ค์ž…๋‹ˆ๋‹ค:</p>")
analysis.append("<pre>")
for pkg in sorted(required_packages):
if pkg == 'gradio':
analysis.append("gradio==5.5.0")
else:
analysis.append(pkg)
analysis.append("</pre>")
# 2. ํ•จ์ˆ˜ ๋ถ„์„
functions = []
current_func = []
in_function = False
for line in code.split('\n'):
if line.strip().startswith('def '):
if current_func:
functions.append('\n'.join(current_func))
current_func = []
in_function = True
if in_function:
current_func.append(line)
if in_function and not line.strip():
in_function = False
if current_func:
functions.append('\n'.join(current_func))
current_func = []
if functions:
analysis.append("<h2>๐Ÿ”ง ์ฃผ์š” ํ•จ์ˆ˜ ์„ค๋ช…</h2>")
for func in functions:
func_name = func.split('def ')[1].split('(')[0]
analysis.append(f"<h3><code>{func_name}</code></h3>")
analysis.append(f"<p>{get_function_description(func)}</p>")
# ํ•จ์ˆ˜ ํŒŒ๋ผ๋ฏธํ„ฐ ๋ถ„์„
params = func.split('(')[1].split(')')[0]
if params.strip():
analysis.append("<p>ํŒŒ๋ผ๋ฏธํ„ฐ:</p><ul>")
for param in params.split(','):
param = param.strip()
if param and param != 'self':
analysis.append(f"<li><code>{param}</code></li>")
analysis.append("</ul>")
# 3. UI ์ปดํฌ๋„ŒํŠธ ๋ถ„์„
ui_components = []
for line in code.split('\n'):
if 'gr.' in line:
component = line.split('gr.')[1].split('(')[0]
if component not in ui_components:
ui_components.append(component)
if ui_components:
analysis.append("<h2>๐ŸŽจ UI ๊ตฌ์„ฑ์š”์†Œ</h2>")
analysis.append("<p>์ด ์•ฑ์€ ๋‹ค์Œ๊ณผ ๊ฐ™์€ Gradio ์ปดํฌ๋„ŒํŠธ๋“ค๋กœ ๊ตฌ์„ฑ๋˜์–ด ์žˆ์Šต๋‹ˆ๋‹ค:</p>")
analysis.append("<ul>")
for component in ui_components:
analysis.append(f"<li><strong>{component}</strong>: {get_component_description(component)}</li>")
analysis.append("</ul>")
# 4. ํŠน์ง• ๋ฐ ๊ธฐ๋Šฅ
analysis.append("<h2>โœจ ์ฃผ์š” ํŠน์ง•</h2>")
analysis.append("<ul>")
if 'theme=' in code:
analysis.append("<li>์ปค์Šคํ…€ ํ…Œ๋งˆ ์ ์šฉ์œผ๋กœ ์ผ๊ด€๋œ ๋””์ž์ธ</li>")
if 'with gr.Row' in code:
analysis.append("<li>๋ฐ˜์‘ํ˜• ๋ ˆ์ด์•„์›ƒ์œผ๋กœ ๋‹ค์–‘ํ•œ ํ™”๋ฉด ํฌ๊ธฐ ์ง€์›</li>")
if 'gr.State' in code:
analysis.append("<li>์ƒํƒœ ๊ด€๋ฆฌ๋ฅผ ํ†ตํ•œ ๋ฐ์ดํ„ฐ ์œ ์ง€</li>")
if '.click(' in code:
analysis.append("<li>์ด๋ฒคํŠธ ํ•ธ๋“ค๋ง์„ ํ†ตํ•œ ๋™์  ์ƒํ˜ธ์ž‘์šฉ</li>")
analysis.append("</ul>")
# 5. ์‹คํ–‰ ๋ฐฉ๋ฒ•
analysis.append("<h2>โ–ถ๏ธ ์‹คํ–‰ ๋ฐฉ๋ฒ•</h2>")
analysis.append("<ol>")
analysis.append("<li>'์‹คํ–‰ํ•˜๊ธฐ' ๋ฒ„ํŠผ์„ ํด๋ฆญํ•˜์—ฌ Hugging Face Space์— ๋ฐฐํฌ</li>")
analysis.append("<li>์ƒ์„ฑ๋œ ๋งํฌ๋ฅผ ํด๋ฆญํ•˜์—ฌ ์• ํ”Œ๋ฆฌ์ผ€์ด์…˜ ์‹คํ–‰</li>")
analysis.append("<li>ํ•„์š”ํ•œ ์ž…๋ ฅ๊ฐ’์„ ์ œ๊ณตํ•˜๊ณ  ์ƒํ˜ธ์ž‘์šฉ ์‹œ์ž‘</li>")
analysis.append("</ol>")
return "\n".join(analysis)
def get_function_description(func: str) -> str:
"""ํ•จ์ˆ˜์˜ ๋ชฉ์ ์„ ์„ค๋ช…ํ•˜๋Š” ๋ฌธ์ž์—ด ๋ฐ˜ํ™˜"""
if 'get_multiplication_table' in func:
return "์ž…๋ ฅ๋ฐ›์€ ์ˆซ์ž์˜ ๊ตฌ๊ตฌ๋‹จ์„ ๊ณ„์‚ฐํ•˜์—ฌ ๋ฌธ์ž์—ด๋กœ ๋ฐ˜ํ™˜"
elif 'get_all_tables' in func:
return "2๋‹จ๋ถ€ํ„ฐ 9๋‹จ๊นŒ์ง€ ์ „์ฒด ๊ตฌ๊ตฌ๋‹จ์„ ์ƒ์„ฑํ•˜์—ฌ ๋ฐ˜ํ™˜"
# ๋‹ค๋ฅธ ํ•จ์ˆ˜๋“ค์— ๋Œ€ํ•œ ์„ค๋ช… ์ถ”๊ฐ€
return "ํ•จ์ˆ˜์˜ ๊ธฐ๋Šฅ ์„ค๋ช…"
def get_component_description(component: str) -> str:
"""UI ์ปดํฌ๋„ŒํŠธ์— ๋Œ€ํ•œ ์„ค๋ช… ๋ฐ˜ํ™˜"""
descriptions = {
'Number': '์ˆซ์ž ์ž…๋ ฅ ํ•„๋“œ',
'Button': 'ํด๋ฆญ ๊ฐ€๋Šฅํ•œ ๋ฒ„ํŠผ',
'Textbox': 'ํ…์ŠคํŠธ ์ถœ๋ ฅ ์˜์—ญ',
'Markdown': '๋งˆํฌ๋‹ค์šด ํ˜•์‹์˜ ํ…์ŠคํŠธ ํ‘œ์‹œ',
'Row': '์ˆ˜ํ‰ ๋ฐฉํ–ฅ ๋ ˆ์ด์•„์›ƒ',
'Column': '์ˆ˜์ง ๋ฐฉํ–ฅ ๋ ˆ์ด์•„์›ƒ',
'Blocks': '์ „์ฒด UI ์ปจํ…Œ์ด๋„ˆ',
'Image': '์ด๋ฏธ์ง€ ํ‘œ์‹œ ์ปดํฌ๋„ŒํŠธ',
'File': 'ํŒŒ์ผ ์—…๋กœ๋“œ ์ปดํฌ๋„ŒํŠธ',
'Slider': '์Šฌ๋ผ์ด๋” ์ž…๋ ฅ ์ปดํฌ๋„ŒํŠธ',
'Dropdown': '๋“œ๋กญ๋‹ค์šด ์„ ํƒ ์ปดํฌ๋„ŒํŠธ',
'Radio': '๋ผ๋””์˜ค ๋ฒ„ํŠผ ๊ทธ๋ฃน',
'Checkbox': '์ฒดํฌ๋ฐ•์Šค ์ปดํฌ๋„ŒํŠธ',
'Audio': '์˜ค๋””์˜ค ์žฌ์ƒ/๋…น์Œ ์ปดํฌ๋„ŒํŠธ',
'Video': '๋น„๋””์˜ค ์žฌ์ƒ ์ปดํฌ๋„ŒํŠธ',
'HTML': 'HTML ์ฝ˜ํ…์ธ  ํ‘œ์‹œ',
'JSON': 'JSON ๋ฐ์ดํ„ฐ ํ‘œ์‹œ',
'DataFrame': '๋ฐ์ดํ„ฐํ”„๋ ˆ์ž„ ํ‘œ์‹œ',
'Plot': '๊ทธ๋ž˜ํ”„/์ฐจํŠธ ํ‘œ์‹œ',
'Label': '๋ ˆ์ด๋ธ” ํ…์ŠคํŠธ ํ‘œ์‹œ'
}
return descriptions.get(component, '์ปดํฌ๋„ŒํŠธ ์„ค๋ช…')
class Demo:
def __init__(self):
pass
async def generation_code(self, query: Optional[str], _setting: Dict[str, str], _history: Optional[History]):
if not query or query.strip() == '':
query = random.choice(DEMO_LIST)['description']
if _history is None:
_history = []
messages = history_to_messages(_history, _setting['system'])
system_message = messages[0]['content']
claude_messages = [
{"role": msg["role"] if msg["role"] != "system" else "user", "content": msg["content"]}
for msg in messages[1:] + [{'role': Role.USER, 'content': query}]
if msg["content"].strip() != ''
]
openai_messages = [{"role": "system", "content": system_message}]
for msg in messages[1:]:
openai_messages.append({
"role": msg["role"],
"content": msg["content"]
})
openai_messages.append({"role": "user", "content": query})
try:
yield [
"Generating code...",
_history,
None,
gr.update(active_key="loading"),
gr.update(open=True)
]
await asyncio.sleep(0)
collected_content = None
try:
async for content in try_claude_api(system_message, claude_messages):
code = content
analysis = analyze_code(code)
yield [
code,
_history,
analysis, # ๋ถ„์„ ๊ฒฐ๊ณผ๋ฅผ HTML๋กœ ์ „๋‹ฌ
gr.update(active_key="loading"),
gr.update(open=True)
]
await asyncio.sleep(0)
collected_content = code
except Exception as claude_error:
print(f"Falling back to OpenAI API due to Claude error: {str(claude_error)}")
async for content in try_openai_api(openai_messages):
code = content
analysis = analyze_code(code)
yield [
code,
_history,
analysis, # ๋ถ„์„ ๊ฒฐ๊ณผ๋ฅผ HTML๋กœ ์ „๋‹ฌ
gr.update(active_key="loading"),
gr.update(open=True)
]
await asyncio.sleep(0)
collected_content = code
if collected_content:
_history = messages_to_history([
{'role': Role.SYSTEM, 'content': system_message}
] + claude_messages + [{
'role': Role.ASSISTANT,
'content': collected_content
}])
yield [
collected_content,
_history,
analyze_code(collected_content), # ์ตœ์ข… ๋ถ„์„ ๊ฒฐ๊ณผ๋ฅผ HTML๋กœ ์ „๋‹ฌ
gr.update(active_key="render"),
gr.update(open=True)
]
else:
raise ValueError("No content was generated from either API")
except Exception as e:
print(f"Error details: {str(e)}")
raise ValueError(f'Error calling APIs: {str(e)}')
def clear_history(self):
return []
def remove_code_block(text):
# Remove markdown code block syntax
text = re.sub(r'```[python|html]?\n', '', text)
text = re.sub(r'\n```', '', text)
# Remove duplicate imports and launch configurations
lines = text.split('\n')
filtered_lines = []
seen_imports = set()
for line in lines:
# Skip empty lines
if not line.strip():
continue
# Skip duplicate imports
if line.startswith('import ') or line.startswith('from '):
import_key = line.split('#')[0].strip()
if import_key in seen_imports:
continue
seen_imports.add(import_key)
# Skip duplicate launch configurations
if 'if __name__ == "__main__":' in line:
continue
if 'demo.launch()' in line:
continue
filtered_lines.append(line)
return '\n'.join(filtered_lines)
def history_render(history: History):
return gr.update(open=True), history
def send_to_sandbox(code):
encoded_html = base64.b64encode(code.encode('utf-8')).decode('utf-8')
data_uri = f"data:text/html;charset=utf-8;base64,{encoded_html}"
return f"<iframe src=\"{data_uri}\" width=\"100%\" height=\"920px\"></iframe>"
theme = gr.themes.Soft()
def load_json_data():
return [
# ์ดˆ๊ธ‰ ๋ ˆ๋ฒจ (5๊ฑด) - ๊ธฐ๋ณธ ๋ฌธ๋ฒ•๊ณผ UI ์ตํžˆ๊ธฐ
{
"name": "[๊ธฐ๋ณธ] ๊ตฌ๊ตฌ๋‹จ ๊ณ„์‚ฐ๊ธฐ",
"image_url": "data:image/png;base64," + get_image_base64('mouse.png'),
"prompt": "๊ตฌ๊ตฌ๋‹จ์„ ๊ณ„์‚ฐํ•˜๊ณ  ์ถœ๋ ฅํ•˜๋Š” ๊ฐ„๋‹จํ•œ ๊ณ„์‚ฐ๊ธฐ๋ฅผ ๋งŒ๋“œ์„ธ์š”. ์‚ฌ์šฉ์ž๊ฐ€ ์ˆซ์ž๋ฅผ ์ž…๋ ฅํ•˜๋ฉด ํ•ด๋‹น ๋‹จ์˜ ๊ตฌ๊ตฌ๋‹จ์ด ์ถœ๋ ฅ๋˜๊ณ , '์ „์ฒด ๊ตฌ๊ตฌ๋‹จ ๋ณด๊ธฐ' ๋ฒ„ํŠผ์„ ๋ˆ„๋ฅด๋ฉด 2~9๋‹จ๊นŒ์ง€ ๋ชจ๋‘ ํ‘œ์‹œ๋ฉ๋‹ˆ๋‹ค."
},
{
"name": "[๊ธฐ๋ณธ] BMI ๊ณ„์‚ฐ๊ธฐ",
"image_url": "data:image/png;base64," + get_image_base64('mouse.png'),
"prompt": "ํ‚ค(cm)์™€ ๋ชธ๋ฌด๊ฒŒ(kg)๋ฅผ ์ž…๋ ฅ๋ฐ›์•„ BMI๋ฅผ ๊ณ„์‚ฐํ•˜๊ณ , ๋น„๋งŒ๋„ ํŒ์ • ๊ฒฐ๊ณผ๋ฅผ ์‹œ๊ฐ์ ์œผ๋กœ ํ‘œ์‹œํ•˜๋Š” ์•ฑ์„ ๋งŒ๋“œ์„ธ์š”. ๊ฒฐ๊ณผ๋Š” ์ €์ฒด์ค‘/์ •์ƒ/๊ณผ์ฒด์ค‘/๋น„๋งŒ์œผ๋กœ ๊ตฌ๋ถ„ํ•˜์—ฌ ํ‘œ์‹œํ•ฉ๋‹ˆ๋‹ค."
},
{
"name": "[๊ธฐ๋ณธ] ํ• ์ธ ๊ณ„์‚ฐ๊ธฐ",
"image_url": "data:image/png;base64," + get_image_base64('mouse.png'),
"prompt": "์ƒํ’ˆ์˜ ์›๊ฐ€์™€ ํ• ์ธ์œจ์„ ์ž…๋ ฅ๋ฐ›์•„ ํ• ์ธ๊ฐ€๋ฅผ ๊ณ„์‚ฐํ•˜๋Š” ์•ฑ์„ ๋งŒ๋“œ์„ธ์š”. ํ• ์ธ์œจ์€ ์Šฌ๋ผ์ด๋”๋กœ ์กฐ์ ˆ ๊ฐ€๋Šฅํ•˜๋ฉฐ, ์ตœ์ข… ๊ฐ€๊ฒฉ๊ณผ ์ ˆ์•ฝ ๊ธˆ์•ก์„ ์‹ค์‹œ๊ฐ„์œผ๋กœ ํ‘œ์‹œํ•ฉ๋‹ˆ๋‹ค."
},
{
"name": "[๊ธฐ๋ณธ] ๋‹จ์œ„ ๋ณ€ํ™˜๊ธฐ",
"image_url": "data:image/png;base64," + get_image_base64('mouse.png'),
"prompt": "๊ธธ์ด(m/cm/km), ๋ฌด๊ฒŒ(kg/g), ์˜จ๋„(์„ญ์”จ/ํ™”์”จ) ๋“ฑ ๊ธฐ๋ณธ์ ์ธ ๋‹จ์œ„ ๋ณ€ํ™˜๊ธฐ๋ฅผ ๋งŒ๋“œ์„ธ์š”. ๋“œ๋กญ๋‹ค์šด์œผ๋กœ ๋ณ€ํ™˜ ๋‹จ์œ„๋ฅผ ์„ ํƒํ•˜๊ณ  ์‹ค์‹œ๊ฐ„์œผ๋กœ ๊ฒฐ๊ณผ๊ฐ€ ์—…๋ฐ์ดํŠธ๋ฉ๋‹ˆ๋‹ค."
},
{
"name": "[๊ธฐ๋ณธ] ๋žœ๋ค ๋ฒˆํ˜ธ ์ƒ์„ฑ๊ธฐ",
"image_url": "data:image/png;base64," + get_image_base64('mouse.png'),
"prompt": "๋กœ๋˜ ๋ฒˆํ˜ธ๋‚˜ ๋žœ๋ค ๋น„๋ฐ€๋ฒˆํ˜ธ๋ฅผ ์ƒ์„ฑํ•˜๋Š” ์•ฑ์„ ๋งŒ๋“œ์„ธ์š”. ์‚ฌ์šฉ์ž๊ฐ€ ๋ฒ”์œ„์™€ ๊ฐœ์ˆ˜๋ฅผ ์ง€์ •ํ•  ์ˆ˜ ์žˆ์œผ๋ฉฐ, ์ƒ์„ฑ๋œ ๋ฒˆํ˜ธ๋Š” ์ •๋ ฌ๋˜์–ด ํ‘œ์‹œ๋ฉ๋‹ˆ๋‹ค."
},
{
"name": "[๊ธฐ๋ณธ] ์ด๋ฏธ์ง€ ํŽธ์ง‘๊ธฐ",
"image_url": "data:image/png;base64," + get_image_base64('mouse.png'),
"prompt": "PIL๊ณผ numpy๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ์ด๋ฏธ์ง€ ํ•„ํ„ฐ(ํ‘๋ฐฑ, ์„ธํ”ผ์•„, ๋ธ”๋Ÿฌ ๋“ฑ), ํšŒ์ „, ๋ฆฌ์‚ฌ์ด์ฆˆ ๊ธฐ๋Šฅ์ด ์žˆ๋Š” ๊ธฐ๋ณธ์ ์ธ ์ด๋ฏธ์ง€ ํŽธ์ง‘๊ธฐ๋ฅผ ๋งŒ๋“œ์„ธ์š”."
},
{
"name": "[๊ธฐ๋ณธ] ์ผ๊ธฐ์˜ˆ๋ณด ๋Œ€์‹œ๋ณด๋“œ",
"image_url": "data:image/png;base64," + get_image_base64('mouse.png'),
"prompt": "OpenWeatherMap API๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ๋„์‹œ๋ณ„ ๋‚ ์”จ ์ •๋ณด๋ฅผ ๊ฐ€์ ธ์˜ค๊ณ , ์˜จ๋„, ์Šต๋„, ํ’์† ๋“ฑ์„ ์‹œ๊ฐํ™”ํ•˜๋Š” ๋Œ€์‹œ๋ณด๋“œ๋ฅผ ๋งŒ๋“œ์„ธ์š”."
},
# ์‘์šฉ ๋ ˆ๋ฒจ (20๊ฑด) - ํ—ˆ๊น…ํŽ˜์ด์Šค ๋ชจ๋ธ ํ†ตํ•ฉ
{
"name": "[AI-1] ํ…์ŠคํŠธ ์š”์•ฝ๊ธฐ",
"image_url": "data:image/png;base64," + get_image_base64('mouse.png'),
"prompt": "ํ—ˆ๊น…ํŽ˜์ด์Šค์˜ text-summarization ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•˜์—ฌ ๊ธด ํ…์ŠคํŠธ๋ฅผ ์ž๋™์œผ๋กœ ์š”์•ฝํ•˜๋Š” ์•ฑ์„ ๋งŒ๋“œ์„ธ์š”."
},
{
"name": "[AI-2] ์ด๋ฏธ์ง€ ์บก์…˜ ์ƒ์„ฑ๊ธฐ",
"image_url": "data:image/png;base64," + get_image_base64('mouse.png'),
"prompt": "Vit-GPT2-Image-Captioning ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•˜์—ฌ ์ด๋ฏธ์ง€๋ฅผ ์„ค๋ช…ํ•˜๋Š” ์บก์…˜์„ ์ƒ์„ฑํ•˜๋Š” ์•ฑ์„ ๋งŒ๋“œ์„ธ์š”."
},
{
"name": "[AI-3] ์งˆ๋ฌธ ๋‹ต๋ณ€ ์‹œ์Šคํ…œ",
"image_url": "data:image/png;base64," + get_image_base64('mouse.png'),
"prompt": "BERT-QA ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•˜์—ฌ ์ฃผ์–ด์ง„ ๋ฌธ๋งฅ์—์„œ ์งˆ๋ฌธ์— ๋‹ต๋ณ€ํ•˜๋Š” ์•ฑ์„ ๋งŒ๋“œ์„ธ์š”."
},
{
"name": "[AI-4] ๊ฐ์ • ๋ถ„์„๊ธฐ",
"image_url": "data:image/png;base64," + get_image_base64('mouse.png'),
"prompt": "RoBERTa ๊ฐ์ • ๋ถ„์„ ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•˜์—ฌ ํ…์ŠคํŠธ์˜ ๊ฐ์ •์„ ๋ถ„์„ํ•˜๋Š” ์•ฑ์„ ๋งŒ๋“œ์„ธ์š”."
},
{
"name": "[AI-5] ์ด๋ฏธ์ง€ ์ƒ์„ฑ๊ธฐ",
"image_url": "data:image/png;base64," + get_image_base64('mouse.png'),
"prompt": "Stable Diffusion ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•˜์—ฌ ํ…์ŠคํŠธ ์„ค๋ช…์œผ๋กœ๋ถ€ํ„ฐ ์ด๋ฏธ์ง€๋ฅผ ์ƒ์„ฑํ•˜๋Š” ์•ฑ์„ ๋งŒ๋“œ์„ธ์š”."
},
{
"name": "[AI-6] ์Œ์„ฑ ํ•ฉ์„ฑ๊ธฐ",
"image_url": "data:image/png;base64," + get_image_base64('mouse.png'),
"prompt": "Coqui TTS ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•˜์—ฌ ํ…์ŠคํŠธ๋ฅผ ์ž์—ฐ์Šค๋Ÿฌ์šด ์Œ์„ฑ์œผ๋กœ ๋ณ€ํ™˜ํ•˜๋Š” ์•ฑ์„ ๋งŒ๋“œ์„ธ์š”. ๋‹ค์–‘ํ•œ ๋ชฉ์†Œ๋ฆฌ์™€ ๊ฐ์ • ์กฐ์ ˆ์ด ๊ฐ€๋Šฅํ•ฉ๋‹ˆ๋‹ค."
},
{
"name": "[AI-7] ์ฝ”๋“œ ์ž๋™์™„์„ฑ",
"image_url": "data:image/png;base64," + get_image_base64('mouse.png'),
"prompt": "CodeBERT ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•˜์—ฌ ํ”„๋กœ๊ทธ๋ž˜๋ฐ ์ฝ”๋“œ๋ฅผ ์ž๋™์œผ๋กœ ์™„์„ฑํ•˜๊ณ  ์ œ์•ˆํ•˜๋Š” ์•ฑ์„ ๋งŒ๋“œ์„ธ์š”."
},
{
"name": "[AI-8] ์ด๋ฏธ์ง€ ๋ณต์›๊ธฐ",
"image_url": "data:image/png;base64," + get_image_base64('mouse.png'),
"prompt": "GFPGAN ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•˜์—ฌ ํ๋ฆฟํ•˜๊ฑฐ๋‚˜ ์†์ƒ๋œ ์ด๋ฏธ์ง€, ํŠนํžˆ ์–ผ๊ตด ์‚ฌ์ง„์„ ๋ณต์›ํ•˜๋Š” ์•ฑ์„ ๋งŒ๋“œ์„ธ์š”."
},
{
"name": "[AI-9] ๋‹ค๊ตญ์–ด ๋ฒˆ์—ญ๊ธฐ",
"image_url": "data:image/png;base64," + get_image_base64('mouse.png'),
"prompt": "M2M100 ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•˜์—ฌ 100๊ฐœ ์ด์ƒ์˜ ์–ธ์–ด ๊ฐ„ ๋ฒˆ์—ญ์„ ์ง€์›ํ•˜๋Š” ์•ฑ์„ ๋งŒ๋“œ์„ธ์š”."
},
{
"name": "[AI-10] ๋ฌธ์„œ OCR",
"image_url": "data:image/png;base64," + get_image_base64('mouse.png'),
"prompt": "Donut-base ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•˜์—ฌ ๋ฌธ์„œ ์ด๋ฏธ์ง€์—์„œ ํ…์ŠคํŠธ๋ฅผ ์ถ”์ถœํ•˜๊ณ  ๊ตฌ์กฐํ™”๋œ ํ˜•์‹์œผ๋กœ ๋ณ€ํ™˜ํ•˜๋Š” ์•ฑ์„ ๋งŒ๋“œ์„ธ์š”."
},
{
"name": "[AI-11] ์Œ์•… ์ƒ์„ฑ๊ธฐ",
"image_url": "data:image/gif;base64," + get_image_base64('mouse.gif'),
"prompt": "Riffusion ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•˜์—ฌ ํ…์ŠคํŠธ ์„ค๋ช…์„ ๋ฐ”ํƒ•์œผ๋กœ ์Œ์•…์„ ์ƒ์„ฑํ•˜๋Š” ์•ฑ์„ ๋งŒ๋“œ์„ธ์š”."
},
{
"name": "[AI-12] ๋น„๋””์˜ค ์„ค๋ช… ์ƒ์„ฑ๊ธฐ",
"image_url": "data:image/gif;base64," + get_image_base64('mouse.gif'),
"prompt": "VideoMAE ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•˜์—ฌ ๋น„๋””์˜ค ํด๋ฆฝ์˜ ๋‚ด์šฉ์„ ์ž๋™์œผ๋กœ ์„ค๋ช…ํ•˜๋Š” ์•ฑ์„ ๋งŒ๋“œ์„ธ์š”."
},
{
"name": "[AI-13] ์ฑ—๋ด‡ ์–ด์‹œ์Šคํ„ดํŠธ",
"image_url": "data:image/gif;base64," + get_image_base64('mouse.gif'),
"prompt": "DialoGPT ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•˜์—ฌ ์ž์—ฐ์Šค๋Ÿฌ์šด ๋Œ€ํ™”๊ฐ€ ๊ฐ€๋Šฅํ•œ ์ฑ—๋ด‡์„ ๋งŒ๋“œ์„ธ์š”."
},
{
"name": "[AI-14] ์ด๋ฏธ์ง€ ๋ฐฐ๊ฒฝ ์ œ๊ฑฐ",
"image_url": "data:image/gif;base64," + get_image_base64('mouse.gif'),
"prompt": "U2Net ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•˜์—ฌ ์ด๋ฏธ์ง€์—์„œ ๋ฐฐ๊ฒฝ์„ ์ž๋™์œผ๋กœ ์ œ๊ฑฐํ•˜๋Š” ์•ฑ์„ ๋งŒ๋“œ์„ธ์š”."
},
{
"name": "[AI-15] ํ…์ŠคํŠธ ๋ถ„๋ฅ˜๊ธฐ",
"image_url": "data:image/gif;base64," + get_image_base64('mouse.gif'),
"prompt": "DistilBERT ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•˜์—ฌ ํ…์ŠคํŠธ๋ฅผ ์—ฌ๋Ÿฌ ์นดํ…Œ๊ณ ๋ฆฌ๋กœ ๋ถ„๋ฅ˜ํ•˜๋Š” ์•ฑ์„ ๋งŒ๋“œ์„ธ์š”."
},
{
"name": "[AI-16] ์–ผ๊ตด ๊ต์ฒด๊ธฐ",
"image_url": "data:image/gif;base64," + get_image_base64('mouse.gif'),
"prompt": "SimSwap ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•˜์—ฌ ์ด๋ฏธ์ง€์—์„œ ์–ผ๊ตด์„ ๋‹ค๋ฅธ ์–ผ๊ตด๋กœ ์ž์—ฐ์Šค๋Ÿฝ๊ฒŒ ๊ต์ฒดํ•˜๋Š” ์•ฑ์„ ๋งŒ๋“œ์„ธ์š”."
},
{
"name": "[AI-17] ๋ฌธ๋ฒ• ๊ต์ •๊ธฐ",
"image_url": "data:image/gif;base64," + get_image_base64('mouse.gif'),
"prompt": "GrammarGPT ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•˜์—ฌ ํ…์ŠคํŠธ์˜ ๋ฌธ๋ฒ• ์˜ค๋ฅ˜๋ฅผ ๊ฐ์ง€ํ•˜๊ณ  ์ˆ˜์ •ํ•˜๋Š” ์•ฑ์„ ๋งŒ๋“œ์„ธ์š”."
},
{
"name": "[AI-18] 3D ํฌ์ฆˆ ์ถ”์ •๊ธฐ",
"image_url": "data:image/gif;base64," + get_image_base64('mouse.gif'),
"prompt": "PARE ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•˜์—ฌ 2D ์ด๋ฏธ์ง€์—์„œ 3D ์ธ์ฒด ํฌ์ฆˆ๋ฅผ ์ถ”์ •ํ•˜๋Š” ์•ฑ์„ ๋งŒ๋“œ์„ธ์š”."
},
{
"name": "[AI-19] ์Œ์„ฑ ๊ฐ์ • ๋ถ„์„๊ธฐ",
"image_url": "data:image/gif;base64," + get_image_base64('mouse.gif'),
"prompt": "Wav2Vec2 ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•˜์—ฌ ์Œ์„ฑ์—์„œ ํ™”์ž์˜ ๊ฐ์ • ์ƒํƒœ๋ฅผ ๋ถ„์„ํ•˜๋Š” ์•ฑ์„ ๋งŒ๋“œ์„ธ์š”."
},
{
"name": "[AI-20] ์ด๋ฏธ์ง€ ์Šคํƒ€์ผ ๋ณ€ํ™˜",
"image_url": "data:image/gif;base64," + get_image_base64('mouse.gif'),
"prompt": "MAGiC ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•˜์—ฌ ์ด๋ฏธ์ง€์˜ ์Šคํƒ€์ผ์„ ๋‹ค์–‘ํ•œ ์˜ˆ์ˆ  ์ž‘ํ’ˆ ์Šคํƒ€์ผ๋กœ ๋ณ€ํ™˜ํ•˜๋Š” ์•ฑ์„ ๋งŒ๋“œ์„ธ์š”."
}
]
def load_best_templates():
json_data = load_json_data()[:12] # ๋ฒ ์ŠคํŠธ ํ…œํ”Œ๋ฆฟ
return create_template_html("๐Ÿ† ๋ฒ ์ŠคํŠธ ํ…œํ”Œ๋ฆฟ", json_data)
def load_trending_templates():
json_data = load_json_data()[12:24] # ํŠธ๋ Œ๋”ฉ ํ…œํ”Œ๋ฆฟ
return create_template_html("๐Ÿ”ฅ ํŠธ๋ Œ๋”ฉ ํ…œํ”Œ๋ฆฟ", json_data)
def load_new_templates():
json_data = load_json_data()[24:44] # NEW ํ…œํ”Œ๋ฆฟ
return create_template_html("โœจ NEW ํ…œํ”Œ๋ฆฟ", json_data)
def create_template_html(title, items):
html_content = """
<style>
.prompt-grid {
display: grid;
grid-template-columns: repeat(auto-fit, minmax(300px, 1fr));
gap: 20px;
padding: 20px;
}
.prompt-card {
background: white;
border: 1px solid #eee;
border-radius: 8px;
padding: 15px;
cursor: pointer;
box-shadow: 0 2px 5px rgba(0,0,0,0.1);
}
.prompt-card:hover {
transform: translateY(-2px);
transition: transform 0.2s;
}
.card-image {
width: 100%;
height: 180px;
object-fit: cover;
border-radius: 4px;
margin-bottom: 10px;
}
.card-name {
font-weight: bold;
margin-bottom: 8px;
font-size: 16px;
color: #333;
}
.card-prompt {
font-size: 11px;
line-height: 1.4;
color: #666;
display: -webkit-box;
-webkit-line-clamp: 6;
-webkit-box-orient: vertical;
overflow: hidden;
height: 90px;
background-color: #f8f9fa;
padding: 8px;
border-radius: 4px;
}
</style>
<div class="prompt-grid">
"""
for item in items:
html_content += f"""
<div class="prompt-card" onclick="copyToInput(this)" data-prompt="{html.escape(item.get('prompt', ''))}">
<img src="{item.get('image_url', '')}" class="card-image" loading="lazy" alt="{html.escape(item.get('name', ''))}">
<div class="card-name">{html.escape(item.get('name', ''))}</div>
<div class="card-prompt">{html.escape(item.get('prompt', ''))}</div>
</div>
"""
html_content += """
<script>
function copyToInput(card) {
const prompt = card.dataset.prompt;
const textarea = document.querySelector('.ant-input-textarea-large textarea');
if (textarea) {
textarea.value = prompt;
textarea.dispatchEvent(new Event('input', { bubbles: true }));
document.querySelector('.session-drawer .close-btn').click();
}
}
</script>
</div>
"""
return gr.HTML(value=html_content)
# ์ „์—ญ ๋ณ€์ˆ˜๋กœ ํ…œํ”Œ๋ฆฟ ๋ฐ์ดํ„ฐ ์บ์‹œ
TEMPLATE_CACHE = None
def load_session_history(template_type="best"):
global TEMPLATE_CACHE
try:
json_data = load_json_data()
# ๋ฐ์ดํ„ฐ๋ฅผ ์„ธ ์„น์…˜์œผ๋กœ ๋‚˜๋ˆ„๊ธฐ
templates = {
"best": json_data[:12], # ๋ฒ ์ŠคํŠธ ํ…œํ”Œ๋ฆฟ
"trending": json_data[12:24], # ํŠธ๋ Œ๋”ฉ ํ…œํ”Œ๋ฆฟ
"new": json_data[24:44] # NEW ํ…œํ”Œ๋ฆฟ
}
titles = {
"best": "๐Ÿ† ๋ฒ ์ŠคํŠธ ํ…œํ”Œ๋ฆฟ",
"trending": "๐Ÿ”ฅ ํŠธ๋ Œ๋”ฉ ํ…œํ”Œ๋ฆฟ",
"new": "โœจ NEW ํ…œํ”Œ๋ฆฟ"
}
html_content = """
<style>
.template-nav {
display: flex;
gap: 10px;
margin: 20px;
position: sticky;
top: 0;
background: white;
z-index: 100;
padding: 10px 0;
border-bottom: 1px solid #eee;
}
.template-btn {
padding: 8px 16px;
border: 1px solid #1890ff;
border-radius: 4px;
cursor: pointer;
background: white;
color: #1890ff;
font-weight: bold;
transition: all 0.3s;
}
.template-btn:hover {
background: #1890ff;
color: white;
}
.template-btn.active {
background: #1890ff;
color: white;
}
.prompt-grid {
display: grid;
grid-template-columns: repeat(auto-fit, minmax(300px, 1fr));
gap: 20px;
padding: 20px;
}
.prompt-card {
background: white;
border: 1px solid #eee;
border-radius: 8px;
padding: 15px;
cursor: pointer;
box-shadow: 0 2px 5px rgba(0,0,0,0.1);
}
.prompt-card:hover {
transform: translateY(-2px);
transition: transform 0.2s;
}
.card-image {
width: 100%;
height: 180px;
object-fit: cover;
border-radius: 4px;
margin-bottom: 10px;
}
.card-name {
font-weight: bold;
margin-bottom: 8px;
font-size: 16px;
color: #333;
}
.card-prompt {
font-size: 11px;
line-height: 1.4;
color: #666;
display: -webkit-box;
-webkit-line-clamp: 6;
-webkit-box-orient: vertical;
overflow: hidden;
height: 90px;
background-color: #f8f9fa;
padding: 8px;
border-radius: 4px;
}
.template-section {
display: none;
}
.template-section.active {
display: block;
}
</style>
<div class="template-nav">
<button class="template-btn" onclick="showTemplate('best')">๐Ÿ† ๋ฒ ์ŠคํŠธ</button>
<button class="template-btn" onclick="showTemplate('trending')">๐Ÿ”ฅ ํŠธ๋ Œ๋”ฉ</button>
<button class="template-btn" onclick="showTemplate('new')">โœจ NEW</button>
</div>
"""
# ๊ฐ ์„น์…˜์˜ ํ…œํ”Œ๋ฆฟ ์ƒ์„ฑ
for section, items in templates.items():
html_content += f"""
<div class="template-section" id="{section}-templates">
<div class="prompt-grid">
"""
for item in items:
html_content += f"""
<div class="prompt-card" onclick="copyToInput(this)" data-prompt="{html.escape(item.get('prompt', ''))}">
<img src="{item.get('image_url', '')}" class="card-image" loading="lazy" alt="{html.escape(item.get('name', ''))}">
<div class="card-name">{html.escape(item.get('name', ''))}</div>
<div class="card-prompt">{html.escape(item.get('prompt', ''))}</div>
</div>
"""
html_content += "</div></div>"
html_content += """
<script>
function copyToInput(card) {
const prompt = card.dataset.prompt;
const textarea = document.querySelector('.ant-input-textarea-large textarea');
if (textarea) {
textarea.value = prompt;
textarea.dispatchEvent(new Event('input', { bubbles: true }));
document.querySelector('.session-drawer .close-btn').click();
}
}
function showTemplate(type) {
// ๋ชจ๋“  ์„น์…˜ ์ˆจ๊ธฐ๊ธฐ
document.querySelectorAll('.template-section').forEach(section => {
section.style.display = 'none';
});
// ๋ชจ๋“  ๋ฒ„ํŠผ ๋น„ํ™œ์„ฑํ™”
document.querySelectorAll('.template-btn').forEach(btn => {
btn.classList.remove('active');
});
// ์„ ํƒ๋œ ์„น์…˜ ๋ณด์ด๊ธฐ
document.getElementById(type + '-templates').style.display = 'block';
// ์„ ํƒ๋œ ๋ฒ„ํŠผ ํ™œ์„ฑํ™”
event.target.classList.add('active');
}
// ์ดˆ๊ธฐ ๋กœ๋“œ์‹œ ๋ฒ ์ŠคํŠธ ํ…œํ”Œ๋ฆฟ ํ‘œ์‹œ
document.addEventListener('DOMContentLoaded', function() {
showTemplate('best');
document.querySelector('.template-btn').classList.add('active');
});
</script>
"""
return gr.HTML(value=html_content)
except Exception as e:
print(f"Error in load_session_history: {str(e)}")
return gr.HTML("Error loading templates")
# ๋ฐฐํฌ ๊ด€๋ จ ํ•จ์ˆ˜ ์ถ”๊ฐ€
def generate_space_name():
"""6์ž๋ฆฌ ๋žœ๋ค ์˜๋ฌธ ์ด๋ฆ„ ์ƒ์„ฑ"""
letters = string.ascii_lowercase
return ''.join(random.choice(letters) for i in range(6))
def deploy_to_huggingface(code: str):
try:
# 1) ๊ธฐ๋ณธ ๊ฒ€์ฆ
token = os.getenv("HF_TOKEN")
if not token:
return "HuggingFace ํ† ํฐ์ด ์„ค์ •๋˜์ง€ ์•Š์•˜์Šต๋‹ˆ๋‹ค."
# 2) Space ์ƒ์„ฑ ์ค€๋น„
api = HfApi(token=token)
space_name = generate_space_name()
username = api.whoami()['name']
repo_id = f"{username}/{space_name}"
# 3) Space ์ƒ์„ฑ (private๋กœ ์„ค์ •)
try:
create_repo(
repo_id,
repo_type="space",
space_sdk="gradio",
token=token,
private=True
)
except Exception as e:
raise e
# 4) ์ฝ”๋“œ ์ •๋ฆฌ
code = code.replace("```python", "").replace("```", "").strip()
# 5) ์ „์ฒด ์• ํ”Œ๋ฆฌ์ผ€์ด์…˜ ์ฝ”๋“œ ์ƒ์„ฑ
if "demo.launch()" not in code:
full_app_code = code + "\n\nif __name__ == '__main__':\n demo.launch()"
else:
full_app_code = code
# 6) ํŒŒ์ผ ์ƒ์„ฑ ๋ฐ ์—…๋กœ๋“œ
with open("app.py", "w", encoding="utf-8") as f:
f.write(full_app_code)
api.upload_file(
path_or_fileobj="app.py",
path_in_repo="app.py",
repo_id=repo_id,
repo_type="space"
)
# 7) requirements.txt ์ƒ์„ฑ ๋ฐ ์—…๋กœ๋“œ
analysis_result = analyze_code(code)
requirements = ""
# HTML์—์„œ requirements.txt ์„น์…˜ ์ฐพ๊ธฐ
if "<h3>๐Ÿ“‹ Requirements.txt</h3>" in analysis_result:
start_idx = analysis_result.find("<pre>") + 5
end_idx = analysis_result.find("</pre>")
if start_idx > 4 and end_idx > 0:
requirements = analysis_result[start_idx:end_idx].strip()
# requirements๊ฐ€ ๋น„์–ด์žˆ์œผ๋ฉด ๊ธฐ๋ณธ๊ฐ’ ์„ค์ •
if not requirements:
requirements = 'gradio==5.5.0'
with open("requirements.txt", "w") as f:
f.write(requirements)
api.upload_file(
path_or_fileobj="requirements.txt",
path_in_repo="requirements.txt",
repo_id=repo_id,
repo_type="space"
)
# 8) ๊ฒฐ๊ณผ ๋ฐ˜ํ™˜
space_url = f"https://huggingface.co/spaces/{username}/{space_name}"
return f'๋ฐฐํฌ ์™„๋ฃŒ! Private Space๋กœ ์ƒ์„ฑ๋˜์—ˆ์Šต๋‹ˆ๋‹ค. <a href="{space_url}" target="_blank" style="color: #1890ff; text-decoration: underline; cursor: pointer;">์—ฌ๊ธฐ๋ฅผ ํด๋ฆญํ•˜์—ฌ Space ์—ด๊ธฐ</a>'
except Exception as e:
return f"๋ฐฐํฌ ์ค‘ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}"
# Demo ์ธ์Šคํ„ด์Šค ์ƒ์„ฑ
demo_instance = Demo()
with gr.Blocks(css_paths="app.css",theme=theme) as demo:
history = gr.State([])
setting = gr.State({
"system": SystemPrompt,
})
with ms.Application() as app:
with antd.ConfigProvider():
# Drawer ์ปดํฌ๋„ŒํŠธ๋“ค
with antd.Drawer(open=False, title="code", placement="left", width="750px") as code_drawer:
code_output = legacy.Markdown()
with antd.Drawer(open=False, title="history", placement="left", width="900px") as history_drawer:
history_output = legacy.Chatbot(show_label=False, flushing=False, height=960, elem_classes="history_chatbot")
with antd.Drawer(
open=False,
title="Templates",
placement="right",
width="900px",
elem_classes="session-drawer"
) as session_drawer:
with antd.Flex(vertical=True, gap="middle"):
gr.Markdown("### Available Templates")
session_history = gr.HTML(
elem_classes="session-history"
)
close_btn = antd.Button(
"Close",
type="default",
elem_classes="close-btn"
)
# ๋ฉ”์ธ ์ปจํ…์ธ ๋ฅผ ์œ„ํ•œ Row
with antd.Row(gutter=[32, 12]) as layout:
# ์ขŒ์ธก ํŒจ๋„
# ์ขŒ์ธก ํŒจ๋„ ๋ถ€๋ถ„์„ ๋‹ค์Œ๊ณผ ๊ฐ™์ด ์ˆ˜์ •
with antd.Col(span=24, md=8):
with antd.Flex(vertical=True, gap="middle", wrap=True):
header = gr.HTML(f"""
<div class="left_header">
<img src="data:image/gif;base64,{get_image_base64('mouse.gif')}" width="360px" />
<h1 style="font-size: 18px;">๊ณ ์–‘์ด๋„ ๋ฐœ๋กœ ์ฝ”๋”ฉํ•˜๋Š” 'MOUSE-II'</h2>
<h1 style="font-size: 10px;">ํ…œํ”Œ๋ฆฟ์˜ ํ”„๋กฌํ”„ํŠธ ๋‚ด์šฉ์„ ๋ณต์‚ฌํ•ด ํ”„๋กฌํ”„ํŠธ์— ์ž…๋ ฅ 'Send'๋ฒ„ํŠผ ํด๋ฆญ -> '์‹คํ–‰ํ•˜๊ธฐ' ๋ฒ„ํŠผ ํด๋ฆญํ•˜์—ฌ ์ฝ”๋“œ ์‹คํ–‰. ๋ฌธ์˜: [email protected] </h2>
</div>
""")
input = antd.InputTextarea(
size="large",
allow_clear=True,
placeholder=random.choice(DEMO_LIST)['description']
)
# UI ์ˆ˜์ • ๋ถ€๋ถ„ - antd.Col(span=24, md=8) ๋‚ด๋ถ€์˜ ๋ฒ„ํŠผ ์ปจํ…Œ์ด๋„ˆ์— ๋ฐฐํฌ ๋ฒ„ํŠผ ์ถ”๊ฐ€:
with antd.Flex(gap="small", justify="space-between"):
btn = antd.Button("Send", type="primary", size="large")
deploy_btn = antd.Button("์‹คํ–‰ํ•˜๊ธฐ", type="default", size="large") # ์ถ”๊ฐ€
clear_btn = antd.Button("Clear", type="default", size="large")
# ๋ฐฐํฌ ๊ฒฐ๊ณผ๋ฅผ ํ‘œ์‹œํ•  ํ…์ŠคํŠธ ์˜์—ญ ์ถ”๊ฐ€
deploy_result = gr.HTML(label="๋ฐฐํฌ ๊ฒฐ๊ณผ")
with antd.Col(span=24, md=16):
with ms.Div(elem_classes="right_panel"):
with antd.Flex(gap="small", elem_classes="setting-buttons"):
codeBtn = antd.Button("๐Ÿง‘โ€๐Ÿ’ป ์ฝ”๋“œ ๋ณด๊ธฐ", type="default")
historyBtn = antd.Button("๐Ÿ“œ ํžˆ์Šคํ† ๋ฆฌ", type="default")
best_btn = antd.Button("๐Ÿ† ๋ฒ ์ŠคํŠธ ํ…œํ”Œ๋ฆฟ", type="default")
trending_btn = antd.Button("๐Ÿ”ฅ ํŠธ๋ Œ๋”ฉ ํ…œํ”Œ๋ฆฟ", type="default")
new_btn = antd.Button("โœจ NEW ํ…œํ”Œ๋ฆฟ", type="default")
gr.HTML('<div class="render_header"><span class="header_btn"></span><span class="header_btn"></span><span class="header_btn"></span></div>')
with antd.Tabs(active_key="empty", render_tab_bar="() => null") as state_tab:
with antd.Tabs.Item(key="empty"):
empty = antd.Empty(description="empty input", elem_classes="right_content")
with antd.Tabs.Item(key="loading"):
loading = antd.Spin(True, tip="coding...", size="large", elem_classes="right_content")
with antd.Tabs.Item(key="render"):
sandbox = gr.HTML(elem_classes="html_content")
# Code ์‹คํ–‰ ๋ฒ„ํŠผ ์ด๋ฒคํŠธ ํ•ธ๋“ค๋Ÿฌ ํ•จ์ˆ˜ ์ •์˜
def execute_code(query: str):
if not query or query.strip() == '':
return None, gr.update(active_key="empty")
try:
# HTML ์ฝ”๋“œ ๋ธ”๋ก ํ™•์ธ
if '```html' in query and '```' in query:
# HTML ์ฝ”๋“œ ๋ธ”๋ก ์ถ”์ถœ
code = remove_code_block(query)
else:
# ์ž…๋ ฅ๋œ ํ…์ŠคํŠธ๋ฅผ ๊ทธ๋Œ€๋กœ ์ฝ”๋“œ๋กœ ์‚ฌ์šฉ
code = query.strip()
return send_to_sandbox(code), gr.update(active_key="render")
except Exception as e:
print(f"Error executing code: {str(e)}")
return None, gr.update(active_key="empty")
# ์ด๋ฒคํŠธ ํ•ธ๋“ค๋Ÿฌ๋“ค
codeBtn.click(
lambda: gr.update(open=True),
inputs=[],
outputs=[code_drawer]
)
code_drawer.close(
lambda: gr.update(open=False),
inputs=[],
outputs=[code_drawer]
)
historyBtn.click(
history_render,
inputs=[history],
outputs=[history_drawer, history_output]
)
history_drawer.close(
lambda: gr.update(open=False),
inputs=[],
outputs=[history_drawer]
)
# ํ…œํ”Œ๋ฆฟ ๋ฒ„ํŠผ ์ด๋ฒคํŠธ ํ•ธ๋“ค๋Ÿฌ
best_btn.click(
fn=lambda: (gr.update(open=True), load_best_templates()),
outputs=[session_drawer, session_history],
queue=False
)
trending_btn.click(
fn=lambda: (gr.update(open=True), load_trending_templates()),
outputs=[session_drawer, session_history],
queue=False
)
new_btn.click(
fn=lambda: (gr.update(open=True), load_new_templates()),
outputs=[session_drawer, session_history],
queue=False
)
session_drawer.close(
lambda: (gr.update(open=False), gr.HTML("")),
outputs=[session_drawer, session_history]
)
close_btn.click(
lambda: (gr.update(open=False), gr.HTML("")),
outputs=[session_drawer, session_history]
)
btn.click(
demo_instance.generation_code,
inputs=[input, setting, history],
outputs=[code_output, history, sandbox, state_tab, code_drawer]
)
clear_btn.click(
demo_instance.clear_history,
inputs=[],
outputs=[history]
)
# ์ด๋ฒคํŠธ ํ•ธ๋“ค๋Ÿฌ ์ถ”๊ฐ€
deploy_btn.click(
fn=lambda code: deploy_to_huggingface(remove_code_block(code)) if code else "์ฝ”๋“œ๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค.",
inputs=[code_output],
outputs=[deploy_result]
)
if __name__ == "__main__":
try:
demo_instance = Demo()
demo.queue(default_concurrency_limit=20).launch(ssr_mode=False)
except Exception as e:
print(f"Initialization error: {e}")
raise