Spaces:
Running
Running
File size: 2,600 Bytes
f1fef64 317e409 539566d 317e409 539566d 317e409 a26f5ee 317e409 539566d a26f5ee 317e409 a26f5ee 317e409 2b3ca21 a26f5ee 2b3ca21 a26f5ee 2b3ca21 a26f5ee 2b3ca21 a26f5ee 317e409 f1fef64 2b3ca21 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 |
import gradio as gr
def load_model(model_name):
return lambda input_text: f"Response from {model_name}: {input_text}"
deepseek_r1_distill = load_model("DeepSeek-R1-Distill-Qwen-32B")
deepseek_r1 = load_model("DeepSeek-R1")
deepseek_r1_zero = load_model("DeepSeek-R1-Zero")
def create_optional_parameters():
with gr.Accordion("Optional Parameters", open=False):
system_message = gr.Textbox(
label="System Message",
value="You are a friendly Chatbot created by ruslanmv.com",
lines=2
)
max_new_tokens = gr.Slider(minimum=1, maximum=4000, value=200, label="Max New Tokens")
temperature = gr.Slider(minimum=0.1, maximum=4.0, value=0.7, label="Temperature")
top_p = gr.Slider(minimum=0.1, maximum=1.0, value=0.9, label="Top-p (nucleus sampling)")
return system_message, max_new_tokens, temperature, top_p
def chat_interface(user_input, system_message, max_new_tokens, temperature, top_p):
response = f"""**System Message**: {system_message}
**Your Input**: {user_input}
**Parameters Used**:
- Max New Tokens: {max_new_tokens}
- Temperature: {temperature}
- Top-p: {top_p}
*Note: Actual model integration required for real responses*"""
return response
with gr.Blocks(css="""
.chat-container { max-width: 700px; margin: auto; }
.chat-input { margin-top: 20px; }
.chat-output { margin-top: 10px; padding: 10px; border: 1px solid #ccc; border-radius: 10px; background-color: #f9f9f9; }
""") as demo:
with gr.Row(variant="panel"):
gr.Markdown(
"""# DeepSeek Chatbot
Created by [ruslanmv.com](https://ruslanmv.com/)
A friendly chatbot interface. Start a conversation below!
""",
elem_id="header"
)
with gr.Row(elem_classes="chat-container"):
with gr.Column():
user_input = gr.Textbox(
label="Your Message",
placeholder="Type your message here...",
lines=3,
elem_classes="chat-input"
)
submit_button = gr.Button("Submit", variant="primary")
system_message, max_new_tokens, temperature, top_p = create_optional_parameters()
with gr.Column():
output = gr.Markdown(
label="Chatbot Response",
elem_classes="chat-output"
)
submit_button.click(
chat_interface,
inputs=[user_input, system_message, max_new_tokens, temperature, top_p],
outputs=output
)
if __name__ == "__main__":
demo.launch() |