ruslanmv commited on
Commit
a26f5ee
·
verified ·
1 Parent(s): 2b3ca21

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +29 -54
app.py CHANGED
@@ -1,94 +1,69 @@
1
  import gradio as gr
2
 
3
- # Placeholder for model loading (adjust as needed for your specific models)
4
  def load_model(model_name):
5
- # Replace this function with actual model loading code if needed
6
  return lambda input_text: f"Response from {model_name}: {input_text}"
7
 
8
- # Load the models (placeholder functions here)
9
  deepseek_r1_distill = load_model("DeepSeek-R1-Distill-Qwen-32B")
10
  deepseek_r1 = load_model("DeepSeek-R1")
11
  deepseek_r1_zero = load_model("DeepSeek-R1-Zero")
12
 
13
- # Define the optional parameters section
14
  def create_optional_parameters():
15
- with gr.Accordion("Optional Parameters (Click to Expand)", open=False):
16
  system_message = gr.Textbox(
17
  label="System Message",
18
  value="You are a friendly Chatbot created by ruslanmv.com",
19
- lines=2,
20
- interactive=True
21
  )
22
- max_new_tokens = gr.Slider(minimum=1, maximum=4000, value=200, label="Max New Tokens", interactive=True)
23
- temperature = gr.Slider(minimum=0.10, maximum=4.00, value=0.70, label="Temperature", interactive=True)
24
- top_p = gr.Slider(minimum=0.10, maximum=1.00, value=0.90, label="Top-p (nucleus sampling)", interactive=True)
25
  return system_message, max_new_tokens, temperature, top_p
26
 
27
- # Define the main interface
28
  def chat_interface(user_input, system_message, max_new_tokens, temperature, top_p):
29
- # Placeholder response - integrate with actual model here
30
  response = f"""**System Message**: {system_message}
31
  **Your Input**: {user_input}
32
  **Parameters Used**:
33
  - Max New Tokens: {max_new_tokens}
34
  - Temperature: {temperature}
35
  - Top-p: {top_p}
36
-
37
  *Note: Actual model integration required for real responses*"""
38
  return response
39
 
40
- # Create the Gradio interface
41
  with gr.Blocks(css="""
42
- .gradio-container {
43
- font-family: Arial, sans-serif;
44
- background-color: #f9f9f9;
45
- color: #333;
46
- padding: 20px;
47
- }
48
- .gr-button.primary {
49
- background-color: #4caf50;
50
- color: white;
51
- border: none;
52
- padding: 10px 20px;
53
- font-size: 16px;
54
- border-radius: 5px;
55
- cursor: pointer;
56
- }
57
- .gr-button.primary:hover {
58
- background-color: #45a049;
59
- }
60
- .gr-textbox textarea {
61
- font-size: 16px;
62
- }
63
  """) as demo:
64
- gr.Markdown("""
65
- # DeepSeek Chatbot
66
- Welcome to the **DeepSeek Chatbot**! This AI-powered chatbot is designed to provide insightful responses.
67
- Created by [ruslanmv.com](https://ruslanmv.com/).
68
- """)
69
-
70
- with gr.Row():
71
- with gr.Column(scale=3):
 
 
 
72
  user_input = gr.Textbox(
73
  label="Your Message",
74
  placeholder="Type your message here...",
75
- lines=4,
76
- interactive=True
77
  )
78
- submit_button = gr.Button("Send", variant="primary")
79
- with gr.Column(scale=5):
80
- output = gr.Markdown(label="Chatbot Response")
81
-
82
- # Add the optional parameters section
83
- system_message, max_new_tokens, temperature, top_p = create_optional_parameters()
84
-
85
- # Link the submit button to the chat interface
86
  submit_button.click(
87
  chat_interface,
88
  inputs=[user_input, system_message, max_new_tokens, temperature, top_p],
89
  outputs=output
90
  )
91
 
92
- # Launch the demo
93
  if __name__ == "__main__":
94
  demo.launch()
 
1
  import gradio as gr
2
 
 
3
  def load_model(model_name):
 
4
  return lambda input_text: f"Response from {model_name}: {input_text}"
5
 
 
6
  deepseek_r1_distill = load_model("DeepSeek-R1-Distill-Qwen-32B")
7
  deepseek_r1 = load_model("DeepSeek-R1")
8
  deepseek_r1_zero = load_model("DeepSeek-R1-Zero")
9
 
 
10
  def create_optional_parameters():
11
+ with gr.Accordion("Optional Parameters", open=False):
12
  system_message = gr.Textbox(
13
  label="System Message",
14
  value="You are a friendly Chatbot created by ruslanmv.com",
15
+ lines=2
 
16
  )
17
+ max_new_tokens = gr.Slider(minimum=1, maximum=4000, value=200, label="Max New Tokens")
18
+ temperature = gr.Slider(minimum=0.1, maximum=4.0, value=0.7, label="Temperature")
19
+ top_p = gr.Slider(minimum=0.1, maximum=1.0, value=0.9, label="Top-p (nucleus sampling)")
20
  return system_message, max_new_tokens, temperature, top_p
21
 
 
22
  def chat_interface(user_input, system_message, max_new_tokens, temperature, top_p):
 
23
  response = f"""**System Message**: {system_message}
24
  **Your Input**: {user_input}
25
  **Parameters Used**:
26
  - Max New Tokens: {max_new_tokens}
27
  - Temperature: {temperature}
28
  - Top-p: {top_p}
 
29
  *Note: Actual model integration required for real responses*"""
30
  return response
31
 
 
32
  with gr.Blocks(css="""
33
+ .chat-container { max-width: 700px; margin: auto; }
34
+ .chat-input { margin-top: 20px; }
35
+ .chat-output { margin-top: 10px; padding: 10px; border: 1px solid #ccc; border-radius: 10px; background-color: #f9f9f9; }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
36
  """) as demo:
37
+ with gr.Row(variant="panel"):
38
+ gr.Markdown(
39
+ """# DeepSeek Chatbot
40
+ Created by [ruslanmv.com](https://ruslanmv.com/)
41
+ A friendly chatbot interface. Start a conversation below!
42
+ """,
43
+ elem_id="header"
44
+ )
45
+
46
+ with gr.Row(elem_classes="chat-container"):
47
+ with gr.Column():
48
  user_input = gr.Textbox(
49
  label="Your Message",
50
  placeholder="Type your message here...",
51
+ lines=3,
52
+ elem_classes="chat-input"
53
  )
54
+ submit_button = gr.Button("Submit", variant="primary")
55
+ system_message, max_new_tokens, temperature, top_p = create_optional_parameters()
56
+ with gr.Column():
57
+ output = gr.Markdown(
58
+ label="Chatbot Response",
59
+ elem_classes="chat-output"
60
+ )
61
+
62
  submit_button.click(
63
  chat_interface,
64
  inputs=[user_input, system_message, max_new_tokens, temperature, top_p],
65
  outputs=output
66
  )
67
 
 
68
  if __name__ == "__main__":
69
  demo.launch()