ruslanmv commited on
Commit
94576e1
·
verified ·
1 Parent(s): f99b534

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -21
app.py CHANGED
@@ -1,6 +1,9 @@
1
  # app.py
2
  import streamlit as st
3
- from models import demo_distill, demo_r1, demo_zero
 
 
 
4
 
5
  # Page configuration
6
  st.set_page_config(
@@ -13,23 +16,10 @@ st.set_page_config(
13
  if "messages" not in st.session_state:
14
  st.session_state.messages = []
15
 
16
- # Sidebar for model selection and parameters
17
  with st.sidebar:
18
  st.header("Model Configuration")
19
 
20
- # Model selection
21
- model_mapping = {
22
- "deepseek-ai/DeepSeek-R1-Distill-Qwen-32B": demo_distill,
23
- "deepseek-ai/DeepSeek-R1": demo_r1,
24
- "deepseek-ai/DeepSeek-R1-Zero": demo_zero
25
- }
26
- selected_model_name = st.selectbox(
27
- "Choose Model",
28
- list(model_mapping.keys()),
29
- index=0
30
- )
31
- selected_demo = model_mapping[selected_model_name]
32
-
33
  # System message
34
  system_message = st.text_area(
35
  "System Message",
@@ -64,7 +54,7 @@ with st.sidebar:
64
 
65
  # Main chat interface
66
  st.title("🤖 DeepSeek Chatbot")
67
- st.caption("Powered by ruslanmv.com - Choose your model and parameters in the sidebar")
68
 
69
  # Display chat messages
70
  for message in st.session_state.messages:
@@ -81,12 +71,11 @@ if prompt := st.chat_input("Type your message..."):
81
  st.markdown(prompt)
82
 
83
  try:
84
- # Generate response using selected model
85
  with st.spinner("Generating response..."):
86
- # The model expects parameters as separate arguments
87
- response = selected_demo.fn(
88
- prompt=f"{system_message}\n\nUser: {prompt}\nAssistant:",
89
- max_length=max_tokens, # Updated from max_new_tokens to max_length
90
  temperature=temperature,
91
  top_p=top_p
92
  )
 
1
  # app.py
2
  import streamlit as st
3
+ from models import load_model
4
+
5
+ # Load the model once
6
+ demo = load_model()
7
 
8
  # Page configuration
9
  st.set_page_config(
 
16
  if "messages" not in st.session_state:
17
  st.session_state.messages = []
18
 
19
+ # Sidebar for model parameters
20
  with st.sidebar:
21
  st.header("Model Configuration")
22
 
 
 
 
 
 
 
 
 
 
 
 
 
 
23
  # System message
24
  system_message = st.text_area(
25
  "System Message",
 
54
 
55
  # Main chat interface
56
  st.title("🤖 DeepSeek Chatbot")
57
+ st.caption("Powered by ruslanmv.com - Configure parameters in the sidebar")
58
 
59
  # Display chat messages
60
  for message in st.session_state.messages:
 
71
  st.markdown(prompt)
72
 
73
  try:
74
+ # Generate response using the model
75
  with st.spinner("Generating response..."):
76
+ response = demo(
77
+ inputs=f"{system_message}\n\nUser: {prompt}\nAssistant:",
78
+ max_length=max_tokens, # Matches Gradio's expected parameter name
 
79
  temperature=temperature,
80
  top_p=top_p
81
  )