ruslanmv commited on
Commit
d9748a4
Β·
verified Β·
1 Parent(s): e10cc1c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +34 -59
app.py CHANGED
@@ -1,29 +1,26 @@
1
  # app.py
2
  import streamlit as st
3
- from huggingface_hub import InferenceClient
4
- from datetime import datetime
5
 
6
- # Configure page
7
  st.set_page_config(
8
  page_title="DeepSeek Chatbot - ruslanmv.com",
9
  page_icon="πŸ€–",
10
- layout="centered",
11
- initial_sidebar_state="expanded"
12
  )
13
 
14
- # Initialize session state
15
  if "messages" not in st.session_state:
16
  st.session_state.messages = []
17
 
18
- # Sidebar controls
19
  with st.sidebar:
20
- st.title("πŸ€– Chatbot Settings")
21
- st.markdown("Created by [ruslanmv.com](https://ruslanmv.com/)")
22
 
23
  # Model selection
24
  selected_model = st.selectbox(
25
  "Choose Model",
26
- options=[
27
  "deepseek-ai/DeepSeek-R1-Distill-Qwen-32B",
28
  "deepseek-ai/DeepSeek-R1",
29
  "deepseek-ai/DeepSeek-R1-Zero"
@@ -39,19 +36,19 @@ with st.sidebar:
39
  )
40
 
41
  # Generation parameters
42
- max_new_tokens = st.slider(
43
- "Max new tokens",
44
  min_value=1,
45
  max_value=4000,
46
  value=512,
47
- step=50
48
  )
49
 
50
  temperature = st.slider(
51
  "Temperature",
52
  min_value=0.1,
53
  max_value=4.0,
54
- value=1.0,
55
  step=0.1
56
  )
57
 
@@ -62,66 +59,44 @@ with st.sidebar:
62
  value=0.9,
63
  step=0.1
64
  )
65
-
66
- # Optional HF Token
67
- hf_token = st.text_input(
68
- "HuggingFace Token (optional)",
69
- type="password",
70
- help="Enter your HuggingFace token if required for model access"
71
- )
72
 
73
  # Main chat interface
74
- st.title("πŸ’¬ DeepSeek Chatbot")
75
- st.caption("πŸš€ A conversational AI powered by DeepSeek models")
76
 
77
  # Display chat messages
78
  for message in st.session_state.messages:
79
  with st.chat_message(message["role"]):
80
  st.markdown(message["content"])
81
- if "timestamp" in message:
82
- st.caption(f"_{message['timestamp']}_")
83
 
84
- # Chat input and processing
85
  if prompt := st.chat_input("Type your message..."):
86
- # Add user message to history
87
- st.session_state.messages.append({
88
- "role": "user",
89
- "content": prompt,
90
- "timestamp": datetime.now().strftime("%H:%M:%S")
91
- })
92
 
93
  # Display user message
94
  with st.chat_message("user"):
95
  st.markdown(prompt)
96
- st.caption(f"_{st.session_state.messages[-1]['timestamp']}_")
97
-
98
- # Create full prompt with system message
99
- full_prompt = f"{system_message}\n\nUser: {prompt}\nAssistant:"
100
 
101
- # Create client and generate response
102
- client = InferenceClient(model=selected_model, token=hf_token)
103
 
104
- # Display assistant response
105
- with st.chat_message("assistant"):
106
- response = st.write_stream(
107
- client.text_generation(
108
  full_prompt,
109
- max_new_tokens=max_new_tokens,
110
  temperature=temperature,
111
- top_p=top_p,
112
- stream=True
113
  )
114
- )
115
- timestamp = datetime.now().strftime("%H:%M:%S")
116
- st.caption(f"_{timestamp}_")
117
-
118
- # Add assistant response to history
119
- st.session_state.messages.append({
120
- "role": "assistant",
121
- "content": response,
122
- "timestamp": timestamp
123
- })
124
-
125
- # Optional debug information
126
- # st.sidebar.markdown("---")
127
- # st.sidebar.json(st.session_state.messages)
 
1
  # app.py
2
  import streamlit as st
3
+ from models import demo
 
4
 
5
+ # Page configuration
6
  st.set_page_config(
7
  page_title="DeepSeek Chatbot - ruslanmv.com",
8
  page_icon="πŸ€–",
9
+ layout="centered"
 
10
  )
11
 
12
+ # Initialize session state for chat history
13
  if "messages" not in st.session_state:
14
  st.session_state.messages = []
15
 
16
+ # Sidebar for model selection and parameters
17
  with st.sidebar:
18
+ st.header("Model Configuration")
 
19
 
20
  # Model selection
21
  selected_model = st.selectbox(
22
  "Choose Model",
23
+ [
24
  "deepseek-ai/DeepSeek-R1-Distill-Qwen-32B",
25
  "deepseek-ai/DeepSeek-R1",
26
  "deepseek-ai/DeepSeek-R1-Zero"
 
36
  )
37
 
38
  # Generation parameters
39
+ max_tokens = st.slider(
40
+ "Max New Tokens",
41
  min_value=1,
42
  max_value=4000,
43
  value=512,
44
+ step=10
45
  )
46
 
47
  temperature = st.slider(
48
  "Temperature",
49
  min_value=0.1,
50
  max_value=4.0,
51
+ value=0.7,
52
  step=0.1
53
  )
54
 
 
59
  value=0.9,
60
  step=0.1
61
  )
 
 
 
 
 
 
 
62
 
63
  # Main chat interface
64
+ st.title("πŸ€– DeepSeek Chatbot")
65
+ st.caption("Powered by ruslanmv.com - Choose your model and parameters in the sidebar")
66
 
67
  # Display chat messages
68
  for message in st.session_state.messages:
69
  with st.chat_message(message["role"]):
70
  st.markdown(message["content"])
 
 
71
 
72
+ # Chat input
73
  if prompt := st.chat_input("Type your message..."):
74
+ # Add user message to chat history
75
+ st.session_state.messages.append({"role": "user", "content": prompt})
 
 
 
 
76
 
77
  # Display user message
78
  with st.chat_message("user"):
79
  st.markdown(prompt)
 
 
 
 
80
 
81
+ # Prepare full prompt with system message
82
+ full_prompt = f"{system_message}\n\nUser: {prompt}\nAssistant:"
83
 
84
+ try:
85
+ # Generate response using selected model
86
+ with st.spinner("Generating response..."):
87
+ response = demo.fn(
88
  full_prompt,
89
+ max_new_tokens=max_tokens,
90
  temperature=temperature,
91
+ top_p=top_p
 
92
  )
93
+
94
+ # Display assistant response
95
+ with st.chat_message("assistant"):
96
+ st.markdown(response)
97
+
98
+ # Add assistant response to chat history
99
+ st.session_state.messages.append({"role": "assistant", "content": response})
100
+
101
+ except Exception as e:
102
+ st.error(f"Error generating response: {str(e)}")