ruslanmv commited on
Commit
be8fcde
·
verified ·
1 Parent(s): f6905f3

Create app-v3-working-dup.py

Browse files
Files changed (1) hide show
  1. app-v3-working-dup.py +121 -0
app-v3-working-dup.py ADDED
@@ -0,0 +1,121 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import requests
3
+ import logging
4
+
5
+ # Configure logging
6
+ logging.basicConfig(level=logging.INFO)
7
+ logger = logging.getLogger(__name__)
8
+
9
+ # Page configuration
10
+ st.set_page_config(
11
+ page_title="DeepSeek Chatbot - ruslanmv.com",
12
+ page_icon="🤖",
13
+ layout="centered"
14
+ )
15
+
16
+ # Initialize session state for chat history
17
+ if "messages" not in st.session_state:
18
+ st.session_state.messages = []
19
+
20
+ # Sidebar configuration
21
+ with st.sidebar:
22
+ st.header("Model Configuration")
23
+ st.markdown("[Get HuggingFace Token](https://huggingface.co/settings/tokens)")
24
+
25
+ # Dropdown to select model
26
+ model_options = [
27
+ "deepseek-ai/DeepSeek-R1-Distill-Qwen-32B",
28
+ ]
29
+ selected_model = st.selectbox("Select Model", model_options, index=0)
30
+
31
+ system_message = st.text_area(
32
+ "System Message",
33
+ value="You are a friendly chatbot created by ruslanmv.com. Provide clear, accurate, and brief answers. Keep responses polite, engaging, and to the point. If unsure, politely suggest alternatives.",
34
+ height=100
35
+ )
36
+
37
+ max_tokens = st.slider(
38
+ "Max Tokens",
39
+ 10, 4000, 100
40
+ )
41
+
42
+ temperature = st.slider(
43
+ "Temperature",
44
+ 0.1, 4.0, 0.3
45
+ )
46
+
47
+ top_p = st.slider(
48
+ "Top-p",
49
+ 0.1, 1.0, 0.6
50
+ )
51
+
52
+ # Function to query the Hugging Face API
53
+ def query(payload, api_url):
54
+ headers = {"Authorization": f"Bearer {st.secrets['HF_TOKEN']}"}
55
+ logger.info(f"Sending request to {api_url} with payload: {payload}")
56
+ response = requests.post(api_url, headers=headers, json=payload)
57
+ logger.info(f"Received response: {response.status_code}, {response.text}")
58
+ try:
59
+ return response.json()
60
+ except requests.exceptions.JSONDecodeError:
61
+ logger.error(f"Failed to decode JSON response: {response.text}")
62
+ return None
63
+
64
+ # Chat interface
65
+ st.title("🤖 DeepSeek Chatbot")
66
+ st.caption("Powered by Hugging Face Inference API - Configure in sidebar")
67
+
68
+ # Display chat history
69
+ for message in st.session_state.messages:
70
+ with st.chat_message(message["role"]):
71
+ st.markdown(message["content"])
72
+
73
+ # Handle input
74
+ if prompt := st.chat_input("Type your message..."):
75
+ st.session_state.messages.append({"role": "user", "content": prompt})
76
+
77
+ with st.chat_message("user"):
78
+ st.markdown(prompt)
79
+
80
+ try:
81
+ with st.spinner("Generating response..."):
82
+ # Prepare the payload for the API
83
+ # Combine system message and user input into a single prompt
84
+ full_prompt = f"{system_message}\n\nUser: {prompt}\nAssistant:"
85
+ payload = {
86
+ "inputs": full_prompt,
87
+ "parameters": {
88
+ "max_new_tokens": max_tokens,
89
+ "temperature": temperature,
90
+ "top_p": top_p,
91
+ "return_full_text": False
92
+ }
93
+ }
94
+
95
+ # Dynamically construct the API URL based on the selected model
96
+ api_url = f"https://api-inference.huggingface.co/models/{selected_model}"
97
+ logger.info(f"Selected model: {selected_model}, API URL: {api_url}")
98
+ print("payload",payload)
99
+ # Query the Hugging Face API using the selected model
100
+ output = query(payload, api_url)
101
+
102
+ # Handle API response
103
+ if output is not None and isinstance(output, list) and len(output) > 0:
104
+ if 'generated_text' in output[0]:
105
+ assistant_response = output[0]['generated_text']
106
+ logger.info(f"Generated response: {assistant_response}")
107
+
108
+ with st.chat_message("assistant"):
109
+ st.markdown(assistant_response)
110
+
111
+ st.session_state.messages.append({"role": "assistant", "content": assistant_response})
112
+ else:
113
+ logger.error(f"Unexpected API response structure: {output}")
114
+ st.error("Error: Unexpected response from the model. Please try again.")
115
+ else:
116
+ logger.error(f"Empty or invalid API response: {output}")
117
+ st.error("Error: Unable to generate a response. Please check the model and try again.")
118
+
119
+ except Exception as e:
120
+ logger.error(f"Application Error: {str(e)}", exc_info=True)
121
+ st.error(f"Application Error: {str(e)}")