Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -31,9 +31,9 @@ class ChatBot:
|
|
31 |
# Initialize the model
|
32 |
self.model = Together(
|
33 |
model="meta-llama/Llama-3.3-70B-Instruct-Turbo",
|
34 |
-
temperature=0.
|
35 |
-
max_tokens=
|
36 |
-
top_k=
|
37 |
together_api_key=TOGETHER_API_KEY
|
38 |
)
|
39 |
|
@@ -45,13 +45,13 @@ class ChatBot:
|
|
45 |
)
|
46 |
|
47 |
# Create the prompt template
|
48 |
-
self.template = """
|
49 |
|
50 |
-
|
51 |
|
52 |
-
|
53 |
|
54 |
-
|
55 |
|
56 |
self.prompt = ChatPromptTemplate.from_template(self.template)
|
57 |
|
@@ -74,16 +74,26 @@ Question: {question} [/INST]"""
|
|
74 |
|
75 |
def process_response(self, response: str) -> str:
|
76 |
"""Clean up the response"""
|
77 |
-
|
|
|
|
|
78 |
return response.strip()
|
79 |
|
80 |
def chat(self, message: str, history: List[Tuple[str, str]]) -> str:
|
81 |
"""Process a single chat message"""
|
82 |
-
|
83 |
-
|
84 |
-
|
85 |
-
|
86 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
87 |
|
88 |
def reset_chat(self) -> List[Tuple[str, str]]:
|
89 |
"""Reset the chat history"""
|
@@ -114,6 +124,11 @@ def create_demo() -> gr.Interface:
|
|
114 |
clear = gr.Button("New Chat")
|
115 |
|
116 |
def respond(message, chat_history):
|
|
|
|
|
|
|
|
|
|
|
117 |
bot_message = chatbot.chat(message, chat_history)
|
118 |
chat_history.append((message, bot_message))
|
119 |
return "", chat_history
|
|
|
31 |
# Initialize the model
|
32 |
self.model = Together(
|
33 |
model="meta-llama/Llama-3.3-70B-Instruct-Turbo",
|
34 |
+
temperature=0.5,
|
35 |
+
max_tokens=150,
|
36 |
+
top_k=30,
|
37 |
together_api_key=TOGETHER_API_KEY
|
38 |
)
|
39 |
|
|
|
45 |
)
|
46 |
|
47 |
# Create the prompt template
|
48 |
+
self.template = """Quyidagi kontekst va suhbat tarixiga asoslanib, savolga o'zbek tilida tabiiy tarzda javob bering:
|
49 |
|
50 |
+
Kontekst: {context}
|
51 |
|
52 |
+
Suhbat Tarixi: {chat_history}
|
53 |
|
54 |
+
Savol: {question}"""
|
55 |
|
56 |
self.prompt = ChatPromptTemplate.from_template(self.template)
|
57 |
|
|
|
74 |
|
75 |
def process_response(self, response: str) -> str:
|
76 |
"""Clean up the response"""
|
77 |
+
unwanted_tags = ["[INST]", "[/INST]", "<s>", "</s>"]
|
78 |
+
for tag in unwanted_tags:
|
79 |
+
response = response.replace(tag, "")
|
80 |
return response.strip()
|
81 |
|
82 |
def chat(self, message: str, history: List[Tuple[str, str]]) -> str:
|
83 |
"""Process a single chat message"""
|
84 |
+
try:
|
85 |
+
self.memory.chat_memory.add_user_message(message)
|
86 |
+
response = self.chain.invoke(message)
|
87 |
+
clean_response = self.process_response(response)
|
88 |
+
|
89 |
+
# Agar javob to'liq bo'lmasa yoki noto'g'ri bo'lsa, qayta urinib ko'rish
|
90 |
+
if not clean_response or len(clean_response.split()) < 3:
|
91 |
+
clean_response = "Kechirasiz, savolingizni tushunolmadim. Iltimos, batafsilroq savol bering."
|
92 |
+
|
93 |
+
self.memory.chat_memory.add_ai_message(clean_response)
|
94 |
+
return clean_response
|
95 |
+
except Exception as e:
|
96 |
+
return f"Xatolik yuz berdi: {str(e)}"
|
97 |
|
98 |
def reset_chat(self) -> List[Tuple[str, str]]:
|
99 |
"""Reset the chat history"""
|
|
|
124 |
clear = gr.Button("New Chat")
|
125 |
|
126 |
def respond(message, chat_history):
|
127 |
+
# Foydalanuvchi xabarini tozalash
|
128 |
+
message = message.strip()
|
129 |
+
if not message:
|
130 |
+
return "", chat_history
|
131 |
+
|
132 |
bot_message = chatbot.chat(message, chat_history)
|
133 |
chat_history.append((message, bot_message))
|
134 |
return "", chat_history
|