OrifjonKenjayev commited on
Commit
6888520
·
verified ·
1 Parent(s): f7b1a59

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +156 -111
app.py CHANGED
@@ -1,156 +1,201 @@
1
- import gradio as gr
2
  import os
3
- from langchain_community.vectorstores import FAISS
4
- from langchain_together import TogetherEmbeddings, Together
5
- from langchain.prompts import ChatPromptTemplate
6
- from langchain.schema.runnable import RunnablePassthrough
7
- from langchain.schema.output_parser import StrOutputParser
8
- from langchain.memory import ConversationBufferMemory
9
  from typing import List, Tuple
10
- import re
11
 
12
- TOGETHER_API_KEY = os.getenv('TOGETHER_API_KEY')
 
13
 
14
- class ChatBot:
15
- def __init__(self):
16
- self.embeddings = TogetherEmbeddings(
17
- model="togethercomputer/m2-bert-80M-32k-retrieval",
18
- together_api_key=TOGETHER_API_KEY
19
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
20
 
21
- self.vectorstore = FAISS.load_local(
22
- ".",
23
- embeddings=self.embeddings,
24
- allow_dangerous_deserialization=True
25
- )
26
- self.retriever = self.vectorstore.as_retriever()
27
-
28
- self.model = Together(
29
- model="meta-llama/Llama-3.3-70B-Instruct-Turbo",
30
- temperature=0.4,
31
- max_tokens=256,
32
- top_k=30,
33
- together_api_key=TOGETHER_API_KEY
34
- )
35
 
36
- self.memory = ConversationBufferMemory(
37
- return_messages=True,
38
- memory_key="chat_history",
39
- output_key="answer"
40
- )
41
 
42
- self.template = """Quyidagi ko'rsatmalarga qat'iy rioya qiling:
 
 
43
 
44
- 1. Faqat o'zbek tilida javob bering
45
- 2. Faqat berilgan ma'lumotlar asosida javob bering
46
- 3. Agar savol tushunarsiz bo'lsa yoki ma'lumot bo'lmasa, "Kechirasiz, bu haqida ma'lumotga ega emasman" deb javob bering
47
- 4. O'zingizdan savol bermang
48
- 5. Javobni takrorlamang
49
- 6. Salomlashish uchun "Assalomu alaykum" yoki "Vaalaykum assalom" dan foydalaning
50
-
51
- Kontekst: {context}
52
- Suhbat Tarixi: {chat_history}
53
- Savol: {question}
54
-
55
- Javob:"""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
56
 
57
- self.prompt = ChatPromptTemplate.from_template(self.template)
58
-
59
- self.chain = (
60
- {
61
- "context": self.retriever,
62
- "chat_history": lambda x: self.get_chat_history(),
63
- "question": RunnablePassthrough()
64
- }
65
- | self.prompt
66
- | self.model
67
- | StrOutputParser()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
68
  )
69
-
70
- def get_chat_history(self) -> str:
71
- messages = self.memory.load_memory_variables({})["chat_history"]
72
- return "\n".join([f"{m.type}: {m.content}" for m in messages])
73
-
74
- def process_response(self, response: str) -> str:
75
- unwanted_tags = ["[INST]", "[/INST]", "<s>", "</s>"]
76
- for tag in unwanted_tags:
77
- response = response.replace(tag, "")
78
-
79
- response = re.sub(r"```.*?```", "", response, flags=re.DOTALL)
80
- response = re.sub(r"print\(.*?\)", "", response)
81
- response = re.sub(r'\s+', ' ', response)
82
-
83
- return response.strip()
84
 
85
  def chat(self, message: str, history: List[Tuple[str, str]]) -> str:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
86
  try:
 
 
87
 
88
- if message == "__init__":
89
- return "Assalomu alaykum. Sizga qanday yordam bera olaman?"
90
-
91
- self.memory.chat_memory.add_user_message(message)
92
- response = self.chain.invoke(message)
93
- clean_response = self.process_response(response)
94
 
95
- if not clean_response or len(clean_response.split()) < 3:
96
- clean_response = "Kechirasiz, savolingizni tushunolmadim. Iltimos, batafsilroq savol bering."
97
-
98
- self.memory.chat_memory.add_ai_message(clean_response)
99
- return clean_response
100
  except Exception as e:
101
- return f"Xatolik yuz berdi: {str(e)}"
102
-
103
- def reset_chat(self) -> List[Tuple[str, str]]:
104
- self.memory.clear()
105
- return []
106
 
107
  def create_demo() -> gr.Interface:
108
- chatbot = ChatBot()
 
 
 
 
 
 
109
 
110
  with gr.Blocks() as demo:
111
- gr.Markdown("""# RAG Chatbot
112
- Beeline Uzbekistanning jismoniy shaxslar uchun tariflari haqida ma'lumotlar beruvchi bot""")
113
-
114
 
115
  chatbot_interface = gr.Chatbot(
116
- height=600,
117
  show_copy_button=True,
 
118
  )
119
 
120
  with gr.Row():
121
  msg = gr.Textbox(
122
- show_label=False,
123
- placeholder="Xabaringizni shu yerda yozing",
124
  container=False
125
  )
126
- submit = gr.Button("Xabarni yuborish", variant="primary")
127
-
128
- clear = gr.Button("Yangi suhbat")
129
 
130
  def respond(message, chat_history):
131
- message = message.strip()
132
- if not message:
133
- return "", chat_history
134
-
135
  bot_message = chatbot.chat(message, chat_history)
136
  chat_history.append((message, bot_message))
137
  return "", chat_history
138
 
139
  def init_chat():
140
-
141
- initial_greeting = chatbot.chat("__init__", [])
142
  return [("", initial_greeting)]
143
 
 
144
  submit.click(respond, [msg, chatbot_interface], [msg, chatbot_interface])
145
  msg.submit(respond, [msg, chatbot_interface], [msg, chatbot_interface])
146
- clear.click(init_chat, None, chatbot_interface)
147
-
148
-
149
- demo.load(init_chat, None, chatbot_interface)
150
 
151
  return demo
152
 
 
153
  demo = create_demo()
154
-
155
  if __name__ == "__main__":
156
- demo.launch()
 
 
1
  import os
2
+ import logging
3
+ import traceback
 
 
 
 
4
  from typing import List, Tuple
 
5
 
6
+ import gradio as gr
7
+ from dotenv import load_dotenv
8
 
9
+ from langchain.document_loaders import TextLoader
10
+ from langchain.text_splitter import RecursiveCharacterTextSplitter
11
+ from langchain.embeddings import OpenAIEmbeddings
12
+ from langchain.vectorstores import FAISS
13
+ from langchain.chat_models import ChatOpenAI
14
+ from langchain.chains import RetrievalQA
15
+ from langchain.prompts import PromptTemplate
16
+
17
+ # Configure logging
18
+ logging.basicConfig(level=logging.INFO)
19
+ logger = logging.getLogger(__name__)
20
+
21
+ # Load environment variables
22
+ load_dotenv()
23
+
24
+ class RAGChatbot:
25
+ def __init__(self, document_path):
26
+ """
27
+ Initialize RAG Chatbot with document vectorization
28
 
29
+ :param document_path: Path to the input document
30
+ """
31
+ self.openai_api_key = os.getenv('OPENAI_API_KEY')
 
 
 
 
 
 
 
 
 
 
 
32
 
33
+ if not self.openai_api_key:
34
+ raise ValueError("OpenAI API Key is not set. Please add it to environment variables.")
 
 
 
35
 
36
+ self.document_path = document_path
37
+ self.vectorstore = self._load_or_create_vector_store()
38
+ self.qa_system = self._create_qa_system()
39
 
40
+ def _load_or_create_vector_store(self):
41
+ """
42
+ Load existing FAISS index or create a new one
43
+
44
+ :return: FAISS vector store
45
+ """
46
+ try:
47
+ embeddings = OpenAIEmbeddings(openai_api_key=self.openai_api_key)
48
+
49
+ # Check if index exists
50
+ if os.path.exists('faiss_index'):
51
+ logger.info("Loading existing vector store...")
52
+ return FAISS.load_local('faiss_index', embeddings)
53
+
54
+ # Create new vector store
55
+ logger.info("Creating new vector store...")
56
+ loader = TextLoader(self.document_path, encoding='utf-8')
57
+ documents = loader.load()
58
+
59
+ text_splitter = RecursiveCharacterTextSplitter(
60
+ chunk_size=3000,
61
+ chunk_overlap=600,
62
+ separators=["\n\n\n", "\n\n", "\n", ".", " ", ""]
63
+ )
64
+ texts = text_splitter.split_documents(documents)
65
+
66
+ vectorstore = FAISS.from_documents(texts, embeddings)
67
+
68
+ # Ensure faiss_index directory exists
69
+ os.makedirs('faiss_index', exist_ok=True)
70
+ vectorstore.save_local('faiss_index')
71
+
72
+ return vectorstore
73
+
74
+ except Exception as e:
75
+ logger.error(f"Vector store creation error: {e}")
76
+ logger.error(traceback.format_exc())
77
+ raise
78
 
79
+ def _create_qa_system(self):
80
+ """
81
+ Create Question-Answering system with custom prompt
82
+
83
+ :return: RetrievalQA chain
84
+ """
85
+ custom_prompt = PromptTemplate(
86
+ input_variables=["context", "question"],
87
+ template="""You are an expert AI assistant for Beeline Uzbekistan tariffs.
88
+ Provide clear, precise answers based on the context.
89
+ Respond in the language of the question.
90
+
91
+ Context: {context}
92
+ Question: {question}
93
+
94
+ Comprehensive Answer:"""
95
+ )
96
+
97
+ llm = ChatOpenAI(
98
+ model_name="gpt-3.5-turbo",
99
+ openai_api_key=self.openai_api_key,
100
+ temperature=0.1
101
+ )
102
+
103
+ return RetrievalQA.from_chain_type(
104
+ llm=llm,
105
+ chain_type="stuff",
106
+ retriever=self.vectorstore.as_retriever(
107
+ search_type="mmr",
108
+ search_kwargs={"k": 4, "fetch_k": 10}
109
+ ),
110
+ chain_type_kwargs={"prompt": custom_prompt}
111
  )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
112
 
113
  def chat(self, message: str, history: List[Tuple[str, str]]) -> str:
114
+ """
115
+ Main chat method with multilingual support
116
+
117
+ :param message: User input message
118
+ :param history: Chat history
119
+ :return: Bot response
120
+ """
121
+ # Handle initial greeting
122
+ if message.lower() in ['init', 'start', 'begin']:
123
+ return "Assalomu alaykum! 📱 Beeline tarifları haqida qanday ma'lumot kerak? (Hello! What Beeline tariff information do you need?)"
124
+
125
+ # Multilingual greeting handling
126
+ greetings = {
127
+ 'uz': ['salom', 'assalomu alaykum', 'hammaga salom'],
128
+ 'ru': ['привет', 'здравствуйте', 'hi', 'hello'],
129
+ 'en': ['hi', 'hello', 'hey']
130
+ }
131
+
132
+ for lang, greeting_list in greetings.items():
133
+ if message.lower() in greeting_list:
134
+ return {
135
+ 'uz': "Salom! Sizga qanday yordam bera olaman? 🤖",
136
+ 'ru': "Привет! Чем могу помочь? 🤖",
137
+ 'en': "Hello! How can I assist you today about Beeline tariffs? 🤖"
138
+ }[lang]
139
+
140
  try:
141
+ # Query the document
142
+ response = self.qa_system.run(message)
143
 
144
+ # Add conversational touch
145
+ response += "\n\n📞 Yana bir nima so'rashingizni xohlar edingizmi? (Would you like to ask anything else?)"
 
 
 
 
146
 
147
+ return response
148
+
 
 
 
149
  except Exception as e:
150
+ logger.error(f"Chat processing error: {e}")
151
+ logger.error(traceback.format_exc())
152
+ return "Kechirasiz, so'rovingizni qayta ishlashda xatolik yuz berdi. Iltimos, qaytadan urinib ko'ring. (Sorry, there was an error processing your request. Please try again.)"
 
 
153
 
154
  def create_demo() -> gr.Interface:
155
+ """
156
+ Create Gradio interface for the chatbot
157
+
158
+ :return: Gradio demo
159
+ """
160
+ # Initialize chatbot with document
161
+ chatbot = RAGChatbot('12.txt')
162
 
163
  with gr.Blocks() as demo:
164
+ gr.Markdown("# 📱 Beeline Uzbekistan Tariff Assistant")
 
 
165
 
166
  chatbot_interface = gr.Chatbot(
167
+ height=600,
168
  show_copy_button=True,
169
+ avatar_images=["🤔", "🤖"] # User and Bot avatars
170
  )
171
 
172
  with gr.Row():
173
  msg = gr.Textbox(
174
+ show_label=False,
175
+ placeholder="Beeline tariffları haqida so'rang... (Ask about Beeline tariffs...)",
176
  container=False
177
  )
178
+ submit = gr.Button("Yuborish (Send)", variant="primary")
179
+ clear = gr.Button("Yangi suhbat (New Chat)")
 
180
 
181
  def respond(message, chat_history):
 
 
 
 
182
  bot_message = chatbot.chat(message, chat_history)
183
  chat_history.append((message, bot_message))
184
  return "", chat_history
185
 
186
  def init_chat():
187
+ initial_greeting = chatbot.chat("init", [])
 
188
  return [("", initial_greeting)]
189
 
190
+ # Event handlers
191
  submit.click(respond, [msg, chatbot_interface], [msg, chatbot_interface])
192
  msg.submit(respond, [msg, chatbot_interface], [msg, chatbot_interface])
193
+ clear.click(fn=init_chat, inputs=None, outputs=chatbot_interface)
194
+ demo.load(init_chat, inputs=None, outputs=chatbot_interface)
 
 
195
 
196
  return demo
197
 
198
+ # Main execution
199
  demo = create_demo()
 
200
  if __name__ == "__main__":
201
+ demo.launch(debug=True)