RakeshUtekar commited on
Commit
4d94499
·
verified ·
1 Parent(s): 30922ce

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +38 -34
app.py CHANGED
@@ -5,46 +5,40 @@ from langchain.chains import LLMChain
5
  from langchain.prompts import ChatPromptTemplate
6
  from langchain_huggingface import HuggingFaceEndpoint
7
 
8
- def create_prompt(name1: str, name2: str, persona_style: str):
9
- """Create a prompt that instructs the model to produce all 15 messages at once."""
10
  prompt_template_str = f"""
11
  You are to simulate a conversation of exactly 15 messages total between two people: {name1} and {name2}.
12
  The conversation should reflect the style: {persona_style}.
13
- {name1} speaks first (message 1), {name2} responds (message 2), then {name1} (message 3), and so on, alternating until 15 messages are complete.
 
 
14
  Rules:
15
- - Each message should be written as:
16
  {name1}: <message> or {name2}: <message>
17
- - Each message should be 1-2 short sentences, friendly, and natural.
18
- - Keep it casual, can ask questions, share opinions.
19
- - Use emojis sparingly if it fits the persona (no more than 1-2 per message).
20
- - Do not repeat the same line over and over.
21
- - The conversation must flow logically and naturally.
22
- - After producing exactly 15 messages (the 15th message by {name1}), stop. Do not add anything else.
23
- - Do not continue the conversation beyond 15 messages.
24
-
25
- Produce all 15 messages now:
26
  """
27
  return ChatPromptTemplate.from_template(prompt_template_str)
28
 
29
- def summarize_conversation(chain: LLMChain, conversation: str, name1: str, name2: str):
30
- """Summarize the completed conversation."""
31
- st.write("**Summarizing the conversation...**")
32
- print("Summarizing the conversation...")
 
33
 
34
- summary_prompt = f"""
35
- Below is a completed conversation between {name1} and {name2}:
36
  {conversation}
37
 
38
- Use the conversation above and write a short Title and a summary of above conversation. The summary should be in paragraph which highlights what was the conversation about.
39
- """
 
40
 
41
- try:
42
- response = chain.run(chat_history="", input=summary_prompt)
43
- return response.strip()
44
- except Exception as e:
45
- st.error(f"Error summarizing conversation: {e}")
46
- print(f"Error summarizing conversation: {e}")
47
- return "Title: No Title\nSummary: No summary available due to error."
48
 
49
  def main():
50
  st.title("LLM Conversation Simulation")
@@ -83,26 +77,36 @@ def main():
83
  print(f"Error initializing HuggingFaceEndpoint: {e}")
84
  return
85
 
86
- prompt = create_prompt(name1, name2, persona_style)
87
- chain = LLMChain(llm=llm, prompt=prompt)
 
88
 
89
  st.write("**Generating the full 15-message conversation...**")
90
  print("Generating the full 15-message conversation...")
91
 
92
  try:
93
  # Generate all 15 messages in one go
94
- conversation = chain.run(chat_history="", input="Produce the full conversation now.")
 
95
  conversation = conversation.strip()
96
 
97
- # Print and display the conversation
98
  st.subheader("Final Conversation:")
99
  st.text(conversation)
100
  print("Conversation Generation Complete.\n")
101
  print("Full Conversation:\n", conversation)
102
 
103
- # Summarize the conversation
 
 
 
 
104
  st.subheader("Summary and Title:")
105
- summary = summarize_conversation(chain, conversation, name1, name2)
 
 
 
 
 
106
  st.write(summary)
107
  print("Summary:\n", summary)
108
 
 
5
  from langchain.prompts import ChatPromptTemplate
6
  from langchain_huggingface import HuggingFaceEndpoint
7
 
8
+ def create_conversation_prompt(name1: str, name2: str, persona_style: str):
9
+ """Create a prompt for generating the entire 15-message conversation."""
10
  prompt_template_str = f"""
11
  You are to simulate a conversation of exactly 15 messages total between two people: {name1} and {name2}.
12
  The conversation should reflect the style: {persona_style}.
13
+ {name1} speaks first (message 1), {name2} responds (message 2), then {name1} (message 3), and so forth,
14
+ until 15 messages are complete (the 15th message by {name1}).
15
+
16
  Rules:
17
+ - Each message is formatted as:
18
  {name1}: <message> or {name2}: <message>
19
+ - Each message: 1-2 short sentences, friendly, natural.
20
+ - Use everyday language, can ask questions, show opinions.
21
+ - Use emojis sparingly if it fits the style.
22
+ - Do not repeat the same line.
23
+ - Produce all 15 messages now and do not continue beyond the 15th message.
 
 
 
 
24
  """
25
  return ChatPromptTemplate.from_template(prompt_template_str)
26
 
27
+ def create_summary_prompt(name1: str, name2: str, conversation: str):
28
+ """Create a prompt specifically for generating a title and summary of the conversation."""
29
+ # Here we explicitly create a new prompt template for the summary.
30
+ summary_prompt_str = f"""
31
+ The following is a completed conversation between {name1} and {name2}:
32
 
 
 
33
  {conversation}
34
 
35
+ Please provide:
36
+ Title: <A short descriptive title of the conversation>
37
+ Summary: <A few short sentences highlighting the main points, tone, and conclusion>
38
 
39
+ Do not continue the conversation, just provide title and summary.
40
+ """
41
+ return ChatPromptTemplate.from_template(summary_prompt_str)
 
 
 
 
42
 
43
  def main():
44
  st.title("LLM Conversation Simulation")
 
77
  print(f"Error initializing HuggingFaceEndpoint: {e}")
78
  return
79
 
80
+ # Create a chain for the conversation generation
81
+ conversation_prompt = create_conversation_prompt(name1, name2, persona_style)
82
+ conversation_chain = LLMChain(llm=llm, prompt=conversation_prompt)
83
 
84
  st.write("**Generating the full 15-message conversation...**")
85
  print("Generating the full 15-message conversation...")
86
 
87
  try:
88
  # Generate all 15 messages in one go
89
+ # Here we send the prompt for the conversation to the LLM
90
+ conversation = conversation_chain.run(chat_history="", input="Produce the full conversation now.")
91
  conversation = conversation.strip()
92
 
 
93
  st.subheader("Final Conversation:")
94
  st.text(conversation)
95
  print("Conversation Generation Complete.\n")
96
  print("Full Conversation:\n", conversation)
97
 
98
+ # Now we create a separate prompt for the summary
99
+ summary_prompt = create_summary_prompt(name1, name2, conversation)
100
+ # Create a new chain for the summary using the summary prompt
101
+ summary_chain = LLMChain(llm=llm, prompt=summary_prompt)
102
+
103
  st.subheader("Summary and Title:")
104
+ st.write("**Summarizing the conversation...**")
105
+ print("Summarizing the conversation...")
106
+
107
+ # Here we explicitly call the summary chain with the summary prompt
108
+ # This ensures we are actually sending the summary prompt to the LLM
109
+ summary = summary_chain.run(chat_history="", input="")
110
  st.write(summary)
111
  print("Summary:\n", summary)
112