RakeshUtekar commited on
Commit
b2c6609
·
verified ·
1 Parent(s): 59e9cef

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +22 -19
app.py CHANGED
@@ -6,27 +6,35 @@ from langchain.prompts import ChatPromptTemplate
6
  from langchain_huggingface import HuggingFaceEndpoint
7
 
8
  def create_conversation_prompt(name1: str, name2: str, persona_style: str):
9
- """Create a prompt for generating the entire 15-message conversation."""
 
 
 
10
  prompt_template_str = f"""
11
  You are to simulate a conversation of exactly 15 messages total between two people: {name1} and {name2}.
12
  The conversation should reflect the style: {persona_style}.
13
- {name1} speaks first (message 1), {name2} responds (message 2), then {name1} (message 3), and so forth,
14
- until 15 messages are complete (the 15th message by {name1}).
15
 
16
  Rules:
17
- - Each message is formatted as:
18
- {name1}: <message> or {name2}: <message>
19
- - Each message: 1-2 short sentences, friendly, natural.
 
 
 
 
20
  - Use everyday language, can ask questions, show opinions.
21
- - Use emojis sparingly if it fits the style.
22
- - Do not repeat the same line.
23
- - Produce all 15 messages now and do not continue beyond the 15th message.
 
 
24
  """
25
  return ChatPromptTemplate.from_template(prompt_template_str)
26
 
27
  def create_summary_prompt(name1: str, name2: str, conversation: str):
28
  """Create a prompt specifically for generating a title and summary of the conversation."""
29
- # Here we explicitly create a new prompt template for the summary.
30
  summary_prompt_str = f"""
31
  The following is a completed conversation between {name1} and {name2}:
32
 
@@ -36,7 +44,7 @@ def create_summary_prompt(name1: str, name2: str, conversation: str):
36
  Title: <A short descriptive title of the conversation>
37
  Summary: <A few short sentences highlighting the main points, tone, and conclusion>
38
 
39
- Do not continue the conversation, just provide title and summary.
40
  """
41
  return ChatPromptTemplate.from_template(summary_prompt_str)
42
 
@@ -50,7 +58,6 @@ def main():
50
  "tiiuae/falcon-180B",
51
  "EleutherAI/gpt-neox-20b",
52
  "dice-research/lola_v1"
53
-
54
  ]
55
  selected_model = st.selectbox("Select a model:", model_names)
56
 
@@ -81,7 +88,6 @@ def main():
81
  print(f"Error initializing HuggingFaceEndpoint: {e}")
82
  return
83
 
84
- # Create a chain for the conversation generation
85
  conversation_prompt = create_conversation_prompt(name1, name2, persona_style)
86
  conversation_chain = LLMChain(llm=llm, prompt=conversation_prompt)
87
 
@@ -90,26 +96,22 @@ def main():
90
 
91
  try:
92
  # Generate all 15 messages in one go
93
- # Here we send the prompt for the conversation to the LLM
94
  conversation = conversation_chain.run(chat_history="", input="Produce the full conversation now.")
95
  conversation = conversation.strip()
96
-
97
  st.subheader("Final Conversation:")
98
  st.text(conversation)
99
  print("Conversation Generation Complete.\n")
100
  print("Full Conversation:\n", conversation)
101
 
102
- # Now we create a separate prompt for the summary
103
  summary_prompt = create_summary_prompt(name1, name2, conversation)
104
- # Create a new chain for the summary using the summary prompt
105
  summary_chain = LLMChain(llm=llm, prompt=summary_prompt)
106
 
107
  st.subheader("Summary and Title:")
108
  st.write("**Summarizing the conversation...**")
109
  print("Summarizing the conversation...")
110
 
111
- # Here we explicitly call the summary chain with the summary prompt
112
- # This ensures we are actually sending the summary prompt to the LLM
113
  summary = summary_chain.run(chat_history="", input="")
114
  st.write(summary)
115
  print("Summary:\n", summary)
@@ -120,3 +122,4 @@ def main():
120
 
121
  if __name__ == "__main__":
122
  main()
 
 
6
  from langchain_huggingface import HuggingFaceEndpoint
7
 
8
  def create_conversation_prompt(name1: str, name2: str, persona_style: str):
9
+ """
10
+ Create a prompt that instructs the model to produce exactly 15 messages
11
+ of conversation, alternating between name1 and name2, starting with name1.
12
+ """
13
  prompt_template_str = f"""
14
  You are to simulate a conversation of exactly 15 messages total between two people: {name1} and {name2}.
15
  The conversation should reflect the style: {persona_style}.
16
+ {name1} speaks first (message 1), then {name2} (message 2), then {name1} (message 3), and so forth,
17
+ alternating until all 15 messages are complete (the 15th message is by {name1}).
18
 
19
  Rules:
20
+ - Output only the 15 messages of the conversation, nothing else.
21
+ - Each message must be in the format:
22
+ {name1}: <one or two short sentences>
23
+ {name2}: <one or two short sentences>
24
+ - Do not add any headings, such as '# Conversation' or '---'.
25
+ - Do not repeat the conversation.
26
+ - Do not produce anything after the 15th message.
27
  - Use everyday language, can ask questions, show opinions.
28
+ - Use emojis sparingly, if at all (no more than 1-2 in the entire conversation).
29
+ - No repetition of the same message. Each message is unique and responds logically to the previous one.
30
+ - The conversation ends exactly after the 15th message by {name1} with no extra lines or content.
31
+
32
+ Now, produce all 15 messages strictly following the above instructions:
33
  """
34
  return ChatPromptTemplate.from_template(prompt_template_str)
35
 
36
  def create_summary_prompt(name1: str, name2: str, conversation: str):
37
  """Create a prompt specifically for generating a title and summary of the conversation."""
 
38
  summary_prompt_str = f"""
39
  The following is a completed conversation between {name1} and {name2}:
40
 
 
44
  Title: <A short descriptive title of the conversation>
45
  Summary: <A few short sentences highlighting the main points, tone, and conclusion>
46
 
47
+ Do not continue the conversation, do not repeat it, just provide a title and summary.
48
  """
49
  return ChatPromptTemplate.from_template(summary_prompt_str)
50
 
 
58
  "tiiuae/falcon-180B",
59
  "EleutherAI/gpt-neox-20b",
60
  "dice-research/lola_v1"
 
61
  ]
62
  selected_model = st.selectbox("Select a model:", model_names)
63
 
 
88
  print(f"Error initializing HuggingFaceEndpoint: {e}")
89
  return
90
 
 
91
  conversation_prompt = create_conversation_prompt(name1, name2, persona_style)
92
  conversation_chain = LLMChain(llm=llm, prompt=conversation_prompt)
93
 
 
96
 
97
  try:
98
  # Generate all 15 messages in one go
 
99
  conversation = conversation_chain.run(chat_history="", input="Produce the full conversation now.")
100
  conversation = conversation.strip()
101
+
102
  st.subheader("Final Conversation:")
103
  st.text(conversation)
104
  print("Conversation Generation Complete.\n")
105
  print("Full Conversation:\n", conversation)
106
 
107
+ # Now summarize
108
  summary_prompt = create_summary_prompt(name1, name2, conversation)
 
109
  summary_chain = LLMChain(llm=llm, prompt=summary_prompt)
110
 
111
  st.subheader("Summary and Title:")
112
  st.write("**Summarizing the conversation...**")
113
  print("Summarizing the conversation...")
114
 
 
 
115
  summary = summary_chain.run(chat_history="", input="")
116
  st.write(summary)
117
  print("Summary:\n", summary)
 
122
 
123
  if __name__ == "__main__":
124
  main()
125
+