File size: 1,625 Bytes
9a5450e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
import os
import gradio as gr
import cohere


COHERE_KEY = os.getenv('COHERE_KEY')
co = cohere.Client(COHERE_KEY)

#list_history = [["question", "answer"], ["how", "how what..."]]
def convert_history(list_history):
    """
    Applies the prompt.
    Converts the chat history structure taken by Gradio to the structure suitable for Cohere.
    """
    chat_history = [
        {"role": "SYSTEM", "text": open("prompt.md","r",encoding="UTF-8").read()}
    ]
    for item in list_history:
        dict_chat = {"role": "USER", "text": item[0]}
        chat_history.append(dict_chat)
        dict_chat = {"role": "CHATBOT", "text": item[1]}
        chat_history.append(dict_chat)
    return chat_history
        



def reply(message:str, history:list):
    """
    Takes the input message of the user and chat history and streams the reply of the chatbot.
    """
    chat_history = convert_history(history)
    response = co.chat_stream(
        message=message,
        chat_history=chat_history,
        model="command-nightly",
        temperature=0.25
    )
    text_so_far = ""
    for event in response:
        if event.event_type == 'text-generation':
            text_so_far += event.text
            yield text_so_far


description = """
Hello! 
💬 Use the text box below to ask questions the Technorama 2024 conference!
<nobr> 🗣️ Talk to me in English, Dutch, or French. </nobr>
<nobr> 🔗 [This chatbot was made by Alfiya  Khabibullina](https://www.linkedin.com/in/alfiya-khabibullina-7b13131b8/) </nobr>
"""

gr.ChatInterface(reply,
    title="Technorama Assistant",
    description=description
).launch()