Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -11,6 +11,8 @@ import torch
|
|
11 |
import numpy as np
|
12 |
import networkx as nx
|
13 |
from collections import Counter
|
|
|
|
|
14 |
|
15 |
@dataclass
|
16 |
class ChatMessage:
|
@@ -93,6 +95,11 @@ class XylariaChat:
|
|
93 |
"photosynthesis": "process used by plants to convert light to energy"
|
94 |
}
|
95 |
|
|
|
|
|
|
|
|
|
|
|
96 |
def update_internal_state(self, emotion_deltas, cognitive_load_deltas, introspection_delta, engagement_delta):
|
97 |
for emotion, delta in emotion_deltas.items():
|
98 |
if emotion in self.internal_state["emotions"]:
|
@@ -559,6 +566,52 @@ class XylariaChat:
|
|
559 |
prompt += "<|assistant|>\n"
|
560 |
return prompt
|
561 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
562 |
def create_interface(self):
|
563 |
loading_svg = """<svg width="256" height="256" viewBox="0 0 256 256" xmlns="http://www.w3.org/2000/svg">
|
564 |
<style>
|
@@ -580,7 +633,7 @@ class XylariaChat:
|
|
580 |
<text x="50%" y="50%" dominant-baseline="middle" text-anchor="middle" font-size="24" fill="white" opacity="0.8">
|
581 |
<tspan>creating your image</tspan>
|
582 |
<tspan x="50%" dy="1.2em">with xylaria iris</tspan>
|
583 |
-
|
584 |
</svg>"""
|
585 |
|
586 |
def streaming_response(message, chat_history, image_filepath, math_ocr_image_path):
|
@@ -628,7 +681,7 @@ class XylariaChat:
|
|
628 |
|
629 |
if image_filepath:
|
630 |
response_stream = self.get_response(message, image_filepath)
|
631 |
-
|
632 |
response_stream = self.get_response(message)
|
633 |
|
634 |
if isinstance(response_stream, str):
|
@@ -697,6 +750,26 @@ class XylariaChat:
|
|
697 |
if len(self.conversation_history) > 10:
|
698 |
self.conversation_history = self.conversation_history[-10:]
|
699 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
700 |
custom_css = """
|
701 |
@import url('https://fonts.googleapis.com/css2?family=Inter:wght@300;400;500;600;700&display=swap');
|
702 |
body, .gradio-container {
|
@@ -771,72 +844,90 @@ class XylariaChat:
|
|
771 |
display: flex;
|
772 |
flex-direction: column-reverse;
|
773 |
}
|
|
|
|
|
|
|
|
|
774 |
"""
|
775 |
|
776 |
with gr.Blocks(theme='soft', css=custom_css) as demo:
|
777 |
-
with gr.
|
778 |
-
|
779 |
-
|
780 |
-
|
781 |
-
|
782 |
-
|
783 |
-
|
784 |
-
|
785 |
-
|
786 |
-
|
787 |
-
|
788 |
-
|
789 |
-
|
790 |
-
|
791 |
-
|
792 |
-
|
793 |
-
|
794 |
-
|
795 |
-
|
796 |
-
|
797 |
-
|
798 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
799 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
800 |
|
801 |
-
|
802 |
-
with gr.Column(scale=4):
|
803 |
-
txt = gr.Textbox(
|
804 |
-
show_label=False,
|
805 |
-
placeholder="Type your message...",
|
806 |
-
container=False
|
807 |
-
)
|
808 |
-
btn = gr.Button("Send", scale=1)
|
809 |
-
|
810 |
-
with gr.Row():
|
811 |
-
clear = gr.Button("Clear Conversation")
|
812 |
-
clear_memory = gr.Button("Clear Memory")
|
813 |
-
|
814 |
-
btn.click(
|
815 |
-
fn=streaming_response,
|
816 |
-
inputs=[txt, chatbot, img, math_ocr_img],
|
817 |
-
outputs=[txt, chatbot, img, math_ocr_img]
|
818 |
-
)
|
819 |
-
txt.submit(
|
820 |
-
fn=streaming_response,
|
821 |
-
inputs=[txt, chatbot, img, math_ocr_img],
|
822 |
-
outputs=[txt, chatbot, img, math_ocr_img]
|
823 |
-
)
|
824 |
-
|
825 |
-
clear.click(
|
826 |
-
fn=lambda: None,
|
827 |
-
inputs=None,
|
828 |
-
outputs=[chatbot],
|
829 |
-
queue=False
|
830 |
-
)
|
831 |
-
|
832 |
-
clear_memory.click(
|
833 |
-
fn=self.reset_conversation,
|
834 |
-
inputs=None,
|
835 |
-
outputs=[chatbot],
|
836 |
-
queue=False
|
837 |
-
)
|
838 |
-
|
839 |
-
demo.load(self.reset_conversation, None, None)
|
840 |
|
841 |
return demo
|
842 |
|
|
|
11 |
import numpy as np
|
12 |
import networkx as nx
|
13 |
from collections import Counter
|
14 |
+
import json
|
15 |
+
from datetime import datetime
|
16 |
|
17 |
@dataclass
|
18 |
class ChatMessage:
|
|
|
95 |
"photosynthesis": "process used by plants to convert light to energy"
|
96 |
}
|
97 |
|
98 |
+
# Initialize chat history file
|
99 |
+
self.chat_history_file = "chat_history.json"
|
100 |
+
self.load_all_chats()
|
101 |
+
|
102 |
+
|
103 |
def update_internal_state(self, emotion_deltas, cognitive_load_deltas, introspection_delta, engagement_delta):
|
104 |
for emotion, delta in emotion_deltas.items():
|
105 |
if emotion in self.internal_state["emotions"]:
|
|
|
566 |
prompt += "<|assistant|>\n"
|
567 |
return prompt
|
568 |
|
569 |
+
def save_chat(self):
|
570 |
+
chat_data = {
|
571 |
+
"timestamp": datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
|
572 |
+
"conversation": self.conversation_history
|
573 |
+
}
|
574 |
+
try:
|
575 |
+
with open(self.chat_history_file, "r") as f:
|
576 |
+
all_chats = json.load(f)
|
577 |
+
except (FileNotFoundError, json.JSONDecodeError):
|
578 |
+
all_chats = []
|
579 |
+
|
580 |
+
all_chats.append(chat_data)
|
581 |
+
|
582 |
+
with open(self.chat_history_file, "w") as f:
|
583 |
+
json.dump(all_chats, f)
|
584 |
+
|
585 |
+
def load_all_chats(self):
|
586 |
+
try:
|
587 |
+
with open(self.chat_history_file, "r") as f:
|
588 |
+
all_chats = json.load(f)
|
589 |
+
except (FileNotFoundError, json.JSONDecodeError):
|
590 |
+
all_chats = []
|
591 |
+
return all_chats
|
592 |
+
|
593 |
+
def load_chat(self, chat_index):
|
594 |
+
all_chats = self.load_all_chats()
|
595 |
+
if 0 <= chat_index < len(all_chats):
|
596 |
+
self.conversation_history = all_chats[chat_index]["conversation"]
|
597 |
+
self.reset_conversation()
|
598 |
+
for msg in self.conversation_history:
|
599 |
+
if msg['role'] == 'user':
|
600 |
+
self.dynamic_belief_update(msg['content'])
|
601 |
+
return self.conversation_history
|
602 |
+
else:
|
603 |
+
raise ValueError("Invalid chat index")
|
604 |
+
|
605 |
+
def delete_chat(self, chat_index):
|
606 |
+
all_chats = self.load_all_chats()
|
607 |
+
if 0 <= chat_index < len(all_chats):
|
608 |
+
del all_chats[chat_index]
|
609 |
+
with open(self.chat_history_file, "w") as f:
|
610 |
+
json.dump(all_chats, f)
|
611 |
+
return self.load_all_chats()
|
612 |
+
else:
|
613 |
+
raise ValueError("Invalid chat index")
|
614 |
+
|
615 |
def create_interface(self):
|
616 |
loading_svg = """<svg width="256" height="256" viewBox="0 0 256 256" xmlns="http://www.w3.org/2000/svg">
|
617 |
<style>
|
|
|
633 |
<text x="50%" y="50%" dominant-baseline="middle" text-anchor="middle" font-size="24" fill="white" opacity="0.8">
|
634 |
<tspan>creating your image</tspan>
|
635 |
<tspan x="50%" dy="1.2em">with xylaria iris</tspan>
|
636 |
+
</text>
|
637 |
</svg>"""
|
638 |
|
639 |
def streaming_response(message, chat_history, image_filepath, math_ocr_image_path):
|
|
|
681 |
|
682 |
if image_filepath:
|
683 |
response_stream = self.get_response(message, image_filepath)
|
684 |
+
else:
|
685 |
response_stream = self.get_response(message)
|
686 |
|
687 |
if isinstance(response_stream, str):
|
|
|
750 |
if len(self.conversation_history) > 10:
|
751 |
self.conversation_history = self.conversation_history[-10:]
|
752 |
|
753 |
+
self.save_chat()
|
754 |
+
all_chats = self.load_all_chats()
|
755 |
+
chat_titles = [f"{chat['timestamp']}: {chat['conversation'][0]['content'][:30]}..." if len(chat['conversation']) > 0 and chat['conversation'][0]['content'] else f"{chat['timestamp']}: Empty Chat" for chat in all_chats]
|
756 |
+
yield "", updated_history, None, None, gr.update(choices=chat_titles)
|
757 |
+
|
758 |
+
def load_selected_chat(chat_index, evt: gr.SelectData):
|
759 |
+
if chat_index is not None:
|
760 |
+
loaded_chat = self.load_chat(evt.index)
|
761 |
+
return loaded_chat
|
762 |
+
else:
|
763 |
+
return []
|
764 |
+
|
765 |
+
def delete_selected_chat(chat_index, evt: gr.SelectData):
|
766 |
+
if chat_index is not None:
|
767 |
+
all_chats = self.delete_chat(evt.index)
|
768 |
+
chat_titles = [f"{chat['timestamp']}: {chat['conversation'][0]['content'][:30]}..." if len(chat['conversation']) > 0 and chat['conversation'][0]['content'] else f"{chat['timestamp']}: Empty Chat" for chat in all_chats]
|
769 |
+
return gr.update(choices=chat_titles)
|
770 |
+
else:
|
771 |
+
return gr.update()
|
772 |
+
|
773 |
custom_css = """
|
774 |
@import url('https://fonts.googleapis.com/css2?family=Inter:wght@300;400;500;600;700&display=swap');
|
775 |
body, .gradio-container {
|
|
|
844 |
display: flex;
|
845 |
flex-direction: column-reverse;
|
846 |
}
|
847 |
+
#chat_list {
|
848 |
+
height: 500px;
|
849 |
+
overflow-y: auto;
|
850 |
+
}
|
851 |
"""
|
852 |
|
853 |
with gr.Blocks(theme='soft', css=custom_css) as demo:
|
854 |
+
with gr.Row():
|
855 |
+
with gr.Column(scale=1):
|
856 |
+
all_chats = self.load_all_chats()
|
857 |
+
chat_titles = [f"{chat['timestamp']}: {chat['conversation'][0]['content'][:30]}..." if len(chat['conversation']) > 0 and chat['conversation'][0]['content'] else f"{chat['timestamp']}: Empty Chat" for chat in all_chats]
|
858 |
+
|
859 |
+
chat_list = gr.Radio(label="Chat History", choices=chat_titles, type="index", elem_id="chat_list")
|
860 |
+
|
861 |
+
load_button = gr.Button("Load Selected Chat")
|
862 |
+
delete_button = gr.Button("Delete Selected Chat")
|
863 |
+
with gr.Column(scale=4):
|
864 |
+
chatbot = gr.Chatbot(
|
865 |
+
label="Xylaria 1.5 Senoa",
|
866 |
+
height=500,
|
867 |
+
show_copy_button=True,
|
868 |
+
)
|
869 |
+
|
870 |
+
with gr.Accordion("Image Input", open=False, elem_classes="gr-accordion"):
|
871 |
+
with gr.Row(elem_classes="image-container"):
|
872 |
+
with gr.Column(elem_classes="image-upload"):
|
873 |
+
img = gr.Image(
|
874 |
+
sources=["upload", "webcam"],
|
875 |
+
type="filepath",
|
876 |
+
label="Upload Image",
|
877 |
+
elem_classes="image-preview"
|
878 |
+
)
|
879 |
+
with gr.Column(elem_classes="image-upload"):
|
880 |
+
math_ocr_img = gr.Image(
|
881 |
+
sources=["upload", "webcam"],
|
882 |
+
type="filepath",
|
883 |
+
label="Upload Image for Math OCR",
|
884 |
+
elem_classes="image-preview"
|
885 |
+
)
|
886 |
+
|
887 |
+
with gr.Row():
|
888 |
+
with gr.Column(scale=4):
|
889 |
+
txt = gr.Textbox(
|
890 |
+
show_label=False,
|
891 |
+
placeholder="Type your message...",
|
892 |
+
container=False
|
893 |
)
|
894 |
+
btn = gr.Button("Send", scale=1)
|
895 |
+
|
896 |
+
with gr.Row():
|
897 |
+
clear = gr.Button("Clear Conversation")
|
898 |
+
clear_memory = gr.Button("Clear Memory")
|
899 |
+
|
900 |
+
load_button.click(fn=load_selected_chat, inputs=[chat_list], outputs=[chatbot])
|
901 |
+
delete_button.click(fn=delete_selected_chat, inputs=[chat_list], outputs=[chat_list])
|
902 |
+
|
903 |
+
btn.click(
|
904 |
+
fn=streaming_response,
|
905 |
+
inputs=[txt, chatbot, img, math_ocr_img],
|
906 |
+
outputs=[txt, chatbot, img, math_ocr_img, chat_list]
|
907 |
+
)
|
908 |
+
txt.submit(
|
909 |
+
fn=streaming_response,
|
910 |
+
inputs=[txt, chatbot, img, math_ocr_img],
|
911 |
+
outputs=[txt, chatbot, img, math_ocr_img, chat_list]
|
912 |
+
)
|
913 |
+
|
914 |
+
clear.click(
|
915 |
+
fn=lambda: None,
|
916 |
+
inputs=None,
|
917 |
+
outputs=[chatbot],
|
918 |
+
queue=False
|
919 |
+
)
|
920 |
+
|
921 |
+
clear_memory.click(
|
922 |
+
fn=self.reset_conversation,
|
923 |
+
inputs=None,
|
924 |
+
outputs=[chatbot],
|
925 |
+
queue=False
|
926 |
+
)
|
927 |
+
|
928 |
+
chat_list.select(fn=load_selected_chat, inputs=[chat_list], outputs=[chatbot])
|
929 |
|
930 |
+
demo.load(self.reset_conversation, None, None)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
931 |
|
932 |
return demo
|
933 |
|