Spaces:
Running
Running
Reality123b
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -21,7 +21,7 @@ class ChatMessage:
|
|
21 |
return {"role": self.role, "content": self.content}
|
22 |
|
23 |
class XylariaChat:
|
24 |
-
def
|
25 |
self.hf_token = os.getenv("HF_TOKEN")
|
26 |
if not self.hf_token:
|
27 |
raise ValueError("HuggingFace token not found in environment variables")
|
@@ -34,8 +34,6 @@ class XylariaChat:
|
|
34 |
self.image_api_url = "https://api-inference.huggingface.co/models/Salesforce/blip-image-captioning-large"
|
35 |
self.image_api_headers = {"Authorization": f"Bearer {self.hf_token}"}
|
36 |
|
37 |
-
self.image_gen_client = InferenceClient("black-forest-labs/FLUX.1-schnell", token=self.hf_token)
|
38 |
-
|
39 |
self.conversation_history = []
|
40 |
self.persistent_memory = []
|
41 |
self.memory_embeddings = None
|
@@ -93,8 +91,6 @@ class XylariaChat:
|
|
93 |
"photosynthesis": "process used by plants to convert light to energy"
|
94 |
}
|
95 |
|
96 |
-
# ... (other methods: update_internal_state, update_knowledge_graph, etc.) ...
|
97 |
-
|
98 |
def update_internal_state(self, emotion_deltas, cognitive_load_deltas, introspection_delta, engagement_delta):
|
99 |
for emotion, delta in emotion_deltas.items():
|
100 |
if emotion in self.internal_state["emotions"]:
|
@@ -425,13 +421,6 @@ class XylariaChat:
|
|
425 |
|
426 |
except Exception as e:
|
427 |
return f"Error processing image: {str(e)}"
|
428 |
-
|
429 |
-
def generate_image(self, prompt):
|
430 |
-
try:
|
431 |
-
image = self.image_gen_client.text_to_image(prompt)
|
432 |
-
return image
|
433 |
-
except Exception as e:
|
434 |
-
return f"Error generating image: {e}"
|
435 |
|
436 |
def perform_math_ocr(self, image_path):
|
437 |
try:
|
@@ -553,44 +542,12 @@ class XylariaChat:
|
|
553 |
def create_interface(self):
|
554 |
def streaming_response(message, chat_history, image_filepath, math_ocr_image_path):
|
555 |
|
556 |
-
if "/image" in message:
|
557 |
-
image_prompt = message.replace("/image", "").strip()
|
558 |
-
|
559 |
-
# Placeholder for image generation
|
560 |
-
placeholder_image = "data:image/svg+xml," + requests.utils.quote(f'''
|
561 |
-
<svg width="256" height="256" viewBox="0 0 256 256" xmlns="http://www.w3.org/2000/svg">
|
562 |
-
<rect width="256" height="256" rx="20" fill="#888888" />
|
563 |
-
<animate attributeName="fill" values="#888888;#000000;#888888" dur="2s" repeatCount="indefinite" />
|
564 |
-
<text x="50%" y="50%" dominant-baseline="middle" text-anchor="middle" font-size="48" fill="white">
|
565 |
-
<tspan>/</tspan>
|
566 |
-
</text>
|
567 |
-
</svg>
|
568 |
-
''')
|
569 |
-
|
570 |
-
updated_history = chat_history + [[message, gr.Image(value=placeholder_image, type="pil", visible=True)]]
|
571 |
-
yield "", updated_history, None, None, ""
|
572 |
-
|
573 |
-
try:
|
574 |
-
generated_image = self.generate_image(image_prompt)
|
575 |
-
|
576 |
-
updated_history[-1][1] = gr.Image(value=generated_image, type="pil", visible=True)
|
577 |
-
yield "", updated_history, None, None, ""
|
578 |
-
|
579 |
-
self.conversation_history.append(ChatMessage(role="user", content=message).to_dict())
|
580 |
-
self.conversation_history.append(ChatMessage(role="assistant", content="Image generated").to_dict())
|
581 |
-
|
582 |
-
return
|
583 |
-
except Exception as e:
|
584 |
-
updated_history[-1][1] = f"Error generating image: {e}"
|
585 |
-
yield "", updated_history, None, None, ""
|
586 |
-
return
|
587 |
-
|
588 |
ocr_text = ""
|
589 |
if math_ocr_image_path:
|
590 |
ocr_text = self.perform_math_ocr(math_ocr_image_path)
|
591 |
if ocr_text.startswith("Error"):
|
592 |
updated_history = chat_history + [[message, ocr_text]]
|
593 |
-
yield "", updated_history, None, None
|
594 |
return
|
595 |
else:
|
596 |
message = f"Math OCR Result: {ocr_text}\n\nUser's message: {message}"
|
@@ -602,7 +559,7 @@ class XylariaChat:
|
|
602 |
|
603 |
if isinstance(response_stream, str):
|
604 |
updated_history = chat_history + [[message, response_stream]]
|
605 |
-
yield "", updated_history, None, None
|
606 |
return
|
607 |
|
608 |
full_response = ""
|
@@ -615,11 +572,11 @@ class XylariaChat:
|
|
615 |
full_response += chunk_content
|
616 |
|
617 |
updated_history[-1][1] = full_response
|
618 |
-
yield "", updated_history, None, None
|
619 |
except Exception as e:
|
620 |
print(f"Streaming error: {e}")
|
621 |
updated_history[-1][1] = f"Error during response: {e}"
|
622 |
-
yield "", updated_history, None, None
|
623 |
return
|
624 |
|
625 |
full_response = self.adjust_response_based_on_state(full_response)
|
@@ -667,77 +624,41 @@ class XylariaChat:
|
|
667 |
self.conversation_history = self.conversation_history[-10:]
|
668 |
|
669 |
custom_css = """
|
670 |
-
@import url('https://fonts.googleapis.com/css2?family=
|
671 |
-
|
672 |
-
|
673 |
-
background-color: #f5f5f5;
|
674 |
-
font-family: 'Source Sans Pro', sans-serif;
|
675 |
-
}
|
676 |
-
|
677 |
-
.gradio-container {
|
678 |
-
max-width: 900px;
|
679 |
-
margin: 0 auto;
|
680 |
-
border-radius: 10px;
|
681 |
-
box-shadow: 0px 4px 20px rgba(0, 0, 0, 0.1);
|
682 |
}
|
683 |
-
|
684 |
-
.chatbot-container {
|
685 |
-
background-color: #fff;
|
686 |
-
border-radius: 10px;
|
687 |
-
padding: 20px;
|
688 |
-
}
|
689 |
-
|
690 |
.chatbot-container .message {
|
691 |
-
font-family: '
|
692 |
-
font-size: 16px;
|
693 |
-
line-height: 1.6;
|
694 |
}
|
695 |
-
|
696 |
.gradio-container input,
|
697 |
.gradio-container textarea,
|
698 |
.gradio-container button {
|
699 |
-
font-family: '
|
700 |
-
font-size: 16px;
|
701 |
-
border-radius: 8px;
|
702 |
}
|
703 |
-
|
704 |
.image-container {
|
705 |
display: flex;
|
706 |
gap: 10px;
|
707 |
-
margin-bottom:
|
708 |
-
justify-content: center;
|
709 |
}
|
710 |
-
|
711 |
.image-upload {
|
712 |
-
border:
|
713 |
border-radius: 8px;
|
714 |
-
padding:
|
715 |
-
background-color: #
|
716 |
-
text-align: center;
|
717 |
-
transition: all 0.3s ease;
|
718 |
-
}
|
719 |
-
|
720 |
-
.image-upload:hover {
|
721 |
-
background-color: #f0f0f0;
|
722 |
-
border-color: #b3b3b3;
|
723 |
}
|
724 |
-
|
725 |
.image-preview {
|
726 |
-
max-width:
|
727 |
-
max-height:
|
728 |
border-radius: 8px;
|
729 |
-
box-shadow: 0px 2px 5px rgba(0, 0, 0, 0.1);
|
730 |
}
|
731 |
-
|
732 |
.clear-button {
|
733 |
display: none;
|
734 |
}
|
735 |
-
|
736 |
.chatbot-container .message {
|
737 |
opacity: 0;
|
738 |
animation: fadeIn 0.5s ease-in-out forwards;
|
739 |
}
|
740 |
-
|
741 |
@keyframes fadeIn {
|
742 |
from {
|
743 |
opacity: 0;
|
@@ -748,109 +669,42 @@ class XylariaChat:
|
|
748 |
transform: translateY(0);
|
749 |
}
|
750 |
}
|
751 |
-
|
752 |
.gr-accordion-button {
|
753 |
background-color: #f0f0f0 !important;
|
754 |
border-radius: 8px !important;
|
755 |
-
padding:
|
756 |
margin-bottom: 10px !important;
|
757 |
transition: all 0.3s ease !important;
|
758 |
cursor: pointer !important;
|
759 |
-
border: none !important;
|
760 |
-
box-shadow: 0px 2px 5px rgba(0, 0, 0, 0.05) !important;
|
761 |
}
|
762 |
-
|
763 |
.gr-accordion-button:hover {
|
764 |
background-color: #e0e0e0 !important;
|
765 |
-
box-shadow: 0px 4px
|
766 |
}
|
767 |
-
|
768 |
.gr-accordion-active .gr-accordion-button {
|
769 |
background-color: #d0d0d0 !important;
|
770 |
-
box-shadow: 0px 4px
|
771 |
}
|
772 |
-
|
773 |
.gr-accordion-content {
|
774 |
transition: max-height 0.3s ease-in-out !important;
|
775 |
overflow: hidden !important;
|
776 |
max-height: 0 !important;
|
777 |
}
|
778 |
-
|
779 |
.gr-accordion-active .gr-accordion-content {
|
780 |
max-height: 500px !important;
|
781 |
}
|
782 |
-
|
783 |
.gr-accordion {
|
784 |
display: flex;
|
785 |
flex-direction: column-reverse;
|
786 |
}
|
787 |
-
|
788 |
-
.chatbot-icon {
|
789 |
-
width: 40px;
|
790 |
-
height: 40px;
|
791 |
-
border-radius: 50%;
|
792 |
-
margin-right: 10px;
|
793 |
-
}
|
794 |
-
|
795 |
-
.user-message .message-row {
|
796 |
-
background-color: #e8f0fe;
|
797 |
-
border-radius: 10px;
|
798 |
-
padding: 10px;
|
799 |
-
margin-bottom: 10px;
|
800 |
-
border-top-right-radius: 2px;
|
801 |
-
}
|
802 |
-
|
803 |
-
.assistant-message .message-row {
|
804 |
-
background-color: #f0f0f0;
|
805 |
-
border-radius: 10px;
|
806 |
-
padding: 10px;
|
807 |
-
margin-bottom: 10px;
|
808 |
-
border-top-left-radius: 2px;
|
809 |
-
}
|
810 |
-
|
811 |
-
.user-message .message-icon {
|
812 |
-
background: url('https://img.icons8.com/color/48/000000/user.png') no-repeat center center;
|
813 |
-
background-size: contain;
|
814 |
-
width: 30px;
|
815 |
-
height: 30px;
|
816 |
-
margin-right: 10px;
|
817 |
-
}
|
818 |
-
|
819 |
-
.assistant-message .message-icon {
|
820 |
-
background: url('https://i.ibb.co/7b7hLGH/Senoa-Icon-1.png') no-repeat center center;
|
821 |
-
background-size: cover;
|
822 |
-
width: 40px;
|
823 |
-
height: 40px;
|
824 |
-
margin-right: 10px;
|
825 |
-
border-radius: 50%;
|
826 |
-
}
|
827 |
-
|
828 |
-
.message-text {
|
829 |
-
flex-grow: 1;
|
830 |
-
}
|
831 |
-
|
832 |
-
.message-row {
|
833 |
-
display: flex;
|
834 |
-
align-items: center;
|
835 |
-
}
|
836 |
"""
|
837 |
|
838 |
-
with gr.Blocks(theme=
|
839 |
-
primary_hue="slate",
|
840 |
-
secondary_hue="gray",
|
841 |
-
neutral_hue="gray",
|
842 |
-
font=["Source Sans Pro", "Arial", "sans-serif"],
|
843 |
-
), css=custom_css) as demo:
|
844 |
with gr.Column():
|
845 |
chatbot = gr.Chatbot(
|
846 |
label="Xylaria 1.5 Senoa",
|
847 |
-
height=
|
848 |
show_copy_button=True,
|
849 |
-
elem_classes="chatbot-container",
|
850 |
-
avatar_images=(
|
851 |
-
"https://img.icons8.com/color/48/000000/user.png", # User avatar
|
852 |
-
"https://i.ibb.co/7b7hLGH/Senoa-Icon-1.png" # Bot avatar
|
853 |
-
)
|
854 |
)
|
855 |
|
856 |
with gr.Accordion("Image Input", open=False, elem_classes="gr-accordion"):
|
@@ -880,18 +734,18 @@ class XylariaChat:
|
|
880 |
btn = gr.Button("Send", scale=1)
|
881 |
|
882 |
with gr.Row():
|
883 |
-
clear = gr.Button("Clear Conversation"
|
884 |
clear_memory = gr.Button("Clear Memory")
|
885 |
|
886 |
btn.click(
|
887 |
fn=streaming_response,
|
888 |
inputs=[txt, chatbot, img, math_ocr_img],
|
889 |
-
outputs=[txt, chatbot, img, math_ocr_img
|
890 |
)
|
891 |
txt.submit(
|
892 |
fn=streaming_response,
|
893 |
inputs=[txt, chatbot, img, math_ocr_img],
|
894 |
-
outputs=[txt, chatbot, img, math_ocr_img
|
895 |
)
|
896 |
|
897 |
clear.click(
|
|
|
21 |
return {"role": self.role, "content": self.content}
|
22 |
|
23 |
class XylariaChat:
|
24 |
+
def __init__(self):
|
25 |
self.hf_token = os.getenv("HF_TOKEN")
|
26 |
if not self.hf_token:
|
27 |
raise ValueError("HuggingFace token not found in environment variables")
|
|
|
34 |
self.image_api_url = "https://api-inference.huggingface.co/models/Salesforce/blip-image-captioning-large"
|
35 |
self.image_api_headers = {"Authorization": f"Bearer {self.hf_token}"}
|
36 |
|
|
|
|
|
37 |
self.conversation_history = []
|
38 |
self.persistent_memory = []
|
39 |
self.memory_embeddings = None
|
|
|
91 |
"photosynthesis": "process used by plants to convert light to energy"
|
92 |
}
|
93 |
|
|
|
|
|
94 |
def update_internal_state(self, emotion_deltas, cognitive_load_deltas, introspection_delta, engagement_delta):
|
95 |
for emotion, delta in emotion_deltas.items():
|
96 |
if emotion in self.internal_state["emotions"]:
|
|
|
421 |
|
422 |
except Exception as e:
|
423 |
return f"Error processing image: {str(e)}"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
424 |
|
425 |
def perform_math_ocr(self, image_path):
|
426 |
try:
|
|
|
542 |
def create_interface(self):
|
543 |
def streaming_response(message, chat_history, image_filepath, math_ocr_image_path):
|
544 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
545 |
ocr_text = ""
|
546 |
if math_ocr_image_path:
|
547 |
ocr_text = self.perform_math_ocr(math_ocr_image_path)
|
548 |
if ocr_text.startswith("Error"):
|
549 |
updated_history = chat_history + [[message, ocr_text]]
|
550 |
+
yield "", updated_history, None, None
|
551 |
return
|
552 |
else:
|
553 |
message = f"Math OCR Result: {ocr_text}\n\nUser's message: {message}"
|
|
|
559 |
|
560 |
if isinstance(response_stream, str):
|
561 |
updated_history = chat_history + [[message, response_stream]]
|
562 |
+
yield "", updated_history, None, None
|
563 |
return
|
564 |
|
565 |
full_response = ""
|
|
|
572 |
full_response += chunk_content
|
573 |
|
574 |
updated_history[-1][1] = full_response
|
575 |
+
yield "", updated_history, None, None
|
576 |
except Exception as e:
|
577 |
print(f"Streaming error: {e}")
|
578 |
updated_history[-1][1] = f"Error during response: {e}"
|
579 |
+
yield "", updated_history, None, None
|
580 |
return
|
581 |
|
582 |
full_response = self.adjust_response_based_on_state(full_response)
|
|
|
624 |
self.conversation_history = self.conversation_history[-10:]
|
625 |
|
626 |
custom_css = """
|
627 |
+
@import url('https://fonts.googleapis.com/css2?family=Inter:wght@300;400;500;600;700&display=swap');
|
628 |
+
body, .gradio-container {
|
629 |
+
font-family: 'Inter', sans-serif !important;
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
630 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
631 |
.chatbot-container .message {
|
632 |
+
font-family: 'Inter', sans-serif !important;
|
|
|
|
|
633 |
}
|
|
|
634 |
.gradio-container input,
|
635 |
.gradio-container textarea,
|
636 |
.gradio-container button {
|
637 |
+
font-family: 'Inter', sans-serif !important;
|
|
|
|
|
638 |
}
|
|
|
639 |
.image-container {
|
640 |
display: flex;
|
641 |
gap: 10px;
|
642 |
+
margin-bottom: 10px;
|
|
|
643 |
}
|
|
|
644 |
.image-upload {
|
645 |
+
border: 1px solid #ccc;
|
646 |
border-radius: 8px;
|
647 |
+
padding: 10px;
|
648 |
+
background-color: #f8f8f8;
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
649 |
}
|
|
|
650 |
.image-preview {
|
651 |
+
max-width: 200px;
|
652 |
+
max-height: 200px;
|
653 |
border-radius: 8px;
|
|
|
654 |
}
|
|
|
655 |
.clear-button {
|
656 |
display: none;
|
657 |
}
|
|
|
658 |
.chatbot-container .message {
|
659 |
opacity: 0;
|
660 |
animation: fadeIn 0.5s ease-in-out forwards;
|
661 |
}
|
|
|
662 |
@keyframes fadeIn {
|
663 |
from {
|
664 |
opacity: 0;
|
|
|
669 |
transform: translateY(0);
|
670 |
}
|
671 |
}
|
|
|
672 |
.gr-accordion-button {
|
673 |
background-color: #f0f0f0 !important;
|
674 |
border-radius: 8px !important;
|
675 |
+
padding: 10px !important;
|
676 |
margin-bottom: 10px !important;
|
677 |
transition: all 0.3s ease !important;
|
678 |
cursor: pointer !important;
|
|
|
|
|
679 |
}
|
|
|
680 |
.gr-accordion-button:hover {
|
681 |
background-color: #e0e0e0 !important;
|
682 |
+
box-shadow: 0px 2px 4px rgba(0, 0, 0, 0.1) !important;
|
683 |
}
|
|
|
684 |
.gr-accordion-active .gr-accordion-button {
|
685 |
background-color: #d0d0d0 !important;
|
686 |
+
box-shadow: 0px 4px 6px rgba(0, 0, 0, 0.1) !important;
|
687 |
}
|
|
|
688 |
.gr-accordion-content {
|
689 |
transition: max-height 0.3s ease-in-out !important;
|
690 |
overflow: hidden !important;
|
691 |
max-height: 0 !important;
|
692 |
}
|
|
|
693 |
.gr-accordion-active .gr-accordion-content {
|
694 |
max-height: 500px !important;
|
695 |
}
|
|
|
696 |
.gr-accordion {
|
697 |
display: flex;
|
698 |
flex-direction: column-reverse;
|
699 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
700 |
"""
|
701 |
|
702 |
+
with gr.Blocks(theme='soft', css=custom_css) as demo:
|
|
|
|
|
|
|
|
|
|
|
703 |
with gr.Column():
|
704 |
chatbot = gr.Chatbot(
|
705 |
label="Xylaria 1.5 Senoa",
|
706 |
+
height=500,
|
707 |
show_copy_button=True,
|
|
|
|
|
|
|
|
|
|
|
708 |
)
|
709 |
|
710 |
with gr.Accordion("Image Input", open=False, elem_classes="gr-accordion"):
|
|
|
734 |
btn = gr.Button("Send", scale=1)
|
735 |
|
736 |
with gr.Row():
|
737 |
+
clear = gr.Button("Clear Conversation")
|
738 |
clear_memory = gr.Button("Clear Memory")
|
739 |
|
740 |
btn.click(
|
741 |
fn=streaming_response,
|
742 |
inputs=[txt, chatbot, img, math_ocr_img],
|
743 |
+
outputs=[txt, chatbot, img, math_ocr_img]
|
744 |
)
|
745 |
txt.submit(
|
746 |
fn=streaming_response,
|
747 |
inputs=[txt, chatbot, img, math_ocr_img],
|
748 |
+
outputs=[txt, chatbot, img, math_ocr_img]
|
749 |
)
|
750 |
|
751 |
clear.click(
|