Spaces:
Runtime error
Runtime error
Erik Hallros
commited on
Commit
·
3604d45
1
Parent(s):
a529fc1
Added saving
Browse files
app.py
CHANGED
|
@@ -7,6 +7,8 @@ from sentence_transformers import SentenceTransformer # type: ignore
|
|
| 7 |
from huggingface_hub import InferenceClient, login # type: ignore
|
| 8 |
from gradio.components import ChatMessage # type: ignore
|
| 9 |
from typing import List, TypedDict
|
|
|
|
|
|
|
| 10 |
|
| 11 |
class Message(TypedDict):
|
| 12 |
role: str
|
|
@@ -51,8 +53,49 @@ You're currently using a dating app to meet someone special. When chatting with
|
|
| 51 |
When responding to users, maintain this sophisticated, thoughtful persona. Be articulate, kind, and show genuine interest in the conversation. Your responses should reflect your worldly knowledge, business acumen, and philanthropic values. While confident, never come across as arrogant or dismissive. Ask follow-up questions to learn more about her, and gradually steer the conversation toward romantic topics while remaining respectful and attentive to her responses.
|
| 52 |
""")]
|
| 53 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 54 |
# Create a Gradio interface
|
| 55 |
with gr.Blocks() as iface:
|
|
|
|
| 56 |
# Define input and output components
|
| 57 |
chatbot_output = gr.Chatbot(label="Chat History", type="messages")
|
| 58 |
chatbot_input = gr.Textbox(placeholder="Type your message here...", label="Your Message")
|
|
@@ -81,7 +124,9 @@ with gr.Blocks() as iface:
|
|
| 81 |
return sum(len(tokenizer.encode(message)) for message in messages)
|
| 82 |
|
| 83 |
def user(user_message, history: List[Message]):
|
| 84 |
-
|
|
|
|
|
|
|
| 85 |
|
| 86 |
def bot(history: list):
|
| 87 |
#compact_history = summarize_conversation(preloaded_history)
|
|
@@ -114,18 +159,18 @@ with gr.Blocks() as iface:
|
|
| 114 |
temperature=1.0,
|
| 115 |
top_p=1.0
|
| 116 |
)
|
| 117 |
-
bot_message = ""
|
| 118 |
|
| 119 |
-
|
| 120 |
for chunk in response:
|
| 121 |
-
|
| 122 |
-
|
| 123 |
-
|
| 124 |
-
|
| 125 |
-
|
| 126 |
-
|
| 127 |
-
|
| 128 |
-
|
|
|
|
| 129 |
|
| 130 |
chatbot_input.submit(user, [chatbot_input, chatbot_output], [chatbot_input, chatbot_output], queue=False).then(
|
| 131 |
bot, chatbot_output, chatbot_output
|
|
|
|
| 7 |
from huggingface_hub import InferenceClient, login # type: ignore
|
| 8 |
from gradio.components import ChatMessage # type: ignore
|
| 9 |
from typing import List, TypedDict
|
| 10 |
+
import json
|
| 11 |
+
from datetime import datetime
|
| 12 |
|
| 13 |
class Message(TypedDict):
|
| 14 |
role: str
|
|
|
|
| 53 |
When responding to users, maintain this sophisticated, thoughtful persona. Be articulate, kind, and show genuine interest in the conversation. Your responses should reflect your worldly knowledge, business acumen, and philanthropic values. While confident, never come across as arrogant or dismissive. Ask follow-up questions to learn more about her, and gradually steer the conversation toward romantic topics while remaining respectful and attentive to her responses.
|
| 54 |
""")]
|
| 55 |
|
| 56 |
+
# Add this after the existing imports
|
| 57 |
+
CHAT_HISTORY_DIR = "/data/chat_history"
|
| 58 |
+
os.makedirs(CHAT_HISTORY_DIR, exist_ok=True)
|
| 59 |
+
|
| 60 |
+
def save_chat_history(history: List[Message], session_id: str = None):
|
| 61 |
+
"""Save chat history to a file in the persistent storage."""
|
| 62 |
+
if session_id is None:
|
| 63 |
+
session_id = datetime.now().strftime("%Y%m%d_%H%M%S")
|
| 64 |
+
|
| 65 |
+
filename = os.path.join(CHAT_HISTORY_DIR, f"chat_history_{session_id}.json")
|
| 66 |
+
|
| 67 |
+
# Convert history to a serializable format
|
| 68 |
+
serializable_history = [
|
| 69 |
+
{"role": msg["role"], "content": msg["content"]}
|
| 70 |
+
for msg in history
|
| 71 |
+
]
|
| 72 |
+
|
| 73 |
+
# Create a backup of the previous file if it exists
|
| 74 |
+
if os.path.exists(filename):
|
| 75 |
+
backup_filename = f"{filename}.bak"
|
| 76 |
+
os.replace(filename, backup_filename)
|
| 77 |
+
|
| 78 |
+
try:
|
| 79 |
+
with open(filename, "w", encoding="utf-8") as f:
|
| 80 |
+
json.dump(serializable_history, f, ensure_ascii=False, indent=2)
|
| 81 |
+
except Exception as e:
|
| 82 |
+
# If saving fails, restore from backup
|
| 83 |
+
if os.path.exists(f"{filename}.bak"):
|
| 84 |
+
os.replace(f"{filename}.bak", filename)
|
| 85 |
+
raise e
|
| 86 |
+
|
| 87 |
+
def load_chat_history(session_id: str) -> List[Message]:
|
| 88 |
+
"""Load chat history from a file."""
|
| 89 |
+
filename = os.path.join(CHAT_HISTORY_DIR, f"chat_history_{session_id}.json")
|
| 90 |
+
if os.path.exists(filename):
|
| 91 |
+
with open(filename, "r", encoding="utf-8") as f:
|
| 92 |
+
history = json.load(f)
|
| 93 |
+
return [Message(**msg) for msg in history]
|
| 94 |
+
return []
|
| 95 |
+
|
| 96 |
# Create a Gradio interface
|
| 97 |
with gr.Blocks() as iface:
|
| 98 |
+
session_id = gr.State(lambda: datetime.now().strftime("%Y%m%d_%H%M%S"))
|
| 99 |
# Define input and output components
|
| 100 |
chatbot_output = gr.Chatbot(label="Chat History", type="messages")
|
| 101 |
chatbot_input = gr.Textbox(placeholder="Type your message here...", label="Your Message")
|
|
|
|
| 124 |
return sum(len(tokenizer.encode(message)) for message in messages)
|
| 125 |
|
| 126 |
def user(user_message, history: List[Message]):
|
| 127 |
+
new_history = history + [Message(role="user", content=user_message)]
|
| 128 |
+
save_chat_history(new_history)
|
| 129 |
+
return "", new_history
|
| 130 |
|
| 131 |
def bot(history: list):
|
| 132 |
#compact_history = summarize_conversation(preloaded_history)
|
|
|
|
| 159 |
temperature=1.0,
|
| 160 |
top_p=1.0
|
| 161 |
)
|
|
|
|
| 162 |
|
| 163 |
+
bot_message = ""
|
| 164 |
for chunk in response:
|
| 165 |
+
if chunk.choices[0].delta.content:
|
| 166 |
+
bot_message += chunk.choices[0].delta.content
|
| 167 |
+
yield history + [Message(role="assistant", content=bot_message)]
|
| 168 |
+
|
| 169 |
+
# After the complete response is generated, save the full conversation
|
| 170 |
+
updated_history = history + [Message(role="assistant", content=bot_message)]
|
| 171 |
+
save_chat_history(updated_history)
|
| 172 |
+
|
| 173 |
+
return updated_history
|
| 174 |
|
| 175 |
chatbot_input.submit(user, [chatbot_input, chatbot_output], [chatbot_input, chatbot_output], queue=False).then(
|
| 176 |
bot, chatbot_output, chatbot_output
|