-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathlaunch.py
More file actions
57 lines (46 loc) · 1.88 KB
/
launch.py
File metadata and controls
57 lines (46 loc) · 1.88 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
#!/usr/bin/env python3
"""
Launch the Tiny Conversational AI Chatbot
Usage: python3 launch.py [model_path]
Default model: chatbot_model.pkl
"""
import sys
import os
from tiny_chatbot import TinyChatbot
def main():
model_path = sys.argv[1] if len(sys.argv) > 1 else 'chatbot_model.pkl'
# Check if model exists
if not os.path.exists(model_path):
print(f"Error: Model file '{model_path}' not found!")
print(f"Train the model first with: python3 train.py {model_path}")
sys.exit(1)
print("=" * 50)
print(f"Loading model from '{model_path}'...")
model = TinyChatbot.load(model_path)
print("Model loaded successfully!")
print("=" * 50)
print("Chatbot Ready! (type 'quit' to exit)")
print("=" * 50)
# Interactive chat with conversation history
conversation_history = ""
while True:
user_input = input("\nYou: ").strip()
if user_input.lower() in ['quit', 'exit', 'q']:
print("Goodbye!")
break
if user_input:
# Build context from conversation history + new input
context = conversation_history + user_input + " "
# Generate response - model will handle pattern matching
bot_response = model.generate(user_input, max_new_tokens=40, temperature=0.8)
# Clean up response (remove input echo if present)
if bot_response.lower().startswith(user_input.lower()):
bot_response = bot_response[len(user_input):].strip()
if bot_response and bot_response != "I'm still learning!":
print(f"Bot: {bot_response}")
# Update conversation history
conversation_history = context + bot_response + " "
else:
print("Bot: I'm still learning!")
if __name__ == "__main__":
main()