Chat basics — start a conversation and add a follow-up.
"""Chat basics — start a conversation and add a follow-up.
This example shows:
- One-shot usage via `talu.ask(...)`
- Multi-turn chat sessions via `talu.Chat(...)`
- Saving message history to JSON
Note: chat.send() returns a complete Response (non-streaming).
For token-by-token streaming, see 02_streaming.py.
"""
import json
import os
import sys
import talu
from talu import repository
MODEL_URI = os.environ.get("MODEL_URI", "LiquidAI/LFM2-350M")
if not repository.is_cached(MODEL_URI):
sys.exit(f"Model '{MODEL_URI}' not found. Run: python examples/python/00_fetch_model.py")
print(talu.ask(MODEL_URI, "Hello, world!"))
chat = talu.Chat(MODEL_URI)
response = chat.send("Give me a short greeting.")
print(response)
offline_chat = talu.Chat(MODEL_URI, offline=True)
response = offline_chat.send("Summarize the ocean in five words.")
print(response)
concise = talu.Chat(MODEL_URI, system="Be concise.")
response = concise.send("Explain gravity in one sentence.")
print(response)
response = chat.send("Say hello.")
response = response.append("Make it friendlier.")
print(response)
response = response.append("Add a fun emoji.")
print(response)
print(f"Stored items: {len(chat.items)}")
transcript = chat.to_dict()["messages"]
print(f"Last role: {transcript[-1]['role']}")
with open("/tmp/talu_01_chat_transcript.json", "w") as f:
json.dump(transcript, f, indent=2)
print("Saved transcript to /tmp/talu_01_chat_transcript.json")