Memory & State Management for AgentsΒΆ

Short-term, long-term, and episodic memory in agents. Vector store memory, summarization memory, and state persistence.

# Install dependencies
# !pip install chromadb sentence-transformers

Short-Term Memory (Conversation Buffer)ΒΆ

from typing import List, Dict, Optional
from dataclasses import dataclass
from datetime import datetime

@dataclass
class Message:
    """A conversation message"""
    role: str  # user, assistant, system
    content: str
    timestamp: datetime
    metadata: Dict = None

class ConversationBuffer:
    """Short-term memory: recent conversation"""
    
    def __init__(self, max_messages: int = 10):
        self.max_messages = max_messages
        self.messages: List[Message] = []
    
    def add(self, role: str, content: str, metadata: Dict = None):
        """Add a message"""
        message = Message(role, content, datetime.now(), metadata)
        self.messages.append(message)
        
        # Keep only recent messages
        if len(self.messages) > self.max_messages:
            self.messages = self.messages[-self.max_messages:]
    
    def get_recent(self, n: int = 5) -> List[Message]:
        """Get n most recent messages"""
        return self.messages[-n:]
    
    def to_dict_list(self) -> List[Dict]:
        """Convert to dict format for LLM APIs"""
        return [
            {"role": msg.role, "content": msg.content}
            for msg in self.messages
        ]
    
    def clear(self):
        """Clear all messages"""
        self.messages = []
    
    def __str__(self):
        return f"ConversationBuffer({len(self.messages)} messages)"

# Test conversation buffer
buffer = ConversationBuffer(max_messages=5)

buffer.add("user", "Hello!")
buffer.add("assistant", "Hi! How can I help?")
buffer.add("user", "What's the weather?")
buffer.add("assistant", "It's sunny and 72Β°F")
buffer.add("user", "Thanks!")
buffer.add("assistant", "You're welcome!")

print("Recent conversation:")
for msg in buffer.get_recent(4):
    print(f"  {msg.role}: {msg.content}")

print(f"\n{buffer}")

Long-Term Memory (Vector Store)ΒΆ

import json
from collections import defaultdict

class LongTermMemory:
    """Long-term memory: facts and knowledge"""
    
    def __init__(self):
        self.facts: Dict[str, any] = {}
        self.preferences: Dict[str, any] = {}
        self.history: List[Dict] = []
    
    def store_fact(self, key: str, value: any):
        """Store a fact"""
        self.facts[key] = value
        self.history.append({
            "action": "store_fact",
            "key": key,
            "value": value,
            "timestamp": datetime.now().isoformat()
        })
        print(f"βœ“ Stored: {key} = {value}")
    
    def recall_fact(self, key: str) -> Optional[any]:
        """Recall a fact"""
        value = self.facts.get(key)
        if value:
            print(f"πŸ“ Recalled: {key} = {value}")
        else:
            print(f"❓ No memory of: {key}")
        return value
    
    def store_preference(self, category: str, value: any):
        """Store a user preference"""
        self.preferences[category] = value
        print(f"βœ“ Preference: {category} = {value}")
    
    def get_all_facts(self) -> Dict:
        """Get all stored facts"""
        return self.facts
    
    def search_history(self, keyword: str) -> List[Dict]:
        """Search through history"""
        results = [
            entry for entry in self.history
            if keyword.lower() in str(entry).lower()
        ]
        return results
    
    def save_to_file(self, filepath: str):
        """Persist memory to disk"""
        data = {
            "facts": self.facts,
            "preferences": self.preferences,
            "history": self.history
        }
        with open(filepath, 'w') as f:
            json.dump(data, f, indent=2)
        print(f"πŸ’Ύ Saved memory to {filepath}")
    
    def load_from_file(self, filepath: str):
        """Load memory from disk"""
        try:
            with open(filepath, 'r') as f:
                data = json.load(f)
            self.facts = data.get("facts", {})
            self.preferences = data.get("preferences", {})
            self.history = data.get("history", [])
            print(f"πŸ“‚ Loaded memory from {filepath}")
        except FileNotFoundError:
            print(f"❌ No memory file found at {filepath}")

# Test long-term memory
ltm = LongTermMemory()

# Store information
ltm.store_fact("user_name", "Alice")
ltm.store_fact("user_location", "San Francisco")
ltm.store_fact("favorite_color", "blue")
ltm.store_preference("communication_style", "concise")

print("\nRecalling facts:")
ltm.recall_fact("user_name")
ltm.recall_fact("user_location")
ltm.recall_fact("unknown_key")

print("\nAll facts:")
print(json.dumps(ltm.get_all_facts(), indent=2))

Episodic Memory (Event-Based)ΒΆ

from datetime import datetime, timedelta

@dataclass
class Episode:
    """A specific event/interaction"""
    timestamp: datetime
    event_type: str
    description: str
    context: Dict
    importance: int = 5  # 1-10 scale

class EpisodicMemory:
    """Memory of specific events"""
    
    def __init__(self):
        self.episodes: List[Episode] = []
    
    def record(self, event_type: str, description: str, 
               context: Dict = None, importance: int = 5):
        """Record an episode"""
        episode = Episode(
            timestamp=datetime.now(),
            event_type=event_type,
            description=description,
            context=context or {},
            importance=importance
        )
        self.episodes.append(episode)
        print(f"πŸ“Œ Recorded: {event_type} - {description}")
    
    def recall_recent(self, n: int = 5) -> List[Episode]:
        """Get recent episodes"""
        return sorted(self.episodes, key=lambda e: e.timestamp, reverse=True)[:n]
    
    def recall_by_type(self, event_type: str) -> List[Episode]:
        """Get episodes by type"""
        return [e for e in self.episodes if e.event_type == event_type]
    
    def recall_important(self, min_importance: int = 7) -> List[Episode]:
        """Get important episodes"""
        return [e for e in self.episodes if e.importance >= min_importance]
    
    def recall_timeframe(self, hours_ago: int) -> List[Episode]:
        """Get episodes from time range"""
        cutoff = datetime.now() - timedelta(hours=hours_ago)
        return [e for e in self.episodes if e.timestamp >= cutoff]

# Test episodic memory
episodic = EpisodicMemory()

# Record events
episodic.record(
    "conversation",
    "Discussed AI agents with user",
    context={"topic": "multi-agent systems"},
    importance=8
)

episodic.record(
    "task_completion",
    "Generated code for web scraper",
    context={"language": "python", "success": True},
    importance=7
)

episodic.record(
    "error",
    "API rate limit exceeded",
    context={"api": "openai", "retry_after": 60},
    importance=9
)

print("\nRecent episodes:")
for ep in episodic.recall_recent(3):
    print(f"  [{ep.event_type}] {ep.description} (importance: {ep.importance})")

print("\nImportant episodes:")
for ep in episodic.recall_important(8):
    print(f"  {ep.description}")

Complete Agent with MemoryΒΆ

class MemoryAgent:
    """Agent with all memory types"""
    
    def __init__(self, name: str):
        self.name = name
        self.short_term = ConversationBuffer(max_messages=10)
        self.long_term = LongTermMemory()
        self.episodic = EpisodicMemory()
    
    def chat(self, user_input: str) -> str:
        """Process user input with memory"""
        print(f"\n{'='*60}")
        print(f"User: {user_input}")
        
        # Store in short-term memory
        self.short_term.add("user", user_input)
        
        # Record episode
        self.episodic.record(
            "user_message",
            user_input,
            context={"length": len(user_input)}
        )
        
        # Process input (simplified logic)
        response = self._generate_response(user_input)
        
        # Store response
        self.short_term.add("assistant", response)
        
        print(f"{self.name}: {response}")
        print(f"{'='*60}")
        
        return response
    
    def _generate_response(self, user_input: str) -> str:
        """Generate response using memory"""
        lower_input = user_input.lower()
        
        # Check if user is sharing information
        if "my name is" in lower_input:
            name = user_input.split("my name is")[1].strip()
            self.long_term.store_fact("user_name", name)
            return f"Nice to meet you, {name}! I'll remember that."
        
        elif "i live in" in lower_input:
            location = user_input.split("i live in")[1].strip()
            self.long_term.store_fact("user_location", location)
            return f"Got it! You live in {location}."
        
        elif "i like" in lower_input or "i love" in lower_input:
            preference = user_input.split(" like " if " like " in lower_input else " love ")[1].strip()
            self.long_term.store_preference("likes", preference)
            return f"I've noted that you like {preference}!"
        
        # Check if user is asking about stored info
        elif "what's my name" in lower_input or "my name" in lower_input:
            name = self.long_term.recall_fact("user_name")
            return f"Your name is {name}!" if name else "I don't know your name yet."
        
        elif "where do i live" in lower_input:
            location = self.long_term.recall_fact("user_location")
            return f"You live in {location}!" if location else "I don't know where you live."
        
        elif "what have we talked about" in lower_input:
            recent = self.episodic.recall_recent(3)
            topics = [ep.description for ep in recent]
            return f"Recently we've discussed: {', '.join(topics)}"
        
        else:
            return "I understand. What else would you like to talk about?"
    
    def summarize_memory(self):
        """Show memory stats"""
        print(f"\n{'='*60}")
        print(f"MEMORY SUMMARY FOR {self.name}")
        print(f"{'='*60}")
        print(f"Short-term: {len(self.short_term.messages)} messages")
        print(f"Long-term: {len(self.long_term.facts)} facts, {len(self.long_term.preferences)} preferences")
        print(f"Episodic: {len(self.episodic.episodes)} episodes")
        
        if self.long_term.facts:
            print("\nStored Facts:")
            for key, value in self.long_term.facts.items():
                print(f"  {key}: {value}")
        
        print(f"{'='*60}\n")

# Test the memory agent
agent = MemoryAgent("MemBot")

# Have a conversation
agent.chat("Hi there!")
agent.chat("My name is Alice")
agent.chat("I live in Paris")
agent.chat("I love machine learning")
agent.chat("What's my name?")
agent.chat("Where do I live?")
agent.chat("What have we talked about?")

# Show memory summary
agent.summarize_memory()

Vector-Based Semantic MemoryΒΆ

For production agents, use vector databases for semantic search:

# Example with ChromaDB (requires installation)
'''
import chromadb
from sentence_transformers import SentenceTransformer

class SemanticMemory:
    def __init__(self):
        self.client = chromadb.Client()
        self.collection = self.client.create_collection("agent_memory")
        self.encoder = SentenceTransformer('all-MiniLM-L6-v2')
    
    def store(self, text: str, metadata: Dict = None):
        """Store with semantic embedding"""
        embedding = self.encoder.encode(text).tolist()
        self.collection.add(
            embeddings=[embedding],
            documents=[text],
            metadatas=[metadata or {}],
            ids=[f"mem_{len(self.collection.get()['ids'])}"]
        )
    
    def recall(self, query: str, n: int = 5) -> List[str]:
        """Semantic search"""
        query_embedding = self.encoder.encode(query).tolist()
        results = self.collection.query(
            query_embeddings=[query_embedding],
            n_results=n
        )
        return results['documents'][0]

# Usage:
memory = SemanticMemory()
memory.store("User likes Python programming")
memory.store("User is interested in AI agents")
results = memory.recall("What does user like?")  # Semantic search!
'''

print("Semantic memory example (commented - requires ChromaDB)")
print("Benefits:")
print("  - Find similar memories even with different wording")
print("  - Scale to millions of memories")
print("  - Efficient retrieval")

Best PracticesΒΆ

1. Memory TypesΒΆ

  • Short-term: Last 5-10 messages, cleared per session

  • Long-term: Important facts, persisted to disk/DB

  • Episodic: Specific events with timestamps

  • Semantic: Vector embeddings for fuzzy matching

2. Storage StrategyΒΆ

  • Use conversation buffers for chat context

  • Use vector DBs for large knowledge bases

  • Use traditional DBs for structured data

  • Implement memory consolidation (forget old, unimportant memories)

3. RetrievalΒΆ

  • Retrieve only relevant memories (not everything)

  • Use hybrid search (keyword + semantic)

  • Weight by recency and importance

  • Cache frequently accessed memories

4. Privacy & SecurityΒΆ

  • Encrypt sensitive memories

  • Implement memory deletion

  • Separate memories by user

  • Allow users to view/edit their data

Key TakeawaysΒΆ

βœ… Agents need memory for context and personalization

βœ… Use different memory types for different purposes

βœ… Persist important memories across sessions

βœ… Vector databases enable semantic memory retrieval

βœ… Implement memory management (storage, retrieval, deletion)