forked from cardosofelipe/fast-next-template
- Add MemoryConsolidationService with Working→Episodic→Semantic/Procedural transfer - Add Celery tasks for session and nightly consolidation - Implement memory pruning with importance-based retention - Add comprehensive test suite (32 tests) 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
235 lines
6.2 KiB
Python
235 lines
6.2 KiB
Python
# app/tasks/memory_consolidation.py
|
|
"""
|
|
Memory consolidation Celery tasks.
|
|
|
|
Handles scheduled and on-demand memory consolidation:
|
|
- Session consolidation (on session end)
|
|
- Nightly consolidation (scheduled)
|
|
- On-demand project consolidation
|
|
"""
|
|
|
|
import logging
|
|
from typing import Any
|
|
|
|
from app.celery_app import celery_app
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
@celery_app.task(
|
|
bind=True,
|
|
name="app.tasks.memory_consolidation.consolidate_session",
|
|
autoretry_for=(Exception,),
|
|
retry_backoff=True,
|
|
retry_kwargs={"max_retries": 3},
|
|
)
|
|
def consolidate_session(
|
|
self,
|
|
project_id: str,
|
|
session_id: str,
|
|
task_type: str = "session_task",
|
|
agent_instance_id: str | None = None,
|
|
agent_type_id: str | None = None,
|
|
) -> dict[str, Any]:
|
|
"""
|
|
Consolidate a session's working memory to episodic memory.
|
|
|
|
This task is triggered when an agent session ends to transfer
|
|
relevant session data into persistent episodic memory.
|
|
|
|
Args:
|
|
project_id: UUID of the project
|
|
session_id: Session identifier
|
|
task_type: Type of task performed
|
|
agent_instance_id: Optional agent instance UUID
|
|
agent_type_id: Optional agent type UUID
|
|
|
|
Returns:
|
|
dict with consolidation results
|
|
"""
|
|
logger.info(f"Consolidating session {session_id} for project {project_id}")
|
|
|
|
# TODO: Implement actual consolidation
|
|
# This will involve:
|
|
# 1. Getting database session from async context
|
|
# 2. Loading working memory for session
|
|
# 3. Calling consolidation service
|
|
# 4. Returning results
|
|
|
|
# Placeholder implementation
|
|
return {
|
|
"status": "pending",
|
|
"project_id": project_id,
|
|
"session_id": session_id,
|
|
"episode_created": False,
|
|
}
|
|
|
|
|
|
@celery_app.task(
|
|
bind=True,
|
|
name="app.tasks.memory_consolidation.run_nightly_consolidation",
|
|
autoretry_for=(Exception,),
|
|
retry_backoff=True,
|
|
retry_kwargs={"max_retries": 3},
|
|
)
|
|
def run_nightly_consolidation(
|
|
self,
|
|
project_id: str,
|
|
agent_type_id: str | None = None,
|
|
) -> dict[str, Any]:
|
|
"""
|
|
Run nightly memory consolidation for a project.
|
|
|
|
This task performs the full consolidation workflow:
|
|
1. Extract facts from recent episodes to semantic memory
|
|
2. Learn procedures from successful episode patterns
|
|
3. Prune old, low-value memories
|
|
|
|
Args:
|
|
project_id: UUID of the project to consolidate
|
|
agent_type_id: Optional agent type to filter by
|
|
|
|
Returns:
|
|
dict with consolidation results
|
|
"""
|
|
logger.info(f"Running nightly consolidation for project {project_id}")
|
|
|
|
# TODO: Implement actual consolidation
|
|
# This will involve:
|
|
# 1. Getting database session from async context
|
|
# 2. Creating consolidation service instance
|
|
# 3. Running run_nightly_consolidation
|
|
# 4. Returning results
|
|
|
|
# Placeholder implementation
|
|
return {
|
|
"status": "pending",
|
|
"project_id": project_id,
|
|
"total_facts_created": 0,
|
|
"total_procedures_created": 0,
|
|
"total_pruned": 0,
|
|
}
|
|
|
|
|
|
@celery_app.task(
|
|
bind=True,
|
|
name="app.tasks.memory_consolidation.consolidate_episodes_to_facts",
|
|
autoretry_for=(Exception,),
|
|
retry_backoff=True,
|
|
retry_kwargs={"max_retries": 3},
|
|
)
|
|
def consolidate_episodes_to_facts(
|
|
self,
|
|
project_id: str,
|
|
since_hours: int = 24,
|
|
limit: int | None = None,
|
|
) -> dict[str, Any]:
|
|
"""
|
|
Extract facts from episodic memories.
|
|
|
|
Args:
|
|
project_id: UUID of the project
|
|
since_hours: Process episodes from last N hours
|
|
limit: Maximum episodes to process
|
|
|
|
Returns:
|
|
dict with extraction results
|
|
"""
|
|
logger.info(f"Consolidating episodes to facts for project {project_id}")
|
|
|
|
# TODO: Implement actual consolidation
|
|
# Placeholder implementation
|
|
return {
|
|
"status": "pending",
|
|
"project_id": project_id,
|
|
"items_processed": 0,
|
|
"items_created": 0,
|
|
}
|
|
|
|
|
|
@celery_app.task(
|
|
bind=True,
|
|
name="app.tasks.memory_consolidation.consolidate_episodes_to_procedures",
|
|
autoretry_for=(Exception,),
|
|
retry_backoff=True,
|
|
retry_kwargs={"max_retries": 3},
|
|
)
|
|
def consolidate_episodes_to_procedures(
|
|
self,
|
|
project_id: str,
|
|
agent_type_id: str | None = None,
|
|
since_days: int = 7,
|
|
) -> dict[str, Any]:
|
|
"""
|
|
Learn procedures from episodic patterns.
|
|
|
|
Args:
|
|
project_id: UUID of the project
|
|
agent_type_id: Optional agent type filter
|
|
since_days: Process episodes from last N days
|
|
|
|
Returns:
|
|
dict with procedure learning results
|
|
"""
|
|
logger.info(f"Consolidating episodes to procedures for project {project_id}")
|
|
|
|
# TODO: Implement actual consolidation
|
|
# Placeholder implementation
|
|
return {
|
|
"status": "pending",
|
|
"project_id": project_id,
|
|
"items_processed": 0,
|
|
"items_created": 0,
|
|
}
|
|
|
|
|
|
@celery_app.task(
|
|
bind=True,
|
|
name="app.tasks.memory_consolidation.prune_old_memories",
|
|
autoretry_for=(Exception,),
|
|
retry_backoff=True,
|
|
retry_kwargs={"max_retries": 3},
|
|
)
|
|
def prune_old_memories(
|
|
self,
|
|
project_id: str,
|
|
max_age_days: int = 90,
|
|
min_importance: float = 0.2,
|
|
) -> dict[str, Any]:
|
|
"""
|
|
Prune old, low-value memories.
|
|
|
|
Args:
|
|
project_id: UUID of the project
|
|
max_age_days: Maximum age in days
|
|
min_importance: Minimum importance to keep
|
|
|
|
Returns:
|
|
dict with pruning results
|
|
"""
|
|
logger.info(f"Pruning old memories for project {project_id}")
|
|
|
|
# TODO: Implement actual pruning
|
|
# Placeholder implementation
|
|
return {
|
|
"status": "pending",
|
|
"project_id": project_id,
|
|
"items_pruned": 0,
|
|
}
|
|
|
|
|
|
# =========================================================================
|
|
# Celery Beat Schedule Configuration
|
|
# =========================================================================
|
|
|
|
# This would typically be configured in celery_app.py or a separate config file
|
|
# Example schedule for nightly consolidation:
|
|
#
|
|
# app.conf.beat_schedule = {
|
|
# 'nightly-memory-consolidation': {
|
|
# 'task': 'app.tasks.memory_consolidation.run_nightly_consolidation',
|
|
# 'schedule': crontab(hour=2, minute=0), # 2 AM daily
|
|
# 'args': (None,), # Will process all projects
|
|
# },
|
|
# }
|