forked from cardosofelipe/fast-next-template
feat(backend): Add Syndarix domain models with CRUD operations
- Add Project model with slug, description, autonomy level, and settings - Add AgentType model for agent templates with model config and failover - Add AgentInstance model for running agents with status and memory - Add Issue model with external tracker sync (Gitea/GitHub/GitLab) - Add Sprint model with velocity tracking and lifecycle management - Add comprehensive Pydantic schemas with validation - Add full CRUD operations for all models with filtering/sorting - Add 280+ tests for models, schemas, and CRUD operations Implements #23, #24, #25, #26, #27 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
20
backend/app/crud/syndarix/__init__.py
Normal file
20
backend/app/crud/syndarix/__init__.py
Normal file
@@ -0,0 +1,20 @@
|
||||
# app/crud/syndarix/__init__.py
|
||||
"""
|
||||
Syndarix CRUD operations.
|
||||
|
||||
This package contains CRUD operations for all Syndarix domain entities.
|
||||
"""
|
||||
|
||||
from .agent_instance import agent_instance
|
||||
from .agent_type import agent_type
|
||||
from .issue import issue
|
||||
from .project import project
|
||||
from .sprint import sprint
|
||||
|
||||
__all__ = [
|
||||
"agent_instance",
|
||||
"agent_type",
|
||||
"issue",
|
||||
"project",
|
||||
"sprint",
|
||||
]
|
||||
346
backend/app/crud/syndarix/agent_instance.py
Normal file
346
backend/app/crud/syndarix/agent_instance.py
Normal file
@@ -0,0 +1,346 @@
|
||||
# app/crud/syndarix/agent_instance.py
|
||||
"""Async CRUD operations for AgentInstance model using SQLAlchemy 2.0 patterns."""
|
||||
|
||||
import logging
|
||||
from datetime import UTC, datetime
|
||||
from decimal import Decimal
|
||||
from typing import Any
|
||||
from uuid import UUID
|
||||
|
||||
from sqlalchemy import func, select, update
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import joinedload
|
||||
|
||||
from app.crud.base import CRUDBase
|
||||
from app.models.syndarix import AgentInstance, Issue
|
||||
from app.models.syndarix.enums import AgentStatus
|
||||
from app.schemas.syndarix import AgentInstanceCreate, AgentInstanceUpdate
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CRUDAgentInstance(CRUDBase[AgentInstance, AgentInstanceCreate, AgentInstanceUpdate]):
|
||||
"""Async CRUD operations for AgentInstance model."""
|
||||
|
||||
async def create(
|
||||
self, db: AsyncSession, *, obj_in: AgentInstanceCreate
|
||||
) -> AgentInstance:
|
||||
"""Create a new agent instance with error handling."""
|
||||
try:
|
||||
db_obj = AgentInstance(
|
||||
agent_type_id=obj_in.agent_type_id,
|
||||
project_id=obj_in.project_id,
|
||||
status=obj_in.status,
|
||||
current_task=obj_in.current_task,
|
||||
short_term_memory=obj_in.short_term_memory,
|
||||
long_term_memory_ref=obj_in.long_term_memory_ref,
|
||||
session_id=obj_in.session_id,
|
||||
)
|
||||
db.add(db_obj)
|
||||
await db.commit()
|
||||
await db.refresh(db_obj)
|
||||
return db_obj
|
||||
except IntegrityError as e:
|
||||
await db.rollback()
|
||||
error_msg = str(e.orig) if hasattr(e, "orig") else str(e)
|
||||
logger.error(f"Integrity error creating agent instance: {error_msg}")
|
||||
raise ValueError(f"Database integrity error: {error_msg}")
|
||||
except Exception as e:
|
||||
await db.rollback()
|
||||
logger.error(
|
||||
f"Unexpected error creating agent instance: {e!s}", exc_info=True
|
||||
)
|
||||
raise
|
||||
|
||||
async def get_with_details(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
*,
|
||||
instance_id: UUID,
|
||||
) -> dict[str, Any] | None:
|
||||
"""
|
||||
Get an agent instance with full details including related entities.
|
||||
|
||||
Returns:
|
||||
Dictionary with instance and related entity details
|
||||
"""
|
||||
try:
|
||||
# Get instance with joined relationships
|
||||
result = await db.execute(
|
||||
select(AgentInstance)
|
||||
.options(
|
||||
joinedload(AgentInstance.agent_type),
|
||||
joinedload(AgentInstance.project),
|
||||
)
|
||||
.where(AgentInstance.id == instance_id)
|
||||
)
|
||||
instance = result.scalar_one_or_none()
|
||||
|
||||
if not instance:
|
||||
return None
|
||||
|
||||
# Get assigned issues count
|
||||
issues_count_result = await db.execute(
|
||||
select(func.count(Issue.id)).where(
|
||||
Issue.assigned_agent_id == instance_id
|
||||
)
|
||||
)
|
||||
assigned_issues_count = issues_count_result.scalar_one()
|
||||
|
||||
return {
|
||||
"instance": instance,
|
||||
"agent_type_name": instance.agent_type.name if instance.agent_type else None,
|
||||
"agent_type_slug": instance.agent_type.slug if instance.agent_type else None,
|
||||
"project_name": instance.project.name if instance.project else None,
|
||||
"project_slug": instance.project.slug if instance.project else None,
|
||||
"assigned_issues_count": assigned_issues_count,
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error getting agent instance with details {instance_id}: {e!s}",
|
||||
exc_info=True,
|
||||
)
|
||||
raise
|
||||
|
||||
async def get_by_project(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
*,
|
||||
project_id: UUID,
|
||||
status: AgentStatus | None = None,
|
||||
skip: int = 0,
|
||||
limit: int = 100,
|
||||
) -> tuple[list[AgentInstance], int]:
|
||||
"""Get agent instances for a specific project."""
|
||||
try:
|
||||
query = select(AgentInstance).where(
|
||||
AgentInstance.project_id == project_id
|
||||
)
|
||||
|
||||
if status is not None:
|
||||
query = query.where(AgentInstance.status == status)
|
||||
|
||||
# Get total count
|
||||
count_query = select(func.count()).select_from(query.alias())
|
||||
count_result = await db.execute(count_query)
|
||||
total = count_result.scalar_one()
|
||||
|
||||
# Apply pagination
|
||||
query = query.order_by(AgentInstance.created_at.desc())
|
||||
query = query.offset(skip).limit(limit)
|
||||
result = await db.execute(query)
|
||||
instances = list(result.scalars().all())
|
||||
|
||||
return instances, total
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error getting instances by project {project_id}: {e!s}",
|
||||
exc_info=True,
|
||||
)
|
||||
raise
|
||||
|
||||
async def get_by_agent_type(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
*,
|
||||
agent_type_id: UUID,
|
||||
status: AgentStatus | None = None,
|
||||
) -> list[AgentInstance]:
|
||||
"""Get all instances of a specific agent type."""
|
||||
try:
|
||||
query = select(AgentInstance).where(
|
||||
AgentInstance.agent_type_id == agent_type_id
|
||||
)
|
||||
|
||||
if status is not None:
|
||||
query = query.where(AgentInstance.status == status)
|
||||
|
||||
query = query.order_by(AgentInstance.created_at.desc())
|
||||
result = await db.execute(query)
|
||||
return list(result.scalars().all())
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error getting instances by agent type {agent_type_id}: {e!s}",
|
||||
exc_info=True,
|
||||
)
|
||||
raise
|
||||
|
||||
async def update_status(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
*,
|
||||
instance_id: UUID,
|
||||
status: AgentStatus,
|
||||
current_task: str | None = None,
|
||||
) -> AgentInstance | None:
|
||||
"""Update the status of an agent instance."""
|
||||
try:
|
||||
result = await db.execute(
|
||||
select(AgentInstance).where(AgentInstance.id == instance_id)
|
||||
)
|
||||
instance = result.scalar_one_or_none()
|
||||
|
||||
if not instance:
|
||||
return None
|
||||
|
||||
instance.status = status
|
||||
instance.last_activity_at = datetime.now(UTC)
|
||||
if current_task is not None:
|
||||
instance.current_task = current_task
|
||||
|
||||
await db.commit()
|
||||
await db.refresh(instance)
|
||||
return instance
|
||||
except Exception as e:
|
||||
await db.rollback()
|
||||
logger.error(
|
||||
f"Error updating instance status {instance_id}: {e!s}", exc_info=True
|
||||
)
|
||||
raise
|
||||
|
||||
async def terminate(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
*,
|
||||
instance_id: UUID,
|
||||
) -> AgentInstance | None:
|
||||
"""Terminate an agent instance."""
|
||||
try:
|
||||
result = await db.execute(
|
||||
select(AgentInstance).where(AgentInstance.id == instance_id)
|
||||
)
|
||||
instance = result.scalar_one_or_none()
|
||||
|
||||
if not instance:
|
||||
return None
|
||||
|
||||
instance.status = AgentStatus.TERMINATED
|
||||
instance.terminated_at = datetime.now(UTC)
|
||||
instance.current_task = None
|
||||
instance.session_id = None
|
||||
|
||||
await db.commit()
|
||||
await db.refresh(instance)
|
||||
return instance
|
||||
except Exception as e:
|
||||
await db.rollback()
|
||||
logger.error(
|
||||
f"Error terminating instance {instance_id}: {e!s}", exc_info=True
|
||||
)
|
||||
raise
|
||||
|
||||
async def record_task_completion(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
*,
|
||||
instance_id: UUID,
|
||||
tokens_used: int,
|
||||
cost_incurred: Decimal,
|
||||
) -> AgentInstance | None:
|
||||
"""Record a completed task and update metrics."""
|
||||
try:
|
||||
result = await db.execute(
|
||||
select(AgentInstance).where(AgentInstance.id == instance_id)
|
||||
)
|
||||
instance = result.scalar_one_or_none()
|
||||
|
||||
if not instance:
|
||||
return None
|
||||
|
||||
instance.tasks_completed += 1
|
||||
instance.tokens_used += tokens_used
|
||||
instance.cost_incurred += cost_incurred
|
||||
instance.last_activity_at = datetime.now(UTC)
|
||||
|
||||
await db.commit()
|
||||
await db.refresh(instance)
|
||||
return instance
|
||||
except Exception as e:
|
||||
await db.rollback()
|
||||
logger.error(
|
||||
f"Error recording task completion {instance_id}: {e!s}", exc_info=True
|
||||
)
|
||||
raise
|
||||
|
||||
async def get_project_metrics(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
*,
|
||||
project_id: UUID,
|
||||
) -> dict[str, Any]:
|
||||
"""Get aggregated metrics for all agents in a project."""
|
||||
try:
|
||||
result = await db.execute(
|
||||
select(
|
||||
func.count(AgentInstance.id).label("total_instances"),
|
||||
func.count(AgentInstance.id)
|
||||
.filter(AgentInstance.status == AgentStatus.WORKING)
|
||||
.label("active_instances"),
|
||||
func.count(AgentInstance.id)
|
||||
.filter(AgentInstance.status == AgentStatus.IDLE)
|
||||
.label("idle_instances"),
|
||||
func.sum(AgentInstance.tasks_completed).label("total_tasks"),
|
||||
func.sum(AgentInstance.tokens_used).label("total_tokens"),
|
||||
func.sum(AgentInstance.cost_incurred).label("total_cost"),
|
||||
).where(AgentInstance.project_id == project_id)
|
||||
)
|
||||
row = result.one()
|
||||
|
||||
return {
|
||||
"total_instances": row.total_instances or 0,
|
||||
"active_instances": row.active_instances or 0,
|
||||
"idle_instances": row.idle_instances or 0,
|
||||
"total_tasks_completed": row.total_tasks or 0,
|
||||
"total_tokens_used": row.total_tokens or 0,
|
||||
"total_cost_incurred": row.total_cost or Decimal("0.0000"),
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error getting project metrics {project_id}: {e!s}", exc_info=True
|
||||
)
|
||||
raise
|
||||
|
||||
async def bulk_terminate_by_project(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
*,
|
||||
project_id: UUID,
|
||||
) -> int:
|
||||
"""Terminate all active instances in a project."""
|
||||
try:
|
||||
now = datetime.now(UTC)
|
||||
stmt = (
|
||||
update(AgentInstance)
|
||||
.where(
|
||||
AgentInstance.project_id == project_id,
|
||||
AgentInstance.status != AgentStatus.TERMINATED,
|
||||
)
|
||||
.values(
|
||||
status=AgentStatus.TERMINATED,
|
||||
terminated_at=now,
|
||||
current_task=None,
|
||||
session_id=None,
|
||||
updated_at=now,
|
||||
)
|
||||
)
|
||||
|
||||
result = await db.execute(stmt)
|
||||
await db.commit()
|
||||
|
||||
terminated_count = result.rowcount
|
||||
logger.info(
|
||||
f"Bulk terminated {terminated_count} instances in project {project_id}"
|
||||
)
|
||||
return terminated_count
|
||||
except Exception as e:
|
||||
await db.rollback()
|
||||
logger.error(
|
||||
f"Error bulk terminating instances for project {project_id}: {e!s}",
|
||||
exc_info=True,
|
||||
)
|
||||
raise
|
||||
|
||||
|
||||
# Create a singleton instance for use across the application
|
||||
agent_instance = CRUDAgentInstance(AgentInstance)
|
||||
275
backend/app/crud/syndarix/agent_type.py
Normal file
275
backend/app/crud/syndarix/agent_type.py
Normal file
@@ -0,0 +1,275 @@
|
||||
# app/crud/syndarix/agent_type.py
|
||||
"""Async CRUD operations for AgentType model using SQLAlchemy 2.0 patterns."""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
from uuid import UUID
|
||||
|
||||
from sqlalchemy import func, or_, select
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.crud.base import CRUDBase
|
||||
from app.models.syndarix import AgentInstance, AgentType
|
||||
from app.schemas.syndarix import AgentTypeCreate, AgentTypeUpdate
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CRUDAgentType(CRUDBase[AgentType, AgentTypeCreate, AgentTypeUpdate]):
|
||||
"""Async CRUD operations for AgentType model."""
|
||||
|
||||
async def get_by_slug(self, db: AsyncSession, *, slug: str) -> AgentType | None:
|
||||
"""Get agent type by slug."""
|
||||
try:
|
||||
result = await db.execute(
|
||||
select(AgentType).where(AgentType.slug == slug)
|
||||
)
|
||||
return result.scalar_one_or_none()
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting agent type by slug {slug}: {e!s}")
|
||||
raise
|
||||
|
||||
async def create(
|
||||
self, db: AsyncSession, *, obj_in: AgentTypeCreate
|
||||
) -> AgentType:
|
||||
"""Create a new agent type with error handling."""
|
||||
try:
|
||||
db_obj = AgentType(
|
||||
name=obj_in.name,
|
||||
slug=obj_in.slug,
|
||||
description=obj_in.description,
|
||||
expertise=obj_in.expertise,
|
||||
personality_prompt=obj_in.personality_prompt,
|
||||
primary_model=obj_in.primary_model,
|
||||
fallback_models=obj_in.fallback_models,
|
||||
model_params=obj_in.model_params,
|
||||
mcp_servers=obj_in.mcp_servers,
|
||||
tool_permissions=obj_in.tool_permissions,
|
||||
is_active=obj_in.is_active,
|
||||
)
|
||||
db.add(db_obj)
|
||||
await db.commit()
|
||||
await db.refresh(db_obj)
|
||||
return db_obj
|
||||
except IntegrityError as e:
|
||||
await db.rollback()
|
||||
error_msg = str(e.orig) if hasattr(e, "orig") else str(e)
|
||||
if "slug" in error_msg.lower():
|
||||
logger.warning(f"Duplicate slug attempted: {obj_in.slug}")
|
||||
raise ValueError(
|
||||
f"Agent type with slug '{obj_in.slug}' already exists"
|
||||
)
|
||||
logger.error(f"Integrity error creating agent type: {error_msg}")
|
||||
raise ValueError(f"Database integrity error: {error_msg}")
|
||||
except Exception as e:
|
||||
await db.rollback()
|
||||
logger.error(
|
||||
f"Unexpected error creating agent type: {e!s}", exc_info=True
|
||||
)
|
||||
raise
|
||||
|
||||
async def get_multi_with_filters(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
*,
|
||||
skip: int = 0,
|
||||
limit: int = 100,
|
||||
is_active: bool | None = None,
|
||||
search: str | None = None,
|
||||
sort_by: str = "created_at",
|
||||
sort_order: str = "desc",
|
||||
) -> tuple[list[AgentType], int]:
|
||||
"""
|
||||
Get multiple agent types with filtering, searching, and sorting.
|
||||
|
||||
Returns:
|
||||
Tuple of (agent types list, total count)
|
||||
"""
|
||||
try:
|
||||
query = select(AgentType)
|
||||
|
||||
# Apply filters
|
||||
if is_active is not None:
|
||||
query = query.where(AgentType.is_active == is_active)
|
||||
|
||||
if search:
|
||||
search_filter = or_(
|
||||
AgentType.name.ilike(f"%{search}%"),
|
||||
AgentType.slug.ilike(f"%{search}%"),
|
||||
AgentType.description.ilike(f"%{search}%"),
|
||||
)
|
||||
query = query.where(search_filter)
|
||||
|
||||
# Get total count before pagination
|
||||
count_query = select(func.count()).select_from(query.alias())
|
||||
count_result = await db.execute(count_query)
|
||||
total = count_result.scalar_one()
|
||||
|
||||
# Apply sorting
|
||||
sort_column = getattr(AgentType, sort_by, AgentType.created_at)
|
||||
if sort_order == "desc":
|
||||
query = query.order_by(sort_column.desc())
|
||||
else:
|
||||
query = query.order_by(sort_column.asc())
|
||||
|
||||
# Apply pagination
|
||||
query = query.offset(skip).limit(limit)
|
||||
result = await db.execute(query)
|
||||
agent_types = list(result.scalars().all())
|
||||
|
||||
return agent_types, total
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting agent types with filters: {e!s}")
|
||||
raise
|
||||
|
||||
async def get_with_instance_count(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
*,
|
||||
agent_type_id: UUID,
|
||||
) -> dict[str, Any] | None:
|
||||
"""
|
||||
Get a single agent type with its instance count.
|
||||
|
||||
Returns:
|
||||
Dictionary with agent_type and instance_count
|
||||
"""
|
||||
try:
|
||||
result = await db.execute(
|
||||
select(AgentType).where(AgentType.id == agent_type_id)
|
||||
)
|
||||
agent_type = result.scalar_one_or_none()
|
||||
|
||||
if not agent_type:
|
||||
return None
|
||||
|
||||
# Get instance count
|
||||
count_result = await db.execute(
|
||||
select(func.count(AgentInstance.id)).where(
|
||||
AgentInstance.agent_type_id == agent_type_id
|
||||
)
|
||||
)
|
||||
instance_count = count_result.scalar_one()
|
||||
|
||||
return {
|
||||
"agent_type": agent_type,
|
||||
"instance_count": instance_count,
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error getting agent type with count {agent_type_id}: {e!s}",
|
||||
exc_info=True,
|
||||
)
|
||||
raise
|
||||
|
||||
async def get_multi_with_instance_counts(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
*,
|
||||
skip: int = 0,
|
||||
limit: int = 100,
|
||||
is_active: bool | None = None,
|
||||
search: str | None = None,
|
||||
) -> tuple[list[dict[str, Any]], int]:
|
||||
"""
|
||||
Get agent types with instance counts in optimized queries.
|
||||
|
||||
Returns:
|
||||
Tuple of (list of dicts with agent_type and instance_count, total count)
|
||||
"""
|
||||
try:
|
||||
# Get filtered agent types
|
||||
agent_types, total = await self.get_multi_with_filters(
|
||||
db,
|
||||
skip=skip,
|
||||
limit=limit,
|
||||
is_active=is_active,
|
||||
search=search,
|
||||
)
|
||||
|
||||
if not agent_types:
|
||||
return [], 0
|
||||
|
||||
agent_type_ids = [at.id for at in agent_types]
|
||||
|
||||
# Get instance counts in bulk
|
||||
counts_result = await db.execute(
|
||||
select(
|
||||
AgentInstance.agent_type_id,
|
||||
func.count(AgentInstance.id).label("count"),
|
||||
)
|
||||
.where(AgentInstance.agent_type_id.in_(agent_type_ids))
|
||||
.group_by(AgentInstance.agent_type_id)
|
||||
)
|
||||
counts = {row.agent_type_id: row.count for row in counts_result}
|
||||
|
||||
# Combine results
|
||||
results = [
|
||||
{
|
||||
"agent_type": agent_type,
|
||||
"instance_count": counts.get(agent_type.id, 0),
|
||||
}
|
||||
for agent_type in agent_types
|
||||
]
|
||||
|
||||
return results, total
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error getting agent types with counts: {e!s}", exc_info=True
|
||||
)
|
||||
raise
|
||||
|
||||
async def get_by_expertise(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
*,
|
||||
expertise: str,
|
||||
is_active: bool = True,
|
||||
) -> list[AgentType]:
|
||||
"""Get agent types that have a specific expertise."""
|
||||
try:
|
||||
# Use PostgreSQL JSONB contains operator
|
||||
query = select(AgentType).where(
|
||||
AgentType.expertise.contains([expertise.lower()]),
|
||||
AgentType.is_active == is_active,
|
||||
)
|
||||
result = await db.execute(query)
|
||||
return list(result.scalars().all())
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error getting agent types by expertise {expertise}: {e!s}",
|
||||
exc_info=True,
|
||||
)
|
||||
raise
|
||||
|
||||
async def deactivate(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
*,
|
||||
agent_type_id: UUID,
|
||||
) -> AgentType | None:
|
||||
"""Deactivate an agent type (soft delete)."""
|
||||
try:
|
||||
result = await db.execute(
|
||||
select(AgentType).where(AgentType.id == agent_type_id)
|
||||
)
|
||||
agent_type = result.scalar_one_or_none()
|
||||
|
||||
if not agent_type:
|
||||
return None
|
||||
|
||||
agent_type.is_active = False
|
||||
await db.commit()
|
||||
await db.refresh(agent_type)
|
||||
return agent_type
|
||||
except Exception as e:
|
||||
await db.rollback()
|
||||
logger.error(
|
||||
f"Error deactivating agent type {agent_type_id}: {e!s}", exc_info=True
|
||||
)
|
||||
raise
|
||||
|
||||
|
||||
# Create a singleton instance for use across the application
|
||||
agent_type = CRUDAgentType(AgentType)
|
||||
437
backend/app/crud/syndarix/issue.py
Normal file
437
backend/app/crud/syndarix/issue.py
Normal file
@@ -0,0 +1,437 @@
|
||||
# app/crud/syndarix/issue.py
|
||||
"""Async CRUD operations for Issue model using SQLAlchemy 2.0 patterns."""
|
||||
|
||||
import logging
|
||||
from datetime import UTC, datetime
|
||||
from typing import Any
|
||||
from uuid import UUID
|
||||
|
||||
from sqlalchemy import func, or_, select
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import joinedload
|
||||
|
||||
from app.crud.base import CRUDBase
|
||||
from app.models.syndarix import AgentInstance, Issue
|
||||
from app.models.syndarix.enums import IssuePriority, IssueStatus, SyncStatus
|
||||
from app.schemas.syndarix import IssueCreate, IssueUpdate
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CRUDIssue(CRUDBase[Issue, IssueCreate, IssueUpdate]):
|
||||
"""Async CRUD operations for Issue model."""
|
||||
|
||||
async def create(self, db: AsyncSession, *, obj_in: IssueCreate) -> Issue:
|
||||
"""Create a new issue with error handling."""
|
||||
try:
|
||||
db_obj = Issue(
|
||||
project_id=obj_in.project_id,
|
||||
title=obj_in.title,
|
||||
body=obj_in.body,
|
||||
status=obj_in.status,
|
||||
priority=obj_in.priority,
|
||||
labels=obj_in.labels,
|
||||
assigned_agent_id=obj_in.assigned_agent_id,
|
||||
human_assignee=obj_in.human_assignee,
|
||||
sprint_id=obj_in.sprint_id,
|
||||
story_points=obj_in.story_points,
|
||||
external_tracker=obj_in.external_tracker,
|
||||
external_id=obj_in.external_id,
|
||||
external_url=obj_in.external_url,
|
||||
external_number=obj_in.external_number,
|
||||
sync_status=SyncStatus.SYNCED,
|
||||
)
|
||||
db.add(db_obj)
|
||||
await db.commit()
|
||||
await db.refresh(db_obj)
|
||||
return db_obj
|
||||
except IntegrityError as e:
|
||||
await db.rollback()
|
||||
error_msg = str(e.orig) if hasattr(e, "orig") else str(e)
|
||||
logger.error(f"Integrity error creating issue: {error_msg}")
|
||||
raise ValueError(f"Database integrity error: {error_msg}")
|
||||
except Exception as e:
|
||||
await db.rollback()
|
||||
logger.error(f"Unexpected error creating issue: {e!s}", exc_info=True)
|
||||
raise
|
||||
|
||||
async def get_with_details(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
*,
|
||||
issue_id: UUID,
|
||||
) -> dict[str, Any] | None:
|
||||
"""
|
||||
Get an issue with full details including related entity names.
|
||||
|
||||
Returns:
|
||||
Dictionary with issue and related entity details
|
||||
"""
|
||||
try:
|
||||
# Get issue with joined relationships
|
||||
result = await db.execute(
|
||||
select(Issue)
|
||||
.options(
|
||||
joinedload(Issue.project),
|
||||
joinedload(Issue.sprint),
|
||||
joinedload(Issue.assigned_agent).joinedload(AgentInstance.agent_type),
|
||||
)
|
||||
.where(Issue.id == issue_id)
|
||||
)
|
||||
issue = result.scalar_one_or_none()
|
||||
|
||||
if not issue:
|
||||
return None
|
||||
|
||||
return {
|
||||
"issue": issue,
|
||||
"project_name": issue.project.name if issue.project else None,
|
||||
"project_slug": issue.project.slug if issue.project else None,
|
||||
"sprint_name": issue.sprint.name if issue.sprint else None,
|
||||
"assigned_agent_type_name": (
|
||||
issue.assigned_agent.agent_type.name
|
||||
if issue.assigned_agent and issue.assigned_agent.agent_type
|
||||
else None
|
||||
),
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error getting issue with details {issue_id}: {e!s}", exc_info=True
|
||||
)
|
||||
raise
|
||||
|
||||
async def get_by_project(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
*,
|
||||
project_id: UUID,
|
||||
status: IssueStatus | None = None,
|
||||
priority: IssuePriority | None = None,
|
||||
sprint_id: UUID | None = None,
|
||||
assigned_agent_id: UUID | None = None,
|
||||
labels: list[str] | None = None,
|
||||
search: str | None = None,
|
||||
skip: int = 0,
|
||||
limit: int = 100,
|
||||
sort_by: str = "created_at",
|
||||
sort_order: str = "desc",
|
||||
) -> tuple[list[Issue], int]:
|
||||
"""Get issues for a specific project with filters."""
|
||||
try:
|
||||
query = select(Issue).where(Issue.project_id == project_id)
|
||||
|
||||
# Apply filters
|
||||
if status is not None:
|
||||
query = query.where(Issue.status == status)
|
||||
|
||||
if priority is not None:
|
||||
query = query.where(Issue.priority == priority)
|
||||
|
||||
if sprint_id is not None:
|
||||
query = query.where(Issue.sprint_id == sprint_id)
|
||||
|
||||
if assigned_agent_id is not None:
|
||||
query = query.where(Issue.assigned_agent_id == assigned_agent_id)
|
||||
|
||||
if labels:
|
||||
# Match any of the provided labels
|
||||
for label in labels:
|
||||
query = query.where(Issue.labels.contains([label.lower()]))
|
||||
|
||||
if search:
|
||||
search_filter = or_(
|
||||
Issue.title.ilike(f"%{search}%"),
|
||||
Issue.body.ilike(f"%{search}%"),
|
||||
)
|
||||
query = query.where(search_filter)
|
||||
|
||||
# Get total count
|
||||
count_query = select(func.count()).select_from(query.alias())
|
||||
count_result = await db.execute(count_query)
|
||||
total = count_result.scalar_one()
|
||||
|
||||
# Apply sorting
|
||||
sort_column = getattr(Issue, sort_by, Issue.created_at)
|
||||
if sort_order == "desc":
|
||||
query = query.order_by(sort_column.desc())
|
||||
else:
|
||||
query = query.order_by(sort_column.asc())
|
||||
|
||||
# Apply pagination
|
||||
query = query.offset(skip).limit(limit)
|
||||
result = await db.execute(query)
|
||||
issues = list(result.scalars().all())
|
||||
|
||||
return issues, total
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error getting issues by project {project_id}: {e!s}", exc_info=True
|
||||
)
|
||||
raise
|
||||
|
||||
async def get_by_sprint(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
*,
|
||||
sprint_id: UUID,
|
||||
status: IssueStatus | None = None,
|
||||
) -> list[Issue]:
|
||||
"""Get all issues in a sprint."""
|
||||
try:
|
||||
query = select(Issue).where(Issue.sprint_id == sprint_id)
|
||||
|
||||
if status is not None:
|
||||
query = query.where(Issue.status == status)
|
||||
|
||||
query = query.order_by(Issue.priority.desc(), Issue.created_at.asc())
|
||||
result = await db.execute(query)
|
||||
return list(result.scalars().all())
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error getting issues by sprint {sprint_id}: {e!s}", exc_info=True
|
||||
)
|
||||
raise
|
||||
|
||||
async def assign_to_agent(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
*,
|
||||
issue_id: UUID,
|
||||
agent_id: UUID | None,
|
||||
) -> Issue | None:
|
||||
"""Assign an issue to an agent (or unassign if agent_id is None)."""
|
||||
try:
|
||||
result = await db.execute(select(Issue).where(Issue.id == issue_id))
|
||||
issue = result.scalar_one_or_none()
|
||||
|
||||
if not issue:
|
||||
return None
|
||||
|
||||
issue.assigned_agent_id = agent_id
|
||||
issue.human_assignee = None # Clear human assignee when assigning to agent
|
||||
await db.commit()
|
||||
await db.refresh(issue)
|
||||
return issue
|
||||
except Exception as e:
|
||||
await db.rollback()
|
||||
logger.error(
|
||||
f"Error assigning issue {issue_id} to agent {agent_id}: {e!s}",
|
||||
exc_info=True,
|
||||
)
|
||||
raise
|
||||
|
||||
async def assign_to_human(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
*,
|
||||
issue_id: UUID,
|
||||
human_assignee: str | None,
|
||||
) -> Issue | None:
|
||||
"""Assign an issue to a human (or unassign if human_assignee is None)."""
|
||||
try:
|
||||
result = await db.execute(select(Issue).where(Issue.id == issue_id))
|
||||
issue = result.scalar_one_or_none()
|
||||
|
||||
if not issue:
|
||||
return None
|
||||
|
||||
issue.human_assignee = human_assignee
|
||||
issue.assigned_agent_id = None # Clear agent when assigning to human
|
||||
await db.commit()
|
||||
await db.refresh(issue)
|
||||
return issue
|
||||
except Exception as e:
|
||||
await db.rollback()
|
||||
logger.error(
|
||||
f"Error assigning issue {issue_id} to human {human_assignee}: {e!s}",
|
||||
exc_info=True,
|
||||
)
|
||||
raise
|
||||
|
||||
async def close_issue(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
*,
|
||||
issue_id: UUID,
|
||||
) -> Issue | None:
|
||||
"""Close an issue by setting status and closed_at timestamp."""
|
||||
try:
|
||||
result = await db.execute(select(Issue).where(Issue.id == issue_id))
|
||||
issue = result.scalar_one_or_none()
|
||||
|
||||
if not issue:
|
||||
return None
|
||||
|
||||
issue.status = IssueStatus.CLOSED
|
||||
issue.closed_at = datetime.now(UTC)
|
||||
await db.commit()
|
||||
await db.refresh(issue)
|
||||
return issue
|
||||
except Exception as e:
|
||||
await db.rollback()
|
||||
logger.error(f"Error closing issue {issue_id}: {e!s}", exc_info=True)
|
||||
raise
|
||||
|
||||
async def reopen_issue(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
*,
|
||||
issue_id: UUID,
|
||||
) -> Issue | None:
|
||||
"""Reopen a closed issue."""
|
||||
try:
|
||||
result = await db.execute(select(Issue).where(Issue.id == issue_id))
|
||||
issue = result.scalar_one_or_none()
|
||||
|
||||
if not issue:
|
||||
return None
|
||||
|
||||
issue.status = IssueStatus.OPEN
|
||||
issue.closed_at = None
|
||||
await db.commit()
|
||||
await db.refresh(issue)
|
||||
return issue
|
||||
except Exception as e:
|
||||
await db.rollback()
|
||||
logger.error(f"Error reopening issue {issue_id}: {e!s}", exc_info=True)
|
||||
raise
|
||||
|
||||
async def update_sync_status(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
*,
|
||||
issue_id: UUID,
|
||||
sync_status: SyncStatus,
|
||||
last_synced_at: datetime | None = None,
|
||||
external_updated_at: datetime | None = None,
|
||||
) -> Issue | None:
|
||||
"""Update the sync status of an issue."""
|
||||
try:
|
||||
result = await db.execute(select(Issue).where(Issue.id == issue_id))
|
||||
issue = result.scalar_one_or_none()
|
||||
|
||||
if not issue:
|
||||
return None
|
||||
|
||||
issue.sync_status = sync_status
|
||||
if last_synced_at:
|
||||
issue.last_synced_at = last_synced_at
|
||||
if external_updated_at:
|
||||
issue.external_updated_at = external_updated_at
|
||||
|
||||
await db.commit()
|
||||
await db.refresh(issue)
|
||||
return issue
|
||||
except Exception as e:
|
||||
await db.rollback()
|
||||
logger.error(
|
||||
f"Error updating sync status for issue {issue_id}: {e!s}", exc_info=True
|
||||
)
|
||||
raise
|
||||
|
||||
async def get_project_stats(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
*,
|
||||
project_id: UUID,
|
||||
) -> dict[str, Any]:
|
||||
"""Get issue statistics for a project."""
|
||||
try:
|
||||
# Get counts by status
|
||||
status_counts = await db.execute(
|
||||
select(Issue.status, func.count(Issue.id).label("count"))
|
||||
.where(Issue.project_id == project_id)
|
||||
.group_by(Issue.status)
|
||||
)
|
||||
by_status = {row.status.value: row.count for row in status_counts}
|
||||
|
||||
# Get counts by priority
|
||||
priority_counts = await db.execute(
|
||||
select(Issue.priority, func.count(Issue.id).label("count"))
|
||||
.where(Issue.project_id == project_id)
|
||||
.group_by(Issue.priority)
|
||||
)
|
||||
by_priority = {row.priority.value: row.count for row in priority_counts}
|
||||
|
||||
# Get story points
|
||||
points_result = await db.execute(
|
||||
select(
|
||||
func.sum(Issue.story_points).label("total"),
|
||||
func.sum(Issue.story_points)
|
||||
.filter(Issue.status == IssueStatus.CLOSED)
|
||||
.label("completed"),
|
||||
).where(Issue.project_id == project_id)
|
||||
)
|
||||
points_row = points_result.one()
|
||||
|
||||
total_issues = sum(by_status.values())
|
||||
|
||||
return {
|
||||
"total": total_issues,
|
||||
"open": by_status.get("open", 0),
|
||||
"in_progress": by_status.get("in_progress", 0),
|
||||
"in_review": by_status.get("in_review", 0),
|
||||
"blocked": by_status.get("blocked", 0),
|
||||
"closed": by_status.get("closed", 0),
|
||||
"by_priority": by_priority,
|
||||
"total_story_points": points_row.total,
|
||||
"completed_story_points": points_row.completed,
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error getting issue stats for project {project_id}: {e!s}",
|
||||
exc_info=True,
|
||||
)
|
||||
raise
|
||||
|
||||
async def get_by_external_id(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
*,
|
||||
external_tracker: str,
|
||||
external_id: str,
|
||||
) -> Issue | None:
|
||||
"""Get an issue by its external tracker ID."""
|
||||
try:
|
||||
result = await db.execute(
|
||||
select(Issue).where(
|
||||
Issue.external_tracker == external_tracker,
|
||||
Issue.external_id == external_id,
|
||||
)
|
||||
)
|
||||
return result.scalar_one_or_none()
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error getting issue by external ID {external_tracker}:{external_id}: {e!s}",
|
||||
exc_info=True,
|
||||
)
|
||||
raise
|
||||
|
||||
async def get_pending_sync(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
*,
|
||||
project_id: UUID | None = None,
|
||||
limit: int = 100,
|
||||
) -> list[Issue]:
|
||||
"""Get issues that need to be synced with external tracker."""
|
||||
try:
|
||||
query = select(Issue).where(
|
||||
Issue.external_tracker.isnot(None),
|
||||
Issue.sync_status.in_([SyncStatus.PENDING, SyncStatus.ERROR]),
|
||||
)
|
||||
|
||||
if project_id:
|
||||
query = query.where(Issue.project_id == project_id)
|
||||
|
||||
query = query.order_by(Issue.updated_at.asc()).limit(limit)
|
||||
result = await db.execute(query)
|
||||
return list(result.scalars().all())
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting pending sync issues: {e!s}", exc_info=True)
|
||||
raise
|
||||
|
||||
|
||||
# Create a singleton instance for use across the application
|
||||
issue = CRUDIssue(Issue)
|
||||
309
backend/app/crud/syndarix/project.py
Normal file
309
backend/app/crud/syndarix/project.py
Normal file
@@ -0,0 +1,309 @@
|
||||
# app/crud/syndarix/project.py
|
||||
"""Async CRUD operations for Project model using SQLAlchemy 2.0 patterns."""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
from uuid import UUID
|
||||
|
||||
from sqlalchemy import func, or_, select
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.crud.base import CRUDBase
|
||||
from app.models.syndarix import AgentInstance, Issue, Project, Sprint
|
||||
from app.models.syndarix.enums import ProjectStatus, SprintStatus
|
||||
from app.schemas.syndarix import ProjectCreate, ProjectUpdate
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CRUDProject(CRUDBase[Project, ProjectCreate, ProjectUpdate]):
|
||||
"""Async CRUD operations for Project model."""
|
||||
|
||||
async def get_by_slug(self, db: AsyncSession, *, slug: str) -> Project | None:
|
||||
"""Get project by slug."""
|
||||
try:
|
||||
result = await db.execute(select(Project).where(Project.slug == slug))
|
||||
return result.scalar_one_or_none()
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting project by slug {slug}: {e!s}")
|
||||
raise
|
||||
|
||||
async def create(self, db: AsyncSession, *, obj_in: ProjectCreate) -> Project:
|
||||
"""Create a new project with error handling."""
|
||||
try:
|
||||
db_obj = Project(
|
||||
name=obj_in.name,
|
||||
slug=obj_in.slug,
|
||||
description=obj_in.description,
|
||||
autonomy_level=obj_in.autonomy_level,
|
||||
status=obj_in.status,
|
||||
settings=obj_in.settings or {},
|
||||
owner_id=obj_in.owner_id,
|
||||
)
|
||||
db.add(db_obj)
|
||||
await db.commit()
|
||||
await db.refresh(db_obj)
|
||||
return db_obj
|
||||
except IntegrityError as e:
|
||||
await db.rollback()
|
||||
error_msg = str(e.orig) if hasattr(e, "orig") else str(e)
|
||||
if "slug" in error_msg.lower():
|
||||
logger.warning(f"Duplicate slug attempted: {obj_in.slug}")
|
||||
raise ValueError(f"Project with slug '{obj_in.slug}' already exists")
|
||||
logger.error(f"Integrity error creating project: {error_msg}")
|
||||
raise ValueError(f"Database integrity error: {error_msg}")
|
||||
except Exception as e:
|
||||
await db.rollback()
|
||||
logger.error(f"Unexpected error creating project: {e!s}", exc_info=True)
|
||||
raise
|
||||
|
||||
async def get_multi_with_filters(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
*,
|
||||
skip: int = 0,
|
||||
limit: int = 100,
|
||||
status: ProjectStatus | None = None,
|
||||
owner_id: UUID | None = None,
|
||||
search: str | None = None,
|
||||
sort_by: str = "created_at",
|
||||
sort_order: str = "desc",
|
||||
) -> tuple[list[Project], int]:
|
||||
"""
|
||||
Get multiple projects with filtering, searching, and sorting.
|
||||
|
||||
Returns:
|
||||
Tuple of (projects list, total count)
|
||||
"""
|
||||
try:
|
||||
query = select(Project)
|
||||
|
||||
# Apply filters
|
||||
if status is not None:
|
||||
query = query.where(Project.status == status)
|
||||
|
||||
if owner_id is not None:
|
||||
query = query.where(Project.owner_id == owner_id)
|
||||
|
||||
if search:
|
||||
search_filter = or_(
|
||||
Project.name.ilike(f"%{search}%"),
|
||||
Project.slug.ilike(f"%{search}%"),
|
||||
Project.description.ilike(f"%{search}%"),
|
||||
)
|
||||
query = query.where(search_filter)
|
||||
|
||||
# Get total count before pagination
|
||||
count_query = select(func.count()).select_from(query.alias())
|
||||
count_result = await db.execute(count_query)
|
||||
total = count_result.scalar_one()
|
||||
|
||||
# Apply sorting
|
||||
sort_column = getattr(Project, sort_by, Project.created_at)
|
||||
if sort_order == "desc":
|
||||
query = query.order_by(sort_column.desc())
|
||||
else:
|
||||
query = query.order_by(sort_column.asc())
|
||||
|
||||
# Apply pagination
|
||||
query = query.offset(skip).limit(limit)
|
||||
result = await db.execute(query)
|
||||
projects = list(result.scalars().all())
|
||||
|
||||
return projects, total
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting projects with filters: {e!s}")
|
||||
raise
|
||||
|
||||
async def get_with_counts(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
*,
|
||||
project_id: UUID,
|
||||
) -> dict[str, Any] | None:
|
||||
"""
|
||||
Get a single project with agent and issue counts.
|
||||
|
||||
Returns:
|
||||
Dictionary with project, agent_count, issue_count, active_sprint_name
|
||||
"""
|
||||
try:
|
||||
# Get project
|
||||
result = await db.execute(select(Project).where(Project.id == project_id))
|
||||
project = result.scalar_one_or_none()
|
||||
|
||||
if not project:
|
||||
return None
|
||||
|
||||
# Get agent count
|
||||
agent_count_result = await db.execute(
|
||||
select(func.count(AgentInstance.id)).where(
|
||||
AgentInstance.project_id == project_id
|
||||
)
|
||||
)
|
||||
agent_count = agent_count_result.scalar_one()
|
||||
|
||||
# Get issue count
|
||||
issue_count_result = await db.execute(
|
||||
select(func.count(Issue.id)).where(Issue.project_id == project_id)
|
||||
)
|
||||
issue_count = issue_count_result.scalar_one()
|
||||
|
||||
# Get active sprint name
|
||||
active_sprint_result = await db.execute(
|
||||
select(Sprint.name).where(
|
||||
Sprint.project_id == project_id,
|
||||
Sprint.status == SprintStatus.ACTIVE,
|
||||
)
|
||||
)
|
||||
active_sprint_name = active_sprint_result.scalar_one_or_none()
|
||||
|
||||
return {
|
||||
"project": project,
|
||||
"agent_count": agent_count,
|
||||
"issue_count": issue_count,
|
||||
"active_sprint_name": active_sprint_name,
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error getting project with counts {project_id}: {e!s}", exc_info=True
|
||||
)
|
||||
raise
|
||||
|
||||
async def get_multi_with_counts(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
*,
|
||||
skip: int = 0,
|
||||
limit: int = 100,
|
||||
status: ProjectStatus | None = None,
|
||||
owner_id: UUID | None = None,
|
||||
search: str | None = None,
|
||||
) -> tuple[list[dict[str, Any]], int]:
|
||||
"""
|
||||
Get projects with agent/issue counts in optimized queries.
|
||||
|
||||
Returns:
|
||||
Tuple of (list of dicts with project and counts, total count)
|
||||
"""
|
||||
try:
|
||||
# Get filtered projects
|
||||
projects, total = await self.get_multi_with_filters(
|
||||
db,
|
||||
skip=skip,
|
||||
limit=limit,
|
||||
status=status,
|
||||
owner_id=owner_id,
|
||||
search=search,
|
||||
)
|
||||
|
||||
if not projects:
|
||||
return [], 0
|
||||
|
||||
project_ids = [p.id for p in projects]
|
||||
|
||||
# Get agent counts in bulk
|
||||
agent_counts_result = await db.execute(
|
||||
select(
|
||||
AgentInstance.project_id,
|
||||
func.count(AgentInstance.id).label("count"),
|
||||
)
|
||||
.where(AgentInstance.project_id.in_(project_ids))
|
||||
.group_by(AgentInstance.project_id)
|
||||
)
|
||||
agent_counts = {row.project_id: row.count for row in agent_counts_result}
|
||||
|
||||
# Get issue counts in bulk
|
||||
issue_counts_result = await db.execute(
|
||||
select(
|
||||
Issue.project_id,
|
||||
func.count(Issue.id).label("count"),
|
||||
)
|
||||
.where(Issue.project_id.in_(project_ids))
|
||||
.group_by(Issue.project_id)
|
||||
)
|
||||
issue_counts = {row.project_id: row.count for row in issue_counts_result}
|
||||
|
||||
# Get active sprint names
|
||||
active_sprints_result = await db.execute(
|
||||
select(Sprint.project_id, Sprint.name).where(
|
||||
Sprint.project_id.in_(project_ids),
|
||||
Sprint.status == SprintStatus.ACTIVE,
|
||||
)
|
||||
)
|
||||
active_sprints = {
|
||||
row.project_id: row.name for row in active_sprints_result
|
||||
}
|
||||
|
||||
# Combine results
|
||||
results = [
|
||||
{
|
||||
"project": project,
|
||||
"agent_count": agent_counts.get(project.id, 0),
|
||||
"issue_count": issue_counts.get(project.id, 0),
|
||||
"active_sprint_name": active_sprints.get(project.id),
|
||||
}
|
||||
for project in projects
|
||||
]
|
||||
|
||||
return results, total
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error getting projects with counts: {e!s}", exc_info=True
|
||||
)
|
||||
raise
|
||||
|
||||
async def get_projects_by_owner(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
*,
|
||||
owner_id: UUID,
|
||||
status: ProjectStatus | None = None,
|
||||
) -> list[Project]:
|
||||
"""Get all projects owned by a specific user."""
|
||||
try:
|
||||
query = select(Project).where(Project.owner_id == owner_id)
|
||||
|
||||
if status is not None:
|
||||
query = query.where(Project.status == status)
|
||||
|
||||
query = query.order_by(Project.created_at.desc())
|
||||
result = await db.execute(query)
|
||||
return list(result.scalars().all())
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error getting projects by owner {owner_id}: {e!s}", exc_info=True
|
||||
)
|
||||
raise
|
||||
|
||||
async def archive_project(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
*,
|
||||
project_id: UUID,
|
||||
) -> Project | None:
|
||||
"""Archive a project by setting status to ARCHIVED."""
|
||||
try:
|
||||
result = await db.execute(
|
||||
select(Project).where(Project.id == project_id)
|
||||
)
|
||||
project = result.scalar_one_or_none()
|
||||
|
||||
if not project:
|
||||
return None
|
||||
|
||||
project.status = ProjectStatus.ARCHIVED
|
||||
await db.commit()
|
||||
await db.refresh(project)
|
||||
return project
|
||||
except Exception as e:
|
||||
await db.rollback()
|
||||
logger.error(
|
||||
f"Error archiving project {project_id}: {e!s}", exc_info=True
|
||||
)
|
||||
raise
|
||||
|
||||
|
||||
# Create a singleton instance for use across the application
|
||||
project = CRUDProject(Project)
|
||||
406
backend/app/crud/syndarix/sprint.py
Normal file
406
backend/app/crud/syndarix/sprint.py
Normal file
@@ -0,0 +1,406 @@
|
||||
# app/crud/syndarix/sprint.py
|
||||
"""Async CRUD operations for Sprint model using SQLAlchemy 2.0 patterns."""
|
||||
|
||||
import logging
|
||||
from datetime import date
|
||||
from typing import Any
|
||||
from uuid import UUID
|
||||
|
||||
from sqlalchemy import func, select
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import joinedload
|
||||
|
||||
from app.crud.base import CRUDBase
|
||||
from app.models.syndarix import Issue, Sprint
|
||||
from app.models.syndarix.enums import IssueStatus, SprintStatus
|
||||
from app.schemas.syndarix import SprintCreate, SprintUpdate
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CRUDSprint(CRUDBase[Sprint, SprintCreate, SprintUpdate]):
|
||||
"""Async CRUD operations for Sprint model."""
|
||||
|
||||
async def create(self, db: AsyncSession, *, obj_in: SprintCreate) -> Sprint:
|
||||
"""Create a new sprint with error handling."""
|
||||
try:
|
||||
db_obj = Sprint(
|
||||
project_id=obj_in.project_id,
|
||||
name=obj_in.name,
|
||||
number=obj_in.number,
|
||||
goal=obj_in.goal,
|
||||
start_date=obj_in.start_date,
|
||||
end_date=obj_in.end_date,
|
||||
status=obj_in.status,
|
||||
planned_points=obj_in.planned_points,
|
||||
completed_points=obj_in.completed_points,
|
||||
)
|
||||
db.add(db_obj)
|
||||
await db.commit()
|
||||
await db.refresh(db_obj)
|
||||
return db_obj
|
||||
except IntegrityError as e:
|
||||
await db.rollback()
|
||||
error_msg = str(e.orig) if hasattr(e, "orig") else str(e)
|
||||
logger.error(f"Integrity error creating sprint: {error_msg}")
|
||||
raise ValueError(f"Database integrity error: {error_msg}")
|
||||
except Exception as e:
|
||||
await db.rollback()
|
||||
logger.error(f"Unexpected error creating sprint: {e!s}", exc_info=True)
|
||||
raise
|
||||
|
||||
async def get_with_details(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
*,
|
||||
sprint_id: UUID,
|
||||
) -> dict[str, Any] | None:
|
||||
"""
|
||||
Get a sprint with full details including issue counts.
|
||||
|
||||
Returns:
|
||||
Dictionary with sprint and related details
|
||||
"""
|
||||
try:
|
||||
# Get sprint with joined project
|
||||
result = await db.execute(
|
||||
select(Sprint)
|
||||
.options(joinedload(Sprint.project))
|
||||
.where(Sprint.id == sprint_id)
|
||||
)
|
||||
sprint = result.scalar_one_or_none()
|
||||
|
||||
if not sprint:
|
||||
return None
|
||||
|
||||
# Get issue counts
|
||||
issue_counts = await db.execute(
|
||||
select(
|
||||
func.count(Issue.id).label("total"),
|
||||
func.count(Issue.id)
|
||||
.filter(Issue.status == IssueStatus.OPEN)
|
||||
.label("open"),
|
||||
func.count(Issue.id)
|
||||
.filter(Issue.status == IssueStatus.CLOSED)
|
||||
.label("completed"),
|
||||
).where(Issue.sprint_id == sprint_id)
|
||||
)
|
||||
counts = issue_counts.one()
|
||||
|
||||
return {
|
||||
"sprint": sprint,
|
||||
"project_name": sprint.project.name if sprint.project else None,
|
||||
"project_slug": sprint.project.slug if sprint.project else None,
|
||||
"issue_count": counts.total,
|
||||
"open_issues": counts.open,
|
||||
"completed_issues": counts.completed,
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error getting sprint with details {sprint_id}: {e!s}", exc_info=True
|
||||
)
|
||||
raise
|
||||
|
||||
async def get_by_project(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
*,
|
||||
project_id: UUID,
|
||||
status: SprintStatus | None = None,
|
||||
skip: int = 0,
|
||||
limit: int = 100,
|
||||
) -> tuple[list[Sprint], int]:
|
||||
"""Get sprints for a specific project."""
|
||||
try:
|
||||
query = select(Sprint).where(Sprint.project_id == project_id)
|
||||
|
||||
if status is not None:
|
||||
query = query.where(Sprint.status == status)
|
||||
|
||||
# Get total count
|
||||
count_query = select(func.count()).select_from(query.alias())
|
||||
count_result = await db.execute(count_query)
|
||||
total = count_result.scalar_one()
|
||||
|
||||
# Apply sorting (by number descending - newest first)
|
||||
query = query.order_by(Sprint.number.desc())
|
||||
query = query.offset(skip).limit(limit)
|
||||
result = await db.execute(query)
|
||||
sprints = list(result.scalars().all())
|
||||
|
||||
return sprints, total
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error getting sprints by project {project_id}: {e!s}", exc_info=True
|
||||
)
|
||||
raise
|
||||
|
||||
async def get_active_sprint(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
*,
|
||||
project_id: UUID,
|
||||
) -> Sprint | None:
|
||||
"""Get the currently active sprint for a project."""
|
||||
try:
|
||||
result = await db.execute(
|
||||
select(Sprint).where(
|
||||
Sprint.project_id == project_id,
|
||||
Sprint.status == SprintStatus.ACTIVE,
|
||||
)
|
||||
)
|
||||
return result.scalar_one_or_none()
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error getting active sprint for project {project_id}: {e!s}",
|
||||
exc_info=True,
|
||||
)
|
||||
raise
|
||||
|
||||
async def get_next_sprint_number(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
*,
|
||||
project_id: UUID,
|
||||
) -> int:
|
||||
"""Get the next sprint number for a project."""
|
||||
try:
|
||||
result = await db.execute(
|
||||
select(func.max(Sprint.number)).where(Sprint.project_id == project_id)
|
||||
)
|
||||
max_number = result.scalar_one_or_none()
|
||||
return (max_number or 0) + 1
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error getting next sprint number for project {project_id}: {e!s}",
|
||||
exc_info=True,
|
||||
)
|
||||
raise
|
||||
|
||||
async def start_sprint(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
*,
|
||||
sprint_id: UUID,
|
||||
start_date: date | None = None,
|
||||
) -> Sprint | None:
|
||||
"""Start a planned sprint."""
|
||||
try:
|
||||
result = await db.execute(select(Sprint).where(Sprint.id == sprint_id))
|
||||
sprint = result.scalar_one_or_none()
|
||||
|
||||
if not sprint:
|
||||
return None
|
||||
|
||||
if sprint.status != SprintStatus.PLANNED:
|
||||
raise ValueError(
|
||||
f"Cannot start sprint with status {sprint.status.value}"
|
||||
)
|
||||
|
||||
# Check for existing active sprint in project
|
||||
active_sprint = await self.get_active_sprint(db, project_id=sprint.project_id)
|
||||
if active_sprint:
|
||||
raise ValueError(
|
||||
f"Project already has an active sprint: {active_sprint.name}"
|
||||
)
|
||||
|
||||
sprint.status = SprintStatus.ACTIVE
|
||||
if start_date:
|
||||
sprint.start_date = start_date
|
||||
|
||||
# Calculate planned points from issues
|
||||
points_result = await db.execute(
|
||||
select(func.sum(Issue.story_points)).where(Issue.sprint_id == sprint_id)
|
||||
)
|
||||
sprint.planned_points = points_result.scalar_one_or_none() or 0
|
||||
|
||||
await db.commit()
|
||||
await db.refresh(sprint)
|
||||
return sprint
|
||||
except ValueError:
|
||||
raise
|
||||
except Exception as e:
|
||||
await db.rollback()
|
||||
logger.error(f"Error starting sprint {sprint_id}: {e!s}", exc_info=True)
|
||||
raise
|
||||
|
||||
async def complete_sprint(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
*,
|
||||
sprint_id: UUID,
|
||||
) -> Sprint | None:
|
||||
"""Complete an active sprint and calculate completed points."""
|
||||
try:
|
||||
result = await db.execute(select(Sprint).where(Sprint.id == sprint_id))
|
||||
sprint = result.scalar_one_or_none()
|
||||
|
||||
if not sprint:
|
||||
return None
|
||||
|
||||
if sprint.status != SprintStatus.ACTIVE:
|
||||
raise ValueError(
|
||||
f"Cannot complete sprint with status {sprint.status.value}"
|
||||
)
|
||||
|
||||
sprint.status = SprintStatus.COMPLETED
|
||||
|
||||
# Calculate completed points from closed issues
|
||||
points_result = await db.execute(
|
||||
select(func.sum(Issue.story_points)).where(
|
||||
Issue.sprint_id == sprint_id,
|
||||
Issue.status == IssueStatus.CLOSED,
|
||||
)
|
||||
)
|
||||
sprint.completed_points = points_result.scalar_one_or_none() or 0
|
||||
|
||||
await db.commit()
|
||||
await db.refresh(sprint)
|
||||
return sprint
|
||||
except ValueError:
|
||||
raise
|
||||
except Exception as e:
|
||||
await db.rollback()
|
||||
logger.error(f"Error completing sprint {sprint_id}: {e!s}", exc_info=True)
|
||||
raise
|
||||
|
||||
async def cancel_sprint(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
*,
|
||||
sprint_id: UUID,
|
||||
) -> Sprint | None:
|
||||
"""Cancel a sprint (only PLANNED or ACTIVE sprints can be cancelled)."""
|
||||
try:
|
||||
result = await db.execute(select(Sprint).where(Sprint.id == sprint_id))
|
||||
sprint = result.scalar_one_or_none()
|
||||
|
||||
if not sprint:
|
||||
return None
|
||||
|
||||
if sprint.status not in [SprintStatus.PLANNED, SprintStatus.ACTIVE]:
|
||||
raise ValueError(
|
||||
f"Cannot cancel sprint with status {sprint.status.value}"
|
||||
)
|
||||
|
||||
sprint.status = SprintStatus.CANCELLED
|
||||
await db.commit()
|
||||
await db.refresh(sprint)
|
||||
return sprint
|
||||
except ValueError:
|
||||
raise
|
||||
except Exception as e:
|
||||
await db.rollback()
|
||||
logger.error(f"Error cancelling sprint {sprint_id}: {e!s}", exc_info=True)
|
||||
raise
|
||||
|
||||
async def get_velocity(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
*,
|
||||
project_id: UUID,
|
||||
limit: int = 5,
|
||||
) -> list[dict[str, Any]]:
|
||||
"""Get velocity data for completed sprints."""
|
||||
try:
|
||||
result = await db.execute(
|
||||
select(Sprint)
|
||||
.where(
|
||||
Sprint.project_id == project_id,
|
||||
Sprint.status == SprintStatus.COMPLETED,
|
||||
)
|
||||
.order_by(Sprint.number.desc())
|
||||
.limit(limit)
|
||||
)
|
||||
sprints = list(result.scalars().all())
|
||||
|
||||
velocity_data = []
|
||||
for sprint in reversed(sprints): # Return in chronological order
|
||||
velocity = None
|
||||
if sprint.planned_points and sprint.planned_points > 0:
|
||||
velocity = (sprint.completed_points or 0) / sprint.planned_points
|
||||
velocity_data.append(
|
||||
{
|
||||
"sprint_number": sprint.number,
|
||||
"sprint_name": sprint.name,
|
||||
"planned_points": sprint.planned_points,
|
||||
"completed_points": sprint.completed_points,
|
||||
"velocity": velocity,
|
||||
}
|
||||
)
|
||||
|
||||
return velocity_data
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error getting velocity for project {project_id}: {e!s}",
|
||||
exc_info=True,
|
||||
)
|
||||
raise
|
||||
|
||||
async def get_sprints_with_issue_counts(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
*,
|
||||
project_id: UUID,
|
||||
skip: int = 0,
|
||||
limit: int = 100,
|
||||
) -> tuple[list[dict[str, Any]], int]:
|
||||
"""Get sprints with issue counts in optimized queries."""
|
||||
try:
|
||||
# Get sprints
|
||||
sprints, total = await self.get_by_project(
|
||||
db, project_id=project_id, skip=skip, limit=limit
|
||||
)
|
||||
|
||||
if not sprints:
|
||||
return [], 0
|
||||
|
||||
sprint_ids = [s.id for s in sprints]
|
||||
|
||||
# Get issue counts in bulk
|
||||
issue_counts = await db.execute(
|
||||
select(
|
||||
Issue.sprint_id,
|
||||
func.count(Issue.id).label("total"),
|
||||
func.count(Issue.id)
|
||||
.filter(Issue.status == IssueStatus.OPEN)
|
||||
.label("open"),
|
||||
func.count(Issue.id)
|
||||
.filter(Issue.status == IssueStatus.CLOSED)
|
||||
.label("completed"),
|
||||
)
|
||||
.where(Issue.sprint_id.in_(sprint_ids))
|
||||
.group_by(Issue.sprint_id)
|
||||
)
|
||||
counts_map = {
|
||||
row.sprint_id: {
|
||||
"issue_count": row.total,
|
||||
"open_issues": row.open,
|
||||
"completed_issues": row.completed,
|
||||
}
|
||||
for row in issue_counts
|
||||
}
|
||||
|
||||
# Combine results
|
||||
results = [
|
||||
{
|
||||
"sprint": sprint,
|
||||
**counts_map.get(
|
||||
sprint.id, {"issue_count": 0, "open_issues": 0, "completed_issues": 0}
|
||||
),
|
||||
}
|
||||
for sprint in sprints
|
||||
]
|
||||
|
||||
return results, total
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error getting sprints with counts for project {project_id}: {e!s}",
|
||||
exc_info=True,
|
||||
)
|
||||
raise
|
||||
|
||||
|
||||
# Create a singleton instance for use across the application
|
||||
sprint = CRUDSprint(Sprint)
|
||||
@@ -23,6 +23,15 @@ from .user import User
|
||||
from .user_organization import OrganizationRole, UserOrganization
|
||||
from .user_session import UserSession
|
||||
|
||||
# Syndarix domain models
|
||||
from .syndarix import (
|
||||
AgentInstance,
|
||||
AgentType,
|
||||
Issue,
|
||||
Project,
|
||||
Sprint,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"Base",
|
||||
"OAuthAccount",
|
||||
@@ -38,4 +47,10 @@ __all__ = [
|
||||
"User",
|
||||
"UserOrganization",
|
||||
"UserSession",
|
||||
# Syndarix models
|
||||
"AgentInstance",
|
||||
"AgentType",
|
||||
"Issue",
|
||||
"Project",
|
||||
"Sprint",
|
||||
]
|
||||
|
||||
41
backend/app/models/syndarix/__init__.py
Normal file
41
backend/app/models/syndarix/__init__.py
Normal file
@@ -0,0 +1,41 @@
|
||||
# app/models/syndarix/__init__.py
|
||||
"""
|
||||
Syndarix domain models.
|
||||
|
||||
This package contains all the core entities for the Syndarix AI consulting platform:
|
||||
- Project: Client engagements with autonomy settings
|
||||
- AgentType: Templates for AI agent capabilities
|
||||
- AgentInstance: Spawned agents working on projects
|
||||
- Issue: Units of work with external tracker sync
|
||||
- Sprint: Time-boxed iterations for organizing work
|
||||
"""
|
||||
|
||||
from .agent_instance import AgentInstance
|
||||
from .agent_type import AgentType
|
||||
from .enums import (
|
||||
AgentStatus,
|
||||
AutonomyLevel,
|
||||
IssuePriority,
|
||||
IssueStatus,
|
||||
ProjectStatus,
|
||||
SprintStatus,
|
||||
SyncStatus,
|
||||
)
|
||||
from .issue import Issue
|
||||
from .project import Project
|
||||
from .sprint import Sprint
|
||||
|
||||
__all__ = [
|
||||
"AgentInstance",
|
||||
"AgentStatus",
|
||||
"AgentType",
|
||||
"AutonomyLevel",
|
||||
"Issue",
|
||||
"IssuePriority",
|
||||
"IssueStatus",
|
||||
"Project",
|
||||
"ProjectStatus",
|
||||
"Sprint",
|
||||
"SprintStatus",
|
||||
"SyncStatus",
|
||||
]
|
||||
108
backend/app/models/syndarix/agent_instance.py
Normal file
108
backend/app/models/syndarix/agent_instance.py
Normal file
@@ -0,0 +1,108 @@
|
||||
# app/models/syndarix/agent_instance.py
|
||||
"""
|
||||
AgentInstance model for Syndarix AI consulting platform.
|
||||
|
||||
An AgentInstance is a spawned instance of an AgentType, assigned to a
|
||||
specific project to perform work.
|
||||
"""
|
||||
|
||||
from sqlalchemy import (
|
||||
BigInteger,
|
||||
Column,
|
||||
DateTime,
|
||||
Enum,
|
||||
ForeignKey,
|
||||
Index,
|
||||
Integer,
|
||||
Numeric,
|
||||
String,
|
||||
Text,
|
||||
)
|
||||
from sqlalchemy.dialects.postgresql import (
|
||||
JSONB,
|
||||
UUID as PGUUID,
|
||||
)
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
from app.models.base import Base, TimestampMixin, UUIDMixin
|
||||
|
||||
from .enums import AgentStatus
|
||||
|
||||
|
||||
class AgentInstance(Base, UUIDMixin, TimestampMixin):
|
||||
"""
|
||||
AgentInstance model representing a spawned agent working on a project.
|
||||
|
||||
Tracks:
|
||||
- Current status and task
|
||||
- Memory (short-term in DB, long-term reference to vector store)
|
||||
- Session information for MCP connections
|
||||
- Usage metrics (tasks completed, tokens, cost)
|
||||
"""
|
||||
|
||||
__tablename__ = "agent_instances"
|
||||
|
||||
# Foreign keys
|
||||
agent_type_id = Column(
|
||||
PGUUID(as_uuid=True),
|
||||
ForeignKey("agent_types.id", ondelete="RESTRICT"),
|
||||
nullable=False,
|
||||
index=True,
|
||||
)
|
||||
|
||||
project_id = Column(
|
||||
PGUUID(as_uuid=True),
|
||||
ForeignKey("projects.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
index=True,
|
||||
)
|
||||
|
||||
# Status tracking
|
||||
status: Column[AgentStatus] = Column(
|
||||
Enum(AgentStatus),
|
||||
default=AgentStatus.IDLE,
|
||||
nullable=False,
|
||||
index=True,
|
||||
)
|
||||
|
||||
# Current task description (brief summary of what agent is doing)
|
||||
current_task = Column(Text, nullable=True)
|
||||
|
||||
# Short-term memory stored in database (conversation context, recent decisions)
|
||||
short_term_memory = Column(JSONB, default=dict, nullable=False)
|
||||
|
||||
# Reference to long-term memory in vector store (e.g., "project-123/agent-456")
|
||||
long_term_memory_ref = Column(String(500), nullable=True)
|
||||
|
||||
# Session ID for active MCP connections
|
||||
session_id = Column(String(255), nullable=True, index=True)
|
||||
|
||||
# Activity tracking
|
||||
last_activity_at = Column(DateTime(timezone=True), nullable=True, index=True)
|
||||
terminated_at = Column(DateTime(timezone=True), nullable=True, index=True)
|
||||
|
||||
# Usage metrics
|
||||
tasks_completed = Column(Integer, default=0, nullable=False)
|
||||
tokens_used = Column(BigInteger, default=0, nullable=False)
|
||||
cost_incurred = Column(Numeric(precision=10, scale=4), default=0, nullable=False)
|
||||
|
||||
# Relationships
|
||||
agent_type = relationship("AgentType", back_populates="instances")
|
||||
project = relationship("Project", back_populates="agent_instances")
|
||||
assigned_issues = relationship(
|
||||
"Issue",
|
||||
back_populates="assigned_agent",
|
||||
foreign_keys="Issue.assigned_agent_id",
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_agent_instances_project_status", "project_id", "status"),
|
||||
Index("ix_agent_instances_type_status", "agent_type_id", "status"),
|
||||
Index("ix_agent_instances_project_type", "project_id", "agent_type_id"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return (
|
||||
f"<AgentInstance {self.id} type={self.agent_type_id} "
|
||||
f"project={self.project_id} status={self.status.value}>"
|
||||
)
|
||||
72
backend/app/models/syndarix/agent_type.py
Normal file
72
backend/app/models/syndarix/agent_type.py
Normal file
@@ -0,0 +1,72 @@
|
||||
# app/models/syndarix/agent_type.py
|
||||
"""
|
||||
AgentType model for Syndarix AI consulting platform.
|
||||
|
||||
An AgentType is a template that defines the capabilities, personality,
|
||||
and model configuration for agent instances.
|
||||
"""
|
||||
|
||||
from sqlalchemy import Boolean, Column, Index, String, Text
|
||||
from sqlalchemy.dialects.postgresql import JSONB
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
from app.models.base import Base, TimestampMixin, UUIDMixin
|
||||
|
||||
|
||||
class AgentType(Base, UUIDMixin, TimestampMixin):
|
||||
"""
|
||||
AgentType model representing a template for agent instances.
|
||||
|
||||
Each agent type defines:
|
||||
- Expertise areas and personality prompt
|
||||
- Model configuration (primary, fallback, parameters)
|
||||
- MCP server access and tool permissions
|
||||
|
||||
Examples: ProductOwner, Architect, BackendEngineer, QAEngineer
|
||||
"""
|
||||
|
||||
__tablename__ = "agent_types"
|
||||
|
||||
name = Column(String(255), nullable=False, index=True)
|
||||
slug = Column(String(255), unique=True, nullable=False, index=True)
|
||||
description = Column(Text, nullable=True)
|
||||
|
||||
# Areas of expertise for this agent type (e.g., ["python", "fastapi", "databases"])
|
||||
expertise = Column(JSONB, default=list, nullable=False)
|
||||
|
||||
# System prompt defining the agent's personality and behavior
|
||||
personality_prompt = Column(Text, nullable=False)
|
||||
|
||||
# Primary LLM model to use (e.g., "claude-opus-4-5-20251101")
|
||||
primary_model = Column(String(100), nullable=False)
|
||||
|
||||
# Fallback models in order of preference
|
||||
fallback_models = Column(JSONB, default=list, nullable=False)
|
||||
|
||||
# Model parameters (temperature, max_tokens, etc.)
|
||||
model_params = Column(JSONB, default=dict, nullable=False)
|
||||
|
||||
# List of MCP servers this agent can connect to
|
||||
mcp_servers = Column(JSONB, default=list, nullable=False)
|
||||
|
||||
# Tool permissions configuration
|
||||
# Structure: {"allowed": ["*"], "denied": [], "require_approval": ["gitea:create_pr"]}
|
||||
tool_permissions = Column(JSONB, default=dict, nullable=False)
|
||||
|
||||
# Whether this agent type is available for new instances
|
||||
is_active = Column(Boolean, default=True, nullable=False, index=True)
|
||||
|
||||
# Relationships
|
||||
instances = relationship(
|
||||
"AgentInstance",
|
||||
back_populates="agent_type",
|
||||
cascade="all, delete-orphan",
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_agent_types_slug_active", "slug", "is_active"),
|
||||
Index("ix_agent_types_name_active", "name", "is_active"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<AgentType {self.name} ({self.slug}) active={self.is_active}>"
|
||||
123
backend/app/models/syndarix/enums.py
Normal file
123
backend/app/models/syndarix/enums.py
Normal file
@@ -0,0 +1,123 @@
|
||||
# app/models/syndarix/enums.py
|
||||
"""
|
||||
Enums for Syndarix domain models.
|
||||
|
||||
These enums represent the core state machines and categorizations
|
||||
used throughout the Syndarix AI consulting platform.
|
||||
"""
|
||||
|
||||
from enum import Enum as PyEnum
|
||||
|
||||
|
||||
class AutonomyLevel(str, PyEnum):
|
||||
"""
|
||||
Defines how much control the human has over agent actions.
|
||||
|
||||
FULL_CONTROL: Human must approve every agent action
|
||||
MILESTONE: Human approves at sprint boundaries and major decisions
|
||||
AUTONOMOUS: Agents work independently, only escalating critical issues
|
||||
"""
|
||||
|
||||
FULL_CONTROL = "full_control"
|
||||
MILESTONE = "milestone"
|
||||
AUTONOMOUS = "autonomous"
|
||||
|
||||
|
||||
class ProjectStatus(str, PyEnum):
|
||||
"""
|
||||
Project lifecycle status.
|
||||
|
||||
ACTIVE: Project is actively being worked on
|
||||
PAUSED: Project is temporarily on hold
|
||||
COMPLETED: Project has been delivered successfully
|
||||
ARCHIVED: Project is no longer accessible for work
|
||||
"""
|
||||
|
||||
ACTIVE = "active"
|
||||
PAUSED = "paused"
|
||||
COMPLETED = "completed"
|
||||
ARCHIVED = "archived"
|
||||
|
||||
|
||||
class AgentStatus(str, PyEnum):
|
||||
"""
|
||||
Current operational status of an agent instance.
|
||||
|
||||
IDLE: Agent is available but not currently working
|
||||
WORKING: Agent is actively processing a task
|
||||
WAITING: Agent is waiting for external input or approval
|
||||
PAUSED: Agent has been manually paused
|
||||
TERMINATED: Agent instance has been shut down
|
||||
"""
|
||||
|
||||
IDLE = "idle"
|
||||
WORKING = "working"
|
||||
WAITING = "waiting"
|
||||
PAUSED = "paused"
|
||||
TERMINATED = "terminated"
|
||||
|
||||
|
||||
class IssueStatus(str, PyEnum):
|
||||
"""
|
||||
Issue workflow status.
|
||||
|
||||
OPEN: Issue is ready to be worked on
|
||||
IN_PROGRESS: Agent or human is actively working on the issue
|
||||
IN_REVIEW: Work is complete, awaiting review
|
||||
BLOCKED: Issue cannot proceed due to dependencies or blockers
|
||||
CLOSED: Issue has been completed or cancelled
|
||||
"""
|
||||
|
||||
OPEN = "open"
|
||||
IN_PROGRESS = "in_progress"
|
||||
IN_REVIEW = "in_review"
|
||||
BLOCKED = "blocked"
|
||||
CLOSED = "closed"
|
||||
|
||||
|
||||
class IssuePriority(str, PyEnum):
|
||||
"""
|
||||
Issue priority levels.
|
||||
|
||||
LOW: Nice to have, can be deferred
|
||||
MEDIUM: Standard priority, should be done
|
||||
HIGH: Important, should be prioritized
|
||||
CRITICAL: Must be done immediately, blocking other work
|
||||
"""
|
||||
|
||||
LOW = "low"
|
||||
MEDIUM = "medium"
|
||||
HIGH = "high"
|
||||
CRITICAL = "critical"
|
||||
|
||||
|
||||
class SyncStatus(str, PyEnum):
|
||||
"""
|
||||
External issue tracker synchronization status.
|
||||
|
||||
SYNCED: Local and remote are in sync
|
||||
PENDING: Local changes waiting to be pushed
|
||||
CONFLICT: Merge conflict between local and remote
|
||||
ERROR: Synchronization failed due to an error
|
||||
"""
|
||||
|
||||
SYNCED = "synced"
|
||||
PENDING = "pending"
|
||||
CONFLICT = "conflict"
|
||||
ERROR = "error"
|
||||
|
||||
|
||||
class SprintStatus(str, PyEnum):
|
||||
"""
|
||||
Sprint lifecycle status.
|
||||
|
||||
PLANNED: Sprint has been created but not started
|
||||
ACTIVE: Sprint is currently in progress
|
||||
COMPLETED: Sprint has been finished successfully
|
||||
CANCELLED: Sprint was cancelled before completion
|
||||
"""
|
||||
|
||||
PLANNED = "planned"
|
||||
ACTIVE = "active"
|
||||
COMPLETED = "completed"
|
||||
CANCELLED = "cancelled"
|
||||
133
backend/app/models/syndarix/issue.py
Normal file
133
backend/app/models/syndarix/issue.py
Normal file
@@ -0,0 +1,133 @@
|
||||
# app/models/syndarix/issue.py
|
||||
"""
|
||||
Issue model for Syndarix AI consulting platform.
|
||||
|
||||
An Issue represents a unit of work that can be assigned to agents or humans,
|
||||
with optional synchronization to external issue trackers (Gitea, GitHub, GitLab).
|
||||
"""
|
||||
|
||||
from sqlalchemy import Column, DateTime, Enum, ForeignKey, Index, Integer, String, Text
|
||||
from sqlalchemy.dialects.postgresql import (
|
||||
JSONB,
|
||||
UUID as PGUUID,
|
||||
)
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
from app.models.base import Base, TimestampMixin, UUIDMixin
|
||||
|
||||
from .enums import IssuePriority, IssueStatus, SyncStatus
|
||||
|
||||
|
||||
class Issue(Base, UUIDMixin, TimestampMixin):
|
||||
"""
|
||||
Issue model representing a unit of work in a project.
|
||||
|
||||
Features:
|
||||
- Standard issue fields (title, body, status, priority)
|
||||
- Assignment to agent instances or human assignees
|
||||
- Sprint association for backlog management
|
||||
- External tracker synchronization (Gitea, GitHub, GitLab)
|
||||
"""
|
||||
|
||||
__tablename__ = "issues"
|
||||
|
||||
# Foreign key to project
|
||||
project_id = Column(
|
||||
PGUUID(as_uuid=True),
|
||||
ForeignKey("projects.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
index=True,
|
||||
)
|
||||
|
||||
# Issue content
|
||||
title = Column(String(500), nullable=False)
|
||||
body = Column(Text, nullable=False, default="")
|
||||
|
||||
# Status and priority
|
||||
status: Column[IssueStatus] = Column(
|
||||
Enum(IssueStatus),
|
||||
default=IssueStatus.OPEN,
|
||||
nullable=False,
|
||||
index=True,
|
||||
)
|
||||
|
||||
priority: Column[IssuePriority] = Column(
|
||||
Enum(IssuePriority),
|
||||
default=IssuePriority.MEDIUM,
|
||||
nullable=False,
|
||||
index=True,
|
||||
)
|
||||
|
||||
# Labels for categorization (e.g., ["bug", "frontend", "urgent"])
|
||||
labels = Column(JSONB, default=list, nullable=False)
|
||||
|
||||
# Assignment - either to an agent or a human (mutually exclusive)
|
||||
assigned_agent_id = Column(
|
||||
PGUUID(as_uuid=True),
|
||||
ForeignKey("agent_instances.id", ondelete="SET NULL"),
|
||||
nullable=True,
|
||||
index=True,
|
||||
)
|
||||
|
||||
# Human assignee (username or email, not a FK to allow external users)
|
||||
human_assignee = Column(String(255), nullable=True, index=True)
|
||||
|
||||
# Sprint association
|
||||
sprint_id = Column(
|
||||
PGUUID(as_uuid=True),
|
||||
ForeignKey("sprints.id", ondelete="SET NULL"),
|
||||
nullable=True,
|
||||
index=True,
|
||||
)
|
||||
|
||||
# Story points for estimation
|
||||
story_points = Column(Integer, nullable=True)
|
||||
|
||||
# External tracker integration
|
||||
external_tracker = Column(
|
||||
String(50),
|
||||
nullable=True,
|
||||
index=True,
|
||||
) # 'gitea', 'github', 'gitlab'
|
||||
|
||||
external_id = Column(String(255), nullable=True) # External system's ID
|
||||
external_url = Column(String(1000), nullable=True) # Link to external issue
|
||||
external_number = Column(Integer, nullable=True) # Issue number (e.g., #123)
|
||||
|
||||
# Sync status with external tracker
|
||||
sync_status: Column[SyncStatus] = Column(
|
||||
Enum(SyncStatus),
|
||||
default=SyncStatus.SYNCED,
|
||||
nullable=False,
|
||||
# Note: Index defined in __table_args__ as ix_issues_sync_status
|
||||
)
|
||||
|
||||
last_synced_at = Column(DateTime(timezone=True), nullable=True)
|
||||
external_updated_at = Column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
# Lifecycle timestamp
|
||||
closed_at = Column(DateTime(timezone=True), nullable=True, index=True)
|
||||
|
||||
# Relationships
|
||||
project = relationship("Project", back_populates="issues")
|
||||
assigned_agent = relationship(
|
||||
"AgentInstance",
|
||||
back_populates="assigned_issues",
|
||||
foreign_keys=[assigned_agent_id],
|
||||
)
|
||||
sprint = relationship("Sprint", back_populates="issues")
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_issues_project_status", "project_id", "status"),
|
||||
Index("ix_issues_project_priority", "project_id", "priority"),
|
||||
Index("ix_issues_project_sprint", "project_id", "sprint_id"),
|
||||
Index("ix_issues_external_tracker_id", "external_tracker", "external_id"),
|
||||
Index("ix_issues_sync_status", "sync_status"),
|
||||
Index("ix_issues_project_agent", "project_id", "assigned_agent_id"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return (
|
||||
f"<Issue {self.id} title='{self.title[:30]}...' "
|
||||
f"status={self.status.value} priority={self.priority.value}>"
|
||||
)
|
||||
88
backend/app/models/syndarix/project.py
Normal file
88
backend/app/models/syndarix/project.py
Normal file
@@ -0,0 +1,88 @@
|
||||
# app/models/syndarix/project.py
|
||||
"""
|
||||
Project model for Syndarix AI consulting platform.
|
||||
|
||||
A Project represents a client engagement where AI agents collaborate
|
||||
to deliver software solutions.
|
||||
"""
|
||||
|
||||
from sqlalchemy import Column, Enum, ForeignKey, Index, String, Text
|
||||
from sqlalchemy.dialects.postgresql import (
|
||||
JSONB,
|
||||
UUID as PGUUID,
|
||||
)
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
from app.models.base import Base, TimestampMixin, UUIDMixin
|
||||
|
||||
from .enums import AutonomyLevel, ProjectStatus
|
||||
|
||||
|
||||
class Project(Base, UUIDMixin, TimestampMixin):
|
||||
"""
|
||||
Project model representing a client engagement.
|
||||
|
||||
A project contains:
|
||||
- Configuration for how autonomous agents should operate
|
||||
- Settings for MCP server integrations
|
||||
- Relationship to assigned agents, issues, and sprints
|
||||
"""
|
||||
|
||||
__tablename__ = "projects"
|
||||
|
||||
name = Column(String(255), nullable=False, index=True)
|
||||
slug = Column(String(255), unique=True, nullable=False, index=True)
|
||||
description = Column(Text, nullable=True)
|
||||
|
||||
autonomy_level: Column[AutonomyLevel] = Column(
|
||||
Enum(AutonomyLevel),
|
||||
default=AutonomyLevel.MILESTONE,
|
||||
nullable=False,
|
||||
index=True,
|
||||
)
|
||||
|
||||
status: Column[ProjectStatus] = Column(
|
||||
Enum(ProjectStatus),
|
||||
default=ProjectStatus.ACTIVE,
|
||||
nullable=False,
|
||||
index=True,
|
||||
)
|
||||
|
||||
# JSON field for flexible project configuration
|
||||
# Can include: mcp_servers, webhook_urls, notification_settings, etc.
|
||||
settings = Column(JSONB, default=dict, nullable=False)
|
||||
|
||||
# Foreign key to the User who owns this project
|
||||
owner_id = Column(
|
||||
PGUUID(as_uuid=True),
|
||||
ForeignKey("users.id", ondelete="SET NULL"),
|
||||
nullable=True,
|
||||
index=True,
|
||||
)
|
||||
|
||||
# Relationships
|
||||
owner = relationship("User", foreign_keys=[owner_id])
|
||||
agent_instances = relationship(
|
||||
"AgentInstance",
|
||||
back_populates="project",
|
||||
cascade="all, delete-orphan",
|
||||
)
|
||||
issues = relationship(
|
||||
"Issue",
|
||||
back_populates="project",
|
||||
cascade="all, delete-orphan",
|
||||
)
|
||||
sprints = relationship(
|
||||
"Sprint",
|
||||
back_populates="project",
|
||||
cascade="all, delete-orphan",
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_projects_slug_status", "slug", "status"),
|
||||
Index("ix_projects_owner_status", "owner_id", "status"),
|
||||
Index("ix_projects_autonomy_status", "autonomy_level", "status"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<Project {self.name} ({self.slug}) status={self.status.value}>"
|
||||
74
backend/app/models/syndarix/sprint.py
Normal file
74
backend/app/models/syndarix/sprint.py
Normal file
@@ -0,0 +1,74 @@
|
||||
# app/models/syndarix/sprint.py
|
||||
"""
|
||||
Sprint model for Syndarix AI consulting platform.
|
||||
|
||||
A Sprint represents a time-boxed iteration for organizing and delivering work.
|
||||
"""
|
||||
|
||||
from sqlalchemy import Column, Date, Enum, ForeignKey, Index, Integer, String, Text
|
||||
from sqlalchemy.dialects.postgresql import UUID as PGUUID
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
from app.models.base import Base, TimestampMixin, UUIDMixin
|
||||
|
||||
from .enums import SprintStatus
|
||||
|
||||
|
||||
class Sprint(Base, UUIDMixin, TimestampMixin):
|
||||
"""
|
||||
Sprint model representing a time-boxed iteration.
|
||||
|
||||
Tracks:
|
||||
- Sprint metadata (name, number, goal)
|
||||
- Date range (start/end)
|
||||
- Progress metrics (planned vs completed points)
|
||||
"""
|
||||
|
||||
__tablename__ = "sprints"
|
||||
|
||||
# Foreign key to project
|
||||
project_id = Column(
|
||||
PGUUID(as_uuid=True),
|
||||
ForeignKey("projects.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
index=True,
|
||||
)
|
||||
|
||||
# Sprint identification
|
||||
name = Column(String(255), nullable=False)
|
||||
number = Column(Integer, nullable=False) # Sprint number within project
|
||||
|
||||
# Sprint goal (what we aim to achieve)
|
||||
goal = Column(Text, nullable=True)
|
||||
|
||||
# Date range
|
||||
start_date = Column(Date, nullable=False, index=True)
|
||||
end_date = Column(Date, nullable=False, index=True)
|
||||
|
||||
# Status
|
||||
status: Column[SprintStatus] = Column(
|
||||
Enum(SprintStatus),
|
||||
default=SprintStatus.PLANNED,
|
||||
nullable=False,
|
||||
index=True,
|
||||
)
|
||||
|
||||
# Progress metrics
|
||||
planned_points = Column(Integer, nullable=True) # Sum of story points at start
|
||||
completed_points = Column(Integer, nullable=True) # Sum of completed story points
|
||||
|
||||
# Relationships
|
||||
project = relationship("Project", back_populates="sprints")
|
||||
issues = relationship("Issue", back_populates="sprint")
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_sprints_project_status", "project_id", "status"),
|
||||
Index("ix_sprints_project_number", "project_id", "number"),
|
||||
Index("ix_sprints_date_range", "start_date", "end_date"),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return (
|
||||
f"<Sprint {self.name} (#{self.number}) "
|
||||
f"project={self.project_id} status={self.status.value}>"
|
||||
)
|
||||
113
backend/app/schemas/syndarix/__init__.py
Normal file
113
backend/app/schemas/syndarix/__init__.py
Normal file
@@ -0,0 +1,113 @@
|
||||
# app/schemas/syndarix/__init__.py
|
||||
"""
|
||||
Syndarix domain schemas.
|
||||
|
||||
This package contains Pydantic schemas for validating and serializing
|
||||
Syndarix domain entities.
|
||||
"""
|
||||
|
||||
from .agent_instance import (
|
||||
AgentInstanceCreate,
|
||||
AgentInstanceInDB,
|
||||
AgentInstanceListResponse,
|
||||
AgentInstanceMetrics,
|
||||
AgentInstanceResponse,
|
||||
AgentInstanceTerminate,
|
||||
AgentInstanceUpdate,
|
||||
)
|
||||
from .agent_type import (
|
||||
AgentTypeCreate,
|
||||
AgentTypeInDB,
|
||||
AgentTypeListResponse,
|
||||
AgentTypeResponse,
|
||||
AgentTypeUpdate,
|
||||
)
|
||||
from .enums import (
|
||||
AgentStatus,
|
||||
AutonomyLevel,
|
||||
IssuePriority,
|
||||
IssueStatus,
|
||||
ProjectStatus,
|
||||
SprintStatus,
|
||||
SyncStatus,
|
||||
)
|
||||
from .issue import (
|
||||
IssueAssign,
|
||||
IssueClose,
|
||||
IssueCreate,
|
||||
IssueInDB,
|
||||
IssueListResponse,
|
||||
IssueResponse,
|
||||
IssueStats,
|
||||
IssueSyncUpdate,
|
||||
IssueUpdate,
|
||||
)
|
||||
from .project import (
|
||||
ProjectCreate,
|
||||
ProjectInDB,
|
||||
ProjectListResponse,
|
||||
ProjectResponse,
|
||||
ProjectUpdate,
|
||||
)
|
||||
from .sprint import (
|
||||
SprintBurndown,
|
||||
SprintComplete,
|
||||
SprintCreate,
|
||||
SprintInDB,
|
||||
SprintListResponse,
|
||||
SprintResponse,
|
||||
SprintStart,
|
||||
SprintUpdate,
|
||||
SprintVelocity,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
# AgentInstance schemas
|
||||
"AgentInstanceCreate",
|
||||
"AgentInstanceInDB",
|
||||
"AgentInstanceListResponse",
|
||||
"AgentInstanceMetrics",
|
||||
"AgentInstanceResponse",
|
||||
"AgentInstanceTerminate",
|
||||
"AgentInstanceUpdate",
|
||||
# Enums
|
||||
"AgentStatus",
|
||||
# AgentType schemas
|
||||
"AgentTypeCreate",
|
||||
"AgentTypeInDB",
|
||||
"AgentTypeListResponse",
|
||||
"AgentTypeResponse",
|
||||
"AgentTypeUpdate",
|
||||
"AutonomyLevel",
|
||||
# Issue schemas
|
||||
"IssueAssign",
|
||||
"IssueClose",
|
||||
"IssueCreate",
|
||||
"IssueInDB",
|
||||
"IssueListResponse",
|
||||
"IssuePriority",
|
||||
"IssueResponse",
|
||||
"IssueStats",
|
||||
"IssueStatus",
|
||||
"IssueSyncUpdate",
|
||||
"IssueUpdate",
|
||||
# Project schemas
|
||||
"ProjectCreate",
|
||||
"ProjectInDB",
|
||||
"ProjectListResponse",
|
||||
"ProjectResponse",
|
||||
"ProjectStatus",
|
||||
"ProjectUpdate",
|
||||
# Sprint schemas
|
||||
"SprintBurndown",
|
||||
"SprintComplete",
|
||||
"SprintCreate",
|
||||
"SprintInDB",
|
||||
"SprintListResponse",
|
||||
"SprintResponse",
|
||||
"SprintStart",
|
||||
"SprintStatus",
|
||||
"SprintUpdate",
|
||||
"SprintVelocity",
|
||||
"SyncStatus",
|
||||
]
|
||||
122
backend/app/schemas/syndarix/agent_instance.py
Normal file
122
backend/app/schemas/syndarix/agent_instance.py
Normal file
@@ -0,0 +1,122 @@
|
||||
# app/schemas/syndarix/agent_instance.py
|
||||
"""
|
||||
Pydantic schemas for AgentInstance entity.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from decimal import Decimal
|
||||
from typing import Any
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import BaseModel, ConfigDict, Field
|
||||
|
||||
from .enums import AgentStatus
|
||||
|
||||
|
||||
class AgentInstanceBase(BaseModel):
|
||||
"""Base agent instance schema with common fields."""
|
||||
|
||||
agent_type_id: UUID
|
||||
project_id: UUID
|
||||
status: AgentStatus = AgentStatus.IDLE
|
||||
current_task: str | None = None
|
||||
short_term_memory: dict[str, Any] = Field(default_factory=dict)
|
||||
long_term_memory_ref: str | None = Field(None, max_length=500)
|
||||
session_id: str | None = Field(None, max_length=255)
|
||||
|
||||
|
||||
class AgentInstanceCreate(BaseModel):
|
||||
"""Schema for creating a new agent instance."""
|
||||
|
||||
agent_type_id: UUID
|
||||
project_id: UUID
|
||||
status: AgentStatus = AgentStatus.IDLE
|
||||
current_task: str | None = None
|
||||
short_term_memory: dict[str, Any] = Field(default_factory=dict)
|
||||
long_term_memory_ref: str | None = Field(None, max_length=500)
|
||||
session_id: str | None = Field(None, max_length=255)
|
||||
|
||||
|
||||
class AgentInstanceUpdate(BaseModel):
|
||||
"""Schema for updating an agent instance."""
|
||||
|
||||
status: AgentStatus | None = None
|
||||
current_task: str | None = None
|
||||
short_term_memory: dict[str, Any] | None = None
|
||||
long_term_memory_ref: str | None = None
|
||||
session_id: str | None = None
|
||||
last_activity_at: datetime | None = None
|
||||
tasks_completed: int | None = Field(None, ge=0)
|
||||
tokens_used: int | None = Field(None, ge=0)
|
||||
cost_incurred: Decimal | None = Field(None, ge=0)
|
||||
|
||||
|
||||
class AgentInstanceTerminate(BaseModel):
|
||||
"""Schema for terminating an agent instance."""
|
||||
|
||||
reason: str | None = None
|
||||
|
||||
|
||||
class AgentInstanceInDB(AgentInstanceBase):
|
||||
"""Schema for agent instance in database."""
|
||||
|
||||
id: UUID
|
||||
last_activity_at: datetime | None = None
|
||||
terminated_at: datetime | None = None
|
||||
tasks_completed: int = 0
|
||||
tokens_used: int = 0
|
||||
cost_incurred: Decimal = Decimal("0.0000")
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
|
||||
class AgentInstanceResponse(BaseModel):
|
||||
"""Schema for agent instance API responses."""
|
||||
|
||||
id: UUID
|
||||
agent_type_id: UUID
|
||||
project_id: UUID
|
||||
status: AgentStatus
|
||||
current_task: str | None = None
|
||||
short_term_memory: dict[str, Any] = Field(default_factory=dict)
|
||||
long_term_memory_ref: str | None = None
|
||||
session_id: str | None = None
|
||||
last_activity_at: datetime | None = None
|
||||
terminated_at: datetime | None = None
|
||||
tasks_completed: int = 0
|
||||
tokens_used: int = 0
|
||||
cost_incurred: Decimal = Decimal("0.0000")
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
# Expanded fields from relationships
|
||||
agent_type_name: str | None = None
|
||||
agent_type_slug: str | None = None
|
||||
project_name: str | None = None
|
||||
project_slug: str | None = None
|
||||
assigned_issues_count: int | None = 0
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
|
||||
class AgentInstanceListResponse(BaseModel):
|
||||
"""Schema for paginated agent instance list responses."""
|
||||
|
||||
agent_instances: list[AgentInstanceResponse]
|
||||
total: int
|
||||
page: int
|
||||
page_size: int
|
||||
pages: int
|
||||
|
||||
|
||||
class AgentInstanceMetrics(BaseModel):
|
||||
"""Schema for agent instance metrics summary."""
|
||||
|
||||
total_instances: int
|
||||
active_instances: int
|
||||
idle_instances: int
|
||||
total_tasks_completed: int
|
||||
total_tokens_used: int
|
||||
total_cost_incurred: Decimal
|
||||
151
backend/app/schemas/syndarix/agent_type.py
Normal file
151
backend/app/schemas/syndarix/agent_type.py
Normal file
@@ -0,0 +1,151 @@
|
||||
# app/schemas/syndarix/agent_type.py
|
||||
"""
|
||||
Pydantic schemas for AgentType entity.
|
||||
"""
|
||||
|
||||
import re
|
||||
from datetime import datetime
|
||||
from typing import Any
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import BaseModel, ConfigDict, Field, field_validator
|
||||
|
||||
|
||||
class AgentTypeBase(BaseModel):
|
||||
"""Base agent type schema with common fields."""
|
||||
|
||||
name: str = Field(..., min_length=1, max_length=255)
|
||||
slug: str | None = Field(None, min_length=1, max_length=255)
|
||||
description: str | None = None
|
||||
expertise: list[str] = Field(default_factory=list)
|
||||
personality_prompt: str = Field(..., min_length=1)
|
||||
primary_model: str = Field(..., min_length=1, max_length=100)
|
||||
fallback_models: list[str] = Field(default_factory=list)
|
||||
model_params: dict[str, Any] = Field(default_factory=dict)
|
||||
mcp_servers: list[str] = Field(default_factory=list)
|
||||
tool_permissions: dict[str, Any] = Field(default_factory=dict)
|
||||
is_active: bool = True
|
||||
|
||||
@field_validator("slug")
|
||||
@classmethod
|
||||
def validate_slug(cls, v: str | None) -> str | None:
|
||||
"""Validate slug format: lowercase, alphanumeric, hyphens only."""
|
||||
if v is None:
|
||||
return v
|
||||
if not re.match(r"^[a-z0-9-]+$", v):
|
||||
raise ValueError(
|
||||
"Slug must contain only lowercase letters, numbers, and hyphens"
|
||||
)
|
||||
if v.startswith("-") or v.endswith("-"):
|
||||
raise ValueError("Slug cannot start or end with a hyphen")
|
||||
if "--" in v:
|
||||
raise ValueError("Slug cannot contain consecutive hyphens")
|
||||
return v
|
||||
|
||||
@field_validator("name")
|
||||
@classmethod
|
||||
def validate_name(cls, v: str) -> str:
|
||||
"""Validate agent type name."""
|
||||
if not v or v.strip() == "":
|
||||
raise ValueError("Agent type name cannot be empty")
|
||||
return v.strip()
|
||||
|
||||
@field_validator("expertise")
|
||||
@classmethod
|
||||
def validate_expertise(cls, v: list[str]) -> list[str]:
|
||||
"""Validate and normalize expertise list."""
|
||||
return [e.strip().lower() for e in v if e.strip()]
|
||||
|
||||
@field_validator("mcp_servers")
|
||||
@classmethod
|
||||
def validate_mcp_servers(cls, v: list[str]) -> list[str]:
|
||||
"""Validate MCP server list."""
|
||||
return [s.strip() for s in v if s.strip()]
|
||||
|
||||
|
||||
class AgentTypeCreate(AgentTypeBase):
|
||||
"""Schema for creating a new agent type."""
|
||||
|
||||
name: str = Field(..., min_length=1, max_length=255)
|
||||
slug: str = Field(..., min_length=1, max_length=255)
|
||||
personality_prompt: str = Field(..., min_length=1)
|
||||
primary_model: str = Field(..., min_length=1, max_length=100)
|
||||
|
||||
|
||||
class AgentTypeUpdate(BaseModel):
|
||||
"""Schema for updating an agent type."""
|
||||
|
||||
name: str | None = Field(None, min_length=1, max_length=255)
|
||||
slug: str | None = Field(None, min_length=1, max_length=255)
|
||||
description: str | None = None
|
||||
expertise: list[str] | None = None
|
||||
personality_prompt: str | None = None
|
||||
primary_model: str | None = Field(None, min_length=1, max_length=100)
|
||||
fallback_models: list[str] | None = None
|
||||
model_params: dict[str, Any] | None = None
|
||||
mcp_servers: list[str] | None = None
|
||||
tool_permissions: dict[str, Any] | None = None
|
||||
is_active: bool | None = None
|
||||
|
||||
@field_validator("slug")
|
||||
@classmethod
|
||||
def validate_slug(cls, v: str | None) -> str | None:
|
||||
"""Validate slug format."""
|
||||
if v is None:
|
||||
return v
|
||||
if not re.match(r"^[a-z0-9-]+$", v):
|
||||
raise ValueError(
|
||||
"Slug must contain only lowercase letters, numbers, and hyphens"
|
||||
)
|
||||
if v.startswith("-") or v.endswith("-"):
|
||||
raise ValueError("Slug cannot start or end with a hyphen")
|
||||
if "--" in v:
|
||||
raise ValueError("Slug cannot contain consecutive hyphens")
|
||||
return v
|
||||
|
||||
@field_validator("name")
|
||||
@classmethod
|
||||
def validate_name(cls, v: str | None) -> str | None:
|
||||
"""Validate agent type name."""
|
||||
if v is not None and (not v or v.strip() == ""):
|
||||
raise ValueError("Agent type name cannot be empty")
|
||||
return v.strip() if v else v
|
||||
|
||||
@field_validator("expertise")
|
||||
@classmethod
|
||||
def validate_expertise(cls, v: list[str] | None) -> list[str] | None:
|
||||
"""Validate and normalize expertise list."""
|
||||
if v is None:
|
||||
return v
|
||||
return [e.strip().lower() for e in v if e.strip()]
|
||||
|
||||
|
||||
class AgentTypeInDB(AgentTypeBase):
|
||||
"""Schema for agent type in database."""
|
||||
|
||||
id: UUID
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
|
||||
class AgentTypeResponse(AgentTypeBase):
|
||||
"""Schema for agent type API responses."""
|
||||
|
||||
id: UUID
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
instance_count: int | None = 0
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
|
||||
class AgentTypeListResponse(BaseModel):
|
||||
"""Schema for paginated agent type list responses."""
|
||||
|
||||
agent_types: list[AgentTypeResponse]
|
||||
total: int
|
||||
page: int
|
||||
page_size: int
|
||||
pages: int
|
||||
26
backend/app/schemas/syndarix/enums.py
Normal file
26
backend/app/schemas/syndarix/enums.py
Normal file
@@ -0,0 +1,26 @@
|
||||
# app/schemas/syndarix/enums.py
|
||||
"""
|
||||
Re-export enums from models for use in schemas.
|
||||
|
||||
This allows schemas to import enums without depending on SQLAlchemy models directly.
|
||||
"""
|
||||
|
||||
from app.models.syndarix.enums import (
|
||||
AgentStatus,
|
||||
AutonomyLevel,
|
||||
IssuePriority,
|
||||
IssueStatus,
|
||||
ProjectStatus,
|
||||
SprintStatus,
|
||||
SyncStatus,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"AgentStatus",
|
||||
"AutonomyLevel",
|
||||
"IssuePriority",
|
||||
"IssueStatus",
|
||||
"ProjectStatus",
|
||||
"SprintStatus",
|
||||
"SyncStatus",
|
||||
]
|
||||
193
backend/app/schemas/syndarix/issue.py
Normal file
193
backend/app/schemas/syndarix/issue.py
Normal file
@@ -0,0 +1,193 @@
|
||||
# app/schemas/syndarix/issue.py
|
||||
"""
|
||||
Pydantic schemas for Issue entity.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Literal
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import BaseModel, ConfigDict, Field, field_validator, model_validator
|
||||
|
||||
from .enums import IssuePriority, IssueStatus, SyncStatus
|
||||
|
||||
|
||||
class IssueBase(BaseModel):
|
||||
"""Base issue schema with common fields."""
|
||||
|
||||
title: str = Field(..., min_length=1, max_length=500)
|
||||
body: str = ""
|
||||
status: IssueStatus = IssueStatus.OPEN
|
||||
priority: IssuePriority = IssuePriority.MEDIUM
|
||||
labels: list[str] = Field(default_factory=list)
|
||||
story_points: int | None = Field(None, ge=0, le=100)
|
||||
|
||||
@field_validator("title")
|
||||
@classmethod
|
||||
def validate_title(cls, v: str) -> str:
|
||||
"""Validate issue title."""
|
||||
if not v or v.strip() == "":
|
||||
raise ValueError("Issue title cannot be empty")
|
||||
return v.strip()
|
||||
|
||||
@field_validator("labels")
|
||||
@classmethod
|
||||
def validate_labels(cls, v: list[str]) -> list[str]:
|
||||
"""Validate and normalize labels."""
|
||||
return [label.strip().lower() for label in v if label.strip()]
|
||||
|
||||
|
||||
class IssueCreate(IssueBase):
|
||||
"""Schema for creating a new issue."""
|
||||
|
||||
project_id: UUID
|
||||
assigned_agent_id: UUID | None = None
|
||||
human_assignee: str | None = Field(None, max_length=255)
|
||||
sprint_id: UUID | None = None
|
||||
|
||||
# External tracker fields (optional, for importing from external systems)
|
||||
external_tracker: Literal["gitea", "github", "gitlab"] | None = None
|
||||
external_id: str | None = Field(None, max_length=255)
|
||||
external_url: str | None = Field(None, max_length=1000)
|
||||
external_number: int | None = None
|
||||
|
||||
|
||||
class IssueUpdate(BaseModel):
|
||||
"""Schema for updating an issue."""
|
||||
|
||||
title: str | None = Field(None, min_length=1, max_length=500)
|
||||
body: str | None = None
|
||||
status: IssueStatus | None = None
|
||||
priority: IssuePriority | None = None
|
||||
labels: list[str] | None = None
|
||||
assigned_agent_id: UUID | None = None
|
||||
human_assignee: str | None = Field(None, max_length=255)
|
||||
sprint_id: UUID | None = None
|
||||
story_points: int | None = Field(None, ge=0, le=100)
|
||||
sync_status: SyncStatus | None = None
|
||||
|
||||
@field_validator("title")
|
||||
@classmethod
|
||||
def validate_title(cls, v: str | None) -> str | None:
|
||||
"""Validate issue title."""
|
||||
if v is not None and (not v or v.strip() == ""):
|
||||
raise ValueError("Issue title cannot be empty")
|
||||
return v.strip() if v else v
|
||||
|
||||
@field_validator("labels")
|
||||
@classmethod
|
||||
def validate_labels(cls, v: list[str] | None) -> list[str] | None:
|
||||
"""Validate and normalize labels."""
|
||||
if v is None:
|
||||
return v
|
||||
return [label.strip().lower() for label in v if label.strip()]
|
||||
|
||||
|
||||
class IssueClose(BaseModel):
|
||||
"""Schema for closing an issue."""
|
||||
|
||||
resolution: str | None = None # Optional resolution note
|
||||
|
||||
|
||||
class IssueAssign(BaseModel):
|
||||
"""Schema for assigning an issue."""
|
||||
|
||||
assigned_agent_id: UUID | None = None
|
||||
human_assignee: str | None = Field(None, max_length=255)
|
||||
|
||||
@model_validator(mode="after")
|
||||
def validate_assignment(self) -> "IssueAssign":
|
||||
"""Ensure only one type of assignee is set."""
|
||||
if self.assigned_agent_id and self.human_assignee:
|
||||
raise ValueError(
|
||||
"Cannot assign to both an agent and a human. Choose one."
|
||||
)
|
||||
return self
|
||||
|
||||
|
||||
class IssueSyncUpdate(BaseModel):
|
||||
"""Schema for updating sync-related fields."""
|
||||
|
||||
sync_status: SyncStatus
|
||||
last_synced_at: datetime | None = None
|
||||
external_updated_at: datetime | None = None
|
||||
|
||||
|
||||
class IssueInDB(IssueBase):
|
||||
"""Schema for issue in database."""
|
||||
|
||||
id: UUID
|
||||
project_id: UUID
|
||||
assigned_agent_id: UUID | None = None
|
||||
human_assignee: str | None = None
|
||||
sprint_id: UUID | None = None
|
||||
external_tracker: str | None = None
|
||||
external_id: str | None = None
|
||||
external_url: str | None = None
|
||||
external_number: int | None = None
|
||||
sync_status: SyncStatus = SyncStatus.SYNCED
|
||||
last_synced_at: datetime | None = None
|
||||
external_updated_at: datetime | None = None
|
||||
closed_at: datetime | None = None
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
|
||||
class IssueResponse(BaseModel):
|
||||
"""Schema for issue API responses."""
|
||||
|
||||
id: UUID
|
||||
project_id: UUID
|
||||
title: str
|
||||
body: str
|
||||
status: IssueStatus
|
||||
priority: IssuePriority
|
||||
labels: list[str] = Field(default_factory=list)
|
||||
assigned_agent_id: UUID | None = None
|
||||
human_assignee: str | None = None
|
||||
sprint_id: UUID | None = None
|
||||
story_points: int | None = None
|
||||
external_tracker: str | None = None
|
||||
external_id: str | None = None
|
||||
external_url: str | None = None
|
||||
external_number: int | None = None
|
||||
sync_status: SyncStatus = SyncStatus.SYNCED
|
||||
last_synced_at: datetime | None = None
|
||||
external_updated_at: datetime | None = None
|
||||
closed_at: datetime | None = None
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
# Expanded fields from relationships
|
||||
project_name: str | None = None
|
||||
project_slug: str | None = None
|
||||
sprint_name: str | None = None
|
||||
assigned_agent_type_name: str | None = None
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
|
||||
class IssueListResponse(BaseModel):
|
||||
"""Schema for paginated issue list responses."""
|
||||
|
||||
issues: list[IssueResponse]
|
||||
total: int
|
||||
page: int
|
||||
page_size: int
|
||||
pages: int
|
||||
|
||||
|
||||
class IssueStats(BaseModel):
|
||||
"""Schema for issue statistics."""
|
||||
|
||||
total: int
|
||||
open: int
|
||||
in_progress: int
|
||||
in_review: int
|
||||
blocked: int
|
||||
closed: int
|
||||
by_priority: dict[str, int]
|
||||
total_story_points: int | None = None
|
||||
completed_story_points: int | None = None
|
||||
127
backend/app/schemas/syndarix/project.py
Normal file
127
backend/app/schemas/syndarix/project.py
Normal file
@@ -0,0 +1,127 @@
|
||||
# app/schemas/syndarix/project.py
|
||||
"""
|
||||
Pydantic schemas for Project entity.
|
||||
"""
|
||||
|
||||
import re
|
||||
from datetime import datetime
|
||||
from typing import Any
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import BaseModel, ConfigDict, Field, field_validator
|
||||
|
||||
from .enums import AutonomyLevel, ProjectStatus
|
||||
|
||||
|
||||
class ProjectBase(BaseModel):
|
||||
"""Base project schema with common fields."""
|
||||
|
||||
name: str = Field(..., min_length=1, max_length=255)
|
||||
slug: str | None = Field(None, min_length=1, max_length=255)
|
||||
description: str | None = None
|
||||
autonomy_level: AutonomyLevel = AutonomyLevel.MILESTONE
|
||||
status: ProjectStatus = ProjectStatus.ACTIVE
|
||||
settings: dict[str, Any] = Field(default_factory=dict)
|
||||
|
||||
@field_validator("slug")
|
||||
@classmethod
|
||||
def validate_slug(cls, v: str | None) -> str | None:
|
||||
"""Validate slug format: lowercase, alphanumeric, hyphens only."""
|
||||
if v is None:
|
||||
return v
|
||||
if not re.match(r"^[a-z0-9-]+$", v):
|
||||
raise ValueError(
|
||||
"Slug must contain only lowercase letters, numbers, and hyphens"
|
||||
)
|
||||
if v.startswith("-") or v.endswith("-"):
|
||||
raise ValueError("Slug cannot start or end with a hyphen")
|
||||
if "--" in v:
|
||||
raise ValueError("Slug cannot contain consecutive hyphens")
|
||||
return v
|
||||
|
||||
@field_validator("name")
|
||||
@classmethod
|
||||
def validate_name(cls, v: str) -> str:
|
||||
"""Validate project name."""
|
||||
if not v or v.strip() == "":
|
||||
raise ValueError("Project name cannot be empty")
|
||||
return v.strip()
|
||||
|
||||
|
||||
class ProjectCreate(ProjectBase):
|
||||
"""Schema for creating a new project."""
|
||||
|
||||
name: str = Field(..., min_length=1, max_length=255)
|
||||
slug: str = Field(..., min_length=1, max_length=255)
|
||||
owner_id: UUID | None = None
|
||||
|
||||
|
||||
class ProjectUpdate(BaseModel):
|
||||
"""Schema for updating a project."""
|
||||
|
||||
name: str | None = Field(None, min_length=1, max_length=255)
|
||||
slug: str | None = Field(None, min_length=1, max_length=255)
|
||||
description: str | None = None
|
||||
autonomy_level: AutonomyLevel | None = None
|
||||
status: ProjectStatus | None = None
|
||||
settings: dict[str, Any] | None = None
|
||||
owner_id: UUID | None = None
|
||||
|
||||
@field_validator("slug")
|
||||
@classmethod
|
||||
def validate_slug(cls, v: str | None) -> str | None:
|
||||
"""Validate slug format."""
|
||||
if v is None:
|
||||
return v
|
||||
if not re.match(r"^[a-z0-9-]+$", v):
|
||||
raise ValueError(
|
||||
"Slug must contain only lowercase letters, numbers, and hyphens"
|
||||
)
|
||||
if v.startswith("-") or v.endswith("-"):
|
||||
raise ValueError("Slug cannot start or end with a hyphen")
|
||||
if "--" in v:
|
||||
raise ValueError("Slug cannot contain consecutive hyphens")
|
||||
return v
|
||||
|
||||
@field_validator("name")
|
||||
@classmethod
|
||||
def validate_name(cls, v: str | None) -> str | None:
|
||||
"""Validate project name."""
|
||||
if v is not None and (not v or v.strip() == ""):
|
||||
raise ValueError("Project name cannot be empty")
|
||||
return v.strip() if v else v
|
||||
|
||||
|
||||
class ProjectInDB(ProjectBase):
|
||||
"""Schema for project in database."""
|
||||
|
||||
id: UUID
|
||||
owner_id: UUID | None = None
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
|
||||
class ProjectResponse(ProjectBase):
|
||||
"""Schema for project API responses."""
|
||||
|
||||
id: UUID
|
||||
owner_id: UUID | None = None
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
agent_count: int | None = 0
|
||||
issue_count: int | None = 0
|
||||
active_sprint_name: str | None = None
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
|
||||
class ProjectListResponse(BaseModel):
|
||||
"""Schema for paginated project list responses."""
|
||||
|
||||
projects: list[ProjectResponse]
|
||||
total: int
|
||||
page: int
|
||||
page_size: int
|
||||
pages: int
|
||||
135
backend/app/schemas/syndarix/sprint.py
Normal file
135
backend/app/schemas/syndarix/sprint.py
Normal file
@@ -0,0 +1,135 @@
|
||||
# app/schemas/syndarix/sprint.py
|
||||
"""
|
||||
Pydantic schemas for Sprint entity.
|
||||
"""
|
||||
|
||||
from datetime import date, datetime
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import BaseModel, ConfigDict, Field, field_validator, model_validator
|
||||
|
||||
from .enums import SprintStatus
|
||||
|
||||
|
||||
class SprintBase(BaseModel):
|
||||
"""Base sprint schema with common fields."""
|
||||
|
||||
name: str = Field(..., min_length=1, max_length=255)
|
||||
number: int = Field(..., ge=1)
|
||||
goal: str | None = None
|
||||
start_date: date
|
||||
end_date: date
|
||||
status: SprintStatus = SprintStatus.PLANNED
|
||||
planned_points: int | None = Field(None, ge=0)
|
||||
completed_points: int | None = Field(None, ge=0)
|
||||
|
||||
@field_validator("name")
|
||||
@classmethod
|
||||
def validate_name(cls, v: str) -> str:
|
||||
"""Validate sprint name."""
|
||||
if not v or v.strip() == "":
|
||||
raise ValueError("Sprint name cannot be empty")
|
||||
return v.strip()
|
||||
|
||||
@model_validator(mode="after")
|
||||
def validate_dates(self) -> "SprintBase":
|
||||
"""Validate that end_date is after start_date."""
|
||||
if self.end_date < self.start_date:
|
||||
raise ValueError("End date must be after or equal to start date")
|
||||
return self
|
||||
|
||||
|
||||
class SprintCreate(SprintBase):
|
||||
"""Schema for creating a new sprint."""
|
||||
|
||||
project_id: UUID
|
||||
|
||||
|
||||
class SprintUpdate(BaseModel):
|
||||
"""Schema for updating a sprint."""
|
||||
|
||||
name: str | None = Field(None, min_length=1, max_length=255)
|
||||
goal: str | None = None
|
||||
start_date: date | None = None
|
||||
end_date: date | None = None
|
||||
status: SprintStatus | None = None
|
||||
planned_points: int | None = Field(None, ge=0)
|
||||
completed_points: int | None = Field(None, ge=0)
|
||||
|
||||
@field_validator("name")
|
||||
@classmethod
|
||||
def validate_name(cls, v: str | None) -> str | None:
|
||||
"""Validate sprint name."""
|
||||
if v is not None and (not v or v.strip() == ""):
|
||||
raise ValueError("Sprint name cannot be empty")
|
||||
return v.strip() if v else v
|
||||
|
||||
|
||||
class SprintStart(BaseModel):
|
||||
"""Schema for starting a sprint."""
|
||||
|
||||
start_date: date | None = None # Optionally override start date
|
||||
|
||||
|
||||
class SprintComplete(BaseModel):
|
||||
"""Schema for completing a sprint."""
|
||||
|
||||
completed_points: int | None = Field(None, ge=0)
|
||||
notes: str | None = None
|
||||
|
||||
|
||||
class SprintInDB(SprintBase):
|
||||
"""Schema for sprint in database."""
|
||||
|
||||
id: UUID
|
||||
project_id: UUID
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
|
||||
class SprintResponse(SprintBase):
|
||||
"""Schema for sprint API responses."""
|
||||
|
||||
id: UUID
|
||||
project_id: UUID
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
# Expanded fields from relationships
|
||||
project_name: str | None = None
|
||||
project_slug: str | None = None
|
||||
issue_count: int | None = 0
|
||||
open_issues: int | None = 0
|
||||
completed_issues: int | None = 0
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
|
||||
class SprintListResponse(BaseModel):
|
||||
"""Schema for paginated sprint list responses."""
|
||||
|
||||
sprints: list[SprintResponse]
|
||||
total: int
|
||||
page: int
|
||||
page_size: int
|
||||
pages: int
|
||||
|
||||
|
||||
class SprintVelocity(BaseModel):
|
||||
"""Schema for sprint velocity metrics."""
|
||||
|
||||
sprint_number: int
|
||||
sprint_name: str
|
||||
planned_points: int | None
|
||||
completed_points: int | None
|
||||
velocity: float | None # completed/planned ratio
|
||||
|
||||
|
||||
class SprintBurndown(BaseModel):
|
||||
"""Schema for sprint burndown data point."""
|
||||
|
||||
date: date
|
||||
remaining_points: int
|
||||
ideal_remaining: float
|
||||
2
backend/tests/crud/syndarix/__init__.py
Normal file
2
backend/tests/crud/syndarix/__init__.py
Normal file
@@ -0,0 +1,2 @@
|
||||
# tests/crud/syndarix/__init__.py
|
||||
"""Syndarix CRUD operation tests."""
|
||||
218
backend/tests/crud/syndarix/conftest.py
Normal file
218
backend/tests/crud/syndarix/conftest.py
Normal file
@@ -0,0 +1,218 @@
|
||||
# tests/crud/syndarix/conftest.py
|
||||
"""
|
||||
Shared fixtures for Syndarix CRUD tests.
|
||||
"""
|
||||
|
||||
import uuid
|
||||
from datetime import date, timedelta
|
||||
from decimal import Decimal
|
||||
|
||||
import pytest
|
||||
import pytest_asyncio
|
||||
|
||||
from app.models.syndarix import (
|
||||
AgentInstance,
|
||||
AgentStatus,
|
||||
AgentType,
|
||||
AutonomyLevel,
|
||||
Issue,
|
||||
IssuePriority,
|
||||
IssueStatus,
|
||||
Project,
|
||||
ProjectStatus,
|
||||
Sprint,
|
||||
SprintStatus,
|
||||
SyncStatus,
|
||||
)
|
||||
from app.models.user import User
|
||||
from app.schemas.syndarix import (
|
||||
AgentInstanceCreate,
|
||||
AgentTypeCreate,
|
||||
IssueCreate,
|
||||
ProjectCreate,
|
||||
SprintCreate,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def project_create_data():
|
||||
"""Return data for creating a project via schema."""
|
||||
return ProjectCreate(
|
||||
name="Test Project",
|
||||
slug="test-project-crud",
|
||||
description="A test project for CRUD testing",
|
||||
autonomy_level=AutonomyLevel.MILESTONE,
|
||||
status=ProjectStatus.ACTIVE,
|
||||
settings={"mcp_servers": ["gitea"]},
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def agent_type_create_data():
|
||||
"""Return data for creating an agent type via schema."""
|
||||
return AgentTypeCreate(
|
||||
name="Backend Engineer",
|
||||
slug="backend-engineer-crud",
|
||||
description="Specialized in backend development",
|
||||
expertise=["python", "fastapi", "postgresql"],
|
||||
personality_prompt="You are an expert backend engineer with deep knowledge of Python and FastAPI.",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
fallback_models=["claude-sonnet-4-20250514"],
|
||||
model_params={"temperature": 0.7, "max_tokens": 4096},
|
||||
mcp_servers=["gitea", "file-system"],
|
||||
tool_permissions={"allowed": ["*"], "denied": []},
|
||||
is_active=True,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sprint_create_data():
|
||||
"""Return data for creating a sprint via schema."""
|
||||
today = date.today()
|
||||
return {
|
||||
"name": "Sprint 1",
|
||||
"number": 1,
|
||||
"goal": "Complete initial setup and core features",
|
||||
"start_date": today,
|
||||
"end_date": today + timedelta(days=14),
|
||||
"status": SprintStatus.PLANNED,
|
||||
"planned_points": 21,
|
||||
"completed_points": 0,
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def issue_create_data():
|
||||
"""Return data for creating an issue via schema."""
|
||||
return {
|
||||
"title": "Implement user authentication",
|
||||
"body": "As a user, I want to log in securely so that I can access my account.",
|
||||
"status": IssueStatus.OPEN,
|
||||
"priority": IssuePriority.HIGH,
|
||||
"labels": ["backend", "security"],
|
||||
"story_points": 5,
|
||||
}
|
||||
|
||||
|
||||
@pytest_asyncio.fixture
|
||||
async def test_owner_crud(async_test_db):
|
||||
"""Create a test user to be used as project owner in CRUD tests."""
|
||||
from app.core.auth import get_password_hash
|
||||
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
user = User(
|
||||
id=uuid.uuid4(),
|
||||
email="crud-owner@example.com",
|
||||
password_hash=get_password_hash("TestPassword123!"),
|
||||
first_name="CRUD",
|
||||
last_name="Owner",
|
||||
is_active=True,
|
||||
is_superuser=False,
|
||||
)
|
||||
session.add(user)
|
||||
await session.commit()
|
||||
await session.refresh(user)
|
||||
return user
|
||||
|
||||
|
||||
@pytest_asyncio.fixture
|
||||
async def test_project_crud(async_test_db, test_owner_crud, project_create_data):
|
||||
"""Create a test project in the database for CRUD tests."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
project = Project(
|
||||
id=uuid.uuid4(),
|
||||
name=project_create_data.name,
|
||||
slug=project_create_data.slug,
|
||||
description=project_create_data.description,
|
||||
autonomy_level=project_create_data.autonomy_level,
|
||||
status=project_create_data.status,
|
||||
settings=project_create_data.settings,
|
||||
owner_id=test_owner_crud.id,
|
||||
)
|
||||
session.add(project)
|
||||
await session.commit()
|
||||
await session.refresh(project)
|
||||
return project
|
||||
|
||||
|
||||
@pytest_asyncio.fixture
|
||||
async def test_agent_type_crud(async_test_db, agent_type_create_data):
|
||||
"""Create a test agent type in the database for CRUD tests."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
agent_type = AgentType(
|
||||
id=uuid.uuid4(),
|
||||
name=agent_type_create_data.name,
|
||||
slug=agent_type_create_data.slug,
|
||||
description=agent_type_create_data.description,
|
||||
expertise=agent_type_create_data.expertise,
|
||||
personality_prompt=agent_type_create_data.personality_prompt,
|
||||
primary_model=agent_type_create_data.primary_model,
|
||||
fallback_models=agent_type_create_data.fallback_models,
|
||||
model_params=agent_type_create_data.model_params,
|
||||
mcp_servers=agent_type_create_data.mcp_servers,
|
||||
tool_permissions=agent_type_create_data.tool_permissions,
|
||||
is_active=agent_type_create_data.is_active,
|
||||
)
|
||||
session.add(agent_type)
|
||||
await session.commit()
|
||||
await session.refresh(agent_type)
|
||||
return agent_type
|
||||
|
||||
|
||||
@pytest_asyncio.fixture
|
||||
async def test_agent_instance_crud(async_test_db, test_project_crud, test_agent_type_crud):
|
||||
"""Create a test agent instance in the database for CRUD tests."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
agent_instance = AgentInstance(
|
||||
id=uuid.uuid4(),
|
||||
agent_type_id=test_agent_type_crud.id,
|
||||
project_id=test_project_crud.id,
|
||||
status=AgentStatus.IDLE,
|
||||
current_task=None,
|
||||
short_term_memory={},
|
||||
long_term_memory_ref=None,
|
||||
session_id=None,
|
||||
tasks_completed=0,
|
||||
tokens_used=0,
|
||||
cost_incurred=Decimal("0.0000"),
|
||||
)
|
||||
session.add(agent_instance)
|
||||
await session.commit()
|
||||
await session.refresh(agent_instance)
|
||||
return agent_instance
|
||||
|
||||
|
||||
@pytest_asyncio.fixture
|
||||
async def test_sprint_crud(async_test_db, test_project_crud, sprint_create_data):
|
||||
"""Create a test sprint in the database for CRUD tests."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
sprint = Sprint(
|
||||
id=uuid.uuid4(),
|
||||
project_id=test_project_crud.id,
|
||||
**sprint_create_data,
|
||||
)
|
||||
session.add(sprint)
|
||||
await session.commit()
|
||||
await session.refresh(sprint)
|
||||
return sprint
|
||||
|
||||
|
||||
@pytest_asyncio.fixture
|
||||
async def test_issue_crud(async_test_db, test_project_crud, issue_create_data):
|
||||
"""Create a test issue in the database for CRUD tests."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
issue = Issue(
|
||||
id=uuid.uuid4(),
|
||||
project_id=test_project_crud.id,
|
||||
**issue_create_data,
|
||||
)
|
||||
session.add(issue)
|
||||
await session.commit()
|
||||
await session.refresh(issue)
|
||||
return issue
|
||||
386
backend/tests/crud/syndarix/test_agent_instance_crud.py
Normal file
386
backend/tests/crud/syndarix/test_agent_instance_crud.py
Normal file
@@ -0,0 +1,386 @@
|
||||
# tests/crud/syndarix/test_agent_instance_crud.py
|
||||
"""
|
||||
Tests for AgentInstance CRUD operations.
|
||||
"""
|
||||
|
||||
import uuid
|
||||
from decimal import Decimal
|
||||
|
||||
import pytest
|
||||
|
||||
from app.crud.syndarix import agent_instance as agent_instance_crud
|
||||
from app.models.syndarix import AgentStatus
|
||||
from app.schemas.syndarix import AgentInstanceCreate, AgentInstanceUpdate
|
||||
|
||||
|
||||
class TestAgentInstanceCreate:
|
||||
"""Tests for agent instance creation."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_agent_instance_success(self, async_test_db, test_project_crud, test_agent_type_crud):
|
||||
"""Test successfully creating an agent instance."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
instance_data = AgentInstanceCreate(
|
||||
agent_type_id=test_agent_type_crud.id,
|
||||
project_id=test_project_crud.id,
|
||||
status=AgentStatus.IDLE,
|
||||
current_task=None,
|
||||
short_term_memory={"context": "initial"},
|
||||
long_term_memory_ref="project-123/agent-456",
|
||||
session_id="session-abc",
|
||||
)
|
||||
result = await agent_instance_crud.create(session, obj_in=instance_data)
|
||||
|
||||
assert result.id is not None
|
||||
assert result.agent_type_id == test_agent_type_crud.id
|
||||
assert result.project_id == test_project_crud.id
|
||||
assert result.status == AgentStatus.IDLE
|
||||
assert result.short_term_memory == {"context": "initial"}
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_agent_instance_minimal(self, async_test_db, test_project_crud, test_agent_type_crud):
|
||||
"""Test creating agent instance with minimal fields."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
instance_data = AgentInstanceCreate(
|
||||
agent_type_id=test_agent_type_crud.id,
|
||||
project_id=test_project_crud.id,
|
||||
)
|
||||
result = await agent_instance_crud.create(session, obj_in=instance_data)
|
||||
|
||||
assert result.status == AgentStatus.IDLE # Default
|
||||
assert result.tasks_completed == 0
|
||||
assert result.tokens_used == 0
|
||||
|
||||
|
||||
class TestAgentInstanceRead:
|
||||
"""Tests for agent instance read operations."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_agent_instance_by_id(self, async_test_db, test_agent_instance_crud):
|
||||
"""Test getting agent instance by ID."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await agent_instance_crud.get(session, id=str(test_agent_instance_crud.id))
|
||||
|
||||
assert result is not None
|
||||
assert result.id == test_agent_instance_crud.id
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_agent_instance_by_id_not_found(self, async_test_db):
|
||||
"""Test getting non-existent agent instance returns None."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await agent_instance_crud.get(session, id=str(uuid.uuid4()))
|
||||
assert result is None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_with_details(self, async_test_db, test_agent_instance_crud):
|
||||
"""Test getting agent instance with related details."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await agent_instance_crud.get_with_details(
|
||||
session,
|
||||
instance_id=test_agent_instance_crud.id,
|
||||
)
|
||||
|
||||
assert result is not None
|
||||
assert result["instance"].id == test_agent_instance_crud.id
|
||||
assert result["agent_type_name"] is not None
|
||||
assert result["project_name"] is not None
|
||||
|
||||
|
||||
class TestAgentInstanceUpdate:
|
||||
"""Tests for agent instance update operations."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_agent_instance_status(self, async_test_db, test_agent_instance_crud):
|
||||
"""Test updating agent instance status."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
instance = await agent_instance_crud.get(session, id=str(test_agent_instance_crud.id))
|
||||
|
||||
update_data = AgentInstanceUpdate(
|
||||
status=AgentStatus.WORKING,
|
||||
current_task="Processing feature request",
|
||||
)
|
||||
result = await agent_instance_crud.update(session, db_obj=instance, obj_in=update_data)
|
||||
|
||||
assert result.status == AgentStatus.WORKING
|
||||
assert result.current_task == "Processing feature request"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_agent_instance_memory(self, async_test_db, test_agent_instance_crud):
|
||||
"""Test updating agent instance short-term memory."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
instance = await agent_instance_crud.get(session, id=str(test_agent_instance_crud.id))
|
||||
|
||||
new_memory = {"conversation": ["msg1", "msg2"], "decisions": {"key": "value"}}
|
||||
update_data = AgentInstanceUpdate(short_term_memory=new_memory)
|
||||
result = await agent_instance_crud.update(session, db_obj=instance, obj_in=update_data)
|
||||
|
||||
assert result.short_term_memory == new_memory
|
||||
|
||||
|
||||
class TestAgentInstanceStatusUpdate:
|
||||
"""Tests for agent instance status update method."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_status(self, async_test_db, test_agent_instance_crud):
|
||||
"""Test updating agent instance status via dedicated method."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await agent_instance_crud.update_status(
|
||||
session,
|
||||
instance_id=test_agent_instance_crud.id,
|
||||
status=AgentStatus.WORKING,
|
||||
current_task="Working on feature X",
|
||||
)
|
||||
|
||||
assert result is not None
|
||||
assert result.status == AgentStatus.WORKING
|
||||
assert result.current_task == "Working on feature X"
|
||||
assert result.last_activity_at is not None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_status_nonexistent(self, async_test_db):
|
||||
"""Test updating status of non-existent instance returns None."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await agent_instance_crud.update_status(
|
||||
session,
|
||||
instance_id=uuid.uuid4(),
|
||||
status=AgentStatus.WORKING,
|
||||
)
|
||||
assert result is None
|
||||
|
||||
|
||||
class TestAgentInstanceTerminate:
|
||||
"""Tests for agent instance termination."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_terminate_agent_instance(self, async_test_db, test_project_crud, test_agent_type_crud):
|
||||
"""Test terminating an agent instance."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
# Create an instance to terminate
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
instance_data = AgentInstanceCreate(
|
||||
agent_type_id=test_agent_type_crud.id,
|
||||
project_id=test_project_crud.id,
|
||||
status=AgentStatus.WORKING,
|
||||
)
|
||||
created = await agent_instance_crud.create(session, obj_in=instance_data)
|
||||
instance_id = created.id
|
||||
|
||||
# Terminate
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await agent_instance_crud.terminate(session, instance_id=instance_id)
|
||||
|
||||
assert result is not None
|
||||
assert result.status == AgentStatus.TERMINATED
|
||||
assert result.terminated_at is not None
|
||||
assert result.current_task is None
|
||||
assert result.session_id is None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_terminate_nonexistent_instance(self, async_test_db):
|
||||
"""Test terminating non-existent instance returns None."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await agent_instance_crud.terminate(session, instance_id=uuid.uuid4())
|
||||
assert result is None
|
||||
|
||||
|
||||
class TestAgentInstanceMetrics:
|
||||
"""Tests for agent instance metrics operations."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_record_task_completion(self, async_test_db, test_agent_instance_crud):
|
||||
"""Test recording task completion with metrics."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await agent_instance_crud.record_task_completion(
|
||||
session,
|
||||
instance_id=test_agent_instance_crud.id,
|
||||
tokens_used=1500,
|
||||
cost_incurred=Decimal("0.0150"),
|
||||
)
|
||||
|
||||
assert result is not None
|
||||
assert result.tasks_completed == 1
|
||||
assert result.tokens_used == 1500
|
||||
assert result.cost_incurred == Decimal("0.0150")
|
||||
assert result.last_activity_at is not None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_record_multiple_task_completions(self, async_test_db, test_project_crud, test_agent_type_crud):
|
||||
"""Test recording multiple task completions accumulates metrics."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
# Create fresh instance
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
instance_data = AgentInstanceCreate(
|
||||
agent_type_id=test_agent_type_crud.id,
|
||||
project_id=test_project_crud.id,
|
||||
)
|
||||
created = await agent_instance_crud.create(session, obj_in=instance_data)
|
||||
instance_id = created.id
|
||||
|
||||
# Record first task
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
await agent_instance_crud.record_task_completion(
|
||||
session,
|
||||
instance_id=instance_id,
|
||||
tokens_used=1000,
|
||||
cost_incurred=Decimal("0.0100"),
|
||||
)
|
||||
|
||||
# Record second task
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await agent_instance_crud.record_task_completion(
|
||||
session,
|
||||
instance_id=instance_id,
|
||||
tokens_used=2000,
|
||||
cost_incurred=Decimal("0.0200"),
|
||||
)
|
||||
|
||||
assert result.tasks_completed == 2
|
||||
assert result.tokens_used == 3000
|
||||
assert result.cost_incurred == Decimal("0.0300")
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_project_metrics(self, async_test_db, test_project_crud, test_agent_instance_crud):
|
||||
"""Test getting aggregated metrics for a project."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await agent_instance_crud.get_project_metrics(
|
||||
session,
|
||||
project_id=test_project_crud.id,
|
||||
)
|
||||
|
||||
assert result is not None
|
||||
assert "total_instances" in result
|
||||
assert "active_instances" in result
|
||||
assert "idle_instances" in result
|
||||
assert "total_tasks_completed" in result
|
||||
assert "total_tokens_used" in result
|
||||
assert "total_cost_incurred" in result
|
||||
|
||||
|
||||
class TestAgentInstanceByProject:
|
||||
"""Tests for getting instances by project."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_by_project(self, async_test_db, test_project_crud, test_agent_instance_crud):
|
||||
"""Test getting instances by project."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
instances, total = await agent_instance_crud.get_by_project(
|
||||
session,
|
||||
project_id=test_project_crud.id,
|
||||
)
|
||||
|
||||
assert total >= 1
|
||||
assert all(i.project_id == test_project_crud.id for i in instances)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_by_project_with_status(self, async_test_db, test_project_crud, test_agent_type_crud):
|
||||
"""Test getting instances by project filtered by status."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
# Create instances with different statuses
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
idle_instance = AgentInstanceCreate(
|
||||
agent_type_id=test_agent_type_crud.id,
|
||||
project_id=test_project_crud.id,
|
||||
status=AgentStatus.IDLE,
|
||||
)
|
||||
await agent_instance_crud.create(session, obj_in=idle_instance)
|
||||
|
||||
working_instance = AgentInstanceCreate(
|
||||
agent_type_id=test_agent_type_crud.id,
|
||||
project_id=test_project_crud.id,
|
||||
status=AgentStatus.WORKING,
|
||||
)
|
||||
await agent_instance_crud.create(session, obj_in=working_instance)
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
instances, total = await agent_instance_crud.get_by_project(
|
||||
session,
|
||||
project_id=test_project_crud.id,
|
||||
status=AgentStatus.WORKING,
|
||||
)
|
||||
|
||||
assert all(i.status == AgentStatus.WORKING for i in instances)
|
||||
|
||||
|
||||
class TestAgentInstanceByAgentType:
|
||||
"""Tests for getting instances by agent type."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_by_agent_type(self, async_test_db, test_agent_type_crud, test_agent_instance_crud):
|
||||
"""Test getting instances by agent type."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
instances = await agent_instance_crud.get_by_agent_type(
|
||||
session,
|
||||
agent_type_id=test_agent_type_crud.id,
|
||||
)
|
||||
|
||||
assert len(instances) >= 1
|
||||
assert all(i.agent_type_id == test_agent_type_crud.id for i in instances)
|
||||
|
||||
|
||||
class TestBulkTerminate:
|
||||
"""Tests for bulk termination of instances."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_bulk_terminate_by_project(self, async_test_db, test_project_crud, test_agent_type_crud):
|
||||
"""Test bulk terminating all instances in a project."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
# Create multiple instances
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
for i in range(3):
|
||||
instance_data = AgentInstanceCreate(
|
||||
agent_type_id=test_agent_type_crud.id,
|
||||
project_id=test_project_crud.id,
|
||||
status=AgentStatus.WORKING if i < 2 else AgentStatus.IDLE,
|
||||
)
|
||||
await agent_instance_crud.create(session, obj_in=instance_data)
|
||||
|
||||
# Bulk terminate
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
count = await agent_instance_crud.bulk_terminate_by_project(
|
||||
session,
|
||||
project_id=test_project_crud.id,
|
||||
)
|
||||
|
||||
assert count >= 3
|
||||
|
||||
# Verify all are terminated
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
instances, _ = await agent_instance_crud.get_by_project(
|
||||
session,
|
||||
project_id=test_project_crud.id,
|
||||
)
|
||||
|
||||
for instance in instances:
|
||||
assert instance.status == AgentStatus.TERMINATED
|
||||
353
backend/tests/crud/syndarix/test_agent_type_crud.py
Normal file
353
backend/tests/crud/syndarix/test_agent_type_crud.py
Normal file
@@ -0,0 +1,353 @@
|
||||
# tests/crud/syndarix/test_agent_type_crud.py
|
||||
"""
|
||||
Tests for AgentType CRUD operations.
|
||||
"""
|
||||
|
||||
import uuid
|
||||
|
||||
import pytest
|
||||
|
||||
from app.crud.syndarix import agent_type as agent_type_crud
|
||||
from app.schemas.syndarix import AgentTypeCreate, AgentTypeUpdate
|
||||
|
||||
|
||||
class TestAgentTypeCreate:
|
||||
"""Tests for agent type creation."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_agent_type_success(self, async_test_db):
|
||||
"""Test successfully creating an agent type."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
agent_type_data = AgentTypeCreate(
|
||||
name="QA Engineer",
|
||||
slug="qa-engineer",
|
||||
description="Specialized in testing and quality assurance",
|
||||
expertise=["testing", "pytest", "playwright"],
|
||||
personality_prompt="You are an expert QA engineer...",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
fallback_models=["claude-sonnet-4-20250514"],
|
||||
model_params={"temperature": 0.5},
|
||||
mcp_servers=["gitea"],
|
||||
tool_permissions={"allowed": ["*"]},
|
||||
is_active=True,
|
||||
)
|
||||
result = await agent_type_crud.create(session, obj_in=agent_type_data)
|
||||
|
||||
assert result.id is not None
|
||||
assert result.name == "QA Engineer"
|
||||
assert result.slug == "qa-engineer"
|
||||
assert result.expertise == ["testing", "pytest", "playwright"]
|
||||
assert result.is_active is True
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_agent_type_duplicate_slug_fails(self, async_test_db, test_agent_type_crud):
|
||||
"""Test creating agent type with duplicate slug raises ValueError."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
agent_type_data = AgentTypeCreate(
|
||||
name="Duplicate Agent",
|
||||
slug=test_agent_type_crud.slug, # Duplicate slug
|
||||
personality_prompt="Duplicate",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
)
|
||||
|
||||
with pytest.raises(ValueError) as exc_info:
|
||||
await agent_type_crud.create(session, obj_in=agent_type_data)
|
||||
|
||||
assert "already exists" in str(exc_info.value).lower()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_agent_type_minimal_fields(self, async_test_db):
|
||||
"""Test creating agent type with minimal required fields."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
agent_type_data = AgentTypeCreate(
|
||||
name="Minimal Agent",
|
||||
slug="minimal-agent",
|
||||
personality_prompt="You are an assistant.",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
)
|
||||
result = await agent_type_crud.create(session, obj_in=agent_type_data)
|
||||
|
||||
assert result.name == "Minimal Agent"
|
||||
assert result.expertise == [] # Default
|
||||
assert result.fallback_models == [] # Default
|
||||
assert result.is_active is True # Default
|
||||
|
||||
|
||||
class TestAgentTypeRead:
|
||||
"""Tests for agent type read operations."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_agent_type_by_id(self, async_test_db, test_agent_type_crud):
|
||||
"""Test getting agent type by ID."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await agent_type_crud.get(session, id=str(test_agent_type_crud.id))
|
||||
|
||||
assert result is not None
|
||||
assert result.id == test_agent_type_crud.id
|
||||
assert result.name == test_agent_type_crud.name
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_agent_type_by_id_not_found(self, async_test_db):
|
||||
"""Test getting non-existent agent type returns None."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await agent_type_crud.get(session, id=str(uuid.uuid4()))
|
||||
assert result is None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_agent_type_by_slug(self, async_test_db, test_agent_type_crud):
|
||||
"""Test getting agent type by slug."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await agent_type_crud.get_by_slug(session, slug=test_agent_type_crud.slug)
|
||||
|
||||
assert result is not None
|
||||
assert result.slug == test_agent_type_crud.slug
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_agent_type_by_slug_not_found(self, async_test_db):
|
||||
"""Test getting non-existent slug returns None."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await agent_type_crud.get_by_slug(session, slug="non-existent-agent")
|
||||
assert result is None
|
||||
|
||||
|
||||
class TestAgentTypeUpdate:
|
||||
"""Tests for agent type update operations."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_agent_type_basic_fields(self, async_test_db, test_agent_type_crud):
|
||||
"""Test updating basic agent type fields."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
agent_type = await agent_type_crud.get(session, id=str(test_agent_type_crud.id))
|
||||
|
||||
update_data = AgentTypeUpdate(
|
||||
name="Updated Agent Name",
|
||||
description="Updated description",
|
||||
)
|
||||
result = await agent_type_crud.update(session, db_obj=agent_type, obj_in=update_data)
|
||||
|
||||
assert result.name == "Updated Agent Name"
|
||||
assert result.description == "Updated description"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_agent_type_expertise(self, async_test_db, test_agent_type_crud):
|
||||
"""Test updating agent type expertise."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
agent_type = await agent_type_crud.get(session, id=str(test_agent_type_crud.id))
|
||||
|
||||
update_data = AgentTypeUpdate(
|
||||
expertise=["new-skill", "another-skill"],
|
||||
)
|
||||
result = await agent_type_crud.update(session, db_obj=agent_type, obj_in=update_data)
|
||||
|
||||
assert "new-skill" in result.expertise
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_agent_type_model_params(self, async_test_db, test_agent_type_crud):
|
||||
"""Test updating agent type model parameters."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
agent_type = await agent_type_crud.get(session, id=str(test_agent_type_crud.id))
|
||||
|
||||
new_params = {"temperature": 0.9, "max_tokens": 8192}
|
||||
update_data = AgentTypeUpdate(model_params=new_params)
|
||||
result = await agent_type_crud.update(session, db_obj=agent_type, obj_in=update_data)
|
||||
|
||||
assert result.model_params == new_params
|
||||
|
||||
|
||||
class TestAgentTypeDelete:
|
||||
"""Tests for agent type delete operations."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_delete_agent_type(self, async_test_db):
|
||||
"""Test deleting an agent type."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
# Create an agent type to delete
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
agent_type_data = AgentTypeCreate(
|
||||
name="Delete Me Agent",
|
||||
slug="delete-me-agent",
|
||||
personality_prompt="Delete test",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
)
|
||||
created = await agent_type_crud.create(session, obj_in=agent_type_data)
|
||||
agent_type_id = created.id
|
||||
|
||||
# Delete the agent type
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await agent_type_crud.remove(session, id=str(agent_type_id))
|
||||
assert result is not None
|
||||
|
||||
# Verify deletion
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
deleted = await agent_type_crud.get(session, id=str(agent_type_id))
|
||||
assert deleted is None
|
||||
|
||||
|
||||
class TestAgentTypeFilters:
|
||||
"""Tests for agent type filtering and search."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_multi_with_filters_active(self, async_test_db):
|
||||
"""Test filtering agent types by is_active."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
# Create active and inactive agent types
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
active_type = AgentTypeCreate(
|
||||
name="Active Agent Type",
|
||||
slug="active-agent-type-filter",
|
||||
personality_prompt="Active",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
is_active=True,
|
||||
)
|
||||
await agent_type_crud.create(session, obj_in=active_type)
|
||||
|
||||
inactive_type = AgentTypeCreate(
|
||||
name="Inactive Agent Type",
|
||||
slug="inactive-agent-type-filter",
|
||||
personality_prompt="Inactive",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
is_active=False,
|
||||
)
|
||||
await agent_type_crud.create(session, obj_in=inactive_type)
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
active_types, _ = await agent_type_crud.get_multi_with_filters(
|
||||
session,
|
||||
is_active=True,
|
||||
)
|
||||
|
||||
assert all(at.is_active for at in active_types)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_multi_with_filters_search(self, async_test_db):
|
||||
"""Test searching agent types by name."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
agent_type_data = AgentTypeCreate(
|
||||
name="Searchable Agent Type",
|
||||
slug="searchable-agent-type",
|
||||
description="This is searchable",
|
||||
personality_prompt="Searchable",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
)
|
||||
await agent_type_crud.create(session, obj_in=agent_type_data)
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
agent_types, total = await agent_type_crud.get_multi_with_filters(
|
||||
session,
|
||||
search="Searchable",
|
||||
)
|
||||
|
||||
assert total >= 1
|
||||
assert any(at.name == "Searchable Agent Type" for at in agent_types)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_multi_with_filters_pagination(self, async_test_db):
|
||||
"""Test pagination of agent type results."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
for i in range(5):
|
||||
agent_type_data = AgentTypeCreate(
|
||||
name=f"Page Agent Type {i}",
|
||||
slug=f"page-agent-type-{i}",
|
||||
personality_prompt=f"Page {i}",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
)
|
||||
await agent_type_crud.create(session, obj_in=agent_type_data)
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
page1, total = await agent_type_crud.get_multi_with_filters(
|
||||
session,
|
||||
skip=0,
|
||||
limit=2,
|
||||
)
|
||||
|
||||
assert len(page1) <= 2
|
||||
|
||||
|
||||
class TestAgentTypeSpecialMethods:
|
||||
"""Tests for special agent type CRUD methods."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_deactivate_agent_type(self, async_test_db):
|
||||
"""Test deactivating an agent type."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
# Create an active agent type
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
agent_type_data = AgentTypeCreate(
|
||||
name="Deactivate Me",
|
||||
slug="deactivate-me-agent",
|
||||
personality_prompt="Deactivate",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
is_active=True,
|
||||
)
|
||||
created = await agent_type_crud.create(session, obj_in=agent_type_data)
|
||||
agent_type_id = created.id
|
||||
|
||||
# Deactivate
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await agent_type_crud.deactivate(session, agent_type_id=agent_type_id)
|
||||
|
||||
assert result is not None
|
||||
assert result.is_active is False
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_deactivate_nonexistent_agent_type(self, async_test_db):
|
||||
"""Test deactivating non-existent agent type returns None."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await agent_type_crud.deactivate(session, agent_type_id=uuid.uuid4())
|
||||
assert result is None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_with_instance_count(self, async_test_db, test_agent_type_crud, test_agent_instance_crud):
|
||||
"""Test getting agent type with instance count."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await agent_type_crud.get_with_instance_count(
|
||||
session,
|
||||
agent_type_id=test_agent_type_crud.id,
|
||||
)
|
||||
|
||||
assert result is not None
|
||||
assert result["agent_type"].id == test_agent_type_crud.id
|
||||
assert result["instance_count"] >= 1
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_with_instance_count_not_found(self, async_test_db):
|
||||
"""Test getting non-existent agent type with count returns None."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await agent_type_crud.get_with_instance_count(
|
||||
session,
|
||||
agent_type_id=uuid.uuid4(),
|
||||
)
|
||||
assert result is None
|
||||
556
backend/tests/crud/syndarix/test_issue_crud.py
Normal file
556
backend/tests/crud/syndarix/test_issue_crud.py
Normal file
@@ -0,0 +1,556 @@
|
||||
# tests/crud/syndarix/test_issue_crud.py
|
||||
"""
|
||||
Tests for Issue CRUD operations.
|
||||
"""
|
||||
|
||||
import uuid
|
||||
from datetime import UTC, datetime
|
||||
|
||||
import pytest
|
||||
|
||||
from app.crud.syndarix import issue as issue_crud
|
||||
from app.models.syndarix import IssuePriority, IssueStatus, SyncStatus
|
||||
from app.schemas.syndarix import IssueCreate, IssueUpdate
|
||||
|
||||
|
||||
class TestIssueCreate:
|
||||
"""Tests for issue creation."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_issue_success(self, async_test_db, test_project_crud):
|
||||
"""Test successfully creating an issue."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
issue_data = IssueCreate(
|
||||
project_id=test_project_crud.id,
|
||||
title="Test Issue",
|
||||
body="This is a test issue body",
|
||||
status=IssueStatus.OPEN,
|
||||
priority=IssuePriority.HIGH,
|
||||
labels=["bug", "security"],
|
||||
story_points=5,
|
||||
)
|
||||
result = await issue_crud.create(session, obj_in=issue_data)
|
||||
|
||||
assert result.id is not None
|
||||
assert result.title == "Test Issue"
|
||||
assert result.body == "This is a test issue body"
|
||||
assert result.status == IssueStatus.OPEN
|
||||
assert result.priority == IssuePriority.HIGH
|
||||
assert result.labels == ["bug", "security"]
|
||||
assert result.story_points == 5
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_issue_with_external_tracker(self, async_test_db, test_project_crud):
|
||||
"""Test creating issue with external tracker info."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
issue_data = IssueCreate(
|
||||
project_id=test_project_crud.id,
|
||||
title="External Issue",
|
||||
external_tracker="gitea",
|
||||
external_id="gitea-123",
|
||||
external_url="https://gitea.example.com/issues/123",
|
||||
external_number=123,
|
||||
)
|
||||
result = await issue_crud.create(session, obj_in=issue_data)
|
||||
|
||||
assert result.external_tracker == "gitea"
|
||||
assert result.external_id == "gitea-123"
|
||||
assert result.external_number == 123
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_issue_minimal(self, async_test_db, test_project_crud):
|
||||
"""Test creating issue with minimal fields."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
issue_data = IssueCreate(
|
||||
project_id=test_project_crud.id,
|
||||
title="Minimal Issue",
|
||||
)
|
||||
result = await issue_crud.create(session, obj_in=issue_data)
|
||||
|
||||
assert result.title == "Minimal Issue"
|
||||
assert result.body == "" # Default
|
||||
assert result.status == IssueStatus.OPEN # Default
|
||||
assert result.priority == IssuePriority.MEDIUM # Default
|
||||
|
||||
|
||||
class TestIssueRead:
|
||||
"""Tests for issue read operations."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_issue_by_id(self, async_test_db, test_issue_crud):
|
||||
"""Test getting issue by ID."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await issue_crud.get(session, id=str(test_issue_crud.id))
|
||||
|
||||
assert result is not None
|
||||
assert result.id == test_issue_crud.id
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_issue_by_id_not_found(self, async_test_db):
|
||||
"""Test getting non-existent issue returns None."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await issue_crud.get(session, id=str(uuid.uuid4()))
|
||||
assert result is None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_with_details(self, async_test_db, test_issue_crud):
|
||||
"""Test getting issue with related details."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await issue_crud.get_with_details(
|
||||
session,
|
||||
issue_id=test_issue_crud.id,
|
||||
)
|
||||
|
||||
assert result is not None
|
||||
assert result["issue"].id == test_issue_crud.id
|
||||
assert result["project_name"] is not None
|
||||
|
||||
|
||||
class TestIssueUpdate:
|
||||
"""Tests for issue update operations."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_issue_basic_fields(self, async_test_db, test_issue_crud):
|
||||
"""Test updating basic issue fields."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
issue = await issue_crud.get(session, id=str(test_issue_crud.id))
|
||||
|
||||
update_data = IssueUpdate(
|
||||
title="Updated Title",
|
||||
body="Updated body content",
|
||||
)
|
||||
result = await issue_crud.update(session, db_obj=issue, obj_in=update_data)
|
||||
|
||||
assert result.title == "Updated Title"
|
||||
assert result.body == "Updated body content"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_issue_status(self, async_test_db, test_issue_crud):
|
||||
"""Test updating issue status."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
issue = await issue_crud.get(session, id=str(test_issue_crud.id))
|
||||
|
||||
update_data = IssueUpdate(status=IssueStatus.IN_PROGRESS)
|
||||
result = await issue_crud.update(session, db_obj=issue, obj_in=update_data)
|
||||
|
||||
assert result.status == IssueStatus.IN_PROGRESS
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_issue_priority(self, async_test_db, test_issue_crud):
|
||||
"""Test updating issue priority."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
issue = await issue_crud.get(session, id=str(test_issue_crud.id))
|
||||
|
||||
update_data = IssueUpdate(priority=IssuePriority.CRITICAL)
|
||||
result = await issue_crud.update(session, db_obj=issue, obj_in=update_data)
|
||||
|
||||
assert result.priority == IssuePriority.CRITICAL
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_issue_labels(self, async_test_db, test_issue_crud):
|
||||
"""Test updating issue labels."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
issue = await issue_crud.get(session, id=str(test_issue_crud.id))
|
||||
|
||||
update_data = IssueUpdate(labels=["new-label", "updated"])
|
||||
result = await issue_crud.update(session, db_obj=issue, obj_in=update_data)
|
||||
|
||||
assert "new-label" in result.labels
|
||||
|
||||
|
||||
class TestIssueAssignment:
|
||||
"""Tests for issue assignment operations."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_assign_to_agent(self, async_test_db, test_issue_crud, test_agent_instance_crud):
|
||||
"""Test assigning issue to an agent."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await issue_crud.assign_to_agent(
|
||||
session,
|
||||
issue_id=test_issue_crud.id,
|
||||
agent_id=test_agent_instance_crud.id,
|
||||
)
|
||||
|
||||
assert result is not None
|
||||
assert result.assigned_agent_id == test_agent_instance_crud.id
|
||||
assert result.human_assignee is None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_unassign_agent(self, async_test_db, test_issue_crud, test_agent_instance_crud):
|
||||
"""Test unassigning agent from issue."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
# First assign
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
await issue_crud.assign_to_agent(
|
||||
session,
|
||||
issue_id=test_issue_crud.id,
|
||||
agent_id=test_agent_instance_crud.id,
|
||||
)
|
||||
|
||||
# Then unassign
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await issue_crud.assign_to_agent(
|
||||
session,
|
||||
issue_id=test_issue_crud.id,
|
||||
agent_id=None,
|
||||
)
|
||||
|
||||
assert result.assigned_agent_id is None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_assign_to_human(self, async_test_db, test_issue_crud):
|
||||
"""Test assigning issue to a human."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await issue_crud.assign_to_human(
|
||||
session,
|
||||
issue_id=test_issue_crud.id,
|
||||
human_assignee="developer@example.com",
|
||||
)
|
||||
|
||||
assert result is not None
|
||||
assert result.human_assignee == "developer@example.com"
|
||||
assert result.assigned_agent_id is None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_assign_to_human_clears_agent(self, async_test_db, test_issue_crud, test_agent_instance_crud):
|
||||
"""Test assigning to human clears agent assignment."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
# First assign to agent
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
await issue_crud.assign_to_agent(
|
||||
session,
|
||||
issue_id=test_issue_crud.id,
|
||||
agent_id=test_agent_instance_crud.id,
|
||||
)
|
||||
|
||||
# Then assign to human
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await issue_crud.assign_to_human(
|
||||
session,
|
||||
issue_id=test_issue_crud.id,
|
||||
human_assignee="developer@example.com",
|
||||
)
|
||||
|
||||
assert result.human_assignee == "developer@example.com"
|
||||
assert result.assigned_agent_id is None
|
||||
|
||||
|
||||
class TestIssueLifecycle:
|
||||
"""Tests for issue lifecycle operations."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_close_issue(self, async_test_db, test_issue_crud):
|
||||
"""Test closing an issue."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await issue_crud.close_issue(session, issue_id=test_issue_crud.id)
|
||||
|
||||
assert result is not None
|
||||
assert result.status == IssueStatus.CLOSED
|
||||
assert result.closed_at is not None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_reopen_issue(self, async_test_db, test_project_crud):
|
||||
"""Test reopening a closed issue."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
# Create and close an issue
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
issue_data = IssueCreate(
|
||||
project_id=test_project_crud.id,
|
||||
title="Issue to Reopen",
|
||||
)
|
||||
created = await issue_crud.create(session, obj_in=issue_data)
|
||||
await issue_crud.close_issue(session, issue_id=created.id)
|
||||
issue_id = created.id
|
||||
|
||||
# Reopen
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await issue_crud.reopen_issue(session, issue_id=issue_id)
|
||||
|
||||
assert result is not None
|
||||
assert result.status == IssueStatus.OPEN
|
||||
assert result.closed_at is None
|
||||
|
||||
|
||||
class TestIssueByProject:
|
||||
"""Tests for getting issues by project."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_by_project(self, async_test_db, test_project_crud, test_issue_crud):
|
||||
"""Test getting issues by project."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
issues, total = await issue_crud.get_by_project(
|
||||
session,
|
||||
project_id=test_project_crud.id,
|
||||
)
|
||||
|
||||
assert total >= 1
|
||||
assert all(i.project_id == test_project_crud.id for i in issues)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_by_project_with_status(self, async_test_db, test_project_crud):
|
||||
"""Test filtering issues by status."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
# Create issues with different statuses
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
open_issue = IssueCreate(
|
||||
project_id=test_project_crud.id,
|
||||
title="Open Issue Filter",
|
||||
status=IssueStatus.OPEN,
|
||||
)
|
||||
await issue_crud.create(session, obj_in=open_issue)
|
||||
|
||||
closed_issue = IssueCreate(
|
||||
project_id=test_project_crud.id,
|
||||
title="Closed Issue Filter",
|
||||
status=IssueStatus.CLOSED,
|
||||
)
|
||||
await issue_crud.create(session, obj_in=closed_issue)
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
issues, _ = await issue_crud.get_by_project(
|
||||
session,
|
||||
project_id=test_project_crud.id,
|
||||
status=IssueStatus.OPEN,
|
||||
)
|
||||
|
||||
assert all(i.status == IssueStatus.OPEN for i in issues)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_by_project_with_priority(self, async_test_db, test_project_crud):
|
||||
"""Test filtering issues by priority."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
high_issue = IssueCreate(
|
||||
project_id=test_project_crud.id,
|
||||
title="High Priority Issue",
|
||||
priority=IssuePriority.HIGH,
|
||||
)
|
||||
await issue_crud.create(session, obj_in=high_issue)
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
issues, _ = await issue_crud.get_by_project(
|
||||
session,
|
||||
project_id=test_project_crud.id,
|
||||
priority=IssuePriority.HIGH,
|
||||
)
|
||||
|
||||
assert all(i.priority == IssuePriority.HIGH for i in issues)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_by_project_with_search(self, async_test_db, test_project_crud):
|
||||
"""Test searching issues by title/body."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
searchable_issue = IssueCreate(
|
||||
project_id=test_project_crud.id,
|
||||
title="Searchable Unique Title",
|
||||
body="This body contains searchable content",
|
||||
)
|
||||
await issue_crud.create(session, obj_in=searchable_issue)
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
issues, total = await issue_crud.get_by_project(
|
||||
session,
|
||||
project_id=test_project_crud.id,
|
||||
search="Searchable Unique",
|
||||
)
|
||||
|
||||
assert total >= 1
|
||||
assert any(i.title == "Searchable Unique Title" for i in issues)
|
||||
|
||||
|
||||
class TestIssueBySprint:
|
||||
"""Tests for getting issues by sprint."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_by_sprint(self, async_test_db, test_project_crud, test_sprint_crud):
|
||||
"""Test getting issues by sprint."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
# Create issue in sprint
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
issue_data = IssueCreate(
|
||||
project_id=test_project_crud.id,
|
||||
title="Sprint Issue",
|
||||
sprint_id=test_sprint_crud.id,
|
||||
)
|
||||
await issue_crud.create(session, obj_in=issue_data)
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
issues = await issue_crud.get_by_sprint(
|
||||
session,
|
||||
sprint_id=test_sprint_crud.id,
|
||||
)
|
||||
|
||||
assert len(issues) >= 1
|
||||
assert all(i.sprint_id == test_sprint_crud.id for i in issues)
|
||||
|
||||
|
||||
class TestIssueSyncStatus:
|
||||
"""Tests for issue sync status operations."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_sync_status(self, async_test_db, test_project_crud):
|
||||
"""Test updating issue sync status."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
# Create issue with external tracker
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
issue_data = IssueCreate(
|
||||
project_id=test_project_crud.id,
|
||||
title="Sync Status Issue",
|
||||
external_tracker="gitea",
|
||||
external_id="gitea-456",
|
||||
)
|
||||
created = await issue_crud.create(session, obj_in=issue_data)
|
||||
issue_id = created.id
|
||||
|
||||
# Update sync status
|
||||
now = datetime.now(UTC)
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await issue_crud.update_sync_status(
|
||||
session,
|
||||
issue_id=issue_id,
|
||||
sync_status=SyncStatus.PENDING,
|
||||
last_synced_at=now,
|
||||
)
|
||||
|
||||
assert result is not None
|
||||
assert result.sync_status == SyncStatus.PENDING
|
||||
assert result.last_synced_at is not None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_pending_sync(self, async_test_db, test_project_crud):
|
||||
"""Test getting issues pending sync."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
# Create issue with pending sync
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
issue_data = IssueCreate(
|
||||
project_id=test_project_crud.id,
|
||||
title="Pending Sync Issue",
|
||||
external_tracker="gitea",
|
||||
external_id="gitea-789",
|
||||
)
|
||||
created = await issue_crud.create(session, obj_in=issue_data)
|
||||
|
||||
# Set to pending
|
||||
await issue_crud.update_sync_status(
|
||||
session,
|
||||
issue_id=created.id,
|
||||
sync_status=SyncStatus.PENDING,
|
||||
)
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
issues = await issue_crud.get_pending_sync(session)
|
||||
|
||||
assert any(i.sync_status == SyncStatus.PENDING for i in issues)
|
||||
|
||||
|
||||
class TestIssueExternalTracker:
|
||||
"""Tests for external tracker operations."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_by_external_id(self, async_test_db, test_project_crud):
|
||||
"""Test getting issue by external tracker ID."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
# Create issue with external ID
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
issue_data = IssueCreate(
|
||||
project_id=test_project_crud.id,
|
||||
title="External ID Issue",
|
||||
external_tracker="github",
|
||||
external_id="github-unique-123",
|
||||
)
|
||||
await issue_crud.create(session, obj_in=issue_data)
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await issue_crud.get_by_external_id(
|
||||
session,
|
||||
external_tracker="github",
|
||||
external_id="github-unique-123",
|
||||
)
|
||||
|
||||
assert result is not None
|
||||
assert result.external_id == "github-unique-123"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_by_external_id_not_found(self, async_test_db):
|
||||
"""Test getting non-existent external ID returns None."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await issue_crud.get_by_external_id(
|
||||
session,
|
||||
external_tracker="gitea",
|
||||
external_id="non-existent",
|
||||
)
|
||||
assert result is None
|
||||
|
||||
|
||||
class TestIssueStats:
|
||||
"""Tests for issue statistics."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_project_stats(self, async_test_db, test_project_crud):
|
||||
"""Test getting issue statistics for a project."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
# Create issues with various statuses and priorities
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
for status in [IssueStatus.OPEN, IssueStatus.IN_PROGRESS, IssueStatus.CLOSED]:
|
||||
issue_data = IssueCreate(
|
||||
project_id=test_project_crud.id,
|
||||
title=f"Stats Issue {status.value}",
|
||||
status=status,
|
||||
story_points=3,
|
||||
)
|
||||
await issue_crud.create(session, obj_in=issue_data)
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
stats = await issue_crud.get_project_stats(
|
||||
session,
|
||||
project_id=test_project_crud.id,
|
||||
)
|
||||
|
||||
assert "total" in stats
|
||||
assert "open" in stats
|
||||
assert "in_progress" in stats
|
||||
assert "closed" in stats
|
||||
assert "by_priority" in stats
|
||||
assert "total_story_points" in stats
|
||||
409
backend/tests/crud/syndarix/test_project_crud.py
Normal file
409
backend/tests/crud/syndarix/test_project_crud.py
Normal file
@@ -0,0 +1,409 @@
|
||||
# tests/crud/syndarix/test_project_crud.py
|
||||
"""
|
||||
Tests for Project CRUD operations.
|
||||
"""
|
||||
|
||||
import uuid
|
||||
|
||||
import pytest
|
||||
|
||||
from app.crud.syndarix import project as project_crud
|
||||
from app.models.syndarix import AutonomyLevel, ProjectStatus
|
||||
from app.schemas.syndarix import ProjectCreate, ProjectUpdate
|
||||
|
||||
|
||||
class TestProjectCreate:
|
||||
"""Tests for project creation."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_project_success(self, async_test_db, test_owner_crud):
|
||||
"""Test successfully creating a project."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
project_data = ProjectCreate(
|
||||
name="New Project",
|
||||
slug="new-project",
|
||||
description="A brand new project",
|
||||
autonomy_level=AutonomyLevel.MILESTONE,
|
||||
status=ProjectStatus.ACTIVE,
|
||||
settings={"key": "value"},
|
||||
owner_id=test_owner_crud.id,
|
||||
)
|
||||
result = await project_crud.create(session, obj_in=project_data)
|
||||
|
||||
assert result.id is not None
|
||||
assert result.name == "New Project"
|
||||
assert result.slug == "new-project"
|
||||
assert result.description == "A brand new project"
|
||||
assert result.autonomy_level == AutonomyLevel.MILESTONE
|
||||
assert result.status == ProjectStatus.ACTIVE
|
||||
assert result.settings == {"key": "value"}
|
||||
assert result.owner_id == test_owner_crud.id
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_project_duplicate_slug_fails(self, async_test_db, test_project_crud):
|
||||
"""Test creating project with duplicate slug raises ValueError."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
project_data = ProjectCreate(
|
||||
name="Duplicate Project",
|
||||
slug=test_project_crud.slug, # Duplicate slug
|
||||
description="This should fail",
|
||||
)
|
||||
|
||||
with pytest.raises(ValueError) as exc_info:
|
||||
await project_crud.create(session, obj_in=project_data)
|
||||
|
||||
assert "already exists" in str(exc_info.value).lower()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_project_minimal_fields(self, async_test_db):
|
||||
"""Test creating project with minimal required fields."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
project_data = ProjectCreate(
|
||||
name="Minimal Project",
|
||||
slug="minimal-project",
|
||||
)
|
||||
result = await project_crud.create(session, obj_in=project_data)
|
||||
|
||||
assert result.name == "Minimal Project"
|
||||
assert result.slug == "minimal-project"
|
||||
assert result.autonomy_level == AutonomyLevel.MILESTONE # Default
|
||||
assert result.status == ProjectStatus.ACTIVE # Default
|
||||
|
||||
|
||||
class TestProjectRead:
|
||||
"""Tests for project read operations."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_project_by_id(self, async_test_db, test_project_crud):
|
||||
"""Test getting project by ID."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await project_crud.get(session, id=str(test_project_crud.id))
|
||||
|
||||
assert result is not None
|
||||
assert result.id == test_project_crud.id
|
||||
assert result.name == test_project_crud.name
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_project_by_id_not_found(self, async_test_db):
|
||||
"""Test getting non-existent project returns None."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await project_crud.get(session, id=str(uuid.uuid4()))
|
||||
assert result is None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_project_by_slug(self, async_test_db, test_project_crud):
|
||||
"""Test getting project by slug."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await project_crud.get_by_slug(session, slug=test_project_crud.slug)
|
||||
|
||||
assert result is not None
|
||||
assert result.slug == test_project_crud.slug
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_project_by_slug_not_found(self, async_test_db):
|
||||
"""Test getting non-existent slug returns None."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await project_crud.get_by_slug(session, slug="non-existent-slug")
|
||||
assert result is None
|
||||
|
||||
|
||||
class TestProjectUpdate:
|
||||
"""Tests for project update operations."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_project_basic_fields(self, async_test_db, test_project_crud):
|
||||
"""Test updating basic project fields."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
project = await project_crud.get(session, id=str(test_project_crud.id))
|
||||
|
||||
update_data = ProjectUpdate(
|
||||
name="Updated Project Name",
|
||||
description="Updated description",
|
||||
)
|
||||
result = await project_crud.update(session, db_obj=project, obj_in=update_data)
|
||||
|
||||
assert result.name == "Updated Project Name"
|
||||
assert result.description == "Updated description"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_project_status(self, async_test_db, test_project_crud):
|
||||
"""Test updating project status."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
project = await project_crud.get(session, id=str(test_project_crud.id))
|
||||
|
||||
update_data = ProjectUpdate(status=ProjectStatus.PAUSED)
|
||||
result = await project_crud.update(session, db_obj=project, obj_in=update_data)
|
||||
|
||||
assert result.status == ProjectStatus.PAUSED
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_project_autonomy_level(self, async_test_db, test_project_crud):
|
||||
"""Test updating project autonomy level."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
project = await project_crud.get(session, id=str(test_project_crud.id))
|
||||
|
||||
update_data = ProjectUpdate(autonomy_level=AutonomyLevel.AUTONOMOUS)
|
||||
result = await project_crud.update(session, db_obj=project, obj_in=update_data)
|
||||
|
||||
assert result.autonomy_level == AutonomyLevel.AUTONOMOUS
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_project_settings(self, async_test_db, test_project_crud):
|
||||
"""Test updating project settings."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
project = await project_crud.get(session, id=str(test_project_crud.id))
|
||||
|
||||
new_settings = {"mcp_servers": ["gitea", "slack"], "webhook_url": "https://example.com"}
|
||||
update_data = ProjectUpdate(settings=new_settings)
|
||||
result = await project_crud.update(session, db_obj=project, obj_in=update_data)
|
||||
|
||||
assert result.settings == new_settings
|
||||
|
||||
|
||||
class TestProjectDelete:
|
||||
"""Tests for project delete operations."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_delete_project(self, async_test_db, test_owner_crud):
|
||||
"""Test deleting a project."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
# Create a project to delete
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
project_data = ProjectCreate(
|
||||
name="Delete Me",
|
||||
slug="delete-me-project",
|
||||
owner_id=test_owner_crud.id,
|
||||
)
|
||||
created = await project_crud.create(session, obj_in=project_data)
|
||||
project_id = created.id
|
||||
|
||||
# Delete the project
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await project_crud.remove(session, id=str(project_id))
|
||||
assert result is not None
|
||||
assert result.id == project_id
|
||||
|
||||
# Verify deletion
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
deleted = await project_crud.get(session, id=str(project_id))
|
||||
assert deleted is None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_delete_nonexistent_project(self, async_test_db):
|
||||
"""Test deleting non-existent project returns None."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await project_crud.remove(session, id=str(uuid.uuid4()))
|
||||
assert result is None
|
||||
|
||||
|
||||
class TestProjectFilters:
|
||||
"""Tests for project filtering and search."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_multi_with_filters_status(self, async_test_db, test_owner_crud):
|
||||
"""Test filtering projects by status."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
# Create multiple projects with different statuses
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
for i, status in enumerate(ProjectStatus):
|
||||
project_data = ProjectCreate(
|
||||
name=f"Project {status.value}",
|
||||
slug=f"project-filter-{status.value}-{i}",
|
||||
status=status,
|
||||
owner_id=test_owner_crud.id,
|
||||
)
|
||||
await project_crud.create(session, obj_in=project_data)
|
||||
|
||||
# Filter by ACTIVE status
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
projects, total = await project_crud.get_multi_with_filters(
|
||||
session,
|
||||
status=ProjectStatus.ACTIVE,
|
||||
)
|
||||
|
||||
assert all(p.status == ProjectStatus.ACTIVE for p in projects)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_multi_with_filters_search(self, async_test_db, test_owner_crud):
|
||||
"""Test searching projects by name/slug."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
project_data = ProjectCreate(
|
||||
name="Searchable Project",
|
||||
slug="searchable-unique-slug",
|
||||
description="This project is searchable",
|
||||
owner_id=test_owner_crud.id,
|
||||
)
|
||||
await project_crud.create(session, obj_in=project_data)
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
projects, total = await project_crud.get_multi_with_filters(
|
||||
session,
|
||||
search="Searchable",
|
||||
)
|
||||
|
||||
assert total >= 1
|
||||
assert any(p.name == "Searchable Project" for p in projects)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_multi_with_filters_owner(self, async_test_db, test_owner_crud, test_project_crud):
|
||||
"""Test filtering projects by owner."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
projects, total = await project_crud.get_multi_with_filters(
|
||||
session,
|
||||
owner_id=test_owner_crud.id,
|
||||
)
|
||||
|
||||
assert total >= 1
|
||||
assert all(p.owner_id == test_owner_crud.id for p in projects)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_multi_with_filters_pagination(self, async_test_db, test_owner_crud):
|
||||
"""Test pagination of project results."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
# Create multiple projects
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
for i in range(5):
|
||||
project_data = ProjectCreate(
|
||||
name=f"Page Project {i}",
|
||||
slug=f"page-project-{i}",
|
||||
owner_id=test_owner_crud.id,
|
||||
)
|
||||
await project_crud.create(session, obj_in=project_data)
|
||||
|
||||
# Get first page
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
page1, total = await project_crud.get_multi_with_filters(
|
||||
session,
|
||||
skip=0,
|
||||
limit=2,
|
||||
owner_id=test_owner_crud.id,
|
||||
)
|
||||
|
||||
assert len(page1) <= 2
|
||||
assert total >= 5
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_multi_with_filters_sorting(self, async_test_db, test_owner_crud):
|
||||
"""Test sorting project results."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
for i, name in enumerate(["Charlie", "Alice", "Bob"]):
|
||||
project_data = ProjectCreate(
|
||||
name=name,
|
||||
slug=f"sort-project-{name.lower()}",
|
||||
owner_id=test_owner_crud.id,
|
||||
)
|
||||
await project_crud.create(session, obj_in=project_data)
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
projects, _ = await project_crud.get_multi_with_filters(
|
||||
session,
|
||||
sort_by="name",
|
||||
sort_order="asc",
|
||||
owner_id=test_owner_crud.id,
|
||||
)
|
||||
|
||||
names = [p.name for p in projects if p.name in ["Alice", "Bob", "Charlie"]]
|
||||
assert names == sorted(names)
|
||||
|
||||
|
||||
class TestProjectSpecialMethods:
|
||||
"""Tests for special project CRUD methods."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_archive_project(self, async_test_db, test_project_crud):
|
||||
"""Test archiving a project."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await project_crud.archive_project(session, project_id=test_project_crud.id)
|
||||
|
||||
assert result is not None
|
||||
assert result.status == ProjectStatus.ARCHIVED
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_archive_nonexistent_project(self, async_test_db):
|
||||
"""Test archiving non-existent project returns None."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await project_crud.archive_project(session, project_id=uuid.uuid4())
|
||||
assert result is None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_projects_by_owner(self, async_test_db, test_owner_crud, test_project_crud):
|
||||
"""Test getting all projects by owner."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
projects = await project_crud.get_projects_by_owner(
|
||||
session,
|
||||
owner_id=test_owner_crud.id,
|
||||
)
|
||||
|
||||
assert len(projects) >= 1
|
||||
assert all(p.owner_id == test_owner_crud.id for p in projects)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_projects_by_owner_with_status(self, async_test_db, test_owner_crud):
|
||||
"""Test getting projects by owner filtered by status."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
# Create projects with different statuses
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
active_project = ProjectCreate(
|
||||
name="Active Owner Project",
|
||||
slug="active-owner-project",
|
||||
status=ProjectStatus.ACTIVE,
|
||||
owner_id=test_owner_crud.id,
|
||||
)
|
||||
await project_crud.create(session, obj_in=active_project)
|
||||
|
||||
paused_project = ProjectCreate(
|
||||
name="Paused Owner Project",
|
||||
slug="paused-owner-project",
|
||||
status=ProjectStatus.PAUSED,
|
||||
owner_id=test_owner_crud.id,
|
||||
)
|
||||
await project_crud.create(session, obj_in=paused_project)
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
projects = await project_crud.get_projects_by_owner(
|
||||
session,
|
||||
owner_id=test_owner_crud.id,
|
||||
status=ProjectStatus.ACTIVE,
|
||||
)
|
||||
|
||||
assert all(p.status == ProjectStatus.ACTIVE for p in projects)
|
||||
524
backend/tests/crud/syndarix/test_sprint_crud.py
Normal file
524
backend/tests/crud/syndarix/test_sprint_crud.py
Normal file
@@ -0,0 +1,524 @@
|
||||
# tests/crud/syndarix/test_sprint_crud.py
|
||||
"""
|
||||
Tests for Sprint CRUD operations.
|
||||
"""
|
||||
|
||||
import uuid
|
||||
from datetime import date, timedelta
|
||||
|
||||
import pytest
|
||||
|
||||
from app.crud.syndarix import sprint as sprint_crud
|
||||
from app.models.syndarix import SprintStatus
|
||||
from app.schemas.syndarix import SprintCreate, SprintUpdate
|
||||
|
||||
|
||||
class TestSprintCreate:
|
||||
"""Tests for sprint creation."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_sprint_success(self, async_test_db, test_project_crud):
|
||||
"""Test successfully creating a sprint."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
today = date.today()
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
sprint_data = SprintCreate(
|
||||
project_id=test_project_crud.id,
|
||||
name="Sprint 1",
|
||||
number=1,
|
||||
goal="Complete initial setup",
|
||||
start_date=today,
|
||||
end_date=today + timedelta(days=14),
|
||||
status=SprintStatus.PLANNED,
|
||||
planned_points=21,
|
||||
)
|
||||
result = await sprint_crud.create(session, obj_in=sprint_data)
|
||||
|
||||
assert result.id is not None
|
||||
assert result.name == "Sprint 1"
|
||||
assert result.number == 1
|
||||
assert result.goal == "Complete initial setup"
|
||||
assert result.status == SprintStatus.PLANNED
|
||||
assert result.planned_points == 21
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_sprint_minimal(self, async_test_db, test_project_crud):
|
||||
"""Test creating sprint with minimal fields."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
today = date.today()
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
sprint_data = SprintCreate(
|
||||
project_id=test_project_crud.id,
|
||||
name="Minimal Sprint",
|
||||
number=1,
|
||||
start_date=today,
|
||||
end_date=today + timedelta(days=14),
|
||||
)
|
||||
result = await sprint_crud.create(session, obj_in=sprint_data)
|
||||
|
||||
assert result.name == "Minimal Sprint"
|
||||
assert result.status == SprintStatus.PLANNED # Default
|
||||
assert result.goal is None
|
||||
assert result.planned_points is None
|
||||
|
||||
|
||||
class TestSprintRead:
|
||||
"""Tests for sprint read operations."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_sprint_by_id(self, async_test_db, test_sprint_crud):
|
||||
"""Test getting sprint by ID."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await sprint_crud.get(session, id=str(test_sprint_crud.id))
|
||||
|
||||
assert result is not None
|
||||
assert result.id == test_sprint_crud.id
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_sprint_by_id_not_found(self, async_test_db):
|
||||
"""Test getting non-existent sprint returns None."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await sprint_crud.get(session, id=str(uuid.uuid4()))
|
||||
assert result is None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_with_details(self, async_test_db, test_sprint_crud):
|
||||
"""Test getting sprint with related details."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await sprint_crud.get_with_details(
|
||||
session,
|
||||
sprint_id=test_sprint_crud.id,
|
||||
)
|
||||
|
||||
assert result is not None
|
||||
assert result["sprint"].id == test_sprint_crud.id
|
||||
assert result["project_name"] is not None
|
||||
assert "issue_count" in result
|
||||
assert "open_issues" in result
|
||||
assert "completed_issues" in result
|
||||
|
||||
|
||||
class TestSprintUpdate:
|
||||
"""Tests for sprint update operations."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_sprint_basic_fields(self, async_test_db, test_sprint_crud):
|
||||
"""Test updating basic sprint fields."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
sprint = await sprint_crud.get(session, id=str(test_sprint_crud.id))
|
||||
|
||||
update_data = SprintUpdate(
|
||||
name="Updated Sprint Name",
|
||||
goal="Updated goal",
|
||||
)
|
||||
result = await sprint_crud.update(session, db_obj=sprint, obj_in=update_data)
|
||||
|
||||
assert result.name == "Updated Sprint Name"
|
||||
assert result.goal == "Updated goal"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_sprint_dates(self, async_test_db, test_sprint_crud):
|
||||
"""Test updating sprint dates."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
today = date.today()
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
sprint = await sprint_crud.get(session, id=str(test_sprint_crud.id))
|
||||
|
||||
update_data = SprintUpdate(
|
||||
start_date=today + timedelta(days=1),
|
||||
end_date=today + timedelta(days=21),
|
||||
)
|
||||
result = await sprint_crud.update(session, db_obj=sprint, obj_in=update_data)
|
||||
|
||||
assert result.start_date == today + timedelta(days=1)
|
||||
assert result.end_date == today + timedelta(days=21)
|
||||
|
||||
|
||||
class TestSprintLifecycle:
|
||||
"""Tests for sprint lifecycle operations."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_start_sprint(self, async_test_db, test_sprint_crud):
|
||||
"""Test starting a planned sprint."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await sprint_crud.start_sprint(
|
||||
session,
|
||||
sprint_id=test_sprint_crud.id,
|
||||
)
|
||||
|
||||
assert result is not None
|
||||
assert result.status == SprintStatus.ACTIVE
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_start_sprint_with_custom_date(self, async_test_db, test_project_crud):
|
||||
"""Test starting sprint with custom start date."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
today = date.today()
|
||||
|
||||
# Create a planned sprint
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
sprint_data = SprintCreate(
|
||||
project_id=test_project_crud.id,
|
||||
name="Start Date Sprint",
|
||||
number=10,
|
||||
start_date=today,
|
||||
end_date=today + timedelta(days=14),
|
||||
status=SprintStatus.PLANNED,
|
||||
)
|
||||
created = await sprint_crud.create(session, obj_in=sprint_data)
|
||||
sprint_id = created.id
|
||||
|
||||
# Start with custom date
|
||||
new_start = today + timedelta(days=2)
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await sprint_crud.start_sprint(
|
||||
session,
|
||||
sprint_id=sprint_id,
|
||||
start_date=new_start,
|
||||
)
|
||||
|
||||
assert result.status == SprintStatus.ACTIVE
|
||||
assert result.start_date == new_start
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_start_sprint_already_active_fails(self, async_test_db, test_project_crud):
|
||||
"""Test starting an already active sprint raises ValueError."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
today = date.today()
|
||||
|
||||
# Create and start a sprint
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
sprint_data = SprintCreate(
|
||||
project_id=test_project_crud.id,
|
||||
name="Already Active Sprint",
|
||||
number=20,
|
||||
start_date=today,
|
||||
end_date=today + timedelta(days=14),
|
||||
status=SprintStatus.ACTIVE,
|
||||
)
|
||||
created = await sprint_crud.create(session, obj_in=sprint_data)
|
||||
sprint_id = created.id
|
||||
|
||||
# Try to start again
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
with pytest.raises(ValueError) as exc_info:
|
||||
await sprint_crud.start_sprint(session, sprint_id=sprint_id)
|
||||
|
||||
assert "cannot start sprint" in str(exc_info.value).lower()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_complete_sprint(self, async_test_db, test_project_crud):
|
||||
"""Test completing an active sprint."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
today = date.today()
|
||||
|
||||
# Create an active sprint
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
sprint_data = SprintCreate(
|
||||
project_id=test_project_crud.id,
|
||||
name="Complete Me Sprint",
|
||||
number=30,
|
||||
start_date=today - timedelta(days=14),
|
||||
end_date=today,
|
||||
status=SprintStatus.ACTIVE,
|
||||
planned_points=21,
|
||||
)
|
||||
created = await sprint_crud.create(session, obj_in=sprint_data)
|
||||
sprint_id = created.id
|
||||
|
||||
# Complete
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await sprint_crud.complete_sprint(session, sprint_id=sprint_id)
|
||||
|
||||
assert result is not None
|
||||
assert result.status == SprintStatus.COMPLETED
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_complete_planned_sprint_fails(self, async_test_db, test_project_crud):
|
||||
"""Test completing a planned sprint raises ValueError."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
today = date.today()
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
sprint_data = SprintCreate(
|
||||
project_id=test_project_crud.id,
|
||||
name="Planned Sprint",
|
||||
number=40,
|
||||
start_date=today,
|
||||
end_date=today + timedelta(days=14),
|
||||
status=SprintStatus.PLANNED,
|
||||
)
|
||||
created = await sprint_crud.create(session, obj_in=sprint_data)
|
||||
sprint_id = created.id
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
with pytest.raises(ValueError) as exc_info:
|
||||
await sprint_crud.complete_sprint(session, sprint_id=sprint_id)
|
||||
|
||||
assert "cannot complete sprint" in str(exc_info.value).lower()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_cancel_sprint(self, async_test_db, test_project_crud):
|
||||
"""Test cancelling a sprint."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
today = date.today()
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
sprint_data = SprintCreate(
|
||||
project_id=test_project_crud.id,
|
||||
name="Cancel Me Sprint",
|
||||
number=50,
|
||||
start_date=today,
|
||||
end_date=today + timedelta(days=14),
|
||||
status=SprintStatus.ACTIVE,
|
||||
)
|
||||
created = await sprint_crud.create(session, obj_in=sprint_data)
|
||||
sprint_id = created.id
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await sprint_crud.cancel_sprint(session, sprint_id=sprint_id)
|
||||
|
||||
assert result is not None
|
||||
assert result.status == SprintStatus.CANCELLED
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_cancel_completed_sprint_fails(self, async_test_db, test_project_crud):
|
||||
"""Test cancelling a completed sprint raises ValueError."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
today = date.today()
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
sprint_data = SprintCreate(
|
||||
project_id=test_project_crud.id,
|
||||
name="Completed Sprint",
|
||||
number=60,
|
||||
start_date=today - timedelta(days=14),
|
||||
end_date=today,
|
||||
status=SprintStatus.COMPLETED,
|
||||
)
|
||||
created = await sprint_crud.create(session, obj_in=sprint_data)
|
||||
sprint_id = created.id
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
with pytest.raises(ValueError) as exc_info:
|
||||
await sprint_crud.cancel_sprint(session, sprint_id=sprint_id)
|
||||
|
||||
assert "cannot cancel sprint" in str(exc_info.value).lower()
|
||||
|
||||
|
||||
class TestSprintByProject:
|
||||
"""Tests for getting sprints by project."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_by_project(self, async_test_db, test_project_crud, test_sprint_crud):
|
||||
"""Test getting sprints by project."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
sprints, total = await sprint_crud.get_by_project(
|
||||
session,
|
||||
project_id=test_project_crud.id,
|
||||
)
|
||||
|
||||
assert total >= 1
|
||||
assert all(s.project_id == test_project_crud.id for s in sprints)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_by_project_with_status(self, async_test_db, test_project_crud):
|
||||
"""Test filtering sprints by status."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
today = date.today()
|
||||
|
||||
# Create sprints with different statuses
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
planned_sprint = SprintCreate(
|
||||
project_id=test_project_crud.id,
|
||||
name="Planned Filter Sprint",
|
||||
number=70,
|
||||
start_date=today,
|
||||
end_date=today + timedelta(days=14),
|
||||
status=SprintStatus.PLANNED,
|
||||
)
|
||||
await sprint_crud.create(session, obj_in=planned_sprint)
|
||||
|
||||
active_sprint = SprintCreate(
|
||||
project_id=test_project_crud.id,
|
||||
name="Active Filter Sprint",
|
||||
number=71,
|
||||
start_date=today,
|
||||
end_date=today + timedelta(days=14),
|
||||
status=SprintStatus.ACTIVE,
|
||||
)
|
||||
await sprint_crud.create(session, obj_in=active_sprint)
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
sprints, _ = await sprint_crud.get_by_project(
|
||||
session,
|
||||
project_id=test_project_crud.id,
|
||||
status=SprintStatus.ACTIVE,
|
||||
)
|
||||
|
||||
assert all(s.status == SprintStatus.ACTIVE for s in sprints)
|
||||
|
||||
|
||||
class TestSprintActiveSprint:
|
||||
"""Tests for active sprint operations."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_active_sprint(self, async_test_db, test_project_crud):
|
||||
"""Test getting active sprint for a project."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
today = date.today()
|
||||
|
||||
# Create an active sprint
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
sprint_data = SprintCreate(
|
||||
project_id=test_project_crud.id,
|
||||
name="Active Sprint",
|
||||
number=80,
|
||||
start_date=today,
|
||||
end_date=today + timedelta(days=14),
|
||||
status=SprintStatus.ACTIVE,
|
||||
)
|
||||
await sprint_crud.create(session, obj_in=sprint_data)
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await sprint_crud.get_active_sprint(
|
||||
session,
|
||||
project_id=test_project_crud.id,
|
||||
)
|
||||
|
||||
assert result is not None
|
||||
assert result.status == SprintStatus.ACTIVE
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_active_sprint_none(self, async_test_db, test_project_crud):
|
||||
"""Test getting active sprint when none exists."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
# Note: test_sprint_crud has PLANNED status by default
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
result = await sprint_crud.get_active_sprint(
|
||||
session,
|
||||
project_id=test_project_crud.id,
|
||||
)
|
||||
|
||||
# May or may not be None depending on other tests
|
||||
if result is not None:
|
||||
assert result.status == SprintStatus.ACTIVE
|
||||
|
||||
|
||||
class TestSprintNextNumber:
|
||||
"""Tests for getting next sprint number."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_next_sprint_number(self, async_test_db, test_project_crud):
|
||||
"""Test getting next sprint number."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
today = date.today()
|
||||
|
||||
# Create sprints with numbers
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
for i in range(1, 4):
|
||||
sprint_data = SprintCreate(
|
||||
project_id=test_project_crud.id,
|
||||
name=f"Number Sprint {i}",
|
||||
number=i,
|
||||
start_date=today,
|
||||
end_date=today + timedelta(days=14),
|
||||
)
|
||||
await sprint_crud.create(session, obj_in=sprint_data)
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
next_number = await sprint_crud.get_next_sprint_number(
|
||||
session,
|
||||
project_id=test_project_crud.id,
|
||||
)
|
||||
|
||||
assert next_number >= 4
|
||||
|
||||
|
||||
class TestSprintVelocity:
|
||||
"""Tests for sprint velocity operations."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_velocity(self, async_test_db, test_project_crud):
|
||||
"""Test getting velocity data for completed sprints."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
today = date.today()
|
||||
|
||||
# Create completed sprints with points
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
for i in range(1, 4):
|
||||
sprint_data = SprintCreate(
|
||||
project_id=test_project_crud.id,
|
||||
name=f"Velocity Sprint {i}",
|
||||
number=100 + i,
|
||||
start_date=today - timedelta(days=14 * i),
|
||||
end_date=today - timedelta(days=14 * (i - 1)),
|
||||
status=SprintStatus.COMPLETED,
|
||||
planned_points=20,
|
||||
completed_points=15 + i,
|
||||
)
|
||||
await sprint_crud.create(session, obj_in=sprint_data)
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
velocity_data = await sprint_crud.get_velocity(
|
||||
session,
|
||||
project_id=test_project_crud.id,
|
||||
limit=5,
|
||||
)
|
||||
|
||||
assert len(velocity_data) >= 1
|
||||
for data in velocity_data:
|
||||
assert "sprint_number" in data
|
||||
assert "sprint_name" in data
|
||||
assert "planned_points" in data
|
||||
assert "completed_points" in data
|
||||
assert "velocity" in data
|
||||
|
||||
|
||||
class TestSprintWithIssueCounts:
|
||||
"""Tests for getting sprints with issue counts."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_sprints_with_issue_counts(self, async_test_db, test_project_crud, test_sprint_crud):
|
||||
"""Test getting sprints with issue counts."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
results, total = await sprint_crud.get_sprints_with_issue_counts(
|
||||
session,
|
||||
project_id=test_project_crud.id,
|
||||
)
|
||||
|
||||
assert total >= 1
|
||||
for result in results:
|
||||
assert "sprint" in result
|
||||
assert "issue_count" in result
|
||||
assert "open_issues" in result
|
||||
assert "completed_issues" in result
|
||||
2
backend/tests/models/syndarix/__init__.py
Normal file
2
backend/tests/models/syndarix/__init__.py
Normal file
@@ -0,0 +1,2 @@
|
||||
# tests/models/syndarix/__init__.py
|
||||
"""Syndarix model unit tests."""
|
||||
192
backend/tests/models/syndarix/conftest.py
Normal file
192
backend/tests/models/syndarix/conftest.py
Normal file
@@ -0,0 +1,192 @@
|
||||
# tests/models/syndarix/conftest.py
|
||||
"""
|
||||
Shared fixtures for Syndarix model tests.
|
||||
"""
|
||||
|
||||
import uuid
|
||||
from datetime import date, timedelta
|
||||
|
||||
import pytest
|
||||
import pytest_asyncio
|
||||
|
||||
from app.models.syndarix import (
|
||||
AgentInstance,
|
||||
AgentStatus,
|
||||
AgentType,
|
||||
AutonomyLevel,
|
||||
Issue,
|
||||
IssuePriority,
|
||||
IssueStatus,
|
||||
Project,
|
||||
ProjectStatus,
|
||||
Sprint,
|
||||
SprintStatus,
|
||||
SyncStatus,
|
||||
)
|
||||
from app.models.user import User
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_project_data():
|
||||
"""Return sample project data for testing."""
|
||||
return {
|
||||
"name": "Test Project",
|
||||
"slug": "test-project",
|
||||
"description": "A test project for unit testing",
|
||||
"autonomy_level": AutonomyLevel.MILESTONE,
|
||||
"status": ProjectStatus.ACTIVE,
|
||||
"settings": {"mcp_servers": ["gitea", "slack"]},
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_agent_type_data():
|
||||
"""Return sample agent type data for testing."""
|
||||
return {
|
||||
"name": "Backend Engineer",
|
||||
"slug": "backend-engineer",
|
||||
"description": "Specialized in backend development",
|
||||
"expertise": ["python", "fastapi", "postgresql"],
|
||||
"personality_prompt": "You are an expert backend engineer...",
|
||||
"primary_model": "claude-opus-4-5-20251101",
|
||||
"fallback_models": ["claude-sonnet-4-20250514"],
|
||||
"model_params": {"temperature": 0.7, "max_tokens": 4096},
|
||||
"mcp_servers": ["gitea", "file-system"],
|
||||
"tool_permissions": {"allowed": ["*"], "denied": []},
|
||||
"is_active": True,
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_sprint_data():
|
||||
"""Return sample sprint data for testing."""
|
||||
today = date.today()
|
||||
return {
|
||||
"name": "Sprint 1",
|
||||
"number": 1,
|
||||
"goal": "Complete initial setup and core features",
|
||||
"start_date": today,
|
||||
"end_date": today + timedelta(days=14),
|
||||
"status": SprintStatus.PLANNED,
|
||||
"planned_points": 21,
|
||||
"completed_points": 0,
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_issue_data():
|
||||
"""Return sample issue data for testing."""
|
||||
return {
|
||||
"title": "Implement user authentication",
|
||||
"body": "As a user, I want to log in securely...",
|
||||
"status": IssueStatus.OPEN,
|
||||
"priority": IssuePriority.HIGH,
|
||||
"labels": ["backend", "security"],
|
||||
"story_points": 5,
|
||||
}
|
||||
|
||||
|
||||
@pytest_asyncio.fixture
|
||||
async def test_owner(async_test_db):
|
||||
"""Create a test user to be used as project owner."""
|
||||
from app.core.auth import get_password_hash
|
||||
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
user = User(
|
||||
id=uuid.uuid4(),
|
||||
email="owner@example.com",
|
||||
password_hash=get_password_hash("TestPassword123!"),
|
||||
first_name="Test",
|
||||
last_name="Owner",
|
||||
is_active=True,
|
||||
is_superuser=False,
|
||||
)
|
||||
session.add(user)
|
||||
await session.commit()
|
||||
await session.refresh(user)
|
||||
return user
|
||||
|
||||
|
||||
@pytest_asyncio.fixture
|
||||
async def test_project(async_test_db, test_owner, sample_project_data):
|
||||
"""Create a test project in the database."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
project = Project(
|
||||
id=uuid.uuid4(),
|
||||
owner_id=test_owner.id,
|
||||
**sample_project_data,
|
||||
)
|
||||
session.add(project)
|
||||
await session.commit()
|
||||
await session.refresh(project)
|
||||
return project
|
||||
|
||||
|
||||
@pytest_asyncio.fixture
|
||||
async def test_agent_type(async_test_db, sample_agent_type_data):
|
||||
"""Create a test agent type in the database."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
agent_type = AgentType(
|
||||
id=uuid.uuid4(),
|
||||
**sample_agent_type_data,
|
||||
)
|
||||
session.add(agent_type)
|
||||
await session.commit()
|
||||
await session.refresh(agent_type)
|
||||
return agent_type
|
||||
|
||||
|
||||
@pytest_asyncio.fixture
|
||||
async def test_agent_instance(async_test_db, test_project, test_agent_type):
|
||||
"""Create a test agent instance in the database."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
agent_instance = AgentInstance(
|
||||
id=uuid.uuid4(),
|
||||
agent_type_id=test_agent_type.id,
|
||||
project_id=test_project.id,
|
||||
status=AgentStatus.IDLE,
|
||||
current_task=None,
|
||||
short_term_memory={},
|
||||
long_term_memory_ref=None,
|
||||
session_id=None,
|
||||
)
|
||||
session.add(agent_instance)
|
||||
await session.commit()
|
||||
await session.refresh(agent_instance)
|
||||
return agent_instance
|
||||
|
||||
|
||||
@pytest_asyncio.fixture
|
||||
async def test_sprint(async_test_db, test_project, sample_sprint_data):
|
||||
"""Create a test sprint in the database."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
sprint = Sprint(
|
||||
id=uuid.uuid4(),
|
||||
project_id=test_project.id,
|
||||
**sample_sprint_data,
|
||||
)
|
||||
session.add(sprint)
|
||||
await session.commit()
|
||||
await session.refresh(sprint)
|
||||
return sprint
|
||||
|
||||
|
||||
@pytest_asyncio.fixture
|
||||
async def test_issue(async_test_db, test_project, sample_issue_data):
|
||||
"""Create a test issue in the database."""
|
||||
_test_engine, AsyncTestingSessionLocal = async_test_db
|
||||
async with AsyncTestingSessionLocal() as session:
|
||||
issue = Issue(
|
||||
id=uuid.uuid4(),
|
||||
project_id=test_project.id,
|
||||
**sample_issue_data,
|
||||
)
|
||||
session.add(issue)
|
||||
await session.commit()
|
||||
await session.refresh(issue)
|
||||
return issue
|
||||
424
backend/tests/models/syndarix/test_agent_instance.py
Normal file
424
backend/tests/models/syndarix/test_agent_instance.py
Normal file
@@ -0,0 +1,424 @@
|
||||
# tests/models/syndarix/test_agent_instance.py
|
||||
"""
|
||||
Unit tests for the AgentInstance model.
|
||||
"""
|
||||
|
||||
import uuid
|
||||
from datetime import UTC, datetime
|
||||
from decimal import Decimal
|
||||
|
||||
import pytest
|
||||
|
||||
from app.models.syndarix import (
|
||||
AgentInstance,
|
||||
AgentStatus,
|
||||
AgentType,
|
||||
Project,
|
||||
)
|
||||
|
||||
|
||||
class TestAgentInstanceModel:
|
||||
"""Tests for AgentInstance model creation and fields."""
|
||||
|
||||
def test_create_agent_instance_with_required_fields(self, db_session):
|
||||
"""Test creating an agent instance with only required fields."""
|
||||
# First create dependencies
|
||||
project = Project(
|
||||
id=uuid.uuid4(),
|
||||
name="Test Project",
|
||||
slug="test-project-instance",
|
||||
)
|
||||
db_session.add(project)
|
||||
|
||||
agent_type = AgentType(
|
||||
id=uuid.uuid4(),
|
||||
name="Test Agent",
|
||||
slug="test-agent-instance",
|
||||
personality_prompt="Test",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
)
|
||||
db_session.add(agent_type)
|
||||
db_session.commit()
|
||||
|
||||
# Create agent instance
|
||||
instance = AgentInstance(
|
||||
id=uuid.uuid4(),
|
||||
agent_type_id=agent_type.id,
|
||||
project_id=project.id,
|
||||
)
|
||||
db_session.add(instance)
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(AgentInstance).filter_by(project_id=project.id).first()
|
||||
|
||||
assert retrieved is not None
|
||||
assert retrieved.agent_type_id == agent_type.id
|
||||
assert retrieved.project_id == project.id
|
||||
assert retrieved.status == AgentStatus.IDLE # Default
|
||||
assert retrieved.current_task is None
|
||||
assert retrieved.short_term_memory == {}
|
||||
assert retrieved.long_term_memory_ref is None
|
||||
assert retrieved.session_id is None
|
||||
assert retrieved.tasks_completed == 0
|
||||
assert retrieved.tokens_used == 0
|
||||
assert retrieved.cost_incurred == Decimal("0")
|
||||
|
||||
def test_create_agent_instance_with_all_fields(self, db_session):
|
||||
"""Test creating an agent instance with all optional fields."""
|
||||
# First create dependencies
|
||||
project = Project(
|
||||
id=uuid.uuid4(),
|
||||
name="Full Project",
|
||||
slug="full-project-instance",
|
||||
)
|
||||
db_session.add(project)
|
||||
|
||||
agent_type = AgentType(
|
||||
id=uuid.uuid4(),
|
||||
name="Full Agent",
|
||||
slug="full-agent-instance",
|
||||
personality_prompt="Test",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
)
|
||||
db_session.add(agent_type)
|
||||
db_session.commit()
|
||||
|
||||
instance_id = uuid.uuid4()
|
||||
now = datetime.now(UTC)
|
||||
|
||||
instance = AgentInstance(
|
||||
id=instance_id,
|
||||
agent_type_id=agent_type.id,
|
||||
project_id=project.id,
|
||||
status=AgentStatus.WORKING,
|
||||
current_task="Implementing user authentication",
|
||||
short_term_memory={"context": "Working on auth", "recent_files": ["auth.py"]},
|
||||
long_term_memory_ref="project-123/agent-456",
|
||||
session_id="session-abc-123",
|
||||
last_activity_at=now,
|
||||
tasks_completed=5,
|
||||
tokens_used=10000,
|
||||
cost_incurred=Decimal("0.5000"),
|
||||
)
|
||||
db_session.add(instance)
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(AgentInstance).filter_by(id=instance_id).first()
|
||||
|
||||
assert retrieved.status == AgentStatus.WORKING
|
||||
assert retrieved.current_task == "Implementing user authentication"
|
||||
assert retrieved.short_term_memory == {"context": "Working on auth", "recent_files": ["auth.py"]}
|
||||
assert retrieved.long_term_memory_ref == "project-123/agent-456"
|
||||
assert retrieved.session_id == "session-abc-123"
|
||||
assert retrieved.tasks_completed == 5
|
||||
assert retrieved.tokens_used == 10000
|
||||
assert retrieved.cost_incurred == Decimal("0.5000")
|
||||
|
||||
def test_agent_instance_timestamps(self, db_session):
|
||||
"""Test that timestamps are automatically set."""
|
||||
project = Project(id=uuid.uuid4(), name="Timestamp Project", slug="timestamp-project-ai")
|
||||
agent_type = AgentType(
|
||||
id=uuid.uuid4(),
|
||||
name="Timestamp Agent",
|
||||
slug="timestamp-agent-ai",
|
||||
personality_prompt="Test",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
)
|
||||
db_session.add(project)
|
||||
db_session.add(agent_type)
|
||||
db_session.commit()
|
||||
|
||||
instance = AgentInstance(
|
||||
id=uuid.uuid4(),
|
||||
agent_type_id=agent_type.id,
|
||||
project_id=project.id,
|
||||
)
|
||||
db_session.add(instance)
|
||||
db_session.commit()
|
||||
|
||||
assert isinstance(instance.created_at, datetime)
|
||||
assert isinstance(instance.updated_at, datetime)
|
||||
|
||||
def test_agent_instance_string_representation(self, db_session):
|
||||
"""Test the string representation of an agent instance."""
|
||||
project = Project(id=uuid.uuid4(), name="Repr Project", slug="repr-project-ai")
|
||||
agent_type = AgentType(
|
||||
id=uuid.uuid4(),
|
||||
name="Repr Agent",
|
||||
slug="repr-agent-ai",
|
||||
personality_prompt="Test",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
)
|
||||
db_session.add(project)
|
||||
db_session.add(agent_type)
|
||||
db_session.commit()
|
||||
|
||||
instance_id = uuid.uuid4()
|
||||
instance = AgentInstance(
|
||||
id=instance_id,
|
||||
agent_type_id=agent_type.id,
|
||||
project_id=project.id,
|
||||
status=AgentStatus.IDLE,
|
||||
)
|
||||
|
||||
repr_str = repr(instance)
|
||||
assert str(instance_id) in repr_str
|
||||
assert str(agent_type.id) in repr_str
|
||||
assert str(project.id) in repr_str
|
||||
assert "idle" in repr_str
|
||||
|
||||
|
||||
class TestAgentInstanceStatus:
|
||||
"""Tests for AgentInstance status transitions."""
|
||||
|
||||
def test_all_agent_statuses(self, db_session):
|
||||
"""Test that all agent statuses can be stored."""
|
||||
project = Project(id=uuid.uuid4(), name="Status Project", slug="status-project-ai")
|
||||
agent_type = AgentType(
|
||||
id=uuid.uuid4(),
|
||||
name="Status Agent",
|
||||
slug="status-agent-ai",
|
||||
personality_prompt="Test",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
)
|
||||
db_session.add(project)
|
||||
db_session.add(agent_type)
|
||||
db_session.commit()
|
||||
|
||||
for status in AgentStatus:
|
||||
instance = AgentInstance(
|
||||
id=uuid.uuid4(),
|
||||
agent_type_id=agent_type.id,
|
||||
project_id=project.id,
|
||||
status=status,
|
||||
)
|
||||
db_session.add(instance)
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(AgentInstance).filter_by(id=instance.id).first()
|
||||
assert retrieved.status == status
|
||||
|
||||
def test_status_update(self, db_session):
|
||||
"""Test updating agent instance status."""
|
||||
project = Project(id=uuid.uuid4(), name="Update Status Project", slug="update-status-project-ai")
|
||||
agent_type = AgentType(
|
||||
id=uuid.uuid4(),
|
||||
name="Update Status Agent",
|
||||
slug="update-status-agent-ai",
|
||||
personality_prompt="Test",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
)
|
||||
db_session.add(project)
|
||||
db_session.add(agent_type)
|
||||
db_session.commit()
|
||||
|
||||
instance = AgentInstance(
|
||||
id=uuid.uuid4(),
|
||||
agent_type_id=agent_type.id,
|
||||
project_id=project.id,
|
||||
status=AgentStatus.IDLE,
|
||||
)
|
||||
db_session.add(instance)
|
||||
db_session.commit()
|
||||
|
||||
# Update to WORKING
|
||||
instance.status = AgentStatus.WORKING
|
||||
instance.current_task = "Processing feature request"
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(AgentInstance).filter_by(id=instance.id).first()
|
||||
assert retrieved.status == AgentStatus.WORKING
|
||||
assert retrieved.current_task == "Processing feature request"
|
||||
|
||||
def test_terminate_agent_instance(self, db_session):
|
||||
"""Test terminating an agent instance."""
|
||||
project = Project(id=uuid.uuid4(), name="Terminate Project", slug="terminate-project-ai")
|
||||
agent_type = AgentType(
|
||||
id=uuid.uuid4(),
|
||||
name="Terminate Agent",
|
||||
slug="terminate-agent-ai",
|
||||
personality_prompt="Test",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
)
|
||||
db_session.add(project)
|
||||
db_session.add(agent_type)
|
||||
db_session.commit()
|
||||
|
||||
instance = AgentInstance(
|
||||
id=uuid.uuid4(),
|
||||
agent_type_id=agent_type.id,
|
||||
project_id=project.id,
|
||||
status=AgentStatus.WORKING,
|
||||
current_task="Working on something",
|
||||
session_id="active-session",
|
||||
)
|
||||
db_session.add(instance)
|
||||
db_session.commit()
|
||||
|
||||
# Terminate
|
||||
now = datetime.now(UTC)
|
||||
instance.status = AgentStatus.TERMINATED
|
||||
instance.terminated_at = now
|
||||
instance.current_task = None
|
||||
instance.session_id = None
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(AgentInstance).filter_by(id=instance.id).first()
|
||||
assert retrieved.status == AgentStatus.TERMINATED
|
||||
assert retrieved.terminated_at is not None
|
||||
assert retrieved.current_task is None
|
||||
assert retrieved.session_id is None
|
||||
|
||||
|
||||
class TestAgentInstanceMetrics:
|
||||
"""Tests for AgentInstance usage metrics."""
|
||||
|
||||
def test_increment_metrics(self, db_session):
|
||||
"""Test incrementing usage metrics."""
|
||||
project = Project(id=uuid.uuid4(), name="Metrics Project", slug="metrics-project-ai")
|
||||
agent_type = AgentType(
|
||||
id=uuid.uuid4(),
|
||||
name="Metrics Agent",
|
||||
slug="metrics-agent-ai",
|
||||
personality_prompt="Test",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
)
|
||||
db_session.add(project)
|
||||
db_session.add(agent_type)
|
||||
db_session.commit()
|
||||
|
||||
instance = AgentInstance(
|
||||
id=uuid.uuid4(),
|
||||
agent_type_id=agent_type.id,
|
||||
project_id=project.id,
|
||||
)
|
||||
db_session.add(instance)
|
||||
db_session.commit()
|
||||
|
||||
# Record task completion
|
||||
instance.tasks_completed += 1
|
||||
instance.tokens_used += 1500
|
||||
instance.cost_incurred += Decimal("0.0150")
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(AgentInstance).filter_by(id=instance.id).first()
|
||||
assert retrieved.tasks_completed == 1
|
||||
assert retrieved.tokens_used == 1500
|
||||
assert retrieved.cost_incurred == Decimal("0.0150")
|
||||
|
||||
# Record another task
|
||||
retrieved.tasks_completed += 1
|
||||
retrieved.tokens_used += 2500
|
||||
retrieved.cost_incurred += Decimal("0.0250")
|
||||
db_session.commit()
|
||||
|
||||
updated = db_session.query(AgentInstance).filter_by(id=instance.id).first()
|
||||
assert updated.tasks_completed == 2
|
||||
assert updated.tokens_used == 4000
|
||||
assert updated.cost_incurred == Decimal("0.0400")
|
||||
|
||||
def test_large_token_count(self, db_session):
|
||||
"""Test handling large token counts."""
|
||||
project = Project(id=uuid.uuid4(), name="Large Tokens Project", slug="large-tokens-project-ai")
|
||||
agent_type = AgentType(
|
||||
id=uuid.uuid4(),
|
||||
name="Large Tokens Agent",
|
||||
slug="large-tokens-agent-ai",
|
||||
personality_prompt="Test",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
)
|
||||
db_session.add(project)
|
||||
db_session.add(agent_type)
|
||||
db_session.commit()
|
||||
|
||||
instance = AgentInstance(
|
||||
id=uuid.uuid4(),
|
||||
agent_type_id=agent_type.id,
|
||||
project_id=project.id,
|
||||
tokens_used=10_000_000_000, # 10 billion tokens
|
||||
cost_incurred=Decimal("100000.0000"), # $100,000
|
||||
)
|
||||
db_session.add(instance)
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(AgentInstance).filter_by(id=instance.id).first()
|
||||
assert retrieved.tokens_used == 10_000_000_000
|
||||
assert retrieved.cost_incurred == Decimal("100000.0000")
|
||||
|
||||
|
||||
class TestAgentInstanceShortTermMemory:
|
||||
"""Tests for AgentInstance short-term memory JSON field."""
|
||||
|
||||
def test_store_complex_memory(self, db_session):
|
||||
"""Test storing complex short-term memory."""
|
||||
project = Project(id=uuid.uuid4(), name="Memory Project", slug="memory-project-ai")
|
||||
agent_type = AgentType(
|
||||
id=uuid.uuid4(),
|
||||
name="Memory Agent",
|
||||
slug="memory-agent-ai",
|
||||
personality_prompt="Test",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
)
|
||||
db_session.add(project)
|
||||
db_session.add(agent_type)
|
||||
db_session.commit()
|
||||
|
||||
memory = {
|
||||
"conversation_history": [
|
||||
{"role": "user", "content": "Implement feature X"},
|
||||
{"role": "assistant", "content": "I'll start by..."},
|
||||
],
|
||||
"recent_files": ["auth.py", "models.py", "test_auth.py"],
|
||||
"decisions": {
|
||||
"architecture": "Use repository pattern",
|
||||
"testing": "TDD approach",
|
||||
},
|
||||
"blockers": [],
|
||||
"context_tokens": 2048,
|
||||
}
|
||||
|
||||
instance = AgentInstance(
|
||||
id=uuid.uuid4(),
|
||||
agent_type_id=agent_type.id,
|
||||
project_id=project.id,
|
||||
short_term_memory=memory,
|
||||
)
|
||||
db_session.add(instance)
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(AgentInstance).filter_by(id=instance.id).first()
|
||||
assert retrieved.short_term_memory == memory
|
||||
assert len(retrieved.short_term_memory["conversation_history"]) == 2
|
||||
assert "auth.py" in retrieved.short_term_memory["recent_files"]
|
||||
|
||||
def test_update_memory(self, db_session):
|
||||
"""Test updating short-term memory."""
|
||||
project = Project(id=uuid.uuid4(), name="Update Memory Project", slug="update-memory-project-ai")
|
||||
agent_type = AgentType(
|
||||
id=uuid.uuid4(),
|
||||
name="Update Memory Agent",
|
||||
slug="update-memory-agent-ai",
|
||||
personality_prompt="Test",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
)
|
||||
db_session.add(project)
|
||||
db_session.add(agent_type)
|
||||
db_session.commit()
|
||||
|
||||
instance = AgentInstance(
|
||||
id=uuid.uuid4(),
|
||||
agent_type_id=agent_type.id,
|
||||
project_id=project.id,
|
||||
short_term_memory={"initial": "state"},
|
||||
)
|
||||
db_session.add(instance)
|
||||
db_session.commit()
|
||||
|
||||
# Update memory
|
||||
instance.short_term_memory = {"updated": "state", "new_key": "new_value"}
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(AgentInstance).filter_by(id=instance.id).first()
|
||||
assert "initial" not in retrieved.short_term_memory
|
||||
assert retrieved.short_term_memory["updated"] == "state"
|
||||
assert retrieved.short_term_memory["new_key"] == "new_value"
|
||||
315
backend/tests/models/syndarix/test_agent_type.py
Normal file
315
backend/tests/models/syndarix/test_agent_type.py
Normal file
@@ -0,0 +1,315 @@
|
||||
# tests/models/syndarix/test_agent_type.py
|
||||
"""
|
||||
Unit tests for the AgentType model.
|
||||
"""
|
||||
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
|
||||
import pytest
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
|
||||
from app.models.syndarix import AgentType
|
||||
|
||||
|
||||
class TestAgentTypeModel:
|
||||
"""Tests for AgentType model creation and fields."""
|
||||
|
||||
def test_create_agent_type_with_required_fields(self, db_session):
|
||||
"""Test creating an agent type with only required fields."""
|
||||
agent_type = AgentType(
|
||||
id=uuid.uuid4(),
|
||||
name="Test Agent",
|
||||
slug="test-agent",
|
||||
personality_prompt="You are a helpful assistant.",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
)
|
||||
db_session.add(agent_type)
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(AgentType).filter_by(slug="test-agent").first()
|
||||
|
||||
assert retrieved is not None
|
||||
assert retrieved.name == "Test Agent"
|
||||
assert retrieved.slug == "test-agent"
|
||||
assert retrieved.personality_prompt == "You are a helpful assistant."
|
||||
assert retrieved.primary_model == "claude-opus-4-5-20251101"
|
||||
assert retrieved.is_active is True # Default
|
||||
assert retrieved.expertise == [] # Default empty list
|
||||
assert retrieved.fallback_models == [] # Default empty list
|
||||
assert retrieved.model_params == {} # Default empty dict
|
||||
assert retrieved.mcp_servers == [] # Default empty list
|
||||
assert retrieved.tool_permissions == {} # Default empty dict
|
||||
|
||||
def test_create_agent_type_with_all_fields(self, db_session):
|
||||
"""Test creating an agent type with all optional fields."""
|
||||
agent_type_id = uuid.uuid4()
|
||||
|
||||
agent_type = AgentType(
|
||||
id=agent_type_id,
|
||||
name="Full Agent Type",
|
||||
slug="full-agent-type",
|
||||
description="A fully configured agent type",
|
||||
expertise=["python", "fastapi", "testing"],
|
||||
personality_prompt="You are an expert Python developer...",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
fallback_models=["claude-sonnet-4-20250514", "gpt-4o"],
|
||||
model_params={"temperature": 0.7, "max_tokens": 4096},
|
||||
mcp_servers=["gitea", "file-system", "slack"],
|
||||
tool_permissions={"allowed": ["*"], "denied": ["dangerous_tool"]},
|
||||
is_active=True,
|
||||
)
|
||||
db_session.add(agent_type)
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(AgentType).filter_by(id=agent_type_id).first()
|
||||
|
||||
assert retrieved.name == "Full Agent Type"
|
||||
assert retrieved.description == "A fully configured agent type"
|
||||
assert retrieved.expertise == ["python", "fastapi", "testing"]
|
||||
assert retrieved.fallback_models == ["claude-sonnet-4-20250514", "gpt-4o"]
|
||||
assert retrieved.model_params == {"temperature": 0.7, "max_tokens": 4096}
|
||||
assert retrieved.mcp_servers == ["gitea", "file-system", "slack"]
|
||||
assert retrieved.tool_permissions == {"allowed": ["*"], "denied": ["dangerous_tool"]}
|
||||
assert retrieved.is_active is True
|
||||
|
||||
def test_agent_type_unique_slug_constraint(self, db_session):
|
||||
"""Test that agent types cannot have duplicate slugs."""
|
||||
agent_type1 = AgentType(
|
||||
id=uuid.uuid4(),
|
||||
name="Agent One",
|
||||
slug="duplicate-agent-slug",
|
||||
personality_prompt="First agent",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
)
|
||||
db_session.add(agent_type1)
|
||||
db_session.commit()
|
||||
|
||||
agent_type2 = AgentType(
|
||||
id=uuid.uuid4(),
|
||||
name="Agent Two",
|
||||
slug="duplicate-agent-slug", # Same slug
|
||||
personality_prompt="Second agent",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
)
|
||||
db_session.add(agent_type2)
|
||||
|
||||
with pytest.raises(IntegrityError):
|
||||
db_session.commit()
|
||||
|
||||
db_session.rollback()
|
||||
|
||||
def test_agent_type_timestamps(self, db_session):
|
||||
"""Test that timestamps are automatically set."""
|
||||
agent_type = AgentType(
|
||||
id=uuid.uuid4(),
|
||||
name="Timestamp Agent",
|
||||
slug="timestamp-agent",
|
||||
personality_prompt="Test",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
)
|
||||
db_session.add(agent_type)
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(AgentType).filter_by(slug="timestamp-agent").first()
|
||||
|
||||
assert isinstance(retrieved.created_at, datetime)
|
||||
assert isinstance(retrieved.updated_at, datetime)
|
||||
|
||||
def test_agent_type_update(self, db_session):
|
||||
"""Test updating agent type fields."""
|
||||
agent_type = AgentType(
|
||||
id=uuid.uuid4(),
|
||||
name="Original Agent",
|
||||
slug="original-agent",
|
||||
personality_prompt="Original prompt",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
is_active=True,
|
||||
)
|
||||
db_session.add(agent_type)
|
||||
db_session.commit()
|
||||
|
||||
original_created_at = agent_type.created_at
|
||||
|
||||
# Update fields
|
||||
agent_type.name = "Updated Agent"
|
||||
agent_type.is_active = False
|
||||
agent_type.expertise = ["new", "skills"]
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(AgentType).filter_by(slug="original-agent").first()
|
||||
|
||||
assert retrieved.name == "Updated Agent"
|
||||
assert retrieved.is_active is False
|
||||
assert retrieved.expertise == ["new", "skills"]
|
||||
assert retrieved.created_at == original_created_at
|
||||
assert retrieved.updated_at > original_created_at
|
||||
|
||||
def test_agent_type_delete(self, db_session):
|
||||
"""Test deleting an agent type."""
|
||||
agent_type_id = uuid.uuid4()
|
||||
agent_type = AgentType(
|
||||
id=agent_type_id,
|
||||
name="Delete Me",
|
||||
slug="delete-me-agent",
|
||||
personality_prompt="Delete test",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
)
|
||||
db_session.add(agent_type)
|
||||
db_session.commit()
|
||||
|
||||
db_session.delete(agent_type)
|
||||
db_session.commit()
|
||||
|
||||
deleted = db_session.query(AgentType).filter_by(id=agent_type_id).first()
|
||||
assert deleted is None
|
||||
|
||||
def test_agent_type_string_representation(self, db_session):
|
||||
"""Test the string representation of an agent type."""
|
||||
agent_type = AgentType(
|
||||
id=uuid.uuid4(),
|
||||
name="Repr Agent",
|
||||
slug="repr-agent",
|
||||
personality_prompt="Test",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
is_active=True,
|
||||
)
|
||||
|
||||
assert str(agent_type) == "<AgentType Repr Agent (repr-agent) active=True>"
|
||||
assert repr(agent_type) == "<AgentType Repr Agent (repr-agent) active=True>"
|
||||
|
||||
|
||||
class TestAgentTypeJsonFields:
|
||||
"""Tests for AgentType JSON fields."""
|
||||
|
||||
def test_complex_expertise_list(self, db_session):
|
||||
"""Test storing a list of expertise areas."""
|
||||
expertise = ["python", "fastapi", "sqlalchemy", "postgresql", "redis", "docker"]
|
||||
|
||||
agent_type = AgentType(
|
||||
id=uuid.uuid4(),
|
||||
name="Expert Agent",
|
||||
slug="expert-agent",
|
||||
personality_prompt="Prompt",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
expertise=expertise,
|
||||
)
|
||||
db_session.add(agent_type)
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(AgentType).filter_by(slug="expert-agent").first()
|
||||
assert retrieved.expertise == expertise
|
||||
assert "python" in retrieved.expertise
|
||||
assert len(retrieved.expertise) == 6
|
||||
|
||||
def test_complex_model_params(self, db_session):
|
||||
"""Test storing complex model parameters."""
|
||||
model_params = {
|
||||
"temperature": 0.7,
|
||||
"max_tokens": 4096,
|
||||
"top_p": 0.9,
|
||||
"frequency_penalty": 0.1,
|
||||
"presence_penalty": 0.1,
|
||||
"stop_sequences": ["###", "END"],
|
||||
}
|
||||
|
||||
agent_type = AgentType(
|
||||
id=uuid.uuid4(),
|
||||
name="Params Agent",
|
||||
slug="params-agent",
|
||||
personality_prompt="Prompt",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
model_params=model_params,
|
||||
)
|
||||
db_session.add(agent_type)
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(AgentType).filter_by(slug="params-agent").first()
|
||||
assert retrieved.model_params == model_params
|
||||
assert retrieved.model_params["temperature"] == 0.7
|
||||
assert retrieved.model_params["stop_sequences"] == ["###", "END"]
|
||||
|
||||
def test_complex_tool_permissions(self, db_session):
|
||||
"""Test storing complex tool permissions."""
|
||||
tool_permissions = {
|
||||
"allowed": ["file:read", "file:write", "git:commit"],
|
||||
"denied": ["file:delete", "system:exec"],
|
||||
"require_approval": ["git:push", "gitea:create_pr"],
|
||||
"limits": {
|
||||
"file:write": {"max_size_mb": 10},
|
||||
"git:commit": {"require_message": True},
|
||||
},
|
||||
}
|
||||
|
||||
agent_type = AgentType(
|
||||
id=uuid.uuid4(),
|
||||
name="Permissions Agent",
|
||||
slug="permissions-agent",
|
||||
personality_prompt="Prompt",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
tool_permissions=tool_permissions,
|
||||
)
|
||||
db_session.add(agent_type)
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(AgentType).filter_by(slug="permissions-agent").first()
|
||||
assert retrieved.tool_permissions == tool_permissions
|
||||
assert "file:read" in retrieved.tool_permissions["allowed"]
|
||||
assert retrieved.tool_permissions["limits"]["file:write"]["max_size_mb"] == 10
|
||||
|
||||
def test_empty_json_fields_default(self, db_session):
|
||||
"""Test that JSON fields default to empty structures."""
|
||||
agent_type = AgentType(
|
||||
id=uuid.uuid4(),
|
||||
name="Empty JSON Agent",
|
||||
slug="empty-json-agent",
|
||||
personality_prompt="Prompt",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
)
|
||||
db_session.add(agent_type)
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(AgentType).filter_by(slug="empty-json-agent").first()
|
||||
assert retrieved.expertise == []
|
||||
assert retrieved.fallback_models == []
|
||||
assert retrieved.model_params == {}
|
||||
assert retrieved.mcp_servers == []
|
||||
assert retrieved.tool_permissions == {}
|
||||
|
||||
|
||||
class TestAgentTypeIsActive:
|
||||
"""Tests for AgentType is_active field."""
|
||||
|
||||
def test_default_is_active(self, db_session):
|
||||
"""Test that is_active defaults to True."""
|
||||
agent_type = AgentType(
|
||||
id=uuid.uuid4(),
|
||||
name="Default Active",
|
||||
slug="default-active",
|
||||
personality_prompt="Prompt",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
)
|
||||
db_session.add(agent_type)
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(AgentType).filter_by(slug="default-active").first()
|
||||
assert retrieved.is_active is True
|
||||
|
||||
def test_deactivate_agent_type(self, db_session):
|
||||
"""Test deactivating an agent type."""
|
||||
agent_type = AgentType(
|
||||
id=uuid.uuid4(),
|
||||
name="Deactivate Me",
|
||||
slug="deactivate-me",
|
||||
personality_prompt="Prompt",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
is_active=True,
|
||||
)
|
||||
db_session.add(agent_type)
|
||||
db_session.commit()
|
||||
|
||||
agent_type.is_active = False
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(AgentType).filter_by(slug="deactivate-me").first()
|
||||
assert retrieved.is_active is False
|
||||
463
backend/tests/models/syndarix/test_issue.py
Normal file
463
backend/tests/models/syndarix/test_issue.py
Normal file
@@ -0,0 +1,463 @@
|
||||
# tests/models/syndarix/test_issue.py
|
||||
"""
|
||||
Unit tests for the Issue model.
|
||||
"""
|
||||
|
||||
import uuid
|
||||
from datetime import UTC, datetime, timedelta
|
||||
|
||||
import pytest
|
||||
|
||||
from app.models.syndarix import (
|
||||
AgentInstance,
|
||||
AgentType,
|
||||
Issue,
|
||||
IssuePriority,
|
||||
IssueStatus,
|
||||
Project,
|
||||
Sprint,
|
||||
SprintStatus,
|
||||
SyncStatus,
|
||||
)
|
||||
|
||||
|
||||
class TestIssueModel:
|
||||
"""Tests for Issue model creation and fields."""
|
||||
|
||||
def test_create_issue_with_required_fields(self, db_session):
|
||||
"""Test creating an issue with only required fields."""
|
||||
project = Project(
|
||||
id=uuid.uuid4(),
|
||||
name="Issue Project",
|
||||
slug="issue-project",
|
||||
)
|
||||
db_session.add(project)
|
||||
db_session.commit()
|
||||
|
||||
issue = Issue(
|
||||
id=uuid.uuid4(),
|
||||
project_id=project.id,
|
||||
title="Test Issue",
|
||||
)
|
||||
db_session.add(issue)
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(Issue).filter_by(title="Test Issue").first()
|
||||
|
||||
assert retrieved is not None
|
||||
assert retrieved.title == "Test Issue"
|
||||
assert retrieved.body == "" # Default empty string
|
||||
assert retrieved.status == IssueStatus.OPEN # Default
|
||||
assert retrieved.priority == IssuePriority.MEDIUM # Default
|
||||
assert retrieved.labels == [] # Default empty list
|
||||
assert retrieved.story_points is None
|
||||
assert retrieved.assigned_agent_id is None
|
||||
assert retrieved.human_assignee is None
|
||||
assert retrieved.sprint_id is None
|
||||
assert retrieved.sync_status == SyncStatus.SYNCED # Default
|
||||
|
||||
def test_create_issue_with_all_fields(self, db_session):
|
||||
"""Test creating an issue with all optional fields."""
|
||||
project = Project(
|
||||
id=uuid.uuid4(),
|
||||
name="Full Issue Project",
|
||||
slug="full-issue-project",
|
||||
)
|
||||
db_session.add(project)
|
||||
db_session.commit()
|
||||
|
||||
issue_id = uuid.uuid4()
|
||||
now = datetime.now(UTC)
|
||||
|
||||
issue = Issue(
|
||||
id=issue_id,
|
||||
project_id=project.id,
|
||||
title="Full Issue",
|
||||
body="A complete issue with all fields set",
|
||||
status=IssueStatus.IN_PROGRESS,
|
||||
priority=IssuePriority.CRITICAL,
|
||||
labels=["bug", "security", "urgent"],
|
||||
story_points=8,
|
||||
human_assignee="john.doe@example.com",
|
||||
external_tracker="gitea",
|
||||
external_id="gitea-123",
|
||||
external_url="https://gitea.example.com/issues/123",
|
||||
external_number=123,
|
||||
sync_status=SyncStatus.SYNCED,
|
||||
last_synced_at=now,
|
||||
external_updated_at=now,
|
||||
)
|
||||
db_session.add(issue)
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(Issue).filter_by(id=issue_id).first()
|
||||
|
||||
assert retrieved.title == "Full Issue"
|
||||
assert retrieved.body == "A complete issue with all fields set"
|
||||
assert retrieved.status == IssueStatus.IN_PROGRESS
|
||||
assert retrieved.priority == IssuePriority.CRITICAL
|
||||
assert retrieved.labels == ["bug", "security", "urgent"]
|
||||
assert retrieved.story_points == 8
|
||||
assert retrieved.human_assignee == "john.doe@example.com"
|
||||
assert retrieved.external_tracker == "gitea"
|
||||
assert retrieved.external_id == "gitea-123"
|
||||
assert retrieved.external_number == 123
|
||||
assert retrieved.sync_status == SyncStatus.SYNCED
|
||||
|
||||
def test_issue_timestamps(self, db_session):
|
||||
"""Test that timestamps are automatically set."""
|
||||
project = Project(id=uuid.uuid4(), name="Timestamp Issue Project", slug="timestamp-issue-project")
|
||||
db_session.add(project)
|
||||
db_session.commit()
|
||||
|
||||
issue = Issue(
|
||||
id=uuid.uuid4(),
|
||||
project_id=project.id,
|
||||
title="Timestamp Issue",
|
||||
)
|
||||
db_session.add(issue)
|
||||
db_session.commit()
|
||||
|
||||
assert isinstance(issue.created_at, datetime)
|
||||
assert isinstance(issue.updated_at, datetime)
|
||||
|
||||
def test_issue_string_representation(self, db_session):
|
||||
"""Test the string representation of an issue."""
|
||||
project = Project(id=uuid.uuid4(), name="Repr Issue Project", slug="repr-issue-project")
|
||||
db_session.add(project)
|
||||
db_session.commit()
|
||||
|
||||
issue = Issue(
|
||||
id=uuid.uuid4(),
|
||||
project_id=project.id,
|
||||
title="This is a very long issue title that should be truncated in repr",
|
||||
status=IssueStatus.OPEN,
|
||||
priority=IssuePriority.HIGH,
|
||||
)
|
||||
|
||||
repr_str = repr(issue)
|
||||
assert "This is a very long issue tit" in repr_str # First 30 chars
|
||||
assert "open" in repr_str
|
||||
assert "high" in repr_str
|
||||
|
||||
|
||||
class TestIssueStatus:
|
||||
"""Tests for Issue status field."""
|
||||
|
||||
def test_all_issue_statuses(self, db_session):
|
||||
"""Test that all issue statuses can be stored."""
|
||||
project = Project(id=uuid.uuid4(), name="Status Issue Project", slug="status-issue-project")
|
||||
db_session.add(project)
|
||||
db_session.commit()
|
||||
|
||||
for status in IssueStatus:
|
||||
issue = Issue(
|
||||
id=uuid.uuid4(),
|
||||
project_id=project.id,
|
||||
title=f"Issue {status.value}",
|
||||
status=status,
|
||||
)
|
||||
db_session.add(issue)
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(Issue).filter_by(id=issue.id).first()
|
||||
assert retrieved.status == status
|
||||
|
||||
|
||||
class TestIssuePriority:
|
||||
"""Tests for Issue priority field."""
|
||||
|
||||
def test_all_issue_priorities(self, db_session):
|
||||
"""Test that all issue priorities can be stored."""
|
||||
project = Project(id=uuid.uuid4(), name="Priority Issue Project", slug="priority-issue-project")
|
||||
db_session.add(project)
|
||||
db_session.commit()
|
||||
|
||||
for priority in IssuePriority:
|
||||
issue = Issue(
|
||||
id=uuid.uuid4(),
|
||||
project_id=project.id,
|
||||
title=f"Issue {priority.value}",
|
||||
priority=priority,
|
||||
)
|
||||
db_session.add(issue)
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(Issue).filter_by(id=issue.id).first()
|
||||
assert retrieved.priority == priority
|
||||
|
||||
|
||||
class TestIssueSyncStatus:
|
||||
"""Tests for Issue sync status field."""
|
||||
|
||||
def test_all_sync_statuses(self, db_session):
|
||||
"""Test that all sync statuses can be stored."""
|
||||
project = Project(id=uuid.uuid4(), name="Sync Issue Project", slug="sync-issue-project")
|
||||
db_session.add(project)
|
||||
db_session.commit()
|
||||
|
||||
for sync_status in SyncStatus:
|
||||
issue = Issue(
|
||||
id=uuid.uuid4(),
|
||||
project_id=project.id,
|
||||
title=f"Issue {sync_status.value}",
|
||||
external_tracker="gitea",
|
||||
external_id=f"ext-{sync_status.value}",
|
||||
sync_status=sync_status,
|
||||
)
|
||||
db_session.add(issue)
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(Issue).filter_by(id=issue.id).first()
|
||||
assert retrieved.sync_status == sync_status
|
||||
|
||||
|
||||
class TestIssueLabels:
|
||||
"""Tests for Issue labels JSON field."""
|
||||
|
||||
def test_store_labels(self, db_session):
|
||||
"""Test storing labels list."""
|
||||
project = Project(id=uuid.uuid4(), name="Labels Issue Project", slug="labels-issue-project")
|
||||
db_session.add(project)
|
||||
db_session.commit()
|
||||
|
||||
labels = ["bug", "security", "high-priority", "needs-review"]
|
||||
|
||||
issue = Issue(
|
||||
id=uuid.uuid4(),
|
||||
project_id=project.id,
|
||||
title="Issue with Labels",
|
||||
labels=labels,
|
||||
)
|
||||
db_session.add(issue)
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(Issue).filter_by(title="Issue with Labels").first()
|
||||
assert retrieved.labels == labels
|
||||
assert "security" in retrieved.labels
|
||||
|
||||
def test_update_labels(self, db_session):
|
||||
"""Test updating labels."""
|
||||
project = Project(id=uuid.uuid4(), name="Update Labels Project", slug="update-labels-project")
|
||||
db_session.add(project)
|
||||
db_session.commit()
|
||||
|
||||
issue = Issue(
|
||||
id=uuid.uuid4(),
|
||||
project_id=project.id,
|
||||
title="Update Labels Issue",
|
||||
labels=["initial"],
|
||||
)
|
||||
db_session.add(issue)
|
||||
db_session.commit()
|
||||
|
||||
issue.labels = ["updated", "new-label"]
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(Issue).filter_by(title="Update Labels Issue").first()
|
||||
assert "initial" not in retrieved.labels
|
||||
assert "updated" in retrieved.labels
|
||||
|
||||
|
||||
class TestIssueAssignment:
|
||||
"""Tests for Issue assignment fields."""
|
||||
|
||||
def test_assign_to_agent(self, db_session):
|
||||
"""Test assigning an issue to an agent."""
|
||||
project = Project(id=uuid.uuid4(), name="Agent Assign Project", slug="agent-assign-project")
|
||||
agent_type = AgentType(
|
||||
id=uuid.uuid4(),
|
||||
name="Test Agent Type",
|
||||
slug="test-agent-type-assign",
|
||||
personality_prompt="Test",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
)
|
||||
db_session.add(project)
|
||||
db_session.add(agent_type)
|
||||
db_session.commit()
|
||||
|
||||
agent_instance = AgentInstance(
|
||||
id=uuid.uuid4(),
|
||||
agent_type_id=agent_type.id,
|
||||
project_id=project.id,
|
||||
)
|
||||
db_session.add(agent_instance)
|
||||
db_session.commit()
|
||||
|
||||
issue = Issue(
|
||||
id=uuid.uuid4(),
|
||||
project_id=project.id,
|
||||
title="Agent Assignment Issue",
|
||||
assigned_agent_id=agent_instance.id,
|
||||
)
|
||||
db_session.add(issue)
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(Issue).filter_by(title="Agent Assignment Issue").first()
|
||||
assert retrieved.assigned_agent_id == agent_instance.id
|
||||
assert retrieved.human_assignee is None
|
||||
|
||||
def test_assign_to_human(self, db_session):
|
||||
"""Test assigning an issue to a human."""
|
||||
project = Project(id=uuid.uuid4(), name="Human Assign Project", slug="human-assign-project")
|
||||
db_session.add(project)
|
||||
db_session.commit()
|
||||
|
||||
issue = Issue(
|
||||
id=uuid.uuid4(),
|
||||
project_id=project.id,
|
||||
title="Human Assignment Issue",
|
||||
human_assignee="developer@example.com",
|
||||
)
|
||||
db_session.add(issue)
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(Issue).filter_by(title="Human Assignment Issue").first()
|
||||
assert retrieved.human_assignee == "developer@example.com"
|
||||
assert retrieved.assigned_agent_id is None
|
||||
|
||||
|
||||
class TestIssueSprintAssociation:
|
||||
"""Tests for Issue sprint association."""
|
||||
|
||||
def test_assign_issue_to_sprint(self, db_session):
|
||||
"""Test assigning an issue to a sprint."""
|
||||
project = Project(id=uuid.uuid4(), name="Sprint Assign Project", slug="sprint-assign-project")
|
||||
db_session.add(project)
|
||||
db_session.commit()
|
||||
|
||||
from datetime import date
|
||||
|
||||
sprint = Sprint(
|
||||
id=uuid.uuid4(),
|
||||
project_id=project.id,
|
||||
name="Sprint 1",
|
||||
number=1,
|
||||
start_date=date.today(),
|
||||
end_date=date.today() + timedelta(days=14),
|
||||
status=SprintStatus.ACTIVE,
|
||||
)
|
||||
db_session.add(sprint)
|
||||
db_session.commit()
|
||||
|
||||
issue = Issue(
|
||||
id=uuid.uuid4(),
|
||||
project_id=project.id,
|
||||
title="Sprint Issue",
|
||||
sprint_id=sprint.id,
|
||||
)
|
||||
db_session.add(issue)
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(Issue).filter_by(title="Sprint Issue").first()
|
||||
assert retrieved.sprint_id == sprint.id
|
||||
|
||||
|
||||
class TestIssueExternalTracker:
|
||||
"""Tests for Issue external tracker integration."""
|
||||
|
||||
def test_gitea_integration(self, db_session):
|
||||
"""Test Gitea external tracker fields."""
|
||||
project = Project(id=uuid.uuid4(), name="Gitea Project", slug="gitea-project")
|
||||
db_session.add(project)
|
||||
db_session.commit()
|
||||
|
||||
now = datetime.now(UTC)
|
||||
|
||||
issue = Issue(
|
||||
id=uuid.uuid4(),
|
||||
project_id=project.id,
|
||||
title="Gitea Synced Issue",
|
||||
external_tracker="gitea",
|
||||
external_id="abc123xyz",
|
||||
external_url="https://gitea.example.com/org/repo/issues/42",
|
||||
external_number=42,
|
||||
sync_status=SyncStatus.SYNCED,
|
||||
last_synced_at=now,
|
||||
external_updated_at=now,
|
||||
)
|
||||
db_session.add(issue)
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(Issue).filter_by(title="Gitea Synced Issue").first()
|
||||
assert retrieved.external_tracker == "gitea"
|
||||
assert retrieved.external_id == "abc123xyz"
|
||||
assert retrieved.external_number == 42
|
||||
assert "/issues/42" in retrieved.external_url
|
||||
|
||||
def test_github_integration(self, db_session):
|
||||
"""Test GitHub external tracker fields."""
|
||||
project = Project(id=uuid.uuid4(), name="GitHub Project", slug="github-project")
|
||||
db_session.add(project)
|
||||
db_session.commit()
|
||||
|
||||
issue = Issue(
|
||||
id=uuid.uuid4(),
|
||||
project_id=project.id,
|
||||
title="GitHub Synced Issue",
|
||||
external_tracker="github",
|
||||
external_id="gh-12345",
|
||||
external_url="https://github.com/org/repo/issues/100",
|
||||
external_number=100,
|
||||
)
|
||||
db_session.add(issue)
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(Issue).filter_by(title="GitHub Synced Issue").first()
|
||||
assert retrieved.external_tracker == "github"
|
||||
assert retrieved.external_number == 100
|
||||
|
||||
|
||||
class TestIssueLifecycle:
|
||||
"""Tests for Issue lifecycle operations."""
|
||||
|
||||
def test_close_issue(self, db_session):
|
||||
"""Test closing an issue."""
|
||||
project = Project(id=uuid.uuid4(), name="Close Issue Project", slug="close-issue-project")
|
||||
db_session.add(project)
|
||||
db_session.commit()
|
||||
|
||||
issue = Issue(
|
||||
id=uuid.uuid4(),
|
||||
project_id=project.id,
|
||||
title="Issue to Close",
|
||||
status=IssueStatus.OPEN,
|
||||
)
|
||||
db_session.add(issue)
|
||||
db_session.commit()
|
||||
|
||||
# Close the issue
|
||||
now = datetime.now(UTC)
|
||||
issue.status = IssueStatus.CLOSED
|
||||
issue.closed_at = now
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(Issue).filter_by(title="Issue to Close").first()
|
||||
assert retrieved.status == IssueStatus.CLOSED
|
||||
assert retrieved.closed_at is not None
|
||||
|
||||
def test_reopen_issue(self, db_session):
|
||||
"""Test reopening a closed issue."""
|
||||
project = Project(id=uuid.uuid4(), name="Reopen Issue Project", slug="reopen-issue-project")
|
||||
db_session.add(project)
|
||||
db_session.commit()
|
||||
|
||||
now = datetime.now(UTC)
|
||||
issue = Issue(
|
||||
id=uuid.uuid4(),
|
||||
project_id=project.id,
|
||||
title="Issue to Reopen",
|
||||
status=IssueStatus.CLOSED,
|
||||
closed_at=now,
|
||||
)
|
||||
db_session.add(issue)
|
||||
db_session.commit()
|
||||
|
||||
# Reopen the issue
|
||||
issue.status = IssueStatus.OPEN
|
||||
issue.closed_at = None
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(Issue).filter_by(title="Issue to Reopen").first()
|
||||
assert retrieved.status == IssueStatus.OPEN
|
||||
assert retrieved.closed_at is None
|
||||
262
backend/tests/models/syndarix/test_project.py
Normal file
262
backend/tests/models/syndarix/test_project.py
Normal file
@@ -0,0 +1,262 @@
|
||||
# tests/models/syndarix/test_project.py
|
||||
"""
|
||||
Unit tests for the Project model.
|
||||
"""
|
||||
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
|
||||
import pytest
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
|
||||
from app.models.syndarix import (
|
||||
AutonomyLevel,
|
||||
Project,
|
||||
ProjectStatus,
|
||||
)
|
||||
|
||||
|
||||
class TestProjectModel:
|
||||
"""Tests for Project model creation and fields."""
|
||||
|
||||
def test_create_project_with_required_fields(self, db_session):
|
||||
"""Test creating a project with only required fields."""
|
||||
project = Project(
|
||||
id=uuid.uuid4(),
|
||||
name="Test Project",
|
||||
slug="test-project",
|
||||
)
|
||||
db_session.add(project)
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(Project).filter_by(slug="test-project").first()
|
||||
|
||||
assert retrieved is not None
|
||||
assert retrieved.name == "Test Project"
|
||||
assert retrieved.slug == "test-project"
|
||||
assert retrieved.autonomy_level == AutonomyLevel.MILESTONE # Default
|
||||
assert retrieved.status == ProjectStatus.ACTIVE # Default
|
||||
assert retrieved.settings == {} # Default empty dict
|
||||
assert retrieved.description is None
|
||||
assert retrieved.owner_id is None
|
||||
|
||||
def test_create_project_with_all_fields(self, db_session):
|
||||
"""Test creating a project with all optional fields."""
|
||||
project_id = uuid.uuid4()
|
||||
owner_id = uuid.uuid4()
|
||||
|
||||
project = Project(
|
||||
id=project_id,
|
||||
name="Full Project",
|
||||
slug="full-project",
|
||||
description="A complete project with all fields",
|
||||
autonomy_level=AutonomyLevel.AUTONOMOUS,
|
||||
status=ProjectStatus.PAUSED,
|
||||
settings={"webhook_url": "https://example.com/webhook"},
|
||||
owner_id=owner_id,
|
||||
)
|
||||
db_session.add(project)
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(Project).filter_by(id=project_id).first()
|
||||
|
||||
assert retrieved.name == "Full Project"
|
||||
assert retrieved.slug == "full-project"
|
||||
assert retrieved.description == "A complete project with all fields"
|
||||
assert retrieved.autonomy_level == AutonomyLevel.AUTONOMOUS
|
||||
assert retrieved.status == ProjectStatus.PAUSED
|
||||
assert retrieved.settings == {"webhook_url": "https://example.com/webhook"}
|
||||
assert retrieved.owner_id == owner_id
|
||||
|
||||
def test_project_unique_slug_constraint(self, db_session):
|
||||
"""Test that projects cannot have duplicate slugs."""
|
||||
project1 = Project(
|
||||
id=uuid.uuid4(),
|
||||
name="Project One",
|
||||
slug="duplicate-slug",
|
||||
)
|
||||
db_session.add(project1)
|
||||
db_session.commit()
|
||||
|
||||
project2 = Project(
|
||||
id=uuid.uuid4(),
|
||||
name="Project Two",
|
||||
slug="duplicate-slug", # Same slug
|
||||
)
|
||||
db_session.add(project2)
|
||||
|
||||
with pytest.raises(IntegrityError):
|
||||
db_session.commit()
|
||||
|
||||
db_session.rollback()
|
||||
|
||||
def test_project_timestamps(self, db_session):
|
||||
"""Test that timestamps are automatically set."""
|
||||
project = Project(
|
||||
id=uuid.uuid4(),
|
||||
name="Timestamp Project",
|
||||
slug="timestamp-project",
|
||||
)
|
||||
db_session.add(project)
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(Project).filter_by(slug="timestamp-project").first()
|
||||
|
||||
assert isinstance(retrieved.created_at, datetime)
|
||||
assert isinstance(retrieved.updated_at, datetime)
|
||||
|
||||
def test_project_update(self, db_session):
|
||||
"""Test updating project fields."""
|
||||
project = Project(
|
||||
id=uuid.uuid4(),
|
||||
name="Original Name",
|
||||
slug="original-slug",
|
||||
status=ProjectStatus.ACTIVE,
|
||||
)
|
||||
db_session.add(project)
|
||||
db_session.commit()
|
||||
|
||||
original_created_at = project.created_at
|
||||
|
||||
# Update fields
|
||||
project.name = "Updated Name"
|
||||
project.status = ProjectStatus.COMPLETED
|
||||
project.settings = {"new_setting": "value"}
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(Project).filter_by(slug="original-slug").first()
|
||||
|
||||
assert retrieved.name == "Updated Name"
|
||||
assert retrieved.status == ProjectStatus.COMPLETED
|
||||
assert retrieved.settings == {"new_setting": "value"}
|
||||
assert retrieved.created_at == original_created_at
|
||||
assert retrieved.updated_at > original_created_at
|
||||
|
||||
def test_project_delete(self, db_session):
|
||||
"""Test deleting a project."""
|
||||
project_id = uuid.uuid4()
|
||||
project = Project(
|
||||
id=project_id,
|
||||
name="Delete Me",
|
||||
slug="delete-me",
|
||||
)
|
||||
db_session.add(project)
|
||||
db_session.commit()
|
||||
|
||||
db_session.delete(project)
|
||||
db_session.commit()
|
||||
|
||||
deleted = db_session.query(Project).filter_by(id=project_id).first()
|
||||
assert deleted is None
|
||||
|
||||
def test_project_string_representation(self, db_session):
|
||||
"""Test the string representation of a project."""
|
||||
project = Project(
|
||||
id=uuid.uuid4(),
|
||||
name="Repr Project",
|
||||
slug="repr-project",
|
||||
status=ProjectStatus.ACTIVE,
|
||||
)
|
||||
|
||||
assert str(project) == "<Project Repr Project (repr-project) status=active>"
|
||||
assert repr(project) == "<Project Repr Project (repr-project) status=active>"
|
||||
|
||||
|
||||
class TestProjectEnums:
|
||||
"""Tests for Project enum fields."""
|
||||
|
||||
def test_all_autonomy_levels(self, db_session):
|
||||
"""Test that all autonomy levels can be stored."""
|
||||
for level in AutonomyLevel:
|
||||
project = Project(
|
||||
id=uuid.uuid4(),
|
||||
name=f"Project {level.value}",
|
||||
slug=f"project-{level.value}",
|
||||
autonomy_level=level,
|
||||
)
|
||||
db_session.add(project)
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(Project).filter_by(slug=f"project-{level.value}").first()
|
||||
assert retrieved.autonomy_level == level
|
||||
|
||||
def test_all_project_statuses(self, db_session):
|
||||
"""Test that all project statuses can be stored."""
|
||||
for status in ProjectStatus:
|
||||
project = Project(
|
||||
id=uuid.uuid4(),
|
||||
name=f"Project {status.value}",
|
||||
slug=f"project-status-{status.value}",
|
||||
status=status,
|
||||
)
|
||||
db_session.add(project)
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(Project).filter_by(slug=f"project-status-{status.value}").first()
|
||||
assert retrieved.status == status
|
||||
|
||||
|
||||
class TestProjectSettings:
|
||||
"""Tests for Project JSON settings field."""
|
||||
|
||||
def test_complex_json_settings(self, db_session):
|
||||
"""Test storing complex JSON in settings."""
|
||||
complex_settings = {
|
||||
"mcp_servers": ["gitea", "slack", "file-system"],
|
||||
"webhook_urls": {
|
||||
"on_issue_created": "https://example.com/issue",
|
||||
"on_sprint_completed": "https://example.com/sprint",
|
||||
},
|
||||
"notification_settings": {
|
||||
"email": True,
|
||||
"slack_channel": "#syndarix-updates",
|
||||
},
|
||||
"tags": ["important", "client-a"],
|
||||
}
|
||||
|
||||
project = Project(
|
||||
id=uuid.uuid4(),
|
||||
name="Complex Settings Project",
|
||||
slug="complex-settings",
|
||||
settings=complex_settings,
|
||||
)
|
||||
db_session.add(project)
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(Project).filter_by(slug="complex-settings").first()
|
||||
|
||||
assert retrieved.settings == complex_settings
|
||||
assert retrieved.settings["mcp_servers"] == ["gitea", "slack", "file-system"]
|
||||
assert retrieved.settings["webhook_urls"]["on_issue_created"] == "https://example.com/issue"
|
||||
assert "important" in retrieved.settings["tags"]
|
||||
|
||||
def test_empty_settings(self, db_session):
|
||||
"""Test that empty settings defaults correctly."""
|
||||
project = Project(
|
||||
id=uuid.uuid4(),
|
||||
name="Empty Settings",
|
||||
slug="empty-settings",
|
||||
)
|
||||
db_session.add(project)
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(Project).filter_by(slug="empty-settings").first()
|
||||
assert retrieved.settings == {}
|
||||
|
||||
def test_update_settings(self, db_session):
|
||||
"""Test updating settings field."""
|
||||
project = Project(
|
||||
id=uuid.uuid4(),
|
||||
name="Update Settings",
|
||||
slug="update-settings",
|
||||
settings={"initial": "value"},
|
||||
)
|
||||
db_session.add(project)
|
||||
db_session.commit()
|
||||
|
||||
# Update settings
|
||||
project.settings = {"updated": "new_value", "additional": "data"}
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(Project).filter_by(slug="update-settings").first()
|
||||
assert retrieved.settings == {"updated": "new_value", "additional": "data"}
|
||||
507
backend/tests/models/syndarix/test_sprint.py
Normal file
507
backend/tests/models/syndarix/test_sprint.py
Normal file
@@ -0,0 +1,507 @@
|
||||
# tests/models/syndarix/test_sprint.py
|
||||
"""
|
||||
Unit tests for the Sprint model.
|
||||
"""
|
||||
|
||||
import uuid
|
||||
from datetime import date, datetime, timedelta
|
||||
|
||||
import pytest
|
||||
|
||||
from app.models.syndarix import (
|
||||
Project,
|
||||
Sprint,
|
||||
SprintStatus,
|
||||
)
|
||||
|
||||
|
||||
class TestSprintModel:
|
||||
"""Tests for Sprint model creation and fields."""
|
||||
|
||||
def test_create_sprint_with_required_fields(self, db_session):
|
||||
"""Test creating a sprint with only required fields."""
|
||||
project = Project(
|
||||
id=uuid.uuid4(),
|
||||
name="Sprint Project",
|
||||
slug="sprint-project",
|
||||
)
|
||||
db_session.add(project)
|
||||
db_session.commit()
|
||||
|
||||
today = date.today()
|
||||
sprint = Sprint(
|
||||
id=uuid.uuid4(),
|
||||
project_id=project.id,
|
||||
name="Sprint 1",
|
||||
number=1,
|
||||
start_date=today,
|
||||
end_date=today + timedelta(days=14),
|
||||
)
|
||||
db_session.add(sprint)
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(Sprint).filter_by(name="Sprint 1").first()
|
||||
|
||||
assert retrieved is not None
|
||||
assert retrieved.name == "Sprint 1"
|
||||
assert retrieved.number == 1
|
||||
assert retrieved.start_date == today
|
||||
assert retrieved.end_date == today + timedelta(days=14)
|
||||
assert retrieved.status == SprintStatus.PLANNED # Default
|
||||
assert retrieved.goal is None
|
||||
assert retrieved.planned_points is None
|
||||
assert retrieved.completed_points is None
|
||||
|
||||
def test_create_sprint_with_all_fields(self, db_session):
|
||||
"""Test creating a sprint with all optional fields."""
|
||||
project = Project(
|
||||
id=uuid.uuid4(),
|
||||
name="Full Sprint Project",
|
||||
slug="full-sprint-project",
|
||||
)
|
||||
db_session.add(project)
|
||||
db_session.commit()
|
||||
|
||||
today = date.today()
|
||||
sprint_id = uuid.uuid4()
|
||||
|
||||
sprint = Sprint(
|
||||
id=sprint_id,
|
||||
project_id=project.id,
|
||||
name="Full Sprint",
|
||||
number=5,
|
||||
goal="Complete all authentication features",
|
||||
start_date=today,
|
||||
end_date=today + timedelta(days=14),
|
||||
status=SprintStatus.ACTIVE,
|
||||
planned_points=34,
|
||||
completed_points=21,
|
||||
)
|
||||
db_session.add(sprint)
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(Sprint).filter_by(id=sprint_id).first()
|
||||
|
||||
assert retrieved.name == "Full Sprint"
|
||||
assert retrieved.number == 5
|
||||
assert retrieved.goal == "Complete all authentication features"
|
||||
assert retrieved.status == SprintStatus.ACTIVE
|
||||
assert retrieved.planned_points == 34
|
||||
assert retrieved.completed_points == 21
|
||||
|
||||
def test_sprint_timestamps(self, db_session):
|
||||
"""Test that timestamps are automatically set."""
|
||||
project = Project(id=uuid.uuid4(), name="Timestamp Sprint Project", slug="timestamp-sprint-project")
|
||||
db_session.add(project)
|
||||
db_session.commit()
|
||||
|
||||
today = date.today()
|
||||
sprint = Sprint(
|
||||
id=uuid.uuid4(),
|
||||
project_id=project.id,
|
||||
name="Timestamp Sprint",
|
||||
number=1,
|
||||
start_date=today,
|
||||
end_date=today + timedelta(days=14),
|
||||
)
|
||||
db_session.add(sprint)
|
||||
db_session.commit()
|
||||
|
||||
assert isinstance(sprint.created_at, datetime)
|
||||
assert isinstance(sprint.updated_at, datetime)
|
||||
|
||||
def test_sprint_string_representation(self, db_session):
|
||||
"""Test the string representation of a sprint."""
|
||||
project = Project(id=uuid.uuid4(), name="Repr Sprint Project", slug="repr-sprint-project")
|
||||
db_session.add(project)
|
||||
db_session.commit()
|
||||
|
||||
today = date.today()
|
||||
sprint = Sprint(
|
||||
id=uuid.uuid4(),
|
||||
project_id=project.id,
|
||||
name="Sprint Alpha",
|
||||
number=3,
|
||||
start_date=today,
|
||||
end_date=today + timedelta(days=14),
|
||||
status=SprintStatus.ACTIVE,
|
||||
)
|
||||
|
||||
repr_str = repr(sprint)
|
||||
assert "Sprint Alpha" in repr_str
|
||||
assert "#3" in repr_str
|
||||
assert str(project.id) in repr_str
|
||||
assert "active" in repr_str
|
||||
|
||||
|
||||
class TestSprintStatus:
|
||||
"""Tests for Sprint status field."""
|
||||
|
||||
def test_all_sprint_statuses(self, db_session):
|
||||
"""Test that all sprint statuses can be stored."""
|
||||
project = Project(id=uuid.uuid4(), name="Status Sprint Project", slug="status-sprint-project")
|
||||
db_session.add(project)
|
||||
db_session.commit()
|
||||
|
||||
today = date.today()
|
||||
for idx, status in enumerate(SprintStatus):
|
||||
sprint = Sprint(
|
||||
id=uuid.uuid4(),
|
||||
project_id=project.id,
|
||||
name=f"Sprint {status.value}",
|
||||
number=idx + 1,
|
||||
start_date=today,
|
||||
end_date=today + timedelta(days=14),
|
||||
status=status,
|
||||
)
|
||||
db_session.add(sprint)
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(Sprint).filter_by(id=sprint.id).first()
|
||||
assert retrieved.status == status
|
||||
|
||||
|
||||
class TestSprintLifecycle:
|
||||
"""Tests for Sprint lifecycle operations."""
|
||||
|
||||
def test_start_sprint(self, db_session):
|
||||
"""Test starting a planned sprint."""
|
||||
project = Project(id=uuid.uuid4(), name="Start Sprint Project", slug="start-sprint-project")
|
||||
db_session.add(project)
|
||||
db_session.commit()
|
||||
|
||||
today = date.today()
|
||||
sprint = Sprint(
|
||||
id=uuid.uuid4(),
|
||||
project_id=project.id,
|
||||
name="Sprint to Start",
|
||||
number=1,
|
||||
start_date=today,
|
||||
end_date=today + timedelta(days=14),
|
||||
status=SprintStatus.PLANNED,
|
||||
)
|
||||
db_session.add(sprint)
|
||||
db_session.commit()
|
||||
|
||||
# Start the sprint
|
||||
sprint.status = SprintStatus.ACTIVE
|
||||
sprint.planned_points = 21
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(Sprint).filter_by(name="Sprint to Start").first()
|
||||
assert retrieved.status == SprintStatus.ACTIVE
|
||||
assert retrieved.planned_points == 21
|
||||
|
||||
def test_complete_sprint(self, db_session):
|
||||
"""Test completing an active sprint."""
|
||||
project = Project(id=uuid.uuid4(), name="Complete Sprint Project", slug="complete-sprint-project")
|
||||
db_session.add(project)
|
||||
db_session.commit()
|
||||
|
||||
today = date.today()
|
||||
sprint = Sprint(
|
||||
id=uuid.uuid4(),
|
||||
project_id=project.id,
|
||||
name="Sprint to Complete",
|
||||
number=1,
|
||||
start_date=today - timedelta(days=14),
|
||||
end_date=today,
|
||||
status=SprintStatus.ACTIVE,
|
||||
planned_points=21,
|
||||
)
|
||||
db_session.add(sprint)
|
||||
db_session.commit()
|
||||
|
||||
# Complete the sprint
|
||||
sprint.status = SprintStatus.COMPLETED
|
||||
sprint.completed_points = 18
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(Sprint).filter_by(name="Sprint to Complete").first()
|
||||
assert retrieved.status == SprintStatus.COMPLETED
|
||||
assert retrieved.completed_points == 18
|
||||
|
||||
def test_cancel_sprint(self, db_session):
|
||||
"""Test cancelling a sprint."""
|
||||
project = Project(id=uuid.uuid4(), name="Cancel Sprint Project", slug="cancel-sprint-project")
|
||||
db_session.add(project)
|
||||
db_session.commit()
|
||||
|
||||
today = date.today()
|
||||
sprint = Sprint(
|
||||
id=uuid.uuid4(),
|
||||
project_id=project.id,
|
||||
name="Sprint to Cancel",
|
||||
number=1,
|
||||
start_date=today,
|
||||
end_date=today + timedelta(days=14),
|
||||
status=SprintStatus.ACTIVE,
|
||||
planned_points=21,
|
||||
)
|
||||
db_session.add(sprint)
|
||||
db_session.commit()
|
||||
|
||||
# Cancel the sprint
|
||||
sprint.status = SprintStatus.CANCELLED
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(Sprint).filter_by(name="Sprint to Cancel").first()
|
||||
assert retrieved.status == SprintStatus.CANCELLED
|
||||
|
||||
|
||||
class TestSprintDates:
|
||||
"""Tests for Sprint date fields."""
|
||||
|
||||
def test_sprint_date_range(self, db_session):
|
||||
"""Test storing sprint date range."""
|
||||
project = Project(id=uuid.uuid4(), name="Date Range Project", slug="date-range-project")
|
||||
db_session.add(project)
|
||||
db_session.commit()
|
||||
|
||||
start = date(2024, 1, 1)
|
||||
end = date(2024, 1, 14)
|
||||
|
||||
sprint = Sprint(
|
||||
id=uuid.uuid4(),
|
||||
project_id=project.id,
|
||||
name="Date Range Sprint",
|
||||
number=1,
|
||||
start_date=start,
|
||||
end_date=end,
|
||||
)
|
||||
db_session.add(sprint)
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(Sprint).filter_by(name="Date Range Sprint").first()
|
||||
assert retrieved.start_date == start
|
||||
assert retrieved.end_date == end
|
||||
|
||||
def test_one_day_sprint(self, db_session):
|
||||
"""Test creating a one-day sprint."""
|
||||
project = Project(id=uuid.uuid4(), name="One Day Project", slug="one-day-project")
|
||||
db_session.add(project)
|
||||
db_session.commit()
|
||||
|
||||
today = date.today()
|
||||
sprint = Sprint(
|
||||
id=uuid.uuid4(),
|
||||
project_id=project.id,
|
||||
name="One Day Sprint",
|
||||
number=1,
|
||||
start_date=today,
|
||||
end_date=today, # Same day
|
||||
)
|
||||
db_session.add(sprint)
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(Sprint).filter_by(name="One Day Sprint").first()
|
||||
assert retrieved.start_date == retrieved.end_date
|
||||
|
||||
def test_long_sprint(self, db_session):
|
||||
"""Test creating a long sprint (e.g., 4 weeks)."""
|
||||
project = Project(id=uuid.uuid4(), name="Long Sprint Project", slug="long-sprint-project")
|
||||
db_session.add(project)
|
||||
db_session.commit()
|
||||
|
||||
today = date.today()
|
||||
sprint = Sprint(
|
||||
id=uuid.uuid4(),
|
||||
project_id=project.id,
|
||||
name="Long Sprint",
|
||||
number=1,
|
||||
start_date=today,
|
||||
end_date=today + timedelta(days=28), # 4 weeks
|
||||
)
|
||||
db_session.add(sprint)
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(Sprint).filter_by(name="Long Sprint").first()
|
||||
delta = retrieved.end_date - retrieved.start_date
|
||||
assert delta.days == 28
|
||||
|
||||
|
||||
class TestSprintPoints:
|
||||
"""Tests for Sprint story points fields."""
|
||||
|
||||
def test_sprint_with_zero_points(self, db_session):
|
||||
"""Test sprint with zero planned points."""
|
||||
project = Project(id=uuid.uuid4(), name="Zero Points Project", slug="zero-points-project")
|
||||
db_session.add(project)
|
||||
db_session.commit()
|
||||
|
||||
today = date.today()
|
||||
sprint = Sprint(
|
||||
id=uuid.uuid4(),
|
||||
project_id=project.id,
|
||||
name="Zero Points Sprint",
|
||||
number=1,
|
||||
start_date=today,
|
||||
end_date=today + timedelta(days=14),
|
||||
planned_points=0,
|
||||
completed_points=0,
|
||||
)
|
||||
db_session.add(sprint)
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(Sprint).filter_by(name="Zero Points Sprint").first()
|
||||
assert retrieved.planned_points == 0
|
||||
assert retrieved.completed_points == 0
|
||||
|
||||
def test_sprint_velocity_calculation(self, db_session):
|
||||
"""Test that we can calculate velocity from points."""
|
||||
project = Project(id=uuid.uuid4(), name="Velocity Project", slug="velocity-project")
|
||||
db_session.add(project)
|
||||
db_session.commit()
|
||||
|
||||
today = date.today()
|
||||
sprint = Sprint(
|
||||
id=uuid.uuid4(),
|
||||
project_id=project.id,
|
||||
name="Velocity Sprint",
|
||||
number=1,
|
||||
start_date=today,
|
||||
end_date=today + timedelta(days=14),
|
||||
status=SprintStatus.COMPLETED,
|
||||
planned_points=21,
|
||||
completed_points=18,
|
||||
)
|
||||
db_session.add(sprint)
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(Sprint).filter_by(name="Velocity Sprint").first()
|
||||
|
||||
# Calculate velocity
|
||||
velocity = retrieved.completed_points / retrieved.planned_points
|
||||
assert velocity == pytest.approx(18 / 21, rel=0.01)
|
||||
|
||||
def test_sprint_overdelivery(self, db_session):
|
||||
"""Test sprint where completed > planned (stretch goals)."""
|
||||
project = Project(id=uuid.uuid4(), name="Overdelivery Project", slug="overdelivery-project")
|
||||
db_session.add(project)
|
||||
db_session.commit()
|
||||
|
||||
today = date.today()
|
||||
sprint = Sprint(
|
||||
id=uuid.uuid4(),
|
||||
project_id=project.id,
|
||||
name="Overdelivery Sprint",
|
||||
number=1,
|
||||
start_date=today,
|
||||
end_date=today + timedelta(days=14),
|
||||
status=SprintStatus.COMPLETED,
|
||||
planned_points=20,
|
||||
completed_points=25, # Completed more than planned
|
||||
)
|
||||
db_session.add(sprint)
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(Sprint).filter_by(name="Overdelivery Sprint").first()
|
||||
assert retrieved.completed_points > retrieved.planned_points
|
||||
|
||||
|
||||
class TestSprintNumber:
|
||||
"""Tests for Sprint number field."""
|
||||
|
||||
def test_sequential_sprint_numbers(self, db_session):
|
||||
"""Test creating sprints with sequential numbers."""
|
||||
project = Project(id=uuid.uuid4(), name="Sequential Project", slug="sequential-project")
|
||||
db_session.add(project)
|
||||
db_session.commit()
|
||||
|
||||
today = date.today()
|
||||
for i in range(1, 6):
|
||||
sprint = Sprint(
|
||||
id=uuid.uuid4(),
|
||||
project_id=project.id,
|
||||
name=f"Sprint {i}",
|
||||
number=i,
|
||||
start_date=today + timedelta(days=(i - 1) * 14),
|
||||
end_date=today + timedelta(days=i * 14 - 1),
|
||||
)
|
||||
db_session.add(sprint)
|
||||
db_session.commit()
|
||||
|
||||
sprints = db_session.query(Sprint).filter_by(project_id=project.id).order_by(Sprint.number).all()
|
||||
assert len(sprints) == 5
|
||||
for i, sprint in enumerate(sprints, 1):
|
||||
assert sprint.number == i
|
||||
|
||||
def test_large_sprint_number(self, db_session):
|
||||
"""Test sprint with large number (e.g., long-running project)."""
|
||||
project = Project(id=uuid.uuid4(), name="Large Number Project", slug="large-number-project")
|
||||
db_session.add(project)
|
||||
db_session.commit()
|
||||
|
||||
today = date.today()
|
||||
sprint = Sprint(
|
||||
id=uuid.uuid4(),
|
||||
project_id=project.id,
|
||||
name="Sprint 100",
|
||||
number=100,
|
||||
start_date=today,
|
||||
end_date=today + timedelta(days=14),
|
||||
)
|
||||
db_session.add(sprint)
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(Sprint).filter_by(name="Sprint 100").first()
|
||||
assert retrieved.number == 100
|
||||
|
||||
|
||||
class TestSprintUpdate:
|
||||
"""Tests for Sprint update operations."""
|
||||
|
||||
def test_update_sprint_goal(self, db_session):
|
||||
"""Test updating sprint goal."""
|
||||
project = Project(id=uuid.uuid4(), name="Update Goal Project", slug="update-goal-project")
|
||||
db_session.add(project)
|
||||
db_session.commit()
|
||||
|
||||
today = date.today()
|
||||
sprint = Sprint(
|
||||
id=uuid.uuid4(),
|
||||
project_id=project.id,
|
||||
name="Update Goal Sprint",
|
||||
number=1,
|
||||
start_date=today,
|
||||
end_date=today + timedelta(days=14),
|
||||
goal="Original goal",
|
||||
)
|
||||
db_session.add(sprint)
|
||||
db_session.commit()
|
||||
|
||||
original_created_at = sprint.created_at
|
||||
|
||||
sprint.goal = "Updated goal with more detail"
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(Sprint).filter_by(name="Update Goal Sprint").first()
|
||||
assert retrieved.goal == "Updated goal with more detail"
|
||||
assert retrieved.created_at == original_created_at
|
||||
assert retrieved.updated_at > original_created_at
|
||||
|
||||
def test_update_sprint_dates(self, db_session):
|
||||
"""Test updating sprint dates."""
|
||||
project = Project(id=uuid.uuid4(), name="Update Dates Project", slug="update-dates-project")
|
||||
db_session.add(project)
|
||||
db_session.commit()
|
||||
|
||||
today = date.today()
|
||||
sprint = Sprint(
|
||||
id=uuid.uuid4(),
|
||||
project_id=project.id,
|
||||
name="Update Dates Sprint",
|
||||
number=1,
|
||||
start_date=today,
|
||||
end_date=today + timedelta(days=14),
|
||||
)
|
||||
db_session.add(sprint)
|
||||
db_session.commit()
|
||||
|
||||
# Extend sprint by a week
|
||||
sprint.end_date = today + timedelta(days=21)
|
||||
db_session.commit()
|
||||
|
||||
retrieved = db_session.query(Sprint).filter_by(name="Update Dates Sprint").first()
|
||||
delta = retrieved.end_date - retrieved.start_date
|
||||
assert delta.days == 21
|
||||
2
backend/tests/schemas/syndarix/__init__.py
Normal file
2
backend/tests/schemas/syndarix/__init__.py
Normal file
@@ -0,0 +1,2 @@
|
||||
# tests/schemas/syndarix/__init__.py
|
||||
"""Syndarix schema validation tests."""
|
||||
68
backend/tests/schemas/syndarix/conftest.py
Normal file
68
backend/tests/schemas/syndarix/conftest.py
Normal file
@@ -0,0 +1,68 @@
|
||||
# tests/schemas/syndarix/conftest.py
|
||||
"""
|
||||
Shared fixtures for Syndarix schema tests.
|
||||
"""
|
||||
|
||||
import uuid
|
||||
from datetime import date, timedelta
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def valid_uuid():
|
||||
"""Return a valid UUID for testing."""
|
||||
return uuid.uuid4()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def valid_project_data():
|
||||
"""Return valid project data for schema testing."""
|
||||
return {
|
||||
"name": "Test Project",
|
||||
"slug": "test-project",
|
||||
"description": "A test project",
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def valid_agent_type_data():
|
||||
"""Return valid agent type data for schema testing."""
|
||||
return {
|
||||
"name": "Backend Engineer",
|
||||
"slug": "backend-engineer",
|
||||
"personality_prompt": "You are an expert backend engineer.",
|
||||
"primary_model": "claude-opus-4-5-20251101",
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def valid_sprint_data(valid_uuid):
|
||||
"""Return valid sprint data for schema testing."""
|
||||
today = date.today()
|
||||
return {
|
||||
"project_id": valid_uuid,
|
||||
"name": "Sprint 1",
|
||||
"number": 1,
|
||||
"start_date": today,
|
||||
"end_date": today + timedelta(days=14),
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def valid_issue_data(valid_uuid):
|
||||
"""Return valid issue data for schema testing."""
|
||||
return {
|
||||
"project_id": valid_uuid,
|
||||
"title": "Test Issue",
|
||||
"body": "Issue description",
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def valid_agent_instance_data(valid_uuid):
|
||||
"""Return valid agent instance data for schema testing."""
|
||||
return {
|
||||
"agent_type_id": valid_uuid,
|
||||
"project_id": valid_uuid,
|
||||
}
|
||||
244
backend/tests/schemas/syndarix/test_agent_instance_schemas.py
Normal file
244
backend/tests/schemas/syndarix/test_agent_instance_schemas.py
Normal file
@@ -0,0 +1,244 @@
|
||||
# tests/schemas/syndarix/test_agent_instance_schemas.py
|
||||
"""
|
||||
Tests for AgentInstance schema validation.
|
||||
"""
|
||||
|
||||
import uuid
|
||||
from decimal import Decimal
|
||||
|
||||
import pytest
|
||||
from pydantic import ValidationError
|
||||
|
||||
from app.schemas.syndarix import (
|
||||
AgentInstanceCreate,
|
||||
AgentInstanceUpdate,
|
||||
AgentStatus,
|
||||
)
|
||||
|
||||
|
||||
class TestAgentInstanceCreateValidation:
|
||||
"""Tests for AgentInstanceCreate schema validation."""
|
||||
|
||||
def test_valid_agent_instance_create(self, valid_agent_instance_data):
|
||||
"""Test creating agent instance with valid data."""
|
||||
instance = AgentInstanceCreate(**valid_agent_instance_data)
|
||||
|
||||
assert instance.agent_type_id is not None
|
||||
assert instance.project_id is not None
|
||||
|
||||
def test_agent_instance_create_defaults(self, valid_agent_instance_data):
|
||||
"""Test that defaults are applied correctly."""
|
||||
instance = AgentInstanceCreate(**valid_agent_instance_data)
|
||||
|
||||
assert instance.status == AgentStatus.IDLE
|
||||
assert instance.current_task is None
|
||||
assert instance.short_term_memory == {}
|
||||
assert instance.long_term_memory_ref is None
|
||||
assert instance.session_id is None
|
||||
|
||||
def test_agent_instance_create_with_all_fields(self, valid_uuid):
|
||||
"""Test creating agent instance with all optional fields."""
|
||||
instance = AgentInstanceCreate(
|
||||
agent_type_id=valid_uuid,
|
||||
project_id=valid_uuid,
|
||||
status=AgentStatus.WORKING,
|
||||
current_task="Processing feature request",
|
||||
short_term_memory={"context": "working"},
|
||||
long_term_memory_ref="project-123/agent-456",
|
||||
session_id="session-abc",
|
||||
)
|
||||
|
||||
assert instance.status == AgentStatus.WORKING
|
||||
assert instance.current_task == "Processing feature request"
|
||||
assert instance.short_term_memory == {"context": "working"}
|
||||
assert instance.long_term_memory_ref == "project-123/agent-456"
|
||||
assert instance.session_id == "session-abc"
|
||||
|
||||
def test_agent_instance_create_agent_type_id_required(self, valid_uuid):
|
||||
"""Test that agent_type_id is required."""
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
AgentInstanceCreate(
|
||||
project_id=valid_uuid,
|
||||
)
|
||||
|
||||
errors = exc_info.value.errors()
|
||||
assert any("agent_type_id" in str(e).lower() for e in errors)
|
||||
|
||||
def test_agent_instance_create_project_id_required(self, valid_uuid):
|
||||
"""Test that project_id is required."""
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
AgentInstanceCreate(
|
||||
agent_type_id=valid_uuid,
|
||||
)
|
||||
|
||||
errors = exc_info.value.errors()
|
||||
assert any("project_id" in str(e).lower() for e in errors)
|
||||
|
||||
|
||||
class TestAgentInstanceUpdateValidation:
|
||||
"""Tests for AgentInstanceUpdate schema validation."""
|
||||
|
||||
def test_agent_instance_update_partial(self):
|
||||
"""Test updating only some fields."""
|
||||
update = AgentInstanceUpdate(
|
||||
status=AgentStatus.WORKING,
|
||||
)
|
||||
|
||||
assert update.status == AgentStatus.WORKING
|
||||
assert update.current_task is None
|
||||
assert update.short_term_memory is None
|
||||
|
||||
def test_agent_instance_update_all_fields(self):
|
||||
"""Test updating all fields."""
|
||||
from datetime import UTC, datetime
|
||||
|
||||
now = datetime.now(UTC)
|
||||
update = AgentInstanceUpdate(
|
||||
status=AgentStatus.WORKING,
|
||||
current_task="New task",
|
||||
short_term_memory={"new": "context"},
|
||||
long_term_memory_ref="new-ref",
|
||||
session_id="new-session",
|
||||
last_activity_at=now,
|
||||
tasks_completed=5,
|
||||
tokens_used=10000,
|
||||
cost_incurred=Decimal("1.5000"),
|
||||
)
|
||||
|
||||
assert update.status == AgentStatus.WORKING
|
||||
assert update.current_task == "New task"
|
||||
assert update.tasks_completed == 5
|
||||
assert update.tokens_used == 10000
|
||||
assert update.cost_incurred == Decimal("1.5000")
|
||||
|
||||
def test_agent_instance_update_tasks_completed_negative_fails(self):
|
||||
"""Test that negative tasks_completed raises ValidationError."""
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
AgentInstanceUpdate(tasks_completed=-1)
|
||||
|
||||
errors = exc_info.value.errors()
|
||||
assert any("tasks_completed" in str(e).lower() for e in errors)
|
||||
|
||||
def test_agent_instance_update_tokens_used_negative_fails(self):
|
||||
"""Test that negative tokens_used raises ValidationError."""
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
AgentInstanceUpdate(tokens_used=-1)
|
||||
|
||||
errors = exc_info.value.errors()
|
||||
assert any("tokens_used" in str(e).lower() for e in errors)
|
||||
|
||||
def test_agent_instance_update_cost_incurred_negative_fails(self):
|
||||
"""Test that negative cost_incurred raises ValidationError."""
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
AgentInstanceUpdate(cost_incurred=Decimal("-0.01"))
|
||||
|
||||
errors = exc_info.value.errors()
|
||||
assert any("cost_incurred" in str(e).lower() for e in errors)
|
||||
|
||||
|
||||
class TestAgentStatusEnum:
|
||||
"""Tests for AgentStatus enum validation."""
|
||||
|
||||
def test_valid_agent_statuses(self, valid_uuid):
|
||||
"""Test all valid agent statuses."""
|
||||
for status in AgentStatus:
|
||||
instance = AgentInstanceCreate(
|
||||
agent_type_id=valid_uuid,
|
||||
project_id=valid_uuid,
|
||||
status=status,
|
||||
)
|
||||
assert instance.status == status
|
||||
|
||||
def test_invalid_agent_status(self, valid_uuid):
|
||||
"""Test that invalid agent status raises ValidationError."""
|
||||
with pytest.raises(ValidationError):
|
||||
AgentInstanceCreate(
|
||||
agent_type_id=valid_uuid,
|
||||
project_id=valid_uuid,
|
||||
status="invalid", # type: ignore
|
||||
)
|
||||
|
||||
|
||||
class TestAgentInstanceShortTermMemory:
|
||||
"""Tests for AgentInstance short_term_memory validation."""
|
||||
|
||||
def test_short_term_memory_empty_dict(self, valid_uuid):
|
||||
"""Test that empty short_term_memory is valid."""
|
||||
instance = AgentInstanceCreate(
|
||||
agent_type_id=valid_uuid,
|
||||
project_id=valid_uuid,
|
||||
short_term_memory={},
|
||||
)
|
||||
assert instance.short_term_memory == {}
|
||||
|
||||
def test_short_term_memory_complex(self, valid_uuid):
|
||||
"""Test complex short_term_memory structure."""
|
||||
memory = {
|
||||
"conversation_history": [
|
||||
{"role": "user", "content": "Hello"},
|
||||
{"role": "assistant", "content": "Hi there"},
|
||||
],
|
||||
"recent_files": ["file1.py", "file2.py"],
|
||||
"decisions": {"key": "value"},
|
||||
"context_tokens": 1024,
|
||||
}
|
||||
instance = AgentInstanceCreate(
|
||||
agent_type_id=valid_uuid,
|
||||
project_id=valid_uuid,
|
||||
short_term_memory=memory,
|
||||
)
|
||||
assert instance.short_term_memory == memory
|
||||
|
||||
|
||||
class TestAgentInstanceStringFields:
|
||||
"""Tests for AgentInstance string field validation."""
|
||||
|
||||
def test_long_term_memory_ref_max_length(self, valid_uuid):
|
||||
"""Test long_term_memory_ref max length."""
|
||||
long_ref = "a" * 500 # Max length is 500
|
||||
|
||||
instance = AgentInstanceCreate(
|
||||
agent_type_id=valid_uuid,
|
||||
project_id=valid_uuid,
|
||||
long_term_memory_ref=long_ref,
|
||||
)
|
||||
assert instance.long_term_memory_ref == long_ref
|
||||
|
||||
def test_long_term_memory_ref_too_long(self, valid_uuid):
|
||||
"""Test that too long long_term_memory_ref raises ValidationError."""
|
||||
too_long = "a" * 501
|
||||
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
AgentInstanceCreate(
|
||||
agent_type_id=valid_uuid,
|
||||
project_id=valid_uuid,
|
||||
long_term_memory_ref=too_long,
|
||||
)
|
||||
|
||||
errors = exc_info.value.errors()
|
||||
assert any("long_term_memory_ref" in str(e).lower() for e in errors)
|
||||
|
||||
def test_session_id_max_length(self, valid_uuid):
|
||||
"""Test session_id max length."""
|
||||
long_session = "a" * 255 # Max length is 255
|
||||
|
||||
instance = AgentInstanceCreate(
|
||||
agent_type_id=valid_uuid,
|
||||
project_id=valid_uuid,
|
||||
session_id=long_session,
|
||||
)
|
||||
assert instance.session_id == long_session
|
||||
|
||||
def test_session_id_too_long(self, valid_uuid):
|
||||
"""Test that too long session_id raises ValidationError."""
|
||||
too_long = "a" * 256
|
||||
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
AgentInstanceCreate(
|
||||
agent_type_id=valid_uuid,
|
||||
project_id=valid_uuid,
|
||||
session_id=too_long,
|
||||
)
|
||||
|
||||
errors = exc_info.value.errors()
|
||||
assert any("session_id" in str(e).lower() for e in errors)
|
||||
318
backend/tests/schemas/syndarix/test_agent_type_schemas.py
Normal file
318
backend/tests/schemas/syndarix/test_agent_type_schemas.py
Normal file
@@ -0,0 +1,318 @@
|
||||
# tests/schemas/syndarix/test_agent_type_schemas.py
|
||||
"""
|
||||
Tests for AgentType schema validation.
|
||||
"""
|
||||
|
||||
import pytest
|
||||
from pydantic import ValidationError
|
||||
|
||||
from app.schemas.syndarix import (
|
||||
AgentTypeCreate,
|
||||
AgentTypeUpdate,
|
||||
)
|
||||
|
||||
|
||||
class TestAgentTypeCreateValidation:
|
||||
"""Tests for AgentTypeCreate schema validation."""
|
||||
|
||||
def test_valid_agent_type_create(self, valid_agent_type_data):
|
||||
"""Test creating agent type with valid data."""
|
||||
agent_type = AgentTypeCreate(**valid_agent_type_data)
|
||||
|
||||
assert agent_type.name == "Backend Engineer"
|
||||
assert agent_type.slug == "backend-engineer"
|
||||
assert agent_type.personality_prompt == "You are an expert backend engineer."
|
||||
assert agent_type.primary_model == "claude-opus-4-5-20251101"
|
||||
|
||||
def test_agent_type_create_defaults(self, valid_agent_type_data):
|
||||
"""Test that defaults are applied correctly."""
|
||||
agent_type = AgentTypeCreate(**valid_agent_type_data)
|
||||
|
||||
assert agent_type.expertise == []
|
||||
assert agent_type.fallback_models == []
|
||||
assert agent_type.model_params == {}
|
||||
assert agent_type.mcp_servers == []
|
||||
assert agent_type.tool_permissions == {}
|
||||
assert agent_type.is_active is True
|
||||
|
||||
def test_agent_type_create_with_all_fields(self, valid_agent_type_data):
|
||||
"""Test creating agent type with all optional fields."""
|
||||
agent_type = AgentTypeCreate(
|
||||
**valid_agent_type_data,
|
||||
description="Detailed description",
|
||||
expertise=["python", "fastapi"],
|
||||
fallback_models=["claude-sonnet-4-20250514"],
|
||||
model_params={"temperature": 0.7},
|
||||
mcp_servers=["gitea", "slack"],
|
||||
tool_permissions={"allowed": ["*"]},
|
||||
is_active=True,
|
||||
)
|
||||
|
||||
assert agent_type.description == "Detailed description"
|
||||
assert agent_type.expertise == ["python", "fastapi"]
|
||||
assert agent_type.fallback_models == ["claude-sonnet-4-20250514"]
|
||||
|
||||
def test_agent_type_create_name_empty_fails(self):
|
||||
"""Test that empty name raises ValidationError."""
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
AgentTypeCreate(
|
||||
name="",
|
||||
slug="valid-slug",
|
||||
personality_prompt="Test prompt",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
)
|
||||
|
||||
errors = exc_info.value.errors()
|
||||
assert any("name" in str(e) for e in errors)
|
||||
|
||||
def test_agent_type_create_name_stripped(self):
|
||||
"""Test that name is stripped of whitespace."""
|
||||
agent_type = AgentTypeCreate(
|
||||
name=" Padded Name ",
|
||||
slug="padded-slug",
|
||||
personality_prompt="Test",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
)
|
||||
|
||||
assert agent_type.name == "Padded Name"
|
||||
|
||||
def test_agent_type_create_personality_prompt_required(self):
|
||||
"""Test that personality_prompt is required."""
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
AgentTypeCreate(
|
||||
name="Test Agent",
|
||||
slug="test-agent",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
)
|
||||
|
||||
errors = exc_info.value.errors()
|
||||
assert any("personality_prompt" in str(e).lower() for e in errors)
|
||||
|
||||
def test_agent_type_create_primary_model_required(self):
|
||||
"""Test that primary_model is required."""
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
AgentTypeCreate(
|
||||
name="Test Agent",
|
||||
slug="test-agent",
|
||||
personality_prompt="Test prompt",
|
||||
)
|
||||
|
||||
errors = exc_info.value.errors()
|
||||
assert any("primary_model" in str(e).lower() for e in errors)
|
||||
|
||||
|
||||
class TestAgentTypeSlugValidation:
|
||||
"""Tests for AgentType slug validation."""
|
||||
|
||||
def test_valid_slugs(self):
|
||||
"""Test various valid slug formats."""
|
||||
valid_slugs = [
|
||||
"simple",
|
||||
"with-hyphens",
|
||||
"has123numbers",
|
||||
]
|
||||
|
||||
for slug in valid_slugs:
|
||||
agent_type = AgentTypeCreate(
|
||||
name="Test Agent",
|
||||
slug=slug,
|
||||
personality_prompt="Test",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
)
|
||||
assert agent_type.slug == slug
|
||||
|
||||
def test_invalid_slug_uppercase(self):
|
||||
"""Test that uppercase letters in slug raise ValidationError."""
|
||||
with pytest.raises(ValidationError):
|
||||
AgentTypeCreate(
|
||||
name="Test Agent",
|
||||
slug="Invalid-Uppercase",
|
||||
personality_prompt="Test",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
)
|
||||
|
||||
def test_invalid_slug_special_chars(self):
|
||||
"""Test that special characters raise ValidationError."""
|
||||
with pytest.raises(ValidationError):
|
||||
AgentTypeCreate(
|
||||
name="Test Agent",
|
||||
slug="has_underscore",
|
||||
personality_prompt="Test",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
)
|
||||
|
||||
|
||||
class TestAgentTypeExpertiseValidation:
|
||||
"""Tests for AgentType expertise validation."""
|
||||
|
||||
def test_expertise_normalized_lowercase(self):
|
||||
"""Test that expertise is normalized to lowercase."""
|
||||
agent_type = AgentTypeCreate(
|
||||
name="Test Agent",
|
||||
slug="test-agent",
|
||||
personality_prompt="Test",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
expertise=["Python", "FastAPI", "PostgreSQL"],
|
||||
)
|
||||
|
||||
assert agent_type.expertise == ["python", "fastapi", "postgresql"]
|
||||
|
||||
def test_expertise_stripped(self):
|
||||
"""Test that expertise items are stripped."""
|
||||
agent_type = AgentTypeCreate(
|
||||
name="Test Agent",
|
||||
slug="test-agent",
|
||||
personality_prompt="Test",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
expertise=[" python ", " fastapi "],
|
||||
)
|
||||
|
||||
assert agent_type.expertise == ["python", "fastapi"]
|
||||
|
||||
def test_expertise_empty_strings_removed(self):
|
||||
"""Test that empty expertise strings are removed."""
|
||||
agent_type = AgentTypeCreate(
|
||||
name="Test Agent",
|
||||
slug="test-agent",
|
||||
personality_prompt="Test",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
expertise=["python", "", " ", "fastapi"],
|
||||
)
|
||||
|
||||
assert agent_type.expertise == ["python", "fastapi"]
|
||||
|
||||
|
||||
class TestAgentTypeMcpServersValidation:
|
||||
"""Tests for AgentType MCP servers validation."""
|
||||
|
||||
def test_mcp_servers_stripped(self):
|
||||
"""Test that MCP server names are stripped."""
|
||||
agent_type = AgentTypeCreate(
|
||||
name="Test Agent",
|
||||
slug="test-agent",
|
||||
personality_prompt="Test",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
mcp_servers=[" gitea ", " slack "],
|
||||
)
|
||||
|
||||
assert agent_type.mcp_servers == ["gitea", "slack"]
|
||||
|
||||
def test_mcp_servers_empty_strings_removed(self):
|
||||
"""Test that empty MCP server strings are removed."""
|
||||
agent_type = AgentTypeCreate(
|
||||
name="Test Agent",
|
||||
slug="test-agent",
|
||||
personality_prompt="Test",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
mcp_servers=["gitea", "", " ", "slack"],
|
||||
)
|
||||
|
||||
assert agent_type.mcp_servers == ["gitea", "slack"]
|
||||
|
||||
|
||||
class TestAgentTypeUpdateValidation:
|
||||
"""Tests for AgentTypeUpdate schema validation."""
|
||||
|
||||
def test_agent_type_update_partial(self):
|
||||
"""Test updating only some fields."""
|
||||
update = AgentTypeUpdate(
|
||||
name="Updated Name",
|
||||
)
|
||||
|
||||
assert update.name == "Updated Name"
|
||||
assert update.slug is None
|
||||
assert update.description is None
|
||||
assert update.expertise is None
|
||||
|
||||
def test_agent_type_update_all_fields(self):
|
||||
"""Test updating all fields."""
|
||||
update = AgentTypeUpdate(
|
||||
name="Updated Name",
|
||||
slug="updated-slug",
|
||||
description="Updated description",
|
||||
expertise=["new-skill"],
|
||||
personality_prompt="Updated prompt",
|
||||
primary_model="new-model",
|
||||
fallback_models=["fallback-1"],
|
||||
model_params={"temp": 0.5},
|
||||
mcp_servers=["server-1"],
|
||||
tool_permissions={"key": "value"},
|
||||
is_active=False,
|
||||
)
|
||||
|
||||
assert update.name == "Updated Name"
|
||||
assert update.slug == "updated-slug"
|
||||
assert update.is_active is False
|
||||
|
||||
def test_agent_type_update_empty_name_fails(self):
|
||||
"""Test that empty name in update raises ValidationError."""
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
AgentTypeUpdate(name="")
|
||||
|
||||
errors = exc_info.value.errors()
|
||||
assert any("name" in str(e) for e in errors)
|
||||
|
||||
def test_agent_type_update_slug_validation(self):
|
||||
"""Test that slug validation applies to updates."""
|
||||
with pytest.raises(ValidationError):
|
||||
AgentTypeUpdate(slug="Invalid-Slug")
|
||||
|
||||
def test_agent_type_update_expertise_normalized(self):
|
||||
"""Test that expertise is normalized in updates."""
|
||||
update = AgentTypeUpdate(
|
||||
expertise=["Python", "FastAPI"],
|
||||
)
|
||||
|
||||
assert update.expertise == ["python", "fastapi"]
|
||||
|
||||
|
||||
class TestAgentTypeJsonFields:
|
||||
"""Tests for AgentType JSON field validation."""
|
||||
|
||||
def test_model_params_complex(self):
|
||||
"""Test complex model_params structure."""
|
||||
params = {
|
||||
"temperature": 0.7,
|
||||
"max_tokens": 4096,
|
||||
"top_p": 0.9,
|
||||
"stop_sequences": ["###"],
|
||||
}
|
||||
agent_type = AgentTypeCreate(
|
||||
name="Test Agent",
|
||||
slug="test-agent",
|
||||
personality_prompt="Test",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
model_params=params,
|
||||
)
|
||||
|
||||
assert agent_type.model_params == params
|
||||
|
||||
def test_tool_permissions_complex(self):
|
||||
"""Test complex tool_permissions structure."""
|
||||
permissions = {
|
||||
"allowed": ["file:read", "git:commit"],
|
||||
"denied": ["file:delete"],
|
||||
"require_approval": ["git:push"],
|
||||
}
|
||||
agent_type = AgentTypeCreate(
|
||||
name="Test Agent",
|
||||
slug="test-agent",
|
||||
personality_prompt="Test",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
tool_permissions=permissions,
|
||||
)
|
||||
|
||||
assert agent_type.tool_permissions == permissions
|
||||
|
||||
def test_fallback_models_list(self):
|
||||
"""Test fallback_models as a list."""
|
||||
models = ["claude-sonnet-4-20250514", "gpt-4o", "mistral-large"]
|
||||
agent_type = AgentTypeCreate(
|
||||
name="Test Agent",
|
||||
slug="test-agent",
|
||||
personality_prompt="Test",
|
||||
primary_model="claude-opus-4-5-20251101",
|
||||
fallback_models=models,
|
||||
)
|
||||
|
||||
assert agent_type.fallback_models == models
|
||||
342
backend/tests/schemas/syndarix/test_issue_schemas.py
Normal file
342
backend/tests/schemas/syndarix/test_issue_schemas.py
Normal file
@@ -0,0 +1,342 @@
|
||||
# tests/schemas/syndarix/test_issue_schemas.py
|
||||
"""
|
||||
Tests for Issue schema validation.
|
||||
"""
|
||||
|
||||
import uuid
|
||||
|
||||
import pytest
|
||||
from pydantic import ValidationError
|
||||
|
||||
from app.schemas.syndarix import (
|
||||
IssueAssign,
|
||||
IssueCreate,
|
||||
IssuePriority,
|
||||
IssueStatus,
|
||||
IssueUpdate,
|
||||
SyncStatus,
|
||||
)
|
||||
|
||||
|
||||
class TestIssueCreateValidation:
|
||||
"""Tests for IssueCreate schema validation."""
|
||||
|
||||
def test_valid_issue_create(self, valid_issue_data):
|
||||
"""Test creating issue with valid data."""
|
||||
issue = IssueCreate(**valid_issue_data)
|
||||
|
||||
assert issue.title == "Test Issue"
|
||||
assert issue.body == "Issue description"
|
||||
|
||||
def test_issue_create_defaults(self, valid_issue_data):
|
||||
"""Test that defaults are applied correctly."""
|
||||
issue = IssueCreate(**valid_issue_data)
|
||||
|
||||
assert issue.status == IssueStatus.OPEN
|
||||
assert issue.priority == IssuePriority.MEDIUM
|
||||
assert issue.labels == []
|
||||
assert issue.story_points is None
|
||||
assert issue.assigned_agent_id is None
|
||||
assert issue.human_assignee is None
|
||||
assert issue.sprint_id is None
|
||||
|
||||
def test_issue_create_with_all_fields(self, valid_uuid):
|
||||
"""Test creating issue with all optional fields."""
|
||||
agent_id = uuid.uuid4()
|
||||
sprint_id = uuid.uuid4()
|
||||
|
||||
issue = IssueCreate(
|
||||
project_id=valid_uuid,
|
||||
title="Full Issue",
|
||||
body="Detailed body",
|
||||
status=IssueStatus.IN_PROGRESS,
|
||||
priority=IssuePriority.HIGH,
|
||||
labels=["bug", "security"],
|
||||
story_points=5,
|
||||
assigned_agent_id=agent_id,
|
||||
sprint_id=sprint_id,
|
||||
external_tracker="gitea",
|
||||
external_id="gitea-123",
|
||||
external_url="https://gitea.example.com/issues/123",
|
||||
external_number=123,
|
||||
)
|
||||
|
||||
assert issue.status == IssueStatus.IN_PROGRESS
|
||||
assert issue.priority == IssuePriority.HIGH
|
||||
assert issue.labels == ["bug", "security"]
|
||||
assert issue.story_points == 5
|
||||
assert issue.external_tracker == "gitea"
|
||||
|
||||
def test_issue_create_title_empty_fails(self, valid_uuid):
|
||||
"""Test that empty title raises ValidationError."""
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
IssueCreate(
|
||||
project_id=valid_uuid,
|
||||
title="",
|
||||
)
|
||||
|
||||
errors = exc_info.value.errors()
|
||||
assert any("title" in str(e) for e in errors)
|
||||
|
||||
def test_issue_create_title_whitespace_only_fails(self, valid_uuid):
|
||||
"""Test that whitespace-only title raises ValidationError."""
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
IssueCreate(
|
||||
project_id=valid_uuid,
|
||||
title=" ",
|
||||
)
|
||||
|
||||
errors = exc_info.value.errors()
|
||||
assert any("title" in str(e) for e in errors)
|
||||
|
||||
def test_issue_create_title_stripped(self, valid_uuid):
|
||||
"""Test that title is stripped."""
|
||||
issue = IssueCreate(
|
||||
project_id=valid_uuid,
|
||||
title=" Padded Title ",
|
||||
)
|
||||
|
||||
assert issue.title == "Padded Title"
|
||||
|
||||
def test_issue_create_project_id_required(self):
|
||||
"""Test that project_id is required."""
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
IssueCreate(title="No Project Issue")
|
||||
|
||||
errors = exc_info.value.errors()
|
||||
assert any("project_id" in str(e).lower() for e in errors)
|
||||
|
||||
|
||||
class TestIssueLabelsValidation:
|
||||
"""Tests for Issue labels validation."""
|
||||
|
||||
def test_labels_normalized_lowercase(self, valid_uuid):
|
||||
"""Test that labels are normalized to lowercase."""
|
||||
issue = IssueCreate(
|
||||
project_id=valid_uuid,
|
||||
title="Test Issue",
|
||||
labels=["Bug", "SECURITY", "FrontEnd"],
|
||||
)
|
||||
|
||||
assert issue.labels == ["bug", "security", "frontend"]
|
||||
|
||||
def test_labels_stripped(self, valid_uuid):
|
||||
"""Test that labels are stripped."""
|
||||
issue = IssueCreate(
|
||||
project_id=valid_uuid,
|
||||
title="Test Issue",
|
||||
labels=[" bug ", " security "],
|
||||
)
|
||||
|
||||
assert issue.labels == ["bug", "security"]
|
||||
|
||||
def test_labels_empty_strings_removed(self, valid_uuid):
|
||||
"""Test that empty label strings are removed."""
|
||||
issue = IssueCreate(
|
||||
project_id=valid_uuid,
|
||||
title="Test Issue",
|
||||
labels=["bug", "", " ", "security"],
|
||||
)
|
||||
|
||||
assert issue.labels == ["bug", "security"]
|
||||
|
||||
|
||||
class TestIssueStoryPointsValidation:
|
||||
"""Tests for Issue story_points validation."""
|
||||
|
||||
def test_story_points_valid_range(self, valid_uuid):
|
||||
"""Test valid story_points values."""
|
||||
for points in [0, 1, 5, 13, 21, 100]:
|
||||
issue = IssueCreate(
|
||||
project_id=valid_uuid,
|
||||
title="Test Issue",
|
||||
story_points=points,
|
||||
)
|
||||
assert issue.story_points == points
|
||||
|
||||
def test_story_points_negative_fails(self, valid_uuid):
|
||||
"""Test that negative story_points raises ValidationError."""
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
IssueCreate(
|
||||
project_id=valid_uuid,
|
||||
title="Test Issue",
|
||||
story_points=-1,
|
||||
)
|
||||
|
||||
errors = exc_info.value.errors()
|
||||
assert any("story_points" in str(e).lower() for e in errors)
|
||||
|
||||
def test_story_points_over_100_fails(self, valid_uuid):
|
||||
"""Test that story_points > 100 raises ValidationError."""
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
IssueCreate(
|
||||
project_id=valid_uuid,
|
||||
title="Test Issue",
|
||||
story_points=101,
|
||||
)
|
||||
|
||||
errors = exc_info.value.errors()
|
||||
assert any("story_points" in str(e).lower() for e in errors)
|
||||
|
||||
|
||||
class TestIssueExternalTrackerValidation:
|
||||
"""Tests for Issue external tracker validation."""
|
||||
|
||||
def test_valid_external_trackers(self, valid_uuid):
|
||||
"""Test valid external tracker values."""
|
||||
for tracker in ["gitea", "github", "gitlab"]:
|
||||
issue = IssueCreate(
|
||||
project_id=valid_uuid,
|
||||
title="Test Issue",
|
||||
external_tracker=tracker,
|
||||
external_id="ext-123",
|
||||
)
|
||||
assert issue.external_tracker == tracker
|
||||
|
||||
def test_invalid_external_tracker(self, valid_uuid):
|
||||
"""Test that invalid external tracker raises ValidationError."""
|
||||
with pytest.raises(ValidationError):
|
||||
IssueCreate(
|
||||
project_id=valid_uuid,
|
||||
title="Test Issue",
|
||||
external_tracker="invalid", # type: ignore
|
||||
external_id="ext-123",
|
||||
)
|
||||
|
||||
|
||||
class TestIssueUpdateValidation:
|
||||
"""Tests for IssueUpdate schema validation."""
|
||||
|
||||
def test_issue_update_partial(self):
|
||||
"""Test updating only some fields."""
|
||||
update = IssueUpdate(
|
||||
title="Updated Title",
|
||||
)
|
||||
|
||||
assert update.title == "Updated Title"
|
||||
assert update.body is None
|
||||
assert update.status is None
|
||||
|
||||
def test_issue_update_all_fields(self):
|
||||
"""Test updating all fields."""
|
||||
agent_id = uuid.uuid4()
|
||||
sprint_id = uuid.uuid4()
|
||||
|
||||
update = IssueUpdate(
|
||||
title="Updated Title",
|
||||
body="Updated body",
|
||||
status=IssueStatus.CLOSED,
|
||||
priority=IssuePriority.CRITICAL,
|
||||
labels=["updated"],
|
||||
assigned_agent_id=agent_id,
|
||||
human_assignee=None,
|
||||
sprint_id=sprint_id,
|
||||
story_points=8,
|
||||
sync_status=SyncStatus.PENDING,
|
||||
)
|
||||
|
||||
assert update.title == "Updated Title"
|
||||
assert update.status == IssueStatus.CLOSED
|
||||
assert update.priority == IssuePriority.CRITICAL
|
||||
|
||||
def test_issue_update_empty_title_fails(self):
|
||||
"""Test that empty title in update raises ValidationError."""
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
IssueUpdate(title="")
|
||||
|
||||
errors = exc_info.value.errors()
|
||||
assert any("title" in str(e) for e in errors)
|
||||
|
||||
def test_issue_update_labels_normalized(self):
|
||||
"""Test that labels are normalized in updates."""
|
||||
update = IssueUpdate(
|
||||
labels=["Bug", "SECURITY"],
|
||||
)
|
||||
|
||||
assert update.labels == ["bug", "security"]
|
||||
|
||||
|
||||
class TestIssueAssignValidation:
|
||||
"""Tests for IssueAssign schema validation."""
|
||||
|
||||
def test_assign_to_agent(self):
|
||||
"""Test assigning to an agent."""
|
||||
agent_id = uuid.uuid4()
|
||||
assign = IssueAssign(assigned_agent_id=agent_id)
|
||||
|
||||
assert assign.assigned_agent_id == agent_id
|
||||
assert assign.human_assignee is None
|
||||
|
||||
def test_assign_to_human(self):
|
||||
"""Test assigning to a human."""
|
||||
assign = IssueAssign(human_assignee="developer@example.com")
|
||||
|
||||
assert assign.human_assignee == "developer@example.com"
|
||||
assert assign.assigned_agent_id is None
|
||||
|
||||
def test_unassign(self):
|
||||
"""Test unassigning (both None)."""
|
||||
assign = IssueAssign()
|
||||
|
||||
assert assign.assigned_agent_id is None
|
||||
assert assign.human_assignee is None
|
||||
|
||||
def test_assign_both_fails(self):
|
||||
"""Test that assigning to both agent and human raises ValidationError."""
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
IssueAssign(
|
||||
assigned_agent_id=uuid.uuid4(),
|
||||
human_assignee="developer@example.com",
|
||||
)
|
||||
|
||||
errors = exc_info.value.errors()
|
||||
# Check for the validation error message
|
||||
assert len(errors) > 0
|
||||
|
||||
|
||||
class TestIssueEnums:
|
||||
"""Tests for Issue enum validation."""
|
||||
|
||||
def test_valid_issue_statuses(self, valid_uuid):
|
||||
"""Test all valid issue statuses."""
|
||||
for status in IssueStatus:
|
||||
issue = IssueCreate(
|
||||
project_id=valid_uuid,
|
||||
title=f"Issue {status.value}",
|
||||
status=status,
|
||||
)
|
||||
assert issue.status == status
|
||||
|
||||
def test_invalid_issue_status(self, valid_uuid):
|
||||
"""Test that invalid issue status raises ValidationError."""
|
||||
with pytest.raises(ValidationError):
|
||||
IssueCreate(
|
||||
project_id=valid_uuid,
|
||||
title="Test Issue",
|
||||
status="invalid", # type: ignore
|
||||
)
|
||||
|
||||
def test_valid_issue_priorities(self, valid_uuid):
|
||||
"""Test all valid issue priorities."""
|
||||
for priority in IssuePriority:
|
||||
issue = IssueCreate(
|
||||
project_id=valid_uuid,
|
||||
title=f"Issue {priority.value}",
|
||||
priority=priority,
|
||||
)
|
||||
assert issue.priority == priority
|
||||
|
||||
def test_invalid_issue_priority(self, valid_uuid):
|
||||
"""Test that invalid issue priority raises ValidationError."""
|
||||
with pytest.raises(ValidationError):
|
||||
IssueCreate(
|
||||
project_id=valid_uuid,
|
||||
title="Test Issue",
|
||||
priority="invalid", # type: ignore
|
||||
)
|
||||
|
||||
def test_valid_sync_statuses(self):
|
||||
"""Test all valid sync statuses in update."""
|
||||
for status in SyncStatus:
|
||||
update = IssueUpdate(sync_status=status)
|
||||
assert update.sync_status == status
|
||||
300
backend/tests/schemas/syndarix/test_project_schemas.py
Normal file
300
backend/tests/schemas/syndarix/test_project_schemas.py
Normal file
@@ -0,0 +1,300 @@
|
||||
# tests/schemas/syndarix/test_project_schemas.py
|
||||
"""
|
||||
Tests for Project schema validation.
|
||||
"""
|
||||
|
||||
import uuid
|
||||
|
||||
import pytest
|
||||
from pydantic import ValidationError
|
||||
|
||||
from app.schemas.syndarix import (
|
||||
AutonomyLevel,
|
||||
ProjectCreate,
|
||||
ProjectStatus,
|
||||
ProjectUpdate,
|
||||
)
|
||||
|
||||
|
||||
class TestProjectCreateValidation:
|
||||
"""Tests for ProjectCreate schema validation."""
|
||||
|
||||
def test_valid_project_create(self, valid_project_data):
|
||||
"""Test creating project with valid data."""
|
||||
project = ProjectCreate(**valid_project_data)
|
||||
|
||||
assert project.name == "Test Project"
|
||||
assert project.slug == "test-project"
|
||||
assert project.description == "A test project"
|
||||
|
||||
def test_project_create_defaults(self):
|
||||
"""Test that defaults are applied correctly."""
|
||||
project = ProjectCreate(
|
||||
name="Minimal Project",
|
||||
slug="minimal-project",
|
||||
)
|
||||
|
||||
assert project.autonomy_level == AutonomyLevel.MILESTONE
|
||||
assert project.status == ProjectStatus.ACTIVE
|
||||
assert project.settings == {}
|
||||
assert project.owner_id is None
|
||||
|
||||
def test_project_create_with_owner(self, valid_project_data):
|
||||
"""Test creating project with owner ID."""
|
||||
owner_id = uuid.uuid4()
|
||||
project = ProjectCreate(
|
||||
**valid_project_data,
|
||||
owner_id=owner_id,
|
||||
)
|
||||
|
||||
assert project.owner_id == owner_id
|
||||
|
||||
def test_project_create_name_empty_fails(self):
|
||||
"""Test that empty name raises ValidationError."""
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
ProjectCreate(
|
||||
name="",
|
||||
slug="valid-slug",
|
||||
)
|
||||
|
||||
errors = exc_info.value.errors()
|
||||
assert any("name" in str(e) for e in errors)
|
||||
|
||||
def test_project_create_name_whitespace_only_fails(self):
|
||||
"""Test that whitespace-only name raises ValidationError."""
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
ProjectCreate(
|
||||
name=" ",
|
||||
slug="valid-slug",
|
||||
)
|
||||
|
||||
errors = exc_info.value.errors()
|
||||
assert any("name" in str(e) for e in errors)
|
||||
|
||||
def test_project_create_name_stripped(self):
|
||||
"""Test that name is stripped of leading/trailing whitespace."""
|
||||
project = ProjectCreate(
|
||||
name=" Padded Name ",
|
||||
slug="padded-slug",
|
||||
)
|
||||
|
||||
assert project.name == "Padded Name"
|
||||
|
||||
def test_project_create_slug_required(self):
|
||||
"""Test that slug is required for create."""
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
ProjectCreate(name="No Slug Project")
|
||||
|
||||
errors = exc_info.value.errors()
|
||||
assert any("slug" in str(e).lower() for e in errors)
|
||||
|
||||
|
||||
class TestProjectSlugValidation:
|
||||
"""Tests for Project slug validation."""
|
||||
|
||||
def test_valid_slugs(self):
|
||||
"""Test various valid slug formats."""
|
||||
valid_slugs = [
|
||||
"simple",
|
||||
"with-hyphens",
|
||||
"has123numbers",
|
||||
"mix3d-with-hyphen5",
|
||||
"a", # Single character
|
||||
]
|
||||
|
||||
for slug in valid_slugs:
|
||||
project = ProjectCreate(
|
||||
name="Test Project",
|
||||
slug=slug,
|
||||
)
|
||||
assert project.slug == slug
|
||||
|
||||
def test_invalid_slug_uppercase(self):
|
||||
"""Test that uppercase letters in slug raise ValidationError."""
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
ProjectCreate(
|
||||
name="Test Project",
|
||||
slug="Invalid-Uppercase",
|
||||
)
|
||||
|
||||
errors = exc_info.value.errors()
|
||||
assert any("slug" in str(e).lower() for e in errors)
|
||||
|
||||
def test_invalid_slug_special_chars(self):
|
||||
"""Test that special characters in slug raise ValidationError."""
|
||||
invalid_slugs = [
|
||||
"has_underscore",
|
||||
"has.dot",
|
||||
"has@symbol",
|
||||
"has space",
|
||||
"has/slash",
|
||||
]
|
||||
|
||||
for slug in invalid_slugs:
|
||||
with pytest.raises(ValidationError):
|
||||
ProjectCreate(
|
||||
name="Test Project",
|
||||
slug=slug,
|
||||
)
|
||||
|
||||
def test_invalid_slug_starts_with_hyphen(self):
|
||||
"""Test that slug starting with hyphen raises ValidationError."""
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
ProjectCreate(
|
||||
name="Test Project",
|
||||
slug="-invalid-start",
|
||||
)
|
||||
|
||||
errors = exc_info.value.errors()
|
||||
assert any("hyphen" in str(e).lower() for e in errors)
|
||||
|
||||
def test_invalid_slug_ends_with_hyphen(self):
|
||||
"""Test that slug ending with hyphen raises ValidationError."""
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
ProjectCreate(
|
||||
name="Test Project",
|
||||
slug="invalid-end-",
|
||||
)
|
||||
|
||||
errors = exc_info.value.errors()
|
||||
assert any("hyphen" in str(e).lower() for e in errors)
|
||||
|
||||
def test_invalid_slug_consecutive_hyphens(self):
|
||||
"""Test that consecutive hyphens in slug raise ValidationError."""
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
ProjectCreate(
|
||||
name="Test Project",
|
||||
slug="invalid--consecutive",
|
||||
)
|
||||
|
||||
errors = exc_info.value.errors()
|
||||
assert any("consecutive" in str(e).lower() for e in errors)
|
||||
|
||||
|
||||
class TestProjectUpdateValidation:
|
||||
"""Tests for ProjectUpdate schema validation."""
|
||||
|
||||
def test_project_update_partial(self):
|
||||
"""Test updating only some fields."""
|
||||
update = ProjectUpdate(
|
||||
name="Updated Name",
|
||||
)
|
||||
|
||||
assert update.name == "Updated Name"
|
||||
assert update.slug is None
|
||||
assert update.description is None
|
||||
assert update.autonomy_level is None
|
||||
assert update.status is None
|
||||
|
||||
def test_project_update_all_fields(self):
|
||||
"""Test updating all fields."""
|
||||
owner_id = uuid.uuid4()
|
||||
update = ProjectUpdate(
|
||||
name="Updated Name",
|
||||
slug="updated-slug",
|
||||
description="Updated description",
|
||||
autonomy_level=AutonomyLevel.AUTONOMOUS,
|
||||
status=ProjectStatus.PAUSED,
|
||||
settings={"key": "value"},
|
||||
owner_id=owner_id,
|
||||
)
|
||||
|
||||
assert update.name == "Updated Name"
|
||||
assert update.slug == "updated-slug"
|
||||
assert update.autonomy_level == AutonomyLevel.AUTONOMOUS
|
||||
assert update.status == ProjectStatus.PAUSED
|
||||
|
||||
def test_project_update_empty_name_fails(self):
|
||||
"""Test that empty name in update raises ValidationError."""
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
ProjectUpdate(name="")
|
||||
|
||||
errors = exc_info.value.errors()
|
||||
assert any("name" in str(e) for e in errors)
|
||||
|
||||
def test_project_update_slug_validation(self):
|
||||
"""Test that slug validation applies to updates too."""
|
||||
with pytest.raises(ValidationError):
|
||||
ProjectUpdate(slug="Invalid-Slug")
|
||||
|
||||
|
||||
class TestProjectEnums:
|
||||
"""Tests for Project enum validation."""
|
||||
|
||||
def test_valid_autonomy_levels(self):
|
||||
"""Test all valid autonomy levels."""
|
||||
for level in AutonomyLevel:
|
||||
# Replace underscores with hyphens for valid slug
|
||||
slug_suffix = level.value.replace("_", "-")
|
||||
project = ProjectCreate(
|
||||
name="Test Project",
|
||||
slug=f"project-{slug_suffix}",
|
||||
autonomy_level=level,
|
||||
)
|
||||
assert project.autonomy_level == level
|
||||
|
||||
def test_invalid_autonomy_level(self):
|
||||
"""Test that invalid autonomy level raises ValidationError."""
|
||||
with pytest.raises(ValidationError):
|
||||
ProjectCreate(
|
||||
name="Test Project",
|
||||
slug="invalid-autonomy",
|
||||
autonomy_level="invalid", # type: ignore
|
||||
)
|
||||
|
||||
def test_valid_project_statuses(self):
|
||||
"""Test all valid project statuses."""
|
||||
for status in ProjectStatus:
|
||||
project = ProjectCreate(
|
||||
name="Test Project",
|
||||
slug=f"project-status-{status.value}",
|
||||
status=status,
|
||||
)
|
||||
assert project.status == status
|
||||
|
||||
def test_invalid_project_status(self):
|
||||
"""Test that invalid project status raises ValidationError."""
|
||||
with pytest.raises(ValidationError):
|
||||
ProjectCreate(
|
||||
name="Test Project",
|
||||
slug="invalid-status",
|
||||
status="invalid", # type: ignore
|
||||
)
|
||||
|
||||
|
||||
class TestProjectSettings:
|
||||
"""Tests for Project settings validation."""
|
||||
|
||||
def test_settings_empty_dict(self):
|
||||
"""Test that empty settings dict is valid."""
|
||||
project = ProjectCreate(
|
||||
name="Test Project",
|
||||
slug="empty-settings",
|
||||
settings={},
|
||||
)
|
||||
assert project.settings == {}
|
||||
|
||||
def test_settings_complex_structure(self):
|
||||
"""Test that complex settings structure is valid."""
|
||||
complex_settings = {
|
||||
"mcp_servers": ["gitea", "slack"],
|
||||
"webhooks": {
|
||||
"on_issue_created": "https://example.com",
|
||||
},
|
||||
"flags": True,
|
||||
"count": 42,
|
||||
}
|
||||
project = ProjectCreate(
|
||||
name="Test Project",
|
||||
slug="complex-settings",
|
||||
settings=complex_settings,
|
||||
)
|
||||
assert project.settings == complex_settings
|
||||
|
||||
def test_settings_default_to_empty_dict(self):
|
||||
"""Test that settings default to empty dict when not provided."""
|
||||
project = ProjectCreate(
|
||||
name="Test Project",
|
||||
slug="default-settings",
|
||||
)
|
||||
assert project.settings == {}
|
||||
366
backend/tests/schemas/syndarix/test_sprint_schemas.py
Normal file
366
backend/tests/schemas/syndarix/test_sprint_schemas.py
Normal file
@@ -0,0 +1,366 @@
|
||||
# tests/schemas/syndarix/test_sprint_schemas.py
|
||||
"""
|
||||
Tests for Sprint schema validation.
|
||||
"""
|
||||
|
||||
from datetime import date, timedelta
|
||||
|
||||
import pytest
|
||||
from pydantic import ValidationError
|
||||
|
||||
from app.schemas.syndarix import (
|
||||
SprintCreate,
|
||||
SprintStatus,
|
||||
SprintUpdate,
|
||||
)
|
||||
|
||||
|
||||
class TestSprintCreateValidation:
|
||||
"""Tests for SprintCreate schema validation."""
|
||||
|
||||
def test_valid_sprint_create(self, valid_sprint_data):
|
||||
"""Test creating sprint with valid data."""
|
||||
sprint = SprintCreate(**valid_sprint_data)
|
||||
|
||||
assert sprint.name == "Sprint 1"
|
||||
assert sprint.number == 1
|
||||
assert sprint.start_date is not None
|
||||
assert sprint.end_date is not None
|
||||
|
||||
def test_sprint_create_defaults(self, valid_sprint_data):
|
||||
"""Test that defaults are applied correctly."""
|
||||
sprint = SprintCreate(**valid_sprint_data)
|
||||
|
||||
assert sprint.status == SprintStatus.PLANNED
|
||||
assert sprint.goal is None
|
||||
assert sprint.planned_points is None
|
||||
assert sprint.completed_points is None
|
||||
|
||||
def test_sprint_create_with_all_fields(self, valid_uuid):
|
||||
"""Test creating sprint with all optional fields."""
|
||||
today = date.today()
|
||||
|
||||
sprint = SprintCreate(
|
||||
project_id=valid_uuid,
|
||||
name="Full Sprint",
|
||||
number=5,
|
||||
goal="Complete all features",
|
||||
start_date=today,
|
||||
end_date=today + timedelta(days=14),
|
||||
status=SprintStatus.PLANNED,
|
||||
planned_points=21,
|
||||
completed_points=0,
|
||||
)
|
||||
|
||||
assert sprint.name == "Full Sprint"
|
||||
assert sprint.number == 5
|
||||
assert sprint.goal == "Complete all features"
|
||||
assert sprint.planned_points == 21
|
||||
|
||||
def test_sprint_create_name_empty_fails(self, valid_uuid):
|
||||
"""Test that empty name raises ValidationError."""
|
||||
today = date.today()
|
||||
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
SprintCreate(
|
||||
project_id=valid_uuid,
|
||||
name="",
|
||||
number=1,
|
||||
start_date=today,
|
||||
end_date=today + timedelta(days=14),
|
||||
)
|
||||
|
||||
errors = exc_info.value.errors()
|
||||
assert any("name" in str(e) for e in errors)
|
||||
|
||||
def test_sprint_create_name_whitespace_only_fails(self, valid_uuid):
|
||||
"""Test that whitespace-only name raises ValidationError."""
|
||||
today = date.today()
|
||||
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
SprintCreate(
|
||||
project_id=valid_uuid,
|
||||
name=" ",
|
||||
number=1,
|
||||
start_date=today,
|
||||
end_date=today + timedelta(days=14),
|
||||
)
|
||||
|
||||
errors = exc_info.value.errors()
|
||||
assert any("name" in str(e) for e in errors)
|
||||
|
||||
def test_sprint_create_name_stripped(self, valid_uuid):
|
||||
"""Test that name is stripped."""
|
||||
today = date.today()
|
||||
|
||||
sprint = SprintCreate(
|
||||
project_id=valid_uuid,
|
||||
name=" Padded Sprint Name ",
|
||||
number=1,
|
||||
start_date=today,
|
||||
end_date=today + timedelta(days=14),
|
||||
)
|
||||
|
||||
assert sprint.name == "Padded Sprint Name"
|
||||
|
||||
def test_sprint_create_project_id_required(self):
|
||||
"""Test that project_id is required."""
|
||||
today = date.today()
|
||||
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
SprintCreate(
|
||||
name="Sprint 1",
|
||||
number=1,
|
||||
start_date=today,
|
||||
end_date=today + timedelta(days=14),
|
||||
)
|
||||
|
||||
errors = exc_info.value.errors()
|
||||
assert any("project_id" in str(e).lower() for e in errors)
|
||||
|
||||
|
||||
class TestSprintNumberValidation:
|
||||
"""Tests for Sprint number validation."""
|
||||
|
||||
def test_sprint_number_valid(self, valid_uuid):
|
||||
"""Test valid sprint numbers."""
|
||||
today = date.today()
|
||||
|
||||
for number in [1, 10, 100]:
|
||||
sprint = SprintCreate(
|
||||
project_id=valid_uuid,
|
||||
name=f"Sprint {number}",
|
||||
number=number,
|
||||
start_date=today,
|
||||
end_date=today + timedelta(days=14),
|
||||
)
|
||||
assert sprint.number == number
|
||||
|
||||
def test_sprint_number_zero_fails(self, valid_uuid):
|
||||
"""Test that sprint number 0 raises ValidationError."""
|
||||
today = date.today()
|
||||
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
SprintCreate(
|
||||
project_id=valid_uuid,
|
||||
name="Sprint Zero",
|
||||
number=0,
|
||||
start_date=today,
|
||||
end_date=today + timedelta(days=14),
|
||||
)
|
||||
|
||||
errors = exc_info.value.errors()
|
||||
assert any("number" in str(e).lower() for e in errors)
|
||||
|
||||
def test_sprint_number_negative_fails(self, valid_uuid):
|
||||
"""Test that negative sprint number raises ValidationError."""
|
||||
today = date.today()
|
||||
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
SprintCreate(
|
||||
project_id=valid_uuid,
|
||||
name="Negative Sprint",
|
||||
number=-1,
|
||||
start_date=today,
|
||||
end_date=today + timedelta(days=14),
|
||||
)
|
||||
|
||||
errors = exc_info.value.errors()
|
||||
assert any("number" in str(e).lower() for e in errors)
|
||||
|
||||
|
||||
class TestSprintDateValidation:
|
||||
"""Tests for Sprint date validation."""
|
||||
|
||||
def test_valid_date_range(self, valid_uuid):
|
||||
"""Test valid date range (end > start)."""
|
||||
today = date.today()
|
||||
|
||||
sprint = SprintCreate(
|
||||
project_id=valid_uuid,
|
||||
name="Sprint 1",
|
||||
number=1,
|
||||
start_date=today,
|
||||
end_date=today + timedelta(days=14),
|
||||
)
|
||||
|
||||
assert sprint.end_date > sprint.start_date
|
||||
|
||||
def test_same_day_sprint(self, valid_uuid):
|
||||
"""Test that same day sprint is valid."""
|
||||
today = date.today()
|
||||
|
||||
sprint = SprintCreate(
|
||||
project_id=valid_uuid,
|
||||
name="One Day Sprint",
|
||||
number=1,
|
||||
start_date=today,
|
||||
end_date=today, # Same day is allowed
|
||||
)
|
||||
|
||||
assert sprint.start_date == sprint.end_date
|
||||
|
||||
def test_end_before_start_fails(self, valid_uuid):
|
||||
"""Test that end date before start date raises ValidationError."""
|
||||
today = date.today()
|
||||
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
SprintCreate(
|
||||
project_id=valid_uuid,
|
||||
name="Invalid Sprint",
|
||||
number=1,
|
||||
start_date=today,
|
||||
end_date=today - timedelta(days=1), # Before start
|
||||
)
|
||||
|
||||
errors = exc_info.value.errors()
|
||||
assert len(errors) > 0
|
||||
|
||||
|
||||
class TestSprintPointsValidation:
|
||||
"""Tests for Sprint points validation."""
|
||||
|
||||
def test_valid_planned_points(self, valid_uuid):
|
||||
"""Test valid planned_points values."""
|
||||
today = date.today()
|
||||
|
||||
for points in [0, 1, 21, 100]:
|
||||
sprint = SprintCreate(
|
||||
project_id=valid_uuid,
|
||||
name=f"Sprint {points}",
|
||||
number=1,
|
||||
start_date=today,
|
||||
end_date=today + timedelta(days=14),
|
||||
planned_points=points,
|
||||
)
|
||||
assert sprint.planned_points == points
|
||||
|
||||
def test_planned_points_negative_fails(self, valid_uuid):
|
||||
"""Test that negative planned_points raises ValidationError."""
|
||||
today = date.today()
|
||||
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
SprintCreate(
|
||||
project_id=valid_uuid,
|
||||
name="Negative Points Sprint",
|
||||
number=1,
|
||||
start_date=today,
|
||||
end_date=today + timedelta(days=14),
|
||||
planned_points=-1,
|
||||
)
|
||||
|
||||
errors = exc_info.value.errors()
|
||||
assert any("planned_points" in str(e).lower() for e in errors)
|
||||
|
||||
def test_valid_completed_points(self, valid_uuid):
|
||||
"""Test valid completed_points values."""
|
||||
today = date.today()
|
||||
|
||||
for points in [0, 5, 21]:
|
||||
sprint = SprintCreate(
|
||||
project_id=valid_uuid,
|
||||
name=f"Sprint {points}",
|
||||
number=1,
|
||||
start_date=today,
|
||||
end_date=today + timedelta(days=14),
|
||||
completed_points=points,
|
||||
)
|
||||
assert sprint.completed_points == points
|
||||
|
||||
def test_completed_points_negative_fails(self, valid_uuid):
|
||||
"""Test that negative completed_points raises ValidationError."""
|
||||
today = date.today()
|
||||
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
SprintCreate(
|
||||
project_id=valid_uuid,
|
||||
name="Negative Completed Sprint",
|
||||
number=1,
|
||||
start_date=today,
|
||||
end_date=today + timedelta(days=14),
|
||||
completed_points=-1,
|
||||
)
|
||||
|
||||
errors = exc_info.value.errors()
|
||||
assert any("completed_points" in str(e).lower() for e in errors)
|
||||
|
||||
|
||||
class TestSprintUpdateValidation:
|
||||
"""Tests for SprintUpdate schema validation."""
|
||||
|
||||
def test_sprint_update_partial(self):
|
||||
"""Test updating only some fields."""
|
||||
update = SprintUpdate(
|
||||
name="Updated Name",
|
||||
)
|
||||
|
||||
assert update.name == "Updated Name"
|
||||
assert update.goal is None
|
||||
assert update.start_date is None
|
||||
assert update.end_date is None
|
||||
|
||||
def test_sprint_update_all_fields(self):
|
||||
"""Test updating all fields."""
|
||||
today = date.today()
|
||||
|
||||
update = SprintUpdate(
|
||||
name="Updated Name",
|
||||
goal="Updated goal",
|
||||
start_date=today,
|
||||
end_date=today + timedelta(days=21),
|
||||
status=SprintStatus.ACTIVE,
|
||||
planned_points=34,
|
||||
completed_points=20,
|
||||
)
|
||||
|
||||
assert update.name == "Updated Name"
|
||||
assert update.goal == "Updated goal"
|
||||
assert update.status == SprintStatus.ACTIVE
|
||||
assert update.planned_points == 34
|
||||
|
||||
def test_sprint_update_empty_name_fails(self):
|
||||
"""Test that empty name in update raises ValidationError."""
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
SprintUpdate(name="")
|
||||
|
||||
errors = exc_info.value.errors()
|
||||
assert any("name" in str(e) for e in errors)
|
||||
|
||||
def test_sprint_update_name_stripped(self):
|
||||
"""Test that name is stripped in updates."""
|
||||
update = SprintUpdate(name=" Updated ")
|
||||
|
||||
assert update.name == "Updated"
|
||||
|
||||
|
||||
class TestSprintStatusEnum:
|
||||
"""Tests for SprintStatus enum validation."""
|
||||
|
||||
def test_valid_sprint_statuses(self, valid_uuid):
|
||||
"""Test all valid sprint statuses."""
|
||||
today = date.today()
|
||||
|
||||
for status in SprintStatus:
|
||||
sprint = SprintCreate(
|
||||
project_id=valid_uuid,
|
||||
name=f"Sprint {status.value}",
|
||||
number=1,
|
||||
start_date=today,
|
||||
end_date=today + timedelta(days=14),
|
||||
status=status,
|
||||
)
|
||||
assert sprint.status == status
|
||||
|
||||
def test_invalid_sprint_status(self, valid_uuid):
|
||||
"""Test that invalid sprint status raises ValidationError."""
|
||||
today = date.today()
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
SprintCreate(
|
||||
project_id=valid_uuid,
|
||||
name="Invalid Status Sprint",
|
||||
number=1,
|
||||
start_date=today,
|
||||
end_date=today + timedelta(days=14),
|
||||
status="invalid", # type: ignore
|
||||
)
|
||||
Reference in New Issue
Block a user