Clean up Alembic migrations

- Removed outdated and redundant Alembic migration files to streamline the migration directory. This improves maintainability and eliminates duplicate or unused scripts.
This commit is contained in:
Felipe Cardoso
2025-11-27 09:12:30 +01:00
parent 4a06b96b2e
commit 2bbe925cef
26 changed files with 883 additions and 971 deletions

View File

@@ -1,9 +1,6 @@
# Development stage
FROM python:3.12-slim AS development
# Create non-root user
RUN groupadd -r appuser && useradd -r -g appuser appuser
WORKDIR /app
ENV PYTHONDONTWRITEBYTECODE=1 \
PYTHONUNBUFFERED=1 \
@@ -31,11 +28,8 @@ COPY . .
COPY entrypoint.sh /usr/local/bin/
RUN chmod +x /usr/local/bin/entrypoint.sh
# Set ownership to non-root user
RUN chown -R appuser:appuser /app
# Switch to non-root user
USER appuser
# Note: Running as root in development for bind mount compatibility
# Production stage uses non-root user for security
ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]

View File

@@ -1,4 +1,4 @@
.PHONY: help lint lint-fix format format-check type-check test test-cov validate clean install-dev sync check-docker install-e2e test-e2e test-e2e-schema test-all
.PHONY: help lint lint-fix format format-check type-check test test-cov validate clean install-dev sync check-docker install-e2e test-e2e test-e2e-schema test-all drop-db reset-db
# Default target
help:
@@ -25,6 +25,10 @@ help:
@echo " make test-all - Run all tests (unit + E2E)"
@echo " make check-docker - Check if Docker is available"
@echo ""
@echo "Database:"
@echo " make drop-db - Drop local database entirely (requires Docker)"
@echo " make reset-db - Drop and recreate database with migrations"
@echo ""
@echo "Cleanup:"
@echo " make clean - Remove cache and build artifacts"
@@ -119,6 +123,22 @@ test-all:
@$(MAKE) test
@$(MAKE) test-e2e
# ============================================================================
# Database Management
# ============================================================================
drop-db: check-docker
@echo "🗑️ Dropping local database..."
@cd .. && docker compose -f docker-compose.dev.yml exec -T db psql -U postgres -c "DROP DATABASE IF EXISTS app WITH (FORCE);" 2>/dev/null || \
cd .. && docker compose -f docker-compose.dev.yml exec -T db psql -U postgres -c "DROP DATABASE IF EXISTS app;"
@cd .. && docker compose -f docker-compose.dev.yml exec -T db psql -U postgres -c "CREATE DATABASE app;"
@echo "✅ Database dropped and recreated (empty)"
reset-db: drop-db
@echo "🔄 Applying migrations..."
@uv run python migrate.py --local apply
@echo "✅ Database reset complete!"
# ============================================================================
# Cleanup
# ============================================================================

View File

@@ -2,6 +2,13 @@
script_location = app/alembic
sqlalchemy.url = postgresql://postgres:postgres@db:5432/app
# Use sequential naming: 0001_message.py, 0002_message.py, etc.
# The rev_id is still used internally but filename is cleaner
file_template = %%(rev)s_%%(slug)s
# Allow specifying custom revision IDs via --rev-id flag
revision_environment = true
[loggers]
keys = root,sqlalchemy,alembic

View File

@@ -22,6 +22,24 @@ from app.models import *
# access to the values within the .ini file in use.
config = context.config
def include_object(object, name, type_, reflected, compare_to):
"""
Filter objects for autogenerate.
Skip comparing functional indexes (like LOWER(column)) and partial indexes
(with WHERE clauses) as Alembic cannot reliably detect these from models.
These should be managed manually via dedicated performance migrations.
Convention: Any index starting with "ix_perf_" is automatically excluded.
This allows adding new performance indexes without updating this file.
"""
if type_ == "index" and name:
# Convention-based: any index prefixed with ix_perf_ is manual
if name.startswith("ix_perf_"):
return False
return True
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
@@ -100,6 +118,8 @@ def run_migrations_offline() -> None:
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
compare_type=True,
include_object=include_object,
)
with context.begin_transaction():
@@ -123,7 +143,12 @@ def run_migrations_online() -> None:
)
with connectable.connect() as connection:
context.configure(connection=connection, target_metadata=target_metadata)
context.configure(
connection=connection,
target_metadata=target_metadata,
compare_type=True,
include_object=include_object,
)
with context.begin_transaction():
context.run_migrations()

View File

@@ -0,0 +1,262 @@
"""initial models
Revision ID: 0001
Revises:
Create Date: 2025-11-27 09:08:09.464506
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = '0001'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('oauth_states',
sa.Column('state', sa.String(length=255), nullable=False),
sa.Column('code_verifier', sa.String(length=128), nullable=True),
sa.Column('nonce', sa.String(length=255), nullable=True),
sa.Column('provider', sa.String(length=50), nullable=False),
sa.Column('redirect_uri', sa.String(length=500), nullable=True),
sa.Column('user_id', sa.UUID(), nullable=True),
sa.Column('expires_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_oauth_states_state'), 'oauth_states', ['state'], unique=True)
op.create_table('organizations',
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('slug', sa.String(length=255), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('settings', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_organizations_is_active'), 'organizations', ['is_active'], unique=False)
op.create_index(op.f('ix_organizations_name'), 'organizations', ['name'], unique=False)
op.create_index('ix_organizations_name_active', 'organizations', ['name', 'is_active'], unique=False)
op.create_index(op.f('ix_organizations_slug'), 'organizations', ['slug'], unique=True)
op.create_index('ix_organizations_slug_active', 'organizations', ['slug', 'is_active'], unique=False)
op.create_table('users',
sa.Column('email', sa.String(length=255), nullable=False),
sa.Column('password_hash', sa.String(length=255), nullable=True),
sa.Column('first_name', sa.String(length=100), nullable=False),
sa.Column('last_name', sa.String(length=100), nullable=True),
sa.Column('phone_number', sa.String(length=20), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('is_superuser', sa.Boolean(), nullable=False),
sa.Column('preferences', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('locale', sa.String(length=10), nullable=True),
sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_users_deleted_at'), 'users', ['deleted_at'], unique=False)
op.create_index(op.f('ix_users_email'), 'users', ['email'], unique=True)
op.create_index(op.f('ix_users_is_active'), 'users', ['is_active'], unique=False)
op.create_index(op.f('ix_users_is_superuser'), 'users', ['is_superuser'], unique=False)
op.create_index(op.f('ix_users_locale'), 'users', ['locale'], unique=False)
op.create_table('oauth_accounts',
sa.Column('user_id', sa.UUID(), nullable=False),
sa.Column('provider', sa.String(length=50), nullable=False),
sa.Column('provider_user_id', sa.String(length=255), nullable=False),
sa.Column('provider_email', sa.String(length=255), nullable=True),
sa.Column('access_token_encrypted', sa.String(length=2048), nullable=True),
sa.Column('refresh_token_encrypted', sa.String(length=2048), nullable=True),
sa.Column('token_expires_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('provider', 'provider_user_id', name='uq_oauth_provider_user')
)
op.create_index(op.f('ix_oauth_accounts_provider'), 'oauth_accounts', ['provider'], unique=False)
op.create_index(op.f('ix_oauth_accounts_provider_email'), 'oauth_accounts', ['provider_email'], unique=False)
op.create_index(op.f('ix_oauth_accounts_user_id'), 'oauth_accounts', ['user_id'], unique=False)
op.create_index('ix_oauth_accounts_user_provider', 'oauth_accounts', ['user_id', 'provider'], unique=False)
op.create_table('oauth_clients',
sa.Column('client_id', sa.String(length=64), nullable=False),
sa.Column('client_secret_hash', sa.String(length=255), nullable=True),
sa.Column('client_name', sa.String(length=255), nullable=False),
sa.Column('client_description', sa.String(length=1000), nullable=True),
sa.Column('client_type', sa.String(length=20), nullable=False),
sa.Column('redirect_uris', postgresql.JSONB(astext_type=sa.Text()), nullable=False),
sa.Column('allowed_scopes', postgresql.JSONB(astext_type=sa.Text()), nullable=False),
sa.Column('access_token_lifetime', sa.String(length=10), nullable=False),
sa.Column('refresh_token_lifetime', sa.String(length=10), nullable=False),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('owner_user_id', sa.UUID(), nullable=True),
sa.Column('mcp_server_url', sa.String(length=2048), nullable=True),
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
sa.ForeignKeyConstraint(['owner_user_id'], ['users.id'], ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_oauth_clients_client_id'), 'oauth_clients', ['client_id'], unique=True)
op.create_index(op.f('ix_oauth_clients_is_active'), 'oauth_clients', ['is_active'], unique=False)
op.create_table('user_organizations',
sa.Column('user_id', sa.UUID(), nullable=False),
sa.Column('organization_id', sa.UUID(), nullable=False),
sa.Column('role', sa.Enum('OWNER', 'ADMIN', 'MEMBER', 'GUEST', name='organizationrole'), nullable=False),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('custom_permissions', sa.String(length=500), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
sa.ForeignKeyConstraint(['organization_id'], ['organizations.id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('user_id', 'organization_id')
)
op.create_index('ix_user_org_org_active', 'user_organizations', ['organization_id', 'is_active'], unique=False)
op.create_index('ix_user_org_role', 'user_organizations', ['role'], unique=False)
op.create_index('ix_user_org_user_active', 'user_organizations', ['user_id', 'is_active'], unique=False)
op.create_index(op.f('ix_user_organizations_is_active'), 'user_organizations', ['is_active'], unique=False)
op.create_table('user_sessions',
sa.Column('user_id', sa.UUID(), nullable=False),
sa.Column('refresh_token_jti', sa.String(length=255), nullable=False),
sa.Column('device_name', sa.String(length=255), nullable=True),
sa.Column('device_id', sa.String(length=255), nullable=True),
sa.Column('ip_address', sa.String(length=45), nullable=True),
sa.Column('user_agent', sa.String(length=500), nullable=True),
sa.Column('last_used_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('expires_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('location_city', sa.String(length=100), nullable=True),
sa.Column('location_country', sa.String(length=100), nullable=True),
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_user_sessions_is_active'), 'user_sessions', ['is_active'], unique=False)
op.create_index('ix_user_sessions_jti_active', 'user_sessions', ['refresh_token_jti', 'is_active'], unique=False)
op.create_index(op.f('ix_user_sessions_refresh_token_jti'), 'user_sessions', ['refresh_token_jti'], unique=True)
op.create_index('ix_user_sessions_user_active', 'user_sessions', ['user_id', 'is_active'], unique=False)
op.create_index(op.f('ix_user_sessions_user_id'), 'user_sessions', ['user_id'], unique=False)
op.create_table('oauth_authorization_codes',
sa.Column('code', sa.String(length=128), nullable=False),
sa.Column('client_id', sa.String(length=64), nullable=False),
sa.Column('user_id', sa.UUID(), nullable=False),
sa.Column('redirect_uri', sa.String(length=2048), nullable=False),
sa.Column('scope', sa.String(length=1000), nullable=False),
sa.Column('code_challenge', sa.String(length=128), nullable=True),
sa.Column('code_challenge_method', sa.String(length=10), nullable=True),
sa.Column('state', sa.String(length=256), nullable=True),
sa.Column('nonce', sa.String(length=256), nullable=True),
sa.Column('expires_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('used', sa.Boolean(), nullable=False),
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
sa.ForeignKeyConstraint(['client_id'], ['oauth_clients.client_id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index('ix_oauth_authorization_codes_client_user', 'oauth_authorization_codes', ['client_id', 'user_id'], unique=False)
op.create_index(op.f('ix_oauth_authorization_codes_code'), 'oauth_authorization_codes', ['code'], unique=True)
op.create_index('ix_oauth_authorization_codes_expires_at', 'oauth_authorization_codes', ['expires_at'], unique=False)
op.create_table('oauth_consents',
sa.Column('user_id', sa.UUID(), nullable=False),
sa.Column('client_id', sa.String(length=64), nullable=False),
sa.Column('granted_scopes', sa.String(length=1000), nullable=False),
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
sa.ForeignKeyConstraint(['client_id'], ['oauth_clients.client_id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index('ix_oauth_consents_user_client', 'oauth_consents', ['user_id', 'client_id'], unique=True)
op.create_table('oauth_provider_refresh_tokens',
sa.Column('token_hash', sa.String(length=64), nullable=False),
sa.Column('jti', sa.String(length=64), nullable=False),
sa.Column('client_id', sa.String(length=64), nullable=False),
sa.Column('user_id', sa.UUID(), nullable=False),
sa.Column('scope', sa.String(length=1000), nullable=False),
sa.Column('expires_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('revoked', sa.Boolean(), nullable=False),
sa.Column('last_used_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('device_info', sa.String(length=500), nullable=True),
sa.Column('ip_address', sa.String(length=45), nullable=True),
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
sa.ForeignKeyConstraint(['client_id'], ['oauth_clients.client_id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index('ix_oauth_provider_refresh_tokens_client_user', 'oauth_provider_refresh_tokens', ['client_id', 'user_id'], unique=False)
op.create_index('ix_oauth_provider_refresh_tokens_expires_at', 'oauth_provider_refresh_tokens', ['expires_at'], unique=False)
op.create_index(op.f('ix_oauth_provider_refresh_tokens_jti'), 'oauth_provider_refresh_tokens', ['jti'], unique=True)
op.create_index(op.f('ix_oauth_provider_refresh_tokens_revoked'), 'oauth_provider_refresh_tokens', ['revoked'], unique=False)
op.create_index(op.f('ix_oauth_provider_refresh_tokens_token_hash'), 'oauth_provider_refresh_tokens', ['token_hash'], unique=True)
op.create_index('ix_oauth_provider_refresh_tokens_user_revoked', 'oauth_provider_refresh_tokens', ['user_id', 'revoked'], unique=False)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index('ix_oauth_provider_refresh_tokens_user_revoked', table_name='oauth_provider_refresh_tokens')
op.drop_index(op.f('ix_oauth_provider_refresh_tokens_token_hash'), table_name='oauth_provider_refresh_tokens')
op.drop_index(op.f('ix_oauth_provider_refresh_tokens_revoked'), table_name='oauth_provider_refresh_tokens')
op.drop_index(op.f('ix_oauth_provider_refresh_tokens_jti'), table_name='oauth_provider_refresh_tokens')
op.drop_index('ix_oauth_provider_refresh_tokens_expires_at', table_name='oauth_provider_refresh_tokens')
op.drop_index('ix_oauth_provider_refresh_tokens_client_user', table_name='oauth_provider_refresh_tokens')
op.drop_table('oauth_provider_refresh_tokens')
op.drop_index('ix_oauth_consents_user_client', table_name='oauth_consents')
op.drop_table('oauth_consents')
op.drop_index('ix_oauth_authorization_codes_expires_at', table_name='oauth_authorization_codes')
op.drop_index(op.f('ix_oauth_authorization_codes_code'), table_name='oauth_authorization_codes')
op.drop_index('ix_oauth_authorization_codes_client_user', table_name='oauth_authorization_codes')
op.drop_table('oauth_authorization_codes')
op.drop_index(op.f('ix_user_sessions_user_id'), table_name='user_sessions')
op.drop_index('ix_user_sessions_user_active', table_name='user_sessions')
op.drop_index(op.f('ix_user_sessions_refresh_token_jti'), table_name='user_sessions')
op.drop_index('ix_user_sessions_jti_active', table_name='user_sessions')
op.drop_index(op.f('ix_user_sessions_is_active'), table_name='user_sessions')
op.drop_table('user_sessions')
op.drop_index(op.f('ix_user_organizations_is_active'), table_name='user_organizations')
op.drop_index('ix_user_org_user_active', table_name='user_organizations')
op.drop_index('ix_user_org_role', table_name='user_organizations')
op.drop_index('ix_user_org_org_active', table_name='user_organizations')
op.drop_table('user_organizations')
op.drop_index(op.f('ix_oauth_clients_is_active'), table_name='oauth_clients')
op.drop_index(op.f('ix_oauth_clients_client_id'), table_name='oauth_clients')
op.drop_table('oauth_clients')
op.drop_index('ix_oauth_accounts_user_provider', table_name='oauth_accounts')
op.drop_index(op.f('ix_oauth_accounts_user_id'), table_name='oauth_accounts')
op.drop_index(op.f('ix_oauth_accounts_provider_email'), table_name='oauth_accounts')
op.drop_index(op.f('ix_oauth_accounts_provider'), table_name='oauth_accounts')
op.drop_table('oauth_accounts')
op.drop_index(op.f('ix_users_locale'), table_name='users')
op.drop_index(op.f('ix_users_is_superuser'), table_name='users')
op.drop_index(op.f('ix_users_is_active'), table_name='users')
op.drop_index(op.f('ix_users_email'), table_name='users')
op.drop_index(op.f('ix_users_deleted_at'), table_name='users')
op.drop_table('users')
op.drop_index('ix_organizations_slug_active', table_name='organizations')
op.drop_index(op.f('ix_organizations_slug'), table_name='organizations')
op.drop_index('ix_organizations_name_active', table_name='organizations')
op.drop_index(op.f('ix_organizations_name'), table_name='organizations')
op.drop_index(op.f('ix_organizations_is_active'), table_name='organizations')
op.drop_table('organizations')
op.drop_index(op.f('ix_oauth_states_state'), table_name='oauth_states')
op.drop_table('oauth_states')
# ### end Alembic commands ###

View File

@@ -0,0 +1,122 @@
"""Add performance indexes
Revision ID: 0002
Revises: 0001
Create Date: 2025-11-27
Performance indexes that Alembic cannot auto-detect:
- Functional indexes (LOWER expressions)
- Partial indexes (WHERE clauses)
These indexes use the ix_perf_ prefix and are excluded from autogenerate
via the include_object() function in env.py.
"""
from collections.abc import Sequence
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "0002"
down_revision: str | None = "0001"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
# ==========================================================================
# USERS TABLE - Performance indexes for authentication
# ==========================================================================
# Case-insensitive email lookup for login/registration
# Query: SELECT * FROM users WHERE LOWER(email) = LOWER(:email) AND deleted_at IS NULL
# Impact: High - every login, registration check, password reset
op.create_index(
"ix_perf_users_email_lower",
"users",
[sa.text("LOWER(email)")],
unique=False,
postgresql_where=sa.text("deleted_at IS NULL"),
)
# Active users lookup (non-soft-deleted)
# Query: SELECT * FROM users WHERE deleted_at IS NULL AND ...
# Impact: Medium - user listings, admin queries
op.create_index(
"ix_perf_users_active",
"users",
["is_active"],
unique=False,
postgresql_where=sa.text("deleted_at IS NULL"),
)
# ==========================================================================
# ORGANIZATIONS TABLE - Performance indexes for multi-tenant lookups
# ==========================================================================
# Case-insensitive slug lookup for URL routing
# Query: SELECT * FROM organizations WHERE LOWER(slug) = LOWER(:slug) AND is_active = true
# Impact: Medium - every organization page load
op.create_index(
"ix_perf_organizations_slug_lower",
"organizations",
[sa.text("LOWER(slug)")],
unique=False,
postgresql_where=sa.text("is_active = true"),
)
# ==========================================================================
# USER SESSIONS TABLE - Performance indexes for session management
# ==========================================================================
# Expired session cleanup
# Query: SELECT * FROM user_sessions WHERE expires_at < NOW() AND is_active = true
# Impact: Medium - background cleanup jobs
op.create_index(
"ix_perf_user_sessions_expires",
"user_sessions",
["expires_at"],
unique=False,
postgresql_where=sa.text("is_active = true"),
)
# ==========================================================================
# OAUTH PROVIDER TOKENS - Performance indexes for token management
# ==========================================================================
# Expired refresh token cleanup
# Query: SELECT * FROM oauth_provider_refresh_tokens WHERE expires_at < NOW() AND revoked = false
# Impact: Medium - OAuth token cleanup, validation
op.create_index(
"ix_perf_oauth_refresh_tokens_expires",
"oauth_provider_refresh_tokens",
["expires_at"],
unique=False,
postgresql_where=sa.text("revoked = false"),
)
# ==========================================================================
# OAUTH AUTHORIZATION CODES - Performance indexes for auth flow
# ==========================================================================
# Expired authorization code cleanup
# Query: DELETE FROM oauth_authorization_codes WHERE expires_at < NOW() AND used = false
# Impact: Low-Medium - OAuth cleanup jobs
op.create_index(
"ix_perf_oauth_auth_codes_expires",
"oauth_authorization_codes",
["expires_at"],
unique=False,
postgresql_where=sa.text("used = false"),
)
def downgrade() -> None:
# Drop indexes in reverse order
op.drop_index("ix_perf_oauth_auth_codes_expires", table_name="oauth_authorization_codes")
op.drop_index("ix_perf_oauth_refresh_tokens_expires", table_name="oauth_provider_refresh_tokens")
op.drop_index("ix_perf_user_sessions_expires", table_name="user_sessions")
op.drop_index("ix_perf_organizations_slug_lower", table_name="organizations")
op.drop_index("ix_perf_users_active", table_name="users")
op.drop_index("ix_perf_users_email_lower", table_name="users")

View File

@@ -1,78 +0,0 @@
"""add_performance_indexes
Revision ID: 1174fffbe3e4
Revises: fbf6318a8a36
Create Date: 2025-11-01 04:15:25.367010
"""
from collections.abc import Sequence
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "1174fffbe3e4"
down_revision: str | None = "fbf6318a8a36"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
"""Add performance indexes for optimized queries."""
# Index for session cleanup queries
# Optimizes: DELETE WHERE is_active = FALSE AND expires_at < now AND created_at < cutoff
op.create_index(
"ix_user_sessions_cleanup",
"user_sessions",
["is_active", "expires_at", "created_at"],
unique=False,
postgresql_where=sa.text("is_active = false"),
)
# Index for user search queries (basic trigram support without pg_trgm extension)
# Optimizes: WHERE email ILIKE '%search%' OR first_name ILIKE '%search%'
# Note: For better performance, consider enabling pg_trgm extension
op.create_index(
"ix_users_email_lower",
"users",
[sa.text("LOWER(email)")],
unique=False,
postgresql_where=sa.text("deleted_at IS NULL"),
)
op.create_index(
"ix_users_first_name_lower",
"users",
[sa.text("LOWER(first_name)")],
unique=False,
postgresql_where=sa.text("deleted_at IS NULL"),
)
op.create_index(
"ix_users_last_name_lower",
"users",
[sa.text("LOWER(last_name)")],
unique=False,
postgresql_where=sa.text("deleted_at IS NULL"),
)
# Index for organization search
op.create_index(
"ix_organizations_name_lower",
"organizations",
[sa.text("LOWER(name)")],
unique=False,
)
def downgrade() -> None:
"""Remove performance indexes."""
# Drop indexes in reverse order
op.drop_index("ix_organizations_name_lower", table_name="organizations")
op.drop_index("ix_users_last_name_lower", table_name="users")
op.drop_index("ix_users_first_name_lower", table_name="users")
op.drop_index("ix_users_email_lower", table_name="users")
op.drop_index("ix_user_sessions_cleanup", table_name="user_sessions")

View File

@@ -1,36 +0,0 @@
"""add_soft_delete_to_users
Revision ID: 2d0fcec3b06d
Revises: 9e4f2a1b8c7d
Create Date: 2025-10-30 16:40:21.000021
"""
from collections.abc import Sequence
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "2d0fcec3b06d"
down_revision: str | None = "9e4f2a1b8c7d"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
# Add deleted_at column for soft deletes
op.add_column(
"users", sa.Column("deleted_at", sa.DateTime(timezone=True), nullable=True)
)
# Add index on deleted_at for efficient queries
op.create_index("ix_users_deleted_at", "users", ["deleted_at"])
def downgrade() -> None:
# Remove index
op.drop_index("ix_users_deleted_at", table_name="users")
# Remove column
op.drop_column("users", "deleted_at")

View File

@@ -1,46 +0,0 @@
"""Add all initial models
Revision ID: 38bf9e7e74b3
Revises: 7396957cbe80
Create Date: 2025-02-28 09:19:33.212278
"""
from collections.abc import Sequence
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "38bf9e7e74b3"
down_revision: str | None = "7396957cbe80"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
op.create_table(
"users",
sa.Column("email", sa.String(), nullable=False),
sa.Column("password_hash", sa.String(), nullable=False),
sa.Column("first_name", sa.String(), nullable=False),
sa.Column("last_name", sa.String(), nullable=True),
sa.Column("phone_number", sa.String(), nullable=True),
sa.Column("is_active", sa.Boolean(), nullable=False),
sa.Column("is_superuser", sa.Boolean(), nullable=False),
sa.Column("preferences", sa.JSON(), nullable=True),
sa.Column("id", sa.UUID(), nullable=False),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(op.f("ix_users_email"), "users", ["email"], unique=True)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f("ix_users_email"), table_name="users")
op.drop_table("users")
# ### end Alembic commands ###

View File

@@ -1,89 +0,0 @@
"""add_user_sessions_table
Revision ID: 549b50ea888d
Revises: b76c725fc3cf
Create Date: 2025-10-31 07:41:18.729544
"""
from collections.abc import Sequence
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "549b50ea888d"
down_revision: str | None = "b76c725fc3cf"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
# Create user_sessions table for per-device session management
op.create_table(
"user_sessions",
sa.Column("id", sa.UUID(), nullable=False),
sa.Column("user_id", sa.UUID(), nullable=False),
sa.Column("refresh_token_jti", sa.String(length=255), nullable=False),
sa.Column("device_name", sa.String(length=255), nullable=True),
sa.Column("device_id", sa.String(length=255), nullable=True),
sa.Column("ip_address", sa.String(length=45), nullable=True),
sa.Column("user_agent", sa.String(length=500), nullable=True),
sa.Column("last_used_at", sa.DateTime(timezone=True), nullable=False),
sa.Column("expires_at", sa.DateTime(timezone=True), nullable=False),
sa.Column("is_active", sa.Boolean(), nullable=False, server_default="true"),
sa.Column("location_city", sa.String(length=100), nullable=True),
sa.Column("location_country", sa.String(length=100), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
# Create foreign key to users table
op.create_foreign_key(
"fk_user_sessions_user_id",
"user_sessions",
"users",
["user_id"],
["id"],
ondelete="CASCADE",
)
# Create indexes for performance
# 1. Lookup session by refresh token JTI (most common query)
op.create_index(
"ix_user_sessions_jti", "user_sessions", ["refresh_token_jti"], unique=True
)
# 2. Lookup sessions by user ID
op.create_index("ix_user_sessions_user_id", "user_sessions", ["user_id"])
# 3. Composite index for active sessions by user
op.create_index(
"ix_user_sessions_user_active", "user_sessions", ["user_id", "is_active"]
)
# 4. Index on expires_at for cleanup job
op.create_index("ix_user_sessions_expires_at", "user_sessions", ["expires_at"])
# 5. Composite index for active session lookup by JTI
op.create_index(
"ix_user_sessions_jti_active",
"user_sessions",
["refresh_token_jti", "is_active"],
)
def downgrade() -> None:
# Drop indexes first
op.drop_index("ix_user_sessions_jti_active", table_name="user_sessions")
op.drop_index("ix_user_sessions_expires_at", table_name="user_sessions")
op.drop_index("ix_user_sessions_user_active", table_name="user_sessions")
op.drop_index("ix_user_sessions_user_id", table_name="user_sessions")
op.drop_index("ix_user_sessions_jti", table_name="user_sessions")
# Drop foreign key
op.drop_constraint("fk_user_sessions_user_id", "user_sessions", type_="foreignkey")
# Drop table
op.drop_table("user_sessions")

View File

@@ -1,23 +0,0 @@
"""Initial empty migration
Revision ID: 7396957cbe80
Revises:
Create Date: 2025-02-27 12:47:46.445313
"""
from collections.abc import Sequence
# revision identifiers, used by Alembic.
revision: str = "7396957cbe80"
down_revision: str | None = None
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
pass
def downgrade() -> None:
pass

View File

@@ -1,116 +0,0 @@
"""Add missing indexes and fix column types
Revision ID: 9e4f2a1b8c7d
Revises: 38bf9e7e74b3
Create Date: 2025-10-30 10:00:00.000000
"""
from collections.abc import Sequence
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "9e4f2a1b8c7d"
down_revision: str | None = "38bf9e7e74b3"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
# Add missing indexes for is_active and is_superuser
op.create_index(op.f("ix_users_is_active"), "users", ["is_active"], unique=False)
op.create_index(
op.f("ix_users_is_superuser"), "users", ["is_superuser"], unique=False
)
# Fix column types to match model definitions with explicit lengths
op.alter_column(
"users",
"email",
existing_type=sa.String(),
type_=sa.String(length=255),
nullable=False,
)
op.alter_column(
"users",
"password_hash",
existing_type=sa.String(),
type_=sa.String(length=255),
nullable=False,
)
op.alter_column(
"users",
"first_name",
existing_type=sa.String(),
type_=sa.String(length=100),
nullable=False,
server_default="user",
) # Add server default
op.alter_column(
"users",
"last_name",
existing_type=sa.String(),
type_=sa.String(length=100),
nullable=True,
)
op.alter_column(
"users",
"phone_number",
existing_type=sa.String(),
type_=sa.String(length=20),
nullable=True,
)
def downgrade() -> None:
# Revert column types
op.alter_column(
"users",
"phone_number",
existing_type=sa.String(length=20),
type_=sa.String(),
nullable=True,
)
op.alter_column(
"users",
"last_name",
existing_type=sa.String(length=100),
type_=sa.String(),
nullable=True,
)
op.alter_column(
"users",
"first_name",
existing_type=sa.String(length=100),
type_=sa.String(),
nullable=False,
server_default=None,
) # Remove server default
op.alter_column(
"users",
"password_hash",
existing_type=sa.String(length=255),
type_=sa.String(),
nullable=False,
)
op.alter_column(
"users",
"email",
existing_type=sa.String(length=255),
type_=sa.String(),
nullable=False,
)
# Drop indexes
op.drop_index(op.f("ix_users_is_superuser"), table_name="users")
op.drop_index(op.f("ix_users_is_active"), table_name="users")

View File

@@ -1,48 +0,0 @@
"""add_composite_indexes
Revision ID: b76c725fc3cf
Revises: 2d0fcec3b06d
Create Date: 2025-10-30 16:41:33.273135
"""
from collections.abc import Sequence
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "b76c725fc3cf"
down_revision: str | None = "2d0fcec3b06d"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
# Add composite indexes for common query patterns
# Composite index for filtering active users by role
op.create_index(
"ix_users_active_superuser",
"users",
["is_active", "is_superuser"],
postgresql_where=sa.text("deleted_at IS NULL"),
)
# Composite index for sorting active users by creation date
op.create_index(
"ix_users_active_created",
"users",
["is_active", "created_at"],
postgresql_where=sa.text("deleted_at IS NULL"),
)
# Composite index for email lookup of non-deleted users
op.create_index("ix_users_email_not_deleted", "users", ["email", "deleted_at"])
def downgrade() -> None:
# Remove composite indexes
op.drop_index("ix_users_email_not_deleted", table_name="users")
op.drop_index("ix_users_active_created", table_name="users")
op.drop_index("ix_users_active_superuser", table_name="users")

View File

@@ -1,39 +0,0 @@
"""add user locale preference column
Revision ID: c8e9f3a2d1b4
Revises: 1174fffbe3e4
Create Date: 2025-11-17 18:00:00.000000
"""
from collections.abc import Sequence
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "c8e9f3a2d1b4"
down_revision: str | None = "1174fffbe3e4"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
# Add locale column to users table
# VARCHAR(10) supports BCP 47 format (e.g., "en", "it", "en-US", "it-IT")
# Nullable: NULL means "not set yet", will use Accept-Language header fallback
# Indexed: For analytics queries and filtering by locale
op.add_column("users", sa.Column("locale", sa.String(length=10), nullable=True))
# Create index on locale column for performance
op.create_index(
"ix_users_locale",
"users",
["locale"],
)
def downgrade() -> None:
# Remove locale index and column
op.drop_index("ix_users_locale", table_name="users")
op.drop_column("users", "locale")

View File

@@ -1,144 +0,0 @@
"""add oauth models
Revision ID: d5a7b2c9e1f3
Revises: c8e9f3a2d1b4
Create Date: 2025-11-24 20:00:00.000000
"""
from collections.abc import Sequence
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = "d5a7b2c9e1f3"
down_revision: str | None = "c8e9f3a2d1b4"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
# 1. Make password_hash nullable on users table (for OAuth-only users)
op.alter_column(
"users",
"password_hash",
existing_type=sa.String(length=255),
nullable=True,
)
# 2. Create oauth_accounts table (links OAuth providers to users)
op.create_table(
"oauth_accounts",
sa.Column("id", sa.UUID(), nullable=False),
sa.Column("user_id", sa.UUID(), nullable=False),
sa.Column("provider", sa.String(length=50), nullable=False),
sa.Column("provider_user_id", sa.String(length=255), nullable=False),
sa.Column("provider_email", sa.String(length=255), nullable=True),
sa.Column("access_token_encrypted", sa.String(length=2048), nullable=True),
sa.Column("refresh_token_encrypted", sa.String(length=2048), nullable=True),
sa.Column("token_expires_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False),
sa.PrimaryKeyConstraint("id"),
sa.ForeignKeyConstraint(
["user_id"],
["users.id"],
name="fk_oauth_accounts_user_id",
ondelete="CASCADE",
),
sa.UniqueConstraint(
"provider", "provider_user_id", name="uq_oauth_provider_user"
),
)
# Create indexes for oauth_accounts
op.create_index("ix_oauth_accounts_user_id", "oauth_accounts", ["user_id"])
op.create_index("ix_oauth_accounts_provider", "oauth_accounts", ["provider"])
op.create_index(
"ix_oauth_accounts_provider_email", "oauth_accounts", ["provider_email"]
)
op.create_index(
"ix_oauth_accounts_user_provider", "oauth_accounts", ["user_id", "provider"]
)
# 3. Create oauth_states table (CSRF protection during OAuth flow)
op.create_table(
"oauth_states",
sa.Column("id", sa.UUID(), nullable=False),
sa.Column("state", sa.String(length=255), nullable=False),
sa.Column("code_verifier", sa.String(length=128), nullable=True),
sa.Column("nonce", sa.String(length=255), nullable=True),
sa.Column("provider", sa.String(length=50), nullable=False),
sa.Column("redirect_uri", sa.String(length=500), nullable=True),
sa.Column("user_id", sa.UUID(), nullable=True),
sa.Column("expires_at", sa.DateTime(timezone=True), nullable=False),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
# Create indexes for oauth_states
op.create_index("ix_oauth_states_state", "oauth_states", ["state"], unique=True)
op.create_index("ix_oauth_states_expires_at", "oauth_states", ["expires_at"])
# 4. Create oauth_clients table (OAuth provider mode - skeleton for MCP)
op.create_table(
"oauth_clients",
sa.Column("id", sa.UUID(), nullable=False),
sa.Column("client_id", sa.String(length=64), nullable=False),
sa.Column("client_secret_hash", sa.String(length=255), nullable=True),
sa.Column("client_name", sa.String(length=255), nullable=False),
sa.Column("client_description", sa.String(length=1000), nullable=True),
sa.Column("client_type", sa.String(length=20), nullable=False),
sa.Column("redirect_uris", postgresql.JSONB(), nullable=False),
sa.Column("allowed_scopes", postgresql.JSONB(), nullable=False),
sa.Column("access_token_lifetime", sa.String(length=10), nullable=False),
sa.Column("refresh_token_lifetime", sa.String(length=10), nullable=False),
sa.Column("is_active", sa.Boolean(), nullable=False, server_default="true"),
sa.Column("owner_user_id", sa.UUID(), nullable=True),
sa.Column("mcp_server_url", sa.String(length=2048), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False),
sa.PrimaryKeyConstraint("id"),
sa.ForeignKeyConstraint(
["owner_user_id"],
["users.id"],
name="fk_oauth_clients_owner_user_id",
ondelete="SET NULL",
),
)
# Create indexes for oauth_clients
op.create_index(
"ix_oauth_clients_client_id", "oauth_clients", ["client_id"], unique=True
)
op.create_index("ix_oauth_clients_is_active", "oauth_clients", ["is_active"])
def downgrade() -> None:
# Drop oauth_clients table and indexes
op.drop_index("ix_oauth_clients_is_active", table_name="oauth_clients")
op.drop_index("ix_oauth_clients_client_id", table_name="oauth_clients")
op.drop_table("oauth_clients")
# Drop oauth_states table and indexes
op.drop_index("ix_oauth_states_expires_at", table_name="oauth_states")
op.drop_index("ix_oauth_states_state", table_name="oauth_states")
op.drop_table("oauth_states")
# Drop oauth_accounts table and indexes
op.drop_index("ix_oauth_accounts_user_provider", table_name="oauth_accounts")
op.drop_index("ix_oauth_accounts_provider_email", table_name="oauth_accounts")
op.drop_index("ix_oauth_accounts_provider", table_name="oauth_accounts")
op.drop_index("ix_oauth_accounts_user_id", table_name="oauth_accounts")
op.drop_table("oauth_accounts")
# Revert password_hash to non-nullable
op.alter_column(
"users",
"password_hash",
existing_type=sa.String(length=255),
nullable=False,
)

View File

@@ -1,194 +0,0 @@
"""Add OAuth provider models for MCP integration.
Revision ID: f8c3d2e1a4b5
Revises: d5a7b2c9e1f3
Create Date: 2025-01-15 10:00:00.000000
This migration adds tables for OAuth provider mode:
- oauth_authorization_codes: Temporary authorization codes
- oauth_provider_refresh_tokens: Long-lived refresh tokens
- oauth_consents: User consent records
"""
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "f8c3d2e1a4b5"
down_revision = "d5a7b2c9e1f3"
branch_labels = None
depends_on = None
def upgrade() -> None:
# Create oauth_authorization_codes table
op.create_table(
"oauth_authorization_codes",
sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False),
sa.Column("code", sa.String(128), nullable=False),
sa.Column("client_id", sa.String(64), nullable=False),
sa.Column("user_id", postgresql.UUID(as_uuid=True), nullable=False),
sa.Column("redirect_uri", sa.String(2048), nullable=False),
sa.Column("scope", sa.String(1000), nullable=False, server_default=""),
sa.Column("code_challenge", sa.String(128), nullable=True),
sa.Column("code_challenge_method", sa.String(10), nullable=True),
sa.Column("state", sa.String(256), nullable=True),
sa.Column("nonce", sa.String(256), nullable=True),
sa.Column("expires_at", sa.DateTime(timezone=True), nullable=False),
sa.Column("used", sa.Boolean(), nullable=False, server_default="false"),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.ForeignKeyConstraint(
["client_id"],
["oauth_clients.client_id"],
ondelete="CASCADE",
),
sa.ForeignKeyConstraint(
["user_id"],
["users.id"],
ondelete="CASCADE",
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
"ix_oauth_authorization_codes_code",
"oauth_authorization_codes",
["code"],
unique=True,
)
op.create_index(
"ix_oauth_authorization_codes_expires_at",
"oauth_authorization_codes",
["expires_at"],
)
op.create_index(
"ix_oauth_authorization_codes_client_user",
"oauth_authorization_codes",
["client_id", "user_id"],
)
# Create oauth_provider_refresh_tokens table
op.create_table(
"oauth_provider_refresh_tokens",
sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False),
sa.Column("token_hash", sa.String(64), nullable=False),
sa.Column("jti", sa.String(64), nullable=False),
sa.Column("client_id", sa.String(64), nullable=False),
sa.Column("user_id", postgresql.UUID(as_uuid=True), nullable=False),
sa.Column("scope", sa.String(1000), nullable=False, server_default=""),
sa.Column("expires_at", sa.DateTime(timezone=True), nullable=False),
sa.Column("revoked", sa.Boolean(), nullable=False, server_default="false"),
sa.Column("last_used_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("device_info", sa.String(500), nullable=True),
sa.Column("ip_address", sa.String(45), nullable=True),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.ForeignKeyConstraint(
["client_id"],
["oauth_clients.client_id"],
ondelete="CASCADE",
),
sa.ForeignKeyConstraint(
["user_id"],
["users.id"],
ondelete="CASCADE",
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
"ix_oauth_provider_refresh_tokens_token_hash",
"oauth_provider_refresh_tokens",
["token_hash"],
unique=True,
)
op.create_index(
"ix_oauth_provider_refresh_tokens_jti",
"oauth_provider_refresh_tokens",
["jti"],
unique=True,
)
op.create_index(
"ix_oauth_provider_refresh_tokens_expires_at",
"oauth_provider_refresh_tokens",
["expires_at"],
)
op.create_index(
"ix_oauth_provider_refresh_tokens_client_user",
"oauth_provider_refresh_tokens",
["client_id", "user_id"],
)
op.create_index(
"ix_oauth_provider_refresh_tokens_user_revoked",
"oauth_provider_refresh_tokens",
["user_id", "revoked"],
)
op.create_index(
"ix_oauth_provider_refresh_tokens_revoked",
"oauth_provider_refresh_tokens",
["revoked"],
)
# Create oauth_consents table
op.create_table(
"oauth_consents",
sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False),
sa.Column("user_id", postgresql.UUID(as_uuid=True), nullable=False),
sa.Column("client_id", sa.String(64), nullable=False),
sa.Column("granted_scopes", sa.String(1000), nullable=False, server_default=""),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.ForeignKeyConstraint(
["client_id"],
["oauth_clients.client_id"],
ondelete="CASCADE",
),
sa.ForeignKeyConstraint(
["user_id"],
["users.id"],
ondelete="CASCADE",
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
"ix_oauth_consents_user_client",
"oauth_consents",
["user_id", "client_id"],
unique=True,
)
def downgrade() -> None:
op.drop_table("oauth_consents")
op.drop_table("oauth_provider_refresh_tokens")
op.drop_table("oauth_authorization_codes")

View File

@@ -1,127 +0,0 @@
"""add_organizations_and_user_organizations
Revision ID: fbf6318a8a36
Revises: 549b50ea888d
Create Date: 2025-10-31 12:08:05.141353
"""
from collections.abc import Sequence
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "fbf6318a8a36"
down_revision: str | None = "549b50ea888d"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
# Create organizations table
op.create_table(
"organizations",
sa.Column("id", sa.UUID(), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
sa.Column("slug", sa.String(length=255), nullable=False),
sa.Column("description", sa.Text(), nullable=True),
sa.Column("is_active", sa.Boolean(), nullable=False, server_default="true"),
sa.Column("settings", sa.JSON(), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
# Create indexes for organizations
op.create_index("ix_organizations_name", "organizations", ["name"])
op.create_index("ix_organizations_slug", "organizations", ["slug"], unique=True)
op.create_index("ix_organizations_is_active", "organizations", ["is_active"])
op.create_index(
"ix_organizations_name_active", "organizations", ["name", "is_active"]
)
op.create_index(
"ix_organizations_slug_active", "organizations", ["slug", "is_active"]
)
# Create user_organizations junction table
op.create_table(
"user_organizations",
sa.Column("user_id", sa.UUID(), nullable=False),
sa.Column("organization_id", sa.UUID(), nullable=False),
sa.Column(
"role",
sa.Enum("OWNER", "ADMIN", "MEMBER", "GUEST", name="organizationrole"),
nullable=False,
server_default="MEMBER",
),
sa.Column("is_active", sa.Boolean(), nullable=False, server_default="true"),
sa.Column("custom_permissions", sa.String(length=500), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False),
sa.PrimaryKeyConstraint("user_id", "organization_id"),
)
# Create foreign keys
op.create_foreign_key(
"fk_user_organizations_user_id",
"user_organizations",
"users",
["user_id"],
["id"],
ondelete="CASCADE",
)
op.create_foreign_key(
"fk_user_organizations_organization_id",
"user_organizations",
"organizations",
["organization_id"],
["id"],
ondelete="CASCADE",
)
# Create indexes for user_organizations
op.create_index("ix_user_organizations_role", "user_organizations", ["role"])
op.create_index(
"ix_user_organizations_is_active", "user_organizations", ["is_active"]
)
op.create_index(
"ix_user_org_user_active", "user_organizations", ["user_id", "is_active"]
)
op.create_index(
"ix_user_org_org_active", "user_organizations", ["organization_id", "is_active"]
)
def downgrade() -> None:
# Drop indexes for user_organizations
op.drop_index("ix_user_org_org_active", table_name="user_organizations")
op.drop_index("ix_user_org_user_active", table_name="user_organizations")
op.drop_index("ix_user_organizations_is_active", table_name="user_organizations")
op.drop_index("ix_user_organizations_role", table_name="user_organizations")
# Drop foreign keys
op.drop_constraint(
"fk_user_organizations_organization_id",
"user_organizations",
type_="foreignkey",
)
op.drop_constraint(
"fk_user_organizations_user_id", "user_organizations", type_="foreignkey"
)
# Drop user_organizations table
op.drop_table("user_organizations")
# Drop indexes for organizations
op.drop_index("ix_organizations_slug_active", table_name="organizations")
op.drop_index("ix_organizations_name_active", table_name="organizations")
op.drop_index("ix_organizations_is_active", table_name="organizations")
op.drop_index("ix_organizations_slug", table_name="organizations")
op.drop_index("ix_organizations_name", table_name="organizations")
# Drop organizations table
op.drop_table("organizations")
# Drop enum type
op.execute("DROP TYPE IF EXISTS organizationrole")

View File

@@ -24,6 +24,9 @@ class OAuthAuthorizationCode(Base, UUIDMixin, TimestampMixin):
- Must validate redirect_uri matches exactly
- Must verify PKCE code_verifier for public clients
- Must be consumed within expiration time
Performance indexes (defined in migration 0002_add_performance_indexes.py):
- ix_perf_oauth_auth_codes_expires: expires_at WHERE used = false
"""
__tablename__ = "oauth_authorization_codes"

View File

@@ -27,6 +27,9 @@ class OAuthProviderRefreshToken(Base, UUIDMixin, TimestampMixin):
- Support token rotation (new refresh token on use)
- Track last used time for security auditing
- Support revocation by user, client, or admin
Performance indexes (defined in migration 0002_add_performance_indexes.py):
- ix_perf_oauth_refresh_tokens_expires: expires_at WHERE revoked = false
"""
__tablename__ = "oauth_provider_refresh_tokens"

View File

@@ -10,6 +10,9 @@ class Organization(Base, UUIDMixin, TimestampMixin):
"""
Organization model for multi-tenant support.
Users can belong to multiple organizations with different roles.
Performance indexes (defined in migration 0002_add_performance_indexes.py):
- ix_perf_organizations_slug_lower: LOWER(slug) WHERE is_active = true
"""
__tablename__ = "organizations"

View File

@@ -6,6 +6,14 @@ from .base import Base, TimestampMixin, UUIDMixin
class User(Base, UUIDMixin, TimestampMixin):
"""
User model for authentication and profile data.
Performance indexes (defined in migration 0002_add_performance_indexes.py):
- ix_perf_users_email_lower: LOWER(email) WHERE deleted_at IS NULL
- ix_perf_users_active: is_active WHERE deleted_at IS NULL
"""
__tablename__ = "users"
email = Column(String(255), unique=True, nullable=False, index=True)

View File

@@ -44,7 +44,7 @@ class UserOrganization(Base, TimestampMixin):
Enum(OrganizationRole),
default=OrganizationRole.MEMBER,
nullable=False,
index=True,
# Note: index defined in __table_args__ as ix_user_org_role
)
is_active = Column(Boolean, default=True, nullable=False, index=True)

View File

@@ -22,6 +22,9 @@ class UserSession(Base, UUIDMixin, TimestampMixin):
Each time a user logs in from a device, a new session is created.
Sessions are identified by the refresh token JTI (JWT ID).
Performance indexes (defined in migration 0002_add_performance_indexes.py):
- ix_perf_user_sessions_expires: expires_at WHERE is_active = true
"""
__tablename__ = "user_sessions"

View File

@@ -8,6 +8,7 @@ This document outlines the coding standards and best practices for the FastAPI b
- [Code Organization](#code-organization)
- [Naming Conventions](#naming-conventions)
- [Error Handling](#error-handling)
- [Data Models and Migrations](#data-models-and-migrations)
- [Database Operations](#database-operations)
- [API Endpoints](#api-endpoints)
- [Authentication & Security](#authentication--security)
@@ -282,6 +283,151 @@ All error responses follow this structure:
}
```
## Data Models and Migrations
### Model Definition Best Practices
To ensure Alembic autogenerate works reliably without drift, follow these rules:
#### 1. Simple Indexes: Use Column-Level or `__table_args__`, Not Both
```python
# ❌ BAD - Creates DUPLICATE indexes with different names
class User(Base):
role = Column(String(50), index=True) # Creates ix_users_role
__table_args__ = (
Index("ix_user_role", "role"), # Creates ANOTHER index!
)
# ✅ GOOD - Choose ONE approach
class User(Base):
role = Column(String(50)) # No index=True
__table_args__ = (
Index("ix_user_role", "role"), # Single index with explicit name
)
# ✅ ALSO GOOD - For simple single-column indexes
class User(Base):
role = Column(String(50), index=True) # Auto-named ix_users_role
```
#### 2. Composite Indexes: Always Use `__table_args__`
```python
class UserOrganization(Base):
__tablename__ = "user_organizations"
user_id = Column(UUID, nullable=False)
organization_id = Column(UUID, nullable=False)
is_active = Column(Boolean, default=True, nullable=False, index=True)
__table_args__ = (
Index("ix_user_org_user_active", "user_id", "is_active"),
Index("ix_user_org_org_active", "organization_id", "is_active"),
)
```
#### 3. Functional/Partial Indexes: Use `ix_perf_` Prefix
Alembic **cannot** auto-detect:
- **Functional indexes**: `LOWER(column)`, `UPPER(column)`, expressions
- **Partial indexes**: Indexes with `WHERE` clauses
**Solution**: Use the `ix_perf_` naming prefix. Any index with this prefix is automatically excluded from autogenerate by `env.py`.
```python
# In migration file (NOT in model) - use ix_perf_ prefix:
op.create_index(
"ix_perf_users_email_lower", # <-- ix_perf_ prefix!
"users",
[sa.text("LOWER(email)")], # Functional
postgresql_where=sa.text("deleted_at IS NULL"), # Partial
)
```
**No need to update `env.py`** - the prefix convention handles it automatically:
```python
# env.py - already configured:
def include_object(object, name, type_, reflected, compare_to):
if type_ == "index" and name:
if name.startswith("ix_perf_"): # Auto-excluded!
return False
return True
```
**To add new performance indexes:**
1. Create a new migration file
2. Name your indexes with `ix_perf_` prefix
3. Done - Alembic will ignore them automatically
#### 4. Use Correct Types
```python
# ✅ GOOD - PostgreSQL-native types
from sqlalchemy.dialects.postgresql import JSONB, UUID
class User(Base):
id = Column(UUID(as_uuid=True), primary_key=True)
preferences = Column(JSONB) # Not JSON!
# ❌ BAD - Generic types may cause migration drift
from sqlalchemy import JSON
preferences = Column(JSON) # May detect as different from JSONB
```
### Migration Workflow
#### Creating Migrations
```bash
# Generate autogenerate migration:
python migrate.py generate "Add new field"
# Or inside Docker:
docker exec -w /app backend uv run alembic revision --autogenerate -m "Add new field"
# Apply migration:
python migrate.py apply
# Or: docker exec -w /app backend uv run alembic upgrade head
```
#### Testing for Drift
After any model changes, verify no unintended drift:
```bash
# Generate test migration
docker exec -w /app backend uv run alembic revision --autogenerate -m "test_drift"
# Check the generated file - should be empty (just 'pass')
# If it has operations, investigate why
# Delete test file
rm backend/app/alembic/versions/*_test_drift.py
```
#### Migration File Structure
```
backend/app/alembic/versions/
├── cbddc8aa6eda_initial_models.py # Auto-generated, tracks all models
├── 0002_performance_indexes.py # Manual, functional/partial indexes
└── __init__.py
```
### Summary: What Goes Where
| Index Type | In Model? | Alembic Detects? | Where to Define |
|------------|-----------|------------------|-----------------|
| Simple column (`index=True`) | Yes | Yes | Column definition |
| Composite (`col1, col2`) | Yes | Yes | `__table_args__` |
| Unique composite | Yes | Yes | `__table_args__` with `unique=True` |
| Functional (`LOWER(col)`) | No | No | Migration with `ix_perf_` prefix |
| Partial (`WHERE ...`) | No | No | Migration with `ix_perf_` prefix |
## Database Operations
### Async CRUD Pattern

View File

@@ -2,8 +2,32 @@
"""
Database migration helper script.
Provides convenient commands for generating and applying Alembic migrations.
Usage:
# Generate migration (auto-increments revision ID: 0001, 0002, etc.)
python migrate.py --local generate "Add new field"
python migrate.py --local auto "Add new field"
# Apply migrations
python migrate.py --local apply
# Show next revision ID
python migrate.py next
# Reset after deleting migrations (clears alembic_version table)
python migrate.py --local reset
# Override auto-increment with custom revision ID
python migrate.py --local generate "initial_models" --rev-id custom_id
# Generate empty migration template without database (no autogenerate)
python migrate.py generate "Add performance indexes" --offline
# Inside Docker (without --local flag):
python migrate.py auto "Add new field"
"""
import argparse
import os
import subprocess
import sys
from pathlib import Path
@@ -13,15 +37,21 @@ project_root = Path(__file__).resolve().parent
if str(project_root) not in sys.path:
sys.path.append(str(project_root))
try:
# Import settings to check if configuration is working
from app.core.config import settings
print(f"Using database URL: {settings.database_url}")
except ImportError as e:
print(f"Error importing settings: {e}")
print("Make sure your Python path includes the project root.")
sys.exit(1)
def setup_database_url(use_local: bool) -> str:
"""Setup database URL, optionally using localhost for local development."""
if use_local:
# Override DATABASE_URL to use localhost instead of Docker hostname
local_url = os.environ.get(
"LOCAL_DATABASE_URL",
"postgresql://postgres:postgres@localhost:5432/app"
)
os.environ["DATABASE_URL"] = local_url
return local_url
# Use the configured DATABASE_URL from environment/.env
from app.core.config import settings
return settings.database_url
def check_models():
@@ -40,11 +70,30 @@ def check_models():
return False
def generate_migration(message):
"""Generate an Alembic migration with the given message"""
def generate_migration(message, rev_id=None, auto_rev_id=True, offline=False):
"""Generate an Alembic migration with the given message.
Args:
message: Migration message
rev_id: Custom revision ID (overrides auto_rev_id)
auto_rev_id: If True and rev_id is None, auto-generate sequential ID
offline: If True, generate empty migration without database (no autogenerate)
"""
# Auto-generate sequential revision ID if not provided
if rev_id is None and auto_rev_id:
rev_id = get_next_rev_id()
print(f"Generating migration: {message}")
if rev_id:
print(f"Using revision ID: {rev_id}")
if offline:
# Generate migration file directly without database connection
return generate_offline_migration(message, rev_id)
cmd = ["alembic", "revision", "--autogenerate", "-m", message]
if rev_id:
cmd.extend(["--rev-id", rev_id])
result = subprocess.run(cmd, capture_output=True, text=True)
print(result.stdout)
@@ -64,8 +113,9 @@ def generate_migration(message):
if len(part) >= 12 and all(c in "0123456789abcdef" for c in part[:12]):
revision = part[:12]
break
except Exception:
pass
except Exception as e:
# If parsing fails, we can still proceed without a detected revision
print(f"Warning: could not parse revision from line '{line}': {e}")
if revision:
print(f"Generated revision: {revision}")
@@ -131,8 +181,14 @@ def check_database_connection():
from sqlalchemy.exc import SQLAlchemyError
try:
engine = create_engine(settings.database_url)
with engine.connect() as conn:
# Use DATABASE_URL from environment (set by setup_database_url)
db_url = os.environ.get("DATABASE_URL")
if not db_url:
from app.core.config import settings
db_url = settings.database_url
engine = create_engine(db_url)
with engine.connect():
print("✓ Database connection successful!")
return True
except SQLAlchemyError as e:
@@ -140,16 +196,172 @@ def check_database_connection():
return False
def get_next_rev_id():
"""Get the next sequential revision ID based on existing migrations."""
import re
versions_dir = project_root / "app" / "alembic" / "versions"
if not versions_dir.exists():
return "0001"
# Find all migration files with numeric prefixes
max_num = 0
pattern = re.compile(r"^(\d{4})_.*\.py$")
for f in versions_dir.iterdir():
if f.is_file() and f.suffix == ".py":
match = pattern.match(f.name)
if match:
num = int(match.group(1))
max_num = max(max_num, num)
next_num = max_num + 1
return f"{next_num:04d}"
def get_current_rev_id():
"""Get the current (latest) revision ID from existing migrations."""
import re
versions_dir = project_root / "app" / "alembic" / "versions"
if not versions_dir.exists():
return None
# Find all migration files with numeric prefixes and get the highest
max_num = 0
max_rev_id = None
pattern = re.compile(r"^(\d{4})_.*\.py$")
for f in versions_dir.iterdir():
if f.is_file() and f.suffix == ".py":
match = pattern.match(f.name)
if match:
num = int(match.group(1))
if num > max_num:
max_num = num
max_rev_id = match.group(1)
return max_rev_id
def generate_offline_migration(message, rev_id):
"""Generate a migration file without database connection.
Creates an empty migration template that can be filled in manually.
Useful for performance indexes or when database is not available.
"""
from datetime import datetime
versions_dir = project_root / "app" / "alembic" / "versions"
versions_dir.mkdir(parents=True, exist_ok=True)
# Slugify the message for filename
slug = message.lower().replace(" ", "_").replace("-", "_")
slug = "".join(c for c in slug if c.isalnum() or c == "_")
filename = f"{rev_id}_{slug}.py"
filepath = versions_dir / filename
# Get the previous revision ID
down_revision = get_current_rev_id()
down_rev_str = f'"{down_revision}"' if down_revision else "None"
# Generate the migration file content
content = f'''"""{message}
Revision ID: {rev_id}
Revises: {down_revision or ''}
Create Date: {datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f')}
"""
from collections.abc import Sequence
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "{rev_id}"
down_revision: str | None = {down_rev_str}
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
# TODO: Add your upgrade operations here
pass
def downgrade() -> None:
# TODO: Add your downgrade operations here
pass
'''
filepath.write_text(content)
print(f"Generated offline migration: {filepath}")
return rev_id
def show_next_rev_id():
"""Show the next sequential revision ID."""
next_id = get_next_rev_id()
print(f"Next revision ID: {next_id}")
print(f"\nUsage:")
print(f" python migrate.py --local generate 'your_message' --rev-id {next_id}")
print(f" python migrate.py --local auto 'your_message' --rev-id {next_id}")
return next_id
def reset_alembic_version():
"""Reset the alembic_version table (for fresh start after deleting migrations)."""
from sqlalchemy import create_engine, text
from sqlalchemy.exc import SQLAlchemyError
db_url = os.environ.get("DATABASE_URL")
if not db_url:
from app.core.config import settings
db_url = settings.database_url
try:
engine = create_engine(db_url)
with engine.connect() as conn:
conn.execute(text("DROP TABLE IF EXISTS alembic_version"))
conn.commit()
print("✓ Alembic version table reset successfully")
print(" You can now run migrations from scratch")
return True
except SQLAlchemyError as e:
print(f"✗ Error resetting alembic version: {e}")
return False
def main():
"""Main function"""
parser = argparse.ArgumentParser(
description='Database migration helper for PragmaStack template'
description='Database migration helper for Generative Models Arena'
)
# Global options
parser.add_argument(
'--local', '-l',
action='store_true',
help='Use localhost instead of Docker hostname (for local development)'
)
subparsers = parser.add_subparsers(dest='command', help='Command to run')
# Generate command
generate_parser = subparsers.add_parser('generate', help='Generate a migration')
generate_parser.add_argument('message', help='Migration message')
generate_parser.add_argument(
'--rev-id',
help='Custom revision ID (e.g., 0001, 0002 for sequential naming)'
)
generate_parser.add_argument(
'--offline',
action='store_true',
help='Generate empty migration template without database connection'
)
# Apply command
apply_parser = subparsers.add_parser('apply', help='Apply migrations')
@@ -164,15 +376,56 @@ def main():
# Check command
subparsers.add_parser('check', help='Check database connection and models')
# Next command (show next revision ID)
subparsers.add_parser('next', help='Show the next sequential revision ID')
# Reset command (clear alembic_version table)
subparsers.add_parser(
'reset',
help='Reset alembic_version table (use after deleting all migrations)'
)
# Auto command (generate and apply)
auto_parser = subparsers.add_parser('auto', help='Generate and apply migration')
auto_parser.add_argument('message', help='Migration message')
auto_parser.add_argument(
'--rev-id',
help='Custom revision ID (e.g., 0001, 0002 for sequential naming)'
)
auto_parser.add_argument(
'--offline',
action='store_true',
help='Generate empty migration template without database connection'
)
args = parser.parse_args()
# Commands that don't need database connection
if args.command == 'next':
show_next_rev_id()
return
# Check if offline mode is requested
offline = getattr(args, 'offline', False)
# Offline generate doesn't need database or model check
if args.command == 'generate' and offline:
generate_migration(args.message, rev_id=args.rev_id, offline=True)
return
if args.command == 'auto' and offline:
generate_migration(args.message, rev_id=args.rev_id, offline=True)
print("\nOffline migration generated. Apply it later with:")
print(f" python migrate.py --local apply")
return
# Setup database URL (must be done before importing settings elsewhere)
db_url = setup_database_url(args.local)
print(f"Using database URL: {db_url}")
if args.command == 'generate':
check_models()
generate_migration(args.message)
generate_migration(args.message, rev_id=args.rev_id)
elif args.command == 'apply':
apply_migration(args.revision)
@@ -187,11 +440,14 @@ def main():
check_database_connection()
check_models()
elif args.command == 'reset':
reset_alembic_version()
elif args.command == 'auto':
check_models()
revision = generate_migration(args.message)
revision = generate_migration(args.message, rev_id=args.rev_id)
if revision:
proceed = input("\nPress Enter to apply migration or Ctrl+C to abort... ")
input("\nPress Enter to apply migration or Ctrl+C to abort... ")
apply_migration()
else:

View File

@@ -25,7 +25,8 @@ services:
volumes:
- ./backend:/app
- ./uploads:/app/uploads
- backend_dev_modules:/app/.venv
# Exclude local .venv from bind mount to use container's .venv
- /app/.venv
ports:
- "8000:8000"
env_file:
@@ -67,7 +68,6 @@ services:
volumes:
postgres_data_dev:
backend_dev_modules:
frontend_dev_modules:
frontend_dev_next: