Add pyproject.toml for consolidated project configuration and replace Black, isort, and Flake8 with Ruff

- Introduced `pyproject.toml` to centralize backend tool configurations (e.g., Ruff, mypy, coverage, pytest).
- Replaced Black, isort, and Flake8 with Ruff for linting, formatting, and import sorting.
- Updated `requirements.txt` to include Ruff and remove replaced tools.
- Added `Makefile` to streamline development workflows with commands for linting, formatting, type-checking, testing, and cleanup.
This commit is contained in:
2025-11-10 11:55:15 +01:00
parent a5c671c133
commit c589b565f0
86 changed files with 4572 additions and 3956 deletions

View File

@@ -5,17 +5,17 @@ Revises: fbf6318a8a36
Create Date: 2025-11-01 04:15:25.367010
"""
from typing import Sequence, Union
from collections.abc import Sequence
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = '1174fffbe3e4'
down_revision: Union[str, None] = 'fbf6318a8a36'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
revision: str = "1174fffbe3e4"
down_revision: str | None = "fbf6318a8a36"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
@@ -24,46 +24,46 @@ def upgrade() -> None:
# Index for session cleanup queries
# Optimizes: DELETE WHERE is_active = FALSE AND expires_at < now AND created_at < cutoff
op.create_index(
'ix_user_sessions_cleanup',
'user_sessions',
['is_active', 'expires_at', 'created_at'],
"ix_user_sessions_cleanup",
"user_sessions",
["is_active", "expires_at", "created_at"],
unique=False,
postgresql_where=sa.text('is_active = false')
postgresql_where=sa.text("is_active = false"),
)
# Index for user search queries (basic trigram support without pg_trgm extension)
# Optimizes: WHERE email ILIKE '%search%' OR first_name ILIKE '%search%'
# Note: For better performance, consider enabling pg_trgm extension
op.create_index(
'ix_users_email_lower',
'users',
[sa.text('LOWER(email)')],
"ix_users_email_lower",
"users",
[sa.text("LOWER(email)")],
unique=False,
postgresql_where=sa.text('deleted_at IS NULL')
postgresql_where=sa.text("deleted_at IS NULL"),
)
op.create_index(
'ix_users_first_name_lower',
'users',
[sa.text('LOWER(first_name)')],
"ix_users_first_name_lower",
"users",
[sa.text("LOWER(first_name)")],
unique=False,
postgresql_where=sa.text('deleted_at IS NULL')
postgresql_where=sa.text("deleted_at IS NULL"),
)
op.create_index(
'ix_users_last_name_lower',
'users',
[sa.text('LOWER(last_name)')],
"ix_users_last_name_lower",
"users",
[sa.text("LOWER(last_name)")],
unique=False,
postgresql_where=sa.text('deleted_at IS NULL')
postgresql_where=sa.text("deleted_at IS NULL"),
)
# Index for organization search
op.create_index(
'ix_organizations_name_lower',
'organizations',
[sa.text('LOWER(name)')],
unique=False
"ix_organizations_name_lower",
"organizations",
[sa.text("LOWER(name)")],
unique=False,
)
@@ -71,8 +71,8 @@ def downgrade() -> None:
"""Remove performance indexes."""
# Drop indexes in reverse order
op.drop_index('ix_organizations_name_lower', table_name='organizations')
op.drop_index('ix_users_last_name_lower', table_name='users')
op.drop_index('ix_users_first_name_lower', table_name='users')
op.drop_index('ix_users_email_lower', table_name='users')
op.drop_index('ix_user_sessions_cleanup', table_name='user_sessions')
op.drop_index("ix_organizations_name_lower", table_name="organizations")
op.drop_index("ix_users_last_name_lower", table_name="users")
op.drop_index("ix_users_first_name_lower", table_name="users")
op.drop_index("ix_users_email_lower", table_name="users")
op.drop_index("ix_user_sessions_cleanup", table_name="user_sessions")

View File

@@ -5,30 +5,32 @@ Revises: 9e4f2a1b8c7d
Create Date: 2025-10-30 16:40:21.000021
"""
from typing import Sequence, Union
from collections.abc import Sequence
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = '2d0fcec3b06d'
down_revision: Union[str, None] = '9e4f2a1b8c7d'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
revision: str = "2d0fcec3b06d"
down_revision: str | None = "9e4f2a1b8c7d"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
# Add deleted_at column for soft deletes
op.add_column('users', sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True))
op.add_column(
"users", sa.Column("deleted_at", sa.DateTime(timezone=True), nullable=True)
)
# Add index on deleted_at for efficient queries
op.create_index('ix_users_deleted_at', 'users', ['deleted_at'])
op.create_index("ix_users_deleted_at", "users", ["deleted_at"])
def downgrade() -> None:
# Remove index
op.drop_index('ix_users_deleted_at', table_name='users')
op.drop_index("ix_users_deleted_at", table_name="users")
# Remove column
op.drop_column('users', 'deleted_at')
op.drop_column("users", "deleted_at")

View File

@@ -5,42 +5,42 @@ Revises: 7396957cbe80
Create Date: 2025-02-28 09:19:33.212278
"""
from typing import Sequence, Union
from collections.abc import Sequence
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = '38bf9e7e74b3'
down_revision: Union[str, None] = '7396957cbe80'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
revision: str = "38bf9e7e74b3"
down_revision: str | None = "7396957cbe80"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
op.create_table('users',
sa.Column('email', sa.String(), nullable=False),
sa.Column('password_hash', sa.String(), nullable=False),
sa.Column('first_name', sa.String(), nullable=False),
sa.Column('last_name', sa.String(), nullable=True),
sa.Column('phone_number', sa.String(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('is_superuser', sa.Boolean(), nullable=False),
sa.Column('preferences', sa.JSON(), nullable=True),
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
sa.PrimaryKeyConstraint('id')
op.create_table(
"users",
sa.Column("email", sa.String(), nullable=False),
sa.Column("password_hash", sa.String(), nullable=False),
sa.Column("first_name", sa.String(), nullable=False),
sa.Column("last_name", sa.String(), nullable=True),
sa.Column("phone_number", sa.String(), nullable=True),
sa.Column("is_active", sa.Boolean(), nullable=False),
sa.Column("is_superuser", sa.Boolean(), nullable=False),
sa.Column("preferences", sa.JSON(), nullable=True),
sa.Column("id", sa.UUID(), nullable=False),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(op.f('ix_users_email'), 'users', ['email'], unique=True)
op.create_index(op.f("ix_users_email"), "users", ["email"], unique=True)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_users_email'), table_name='users')
op.drop_table('users')
op.drop_index(op.f("ix_users_email"), table_name="users")
op.drop_table("users")
# ### end Alembic commands ###

View File

@@ -5,98 +5,85 @@ Revises: b76c725fc3cf
Create Date: 2025-10-31 07:41:18.729544
"""
from typing import Sequence, Union
from collections.abc import Sequence
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = '549b50ea888d'
down_revision: Union[str, None] = 'b76c725fc3cf'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
revision: str = "549b50ea888d"
down_revision: str | None = "b76c725fc3cf"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
# Create user_sessions table for per-device session management
op.create_table(
'user_sessions',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('user_id', sa.UUID(), nullable=False),
sa.Column('refresh_token_jti', sa.String(length=255), nullable=False),
sa.Column('device_name', sa.String(length=255), nullable=True),
sa.Column('device_id', sa.String(length=255), nullable=True),
sa.Column('ip_address', sa.String(length=45), nullable=True),
sa.Column('user_agent', sa.String(length=500), nullable=True),
sa.Column('last_used_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('expires_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('is_active', sa.Boolean(), nullable=False, server_default='true'),
sa.Column('location_city', sa.String(length=100), nullable=True),
sa.Column('location_country', sa.String(length=100), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
sa.PrimaryKeyConstraint('id')
"user_sessions",
sa.Column("id", sa.UUID(), nullable=False),
sa.Column("user_id", sa.UUID(), nullable=False),
sa.Column("refresh_token_jti", sa.String(length=255), nullable=False),
sa.Column("device_name", sa.String(length=255), nullable=True),
sa.Column("device_id", sa.String(length=255), nullable=True),
sa.Column("ip_address", sa.String(length=45), nullable=True),
sa.Column("user_agent", sa.String(length=500), nullable=True),
sa.Column("last_used_at", sa.DateTime(timezone=True), nullable=False),
sa.Column("expires_at", sa.DateTime(timezone=True), nullable=False),
sa.Column("is_active", sa.Boolean(), nullable=False, server_default="true"),
sa.Column("location_city", sa.String(length=100), nullable=True),
sa.Column("location_country", sa.String(length=100), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
# Create foreign key to users table
op.create_foreign_key(
'fk_user_sessions_user_id',
'user_sessions',
'users',
['user_id'],
['id'],
ondelete='CASCADE'
"fk_user_sessions_user_id",
"user_sessions",
"users",
["user_id"],
["id"],
ondelete="CASCADE",
)
# Create indexes for performance
# 1. Lookup session by refresh token JTI (most common query)
op.create_index(
'ix_user_sessions_jti',
'user_sessions',
['refresh_token_jti'],
unique=True
"ix_user_sessions_jti", "user_sessions", ["refresh_token_jti"], unique=True
)
# 2. Lookup sessions by user ID
op.create_index(
'ix_user_sessions_user_id',
'user_sessions',
['user_id']
)
op.create_index("ix_user_sessions_user_id", "user_sessions", ["user_id"])
# 3. Composite index for active sessions by user
op.create_index(
'ix_user_sessions_user_active',
'user_sessions',
['user_id', 'is_active']
"ix_user_sessions_user_active", "user_sessions", ["user_id", "is_active"]
)
# 4. Index on expires_at for cleanup job
op.create_index(
'ix_user_sessions_expires_at',
'user_sessions',
['expires_at']
)
op.create_index("ix_user_sessions_expires_at", "user_sessions", ["expires_at"])
# 5. Composite index for active session lookup by JTI
op.create_index(
'ix_user_sessions_jti_active',
'user_sessions',
['refresh_token_jti', 'is_active']
"ix_user_sessions_jti_active",
"user_sessions",
["refresh_token_jti", "is_active"],
)
def downgrade() -> None:
# Drop indexes first
op.drop_index('ix_user_sessions_jti_active', table_name='user_sessions')
op.drop_index('ix_user_sessions_expires_at', table_name='user_sessions')
op.drop_index('ix_user_sessions_user_active', table_name='user_sessions')
op.drop_index('ix_user_sessions_user_id', table_name='user_sessions')
op.drop_index('ix_user_sessions_jti', table_name='user_sessions')
op.drop_index("ix_user_sessions_jti_active", table_name="user_sessions")
op.drop_index("ix_user_sessions_expires_at", table_name="user_sessions")
op.drop_index("ix_user_sessions_user_active", table_name="user_sessions")
op.drop_index("ix_user_sessions_user_id", table_name="user_sessions")
op.drop_index("ix_user_sessions_jti", table_name="user_sessions")
# Drop foreign key
op.drop_constraint('fk_user_sessions_user_id', 'user_sessions', type_='foreignkey')
op.drop_constraint("fk_user_sessions_user_id", "user_sessions", type_="foreignkey")
# Drop table
op.drop_table('user_sessions')
op.drop_table("user_sessions")

View File

@@ -1,19 +1,18 @@
"""Initial empty migration
Revision ID: 7396957cbe80
Revises:
Revises:
Create Date: 2025-02-27 12:47:46.445313
"""
from typing import Sequence, Union
from alembic import op
from collections.abc import Sequence
# revision identifiers, used by Alembic.
revision: str = '7396957cbe80'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
revision: str = "7396957cbe80"
down_revision: str | None = None
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:

View File

@@ -5,80 +5,112 @@ Revises: 38bf9e7e74b3
Create Date: 2025-10-30 10:00:00.000000
"""
from typing import Sequence, Union
from collections.abc import Sequence
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = '9e4f2a1b8c7d'
down_revision: Union[str, None] = '38bf9e7e74b3'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
revision: str = "9e4f2a1b8c7d"
down_revision: str | None = "38bf9e7e74b3"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
# Add missing indexes for is_active and is_superuser
op.create_index(op.f('ix_users_is_active'), 'users', ['is_active'], unique=False)
op.create_index(op.f('ix_users_is_superuser'), 'users', ['is_superuser'], unique=False)
op.create_index(op.f("ix_users_is_active"), "users", ["is_active"], unique=False)
op.create_index(
op.f("ix_users_is_superuser"), "users", ["is_superuser"], unique=False
)
# Fix column types to match model definitions with explicit lengths
op.alter_column('users', 'email',
existing_type=sa.String(),
type_=sa.String(length=255),
nullable=False)
op.alter_column(
"users",
"email",
existing_type=sa.String(),
type_=sa.String(length=255),
nullable=False,
)
op.alter_column('users', 'password_hash',
existing_type=sa.String(),
type_=sa.String(length=255),
nullable=False)
op.alter_column(
"users",
"password_hash",
existing_type=sa.String(),
type_=sa.String(length=255),
nullable=False,
)
op.alter_column('users', 'first_name',
existing_type=sa.String(),
type_=sa.String(length=100),
nullable=False,
server_default='user') # Add server default
op.alter_column(
"users",
"first_name",
existing_type=sa.String(),
type_=sa.String(length=100),
nullable=False,
server_default="user",
) # Add server default
op.alter_column('users', 'last_name',
existing_type=sa.String(),
type_=sa.String(length=100),
nullable=True)
op.alter_column(
"users",
"last_name",
existing_type=sa.String(),
type_=sa.String(length=100),
nullable=True,
)
op.alter_column('users', 'phone_number',
existing_type=sa.String(),
type_=sa.String(length=20),
nullable=True)
op.alter_column(
"users",
"phone_number",
existing_type=sa.String(),
type_=sa.String(length=20),
nullable=True,
)
def downgrade() -> None:
# Revert column types
op.alter_column('users', 'phone_number',
existing_type=sa.String(length=20),
type_=sa.String(),
nullable=True)
op.alter_column(
"users",
"phone_number",
existing_type=sa.String(length=20),
type_=sa.String(),
nullable=True,
)
op.alter_column('users', 'last_name',
existing_type=sa.String(length=100),
type_=sa.String(),
nullable=True)
op.alter_column(
"users",
"last_name",
existing_type=sa.String(length=100),
type_=sa.String(),
nullable=True,
)
op.alter_column('users', 'first_name',
existing_type=sa.String(length=100),
type_=sa.String(),
nullable=False,
server_default=None) # Remove server default
op.alter_column(
"users",
"first_name",
existing_type=sa.String(length=100),
type_=sa.String(),
nullable=False,
server_default=None,
) # Remove server default
op.alter_column('users', 'password_hash',
existing_type=sa.String(length=255),
type_=sa.String(),
nullable=False)
op.alter_column(
"users",
"password_hash",
existing_type=sa.String(length=255),
type_=sa.String(),
nullable=False,
)
op.alter_column('users', 'email',
existing_type=sa.String(length=255),
type_=sa.String(),
nullable=False)
op.alter_column(
"users",
"email",
existing_type=sa.String(length=255),
type_=sa.String(),
nullable=False,
)
# Drop indexes
op.drop_index(op.f('ix_users_is_superuser'), table_name='users')
op.drop_index(op.f('ix_users_is_active'), table_name='users')
op.drop_index(op.f("ix_users_is_superuser"), table_name="users")
op.drop_index(op.f("ix_users_is_active"), table_name="users")

View File

@@ -5,17 +5,17 @@ Revises: 2d0fcec3b06d
Create Date: 2025-10-30 16:41:33.273135
"""
from typing import Sequence, Union
from collections.abc import Sequence
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = 'b76c725fc3cf'
down_revision: Union[str, None] = '2d0fcec3b06d'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
revision: str = "b76c725fc3cf"
down_revision: str | None = "2d0fcec3b06d"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
@@ -23,30 +23,26 @@ def upgrade() -> None:
# Composite index for filtering active users by role
op.create_index(
'ix_users_active_superuser',
'users',
['is_active', 'is_superuser'],
postgresql_where=sa.text('deleted_at IS NULL')
"ix_users_active_superuser",
"users",
["is_active", "is_superuser"],
postgresql_where=sa.text("deleted_at IS NULL"),
)
# Composite index for sorting active users by creation date
op.create_index(
'ix_users_active_created',
'users',
['is_active', 'created_at'],
postgresql_where=sa.text('deleted_at IS NULL')
"ix_users_active_created",
"users",
["is_active", "created_at"],
postgresql_where=sa.text("deleted_at IS NULL"),
)
# Composite index for email lookup of non-deleted users
op.create_index(
'ix_users_email_not_deleted',
'users',
['email', 'deleted_at']
)
op.create_index("ix_users_email_not_deleted", "users", ["email", "deleted_at"])
def downgrade() -> None:
# Remove composite indexes
op.drop_index('ix_users_email_not_deleted', table_name='users')
op.drop_index('ix_users_active_created', table_name='users')
op.drop_index('ix_users_active_superuser', table_name='users')
op.drop_index("ix_users_email_not_deleted", table_name="users")
op.drop_index("ix_users_active_created", table_name="users")
op.drop_index("ix_users_active_superuser", table_name="users")

View File

@@ -5,102 +5,123 @@ Revises: 549b50ea888d
Create Date: 2025-10-31 12:08:05.141353
"""
from typing import Sequence, Union
from collections.abc import Sequence
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = 'fbf6318a8a36'
down_revision: Union[str, None] = '549b50ea888d'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
revision: str = "fbf6318a8a36"
down_revision: str | None = "549b50ea888d"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
# Create organizations table
op.create_table(
'organizations',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('slug', sa.String(length=255), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=False, server_default='true'),
sa.Column('settings', sa.JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
sa.PrimaryKeyConstraint('id')
"organizations",
sa.Column("id", sa.UUID(), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
sa.Column("slug", sa.String(length=255), nullable=False),
sa.Column("description", sa.Text(), nullable=True),
sa.Column("is_active", sa.Boolean(), nullable=False, server_default="true"),
sa.Column("settings", sa.JSON(), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
# Create indexes for organizations
op.create_index('ix_organizations_name', 'organizations', ['name'])
op.create_index('ix_organizations_slug', 'organizations', ['slug'], unique=True)
op.create_index('ix_organizations_is_active', 'organizations', ['is_active'])
op.create_index('ix_organizations_name_active', 'organizations', ['name', 'is_active'])
op.create_index('ix_organizations_slug_active', 'organizations', ['slug', 'is_active'])
op.create_index("ix_organizations_name", "organizations", ["name"])
op.create_index("ix_organizations_slug", "organizations", ["slug"], unique=True)
op.create_index("ix_organizations_is_active", "organizations", ["is_active"])
op.create_index(
"ix_organizations_name_active", "organizations", ["name", "is_active"]
)
op.create_index(
"ix_organizations_slug_active", "organizations", ["slug", "is_active"]
)
# Create user_organizations junction table
op.create_table(
'user_organizations',
sa.Column('user_id', sa.UUID(), nullable=False),
sa.Column('organization_id', sa.UUID(), nullable=False),
sa.Column('role', sa.Enum('OWNER', 'ADMIN', 'MEMBER', 'GUEST', name='organizationrole'), nullable=False, server_default='MEMBER'),
sa.Column('is_active', sa.Boolean(), nullable=False, server_default='true'),
sa.Column('custom_permissions', sa.String(length=500), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
sa.PrimaryKeyConstraint('user_id', 'organization_id')
"user_organizations",
sa.Column("user_id", sa.UUID(), nullable=False),
sa.Column("organization_id", sa.UUID(), nullable=False),
sa.Column(
"role",
sa.Enum("OWNER", "ADMIN", "MEMBER", "GUEST", name="organizationrole"),
nullable=False,
server_default="MEMBER",
),
sa.Column("is_active", sa.Boolean(), nullable=False, server_default="true"),
sa.Column("custom_permissions", sa.String(length=500), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False),
sa.PrimaryKeyConstraint("user_id", "organization_id"),
)
# Create foreign keys
op.create_foreign_key(
'fk_user_organizations_user_id',
'user_organizations',
'users',
['user_id'],
['id'],
ondelete='CASCADE'
"fk_user_organizations_user_id",
"user_organizations",
"users",
["user_id"],
["id"],
ondelete="CASCADE",
)
op.create_foreign_key(
'fk_user_organizations_organization_id',
'user_organizations',
'organizations',
['organization_id'],
['id'],
ondelete='CASCADE'
"fk_user_organizations_organization_id",
"user_organizations",
"organizations",
["organization_id"],
["id"],
ondelete="CASCADE",
)
# Create indexes for user_organizations
op.create_index('ix_user_organizations_role', 'user_organizations', ['role'])
op.create_index('ix_user_organizations_is_active', 'user_organizations', ['is_active'])
op.create_index('ix_user_org_user_active', 'user_organizations', ['user_id', 'is_active'])
op.create_index('ix_user_org_org_active', 'user_organizations', ['organization_id', 'is_active'])
op.create_index("ix_user_organizations_role", "user_organizations", ["role"])
op.create_index(
"ix_user_organizations_is_active", "user_organizations", ["is_active"]
)
op.create_index(
"ix_user_org_user_active", "user_organizations", ["user_id", "is_active"]
)
op.create_index(
"ix_user_org_org_active", "user_organizations", ["organization_id", "is_active"]
)
def downgrade() -> None:
# Drop indexes for user_organizations
op.drop_index('ix_user_org_org_active', table_name='user_organizations')
op.drop_index('ix_user_org_user_active', table_name='user_organizations')
op.drop_index('ix_user_organizations_is_active', table_name='user_organizations')
op.drop_index('ix_user_organizations_role', table_name='user_organizations')
op.drop_index("ix_user_org_org_active", table_name="user_organizations")
op.drop_index("ix_user_org_user_active", table_name="user_organizations")
op.drop_index("ix_user_organizations_is_active", table_name="user_organizations")
op.drop_index("ix_user_organizations_role", table_name="user_organizations")
# Drop foreign keys
op.drop_constraint('fk_user_organizations_organization_id', 'user_organizations', type_='foreignkey')
op.drop_constraint('fk_user_organizations_user_id', 'user_organizations', type_='foreignkey')
op.drop_constraint(
"fk_user_organizations_organization_id",
"user_organizations",
type_="foreignkey",
)
op.drop_constraint(
"fk_user_organizations_user_id", "user_organizations", type_="foreignkey"
)
# Drop user_organizations table
op.drop_table('user_organizations')
op.drop_table("user_organizations")
# Drop indexes for organizations
op.drop_index('ix_organizations_slug_active', table_name='organizations')
op.drop_index('ix_organizations_name_active', table_name='organizations')
op.drop_index('ix_organizations_is_active', table_name='organizations')
op.drop_index('ix_organizations_slug', table_name='organizations')
op.drop_index('ix_organizations_name', table_name='organizations')
op.drop_index("ix_organizations_slug_active", table_name="organizations")
op.drop_index("ix_organizations_name_active", table_name="organizations")
op.drop_index("ix_organizations_is_active", table_name="organizations")
op.drop_index("ix_organizations_slug", table_name="organizations")
op.drop_index("ix_organizations_name", table_name="organizations")
# Drop organizations table
op.drop_table('organizations')
op.drop_table("organizations")
# Drop enum type
op.execute('DROP TYPE IF EXISTS organizationrole')
op.execute("DROP TYPE IF EXISTS organizationrole")