Compare commits

...

15 Commits

Author SHA1 Message Date
Felipe Cardoso
a94e29d99c chore(frontend): remove unnecessary newline in overrides field of package.json 2026-03-01 19:40:11 +01:00
Felipe Cardoso
81e48c73ca fix(tests): handle missing schemathesis gracefully in API contract tests
- Replaced `pytest.mark.skipif` with `pytest.skip` to better manage scenarios where `schemathesis` is not installed.
- Added a fallback test function to ensure explicit handling for missing dependencies.
2026-03-01 19:32:49 +01:00
Felipe Cardoso
a3f78dc801 refactor(tests): replace crud references with repo across repository test files
- Updated import statements and test logic to align with `repositories` naming changes.
- Adjusted documentation and test names for consistency with the updated naming convention.
- Improved test descriptions to reflect the repository-based structure.
2026-03-01 19:22:16 +01:00
Felipe Cardoso
07309013d7 chore(frontend): update scripts and docs to use bun run test for consistency
- Replaced `bun test` with `bun run test` in all documentation and scripts for uniformity.
- Removed outdated `glob` override in package configurations.
2026-03-01 18:44:48 +01:00
Felipe Cardoso
846fc31190 feat(api): enhance KeyMap and FieldsConfig handling for improved flexibility
- Added support for unmapped fields in `KeyMap` definitions and parsing.
- Updated `buildKeyMap` to allow aliasing keys without transport layer mappings.
- Improved parameter assignment logic to handle optional `in` mappings.
- Enhanced handling of `allowExtra` fields for more concise and robust configurations.
2026-03-01 18:01:34 +01:00
Felipe Cardoso
ff7a67cb58 chore(frontend): migrate from npm to Bun for dependency management and scripts
- Updated README to replace npm commands with Bun equivalents.
- Added `bun.lock` file to track Bun-managed dependencies.
2026-03-01 18:00:43 +01:00
Felipe Cardoso
0760a8284d feat(tests): add comprehensive benchmarks for auth and performance-critical endpoints
- Introduced benchmarks for password hashing, verification, and JWT token operations.
- Added latency tests for `/register`, `/refresh`, `/sessions`, and `/users/me` endpoints.
- Updated `BENCHMARKS.md` with new tests, thresholds, and execution details.
2026-03-01 17:01:44 +01:00
Felipe Cardoso
ce4d0c7b0d feat(backend): enhance performance benchmarking with baseline detection and documentation
- Updated `make benchmark-check` in Makefile to detect and handle missing baselines, creating them if not found.
- Added `.benchmarks` directory to `.gitignore` for local baseline exclusions.
- Linked benchmarking documentation in `ARCHITECTURE.md` and added comprehensive `BENCHMARKS.md` guide.
2026-03-01 16:30:06 +01:00
Felipe Cardoso
4ceb8ad98c feat(backend): add performance benchmarks and API security tests
- Introduced `benchmark`, `benchmark-save`, and `benchmark-check` Makefile targets for performance testing.
- Added API security fuzzing through the `test-api-security` Makefile target, leveraging Schemathesis.
- Updated Dockerfiles to use Alpine for security and CVE mitigation.
- Enhanced security with `scan-image` and `scan-images` targets for Docker image vulnerability scanning via Trivy.
- Integrated `pytest-benchmark` for performance regression detection, with tests for key API endpoints.
- Extended `uv.lock` and `pyproject.toml` to include performance benchmarking dependencies.
2026-03-01 16:16:18 +01:00
Felipe Cardoso
f8aafb250d fix(backend): suppress license-check output in Makefile for cleaner logs
- Redirect pip-licenses output to `/dev/null` to reduce noise during license checks.
- Retain success and compliance messages for clear feedback.
2026-03-01 14:24:22 +01:00
Felipe Cardoso
4385d20ca6 fix(tests): simplify invalid token test logic in test_auth_security.py
- Removed unnecessary try-except block for JWT encoding failures.
- Adjusted test to directly verify `TokenInvalidError` during decoding.
- Clarified comment on HMAC algorithm compatibility (`HS384` vs. `HS256`).
2026-03-01 14:24:17 +01:00
Felipe Cardoso
1a36907f10 refactor(backend): replace python-jose and passlib with PyJWT and bcrypt for security and simplicity
- Migrated JWT token handling from `python-jose` to `PyJWT`, reducing dependencies and improving error clarity.
- Replaced `passlib` bcrypt integration with direct `bcrypt` usage for password hashing.
- Updated `Makefile`, removing unused CVE ignore based on the replaced dependencies.
- Reflected changes in `ARCHITECTURE.md` and adjusted function headers in `auth.py`.
- Cleaned up `uv.lock` and `pyproject.toml` to remove unused dependencies (`ecdsa`, `rsa`, etc.) and add `PyJWT`.
- Refactored tests and services to align with the updated libraries (`PyJWT` error handling, decoding, and validation).
2026-03-01 14:02:04 +01:00
Felipe Cardoso
0553a1fc53 refactor(logging): switch to parameterized logging for improved performance and clarity
- Replaced f-strings with parameterized logging calls across routes, services, and repositories to optimize log message evaluation.
- Improved exception handling by using `logger.exception` where appropriate for automatic traceback logging.
2026-03-01 13:38:15 +01:00
Felipe Cardoso
57e969ed67 chore(backend): extend Makefile with audit, validation, and security targets
- Added `dep-audit`, `license-check`, `audit`, `validate-all`, and `check` targets for security and quality checks.
- Updated `.PHONY` to include new targets.
- Enhanced `help` command documentation with descriptions of the new commands.
- Updated `ARCHITECTURE.md`, `CLAUDE.md`, and `uv.lock` to reflect related changes. Upgraded dependencies where necessary.
2026-03-01 12:03:34 +01:00
Felipe Cardoso
68275b1dd3 refactor(docs): update architecture to reflect repository migration
- Rename CRUD layer to Repository layer throughout architecture documentation.
- Update dependency injection examples to use repository classes.
- Add async SQLAlchemy pattern for Repository methods (`select()` and transactions).
- Replace CRUD references in FEATURE_EXAMPLE.md with Repository-focused implementation details.
- Highlight repository class responsibilities and remove outdated CRUD patterns.
2026-03-01 11:13:51 +01:00
85 changed files with 6630 additions and 20976 deletions

View File

@@ -41,7 +41,7 @@ To enable CI/CD workflows:
- Runs on: Push to main/develop, PRs affecting frontend code - Runs on: Push to main/develop, PRs affecting frontend code
- Tests: Frontend unit tests (Jest) - Tests: Frontend unit tests (Jest)
- Coverage: Uploads to Codecov - Coverage: Uploads to Codecov
- Fast: Uses npm cache - Fast: Uses bun cache
### `e2e-tests.yml` ### `e2e-tests.yml`
- Runs on: All pushes and PRs - Runs on: All pushes and PRs

2
.gitignore vendored
View File

@@ -187,7 +187,7 @@ coverage.xml
.hypothesis/ .hypothesis/
.pytest_cache/ .pytest_cache/
cover/ cover/
backend/.benchmarks
# Translations # Translations
*.mo *.mo
*.pot *.pot

View File

@@ -13,10 +13,10 @@ uv run uvicorn app.main:app --reload # Start dev server
# Frontend (Node.js) # Frontend (Node.js)
cd frontend cd frontend
npm install # Install dependencies bun install # Install dependencies
npm run dev # Start dev server bun run dev # Start dev server
npm run generate:api # Generate API client from OpenAPI bun run generate:api # Generate API client from OpenAPI
npm run test:e2e # Run E2E tests bun run test:e2e # Run E2E tests
``` ```
**Access points:** **Access points:**
@@ -37,7 +37,7 @@ Default superuser (change in production):
│ ├── app/ │ ├── app/
│ │ ├── api/ # API routes (auth, users, organizations, admin) │ │ ├── api/ # API routes (auth, users, organizations, admin)
│ │ ├── core/ # Core functionality (auth, config, database) │ │ ├── core/ # Core functionality (auth, config, database)
│ │ ├── crud/ # Database CRUD operations │ │ ├── repositories/ # Repository pattern (database operations)
│ │ ├── models/ # SQLAlchemy ORM models │ │ ├── models/ # SQLAlchemy ORM models
│ │ ├── schemas/ # Pydantic request/response schemas │ │ ├── schemas/ # Pydantic request/response schemas
│ │ ├── services/ # Business logic layer │ │ ├── services/ # Business logic layer
@@ -113,7 +113,7 @@ OAUTH_ISSUER=https://api.yourdomain.com # JWT issuer URL (must be HTTPS in
### Database Pattern ### Database Pattern
- **Async SQLAlchemy 2.0** with PostgreSQL - **Async SQLAlchemy 2.0** with PostgreSQL
- **Connection pooling**: 20 base connections, 50 max overflow - **Connection pooling**: 20 base connections, 50 max overflow
- **CRUD base class**: `crud/base.py` with common operations - **Repository base class**: `repositories/base.py` with common operations
- **Migrations**: Alembic with helper script `migrate.py` - **Migrations**: Alembic with helper script `migrate.py`
- `python migrate.py auto "message"` - Generate and apply - `python migrate.py auto "message"` - Generate and apply
- `python migrate.py list` - View history - `python migrate.py list` - View history
@@ -121,7 +121,7 @@ OAUTH_ISSUER=https://api.yourdomain.com # JWT issuer URL (must be HTTPS in
### Frontend State Management ### Frontend State Management
- **Zustand stores**: Lightweight state management - **Zustand stores**: Lightweight state management
- **TanStack Query**: API data fetching/caching - **TanStack Query**: API data fetching/caching
- **Auto-generated client**: From OpenAPI spec via `npm run generate:api` - **Auto-generated client**: From OpenAPI spec via `bun run generate:api`
- **Dependency Injection**: ALWAYS use `useAuth()` from `AuthContext`, NEVER import `useAuthStore` directly - **Dependency Injection**: ALWAYS use `useAuth()` from `AuthContext`, NEVER import `useAuthStore` directly
### Internationalization (i18n) ### Internationalization (i18n)
@@ -165,21 +165,25 @@ Permission dependencies in `api/dependencies/permissions.py`:
**Frontend Unit Tests (Jest):** **Frontend Unit Tests (Jest):**
- 97% coverage - 97% coverage
- Component, hook, and utility testing - Component, hook, and utility testing
- Run: `npm test` - Run: `bun run test`
- Coverage: `npm run test:coverage` - Coverage: `bun run test:coverage`
**Frontend E2E Tests (Playwright):** **Frontend E2E Tests (Playwright):**
- 56 passing, 1 skipped (zero flaky tests) - 56 passing, 1 skipped (zero flaky tests)
- Complete user flows (auth, navigation, settings) - Complete user flows (auth, navigation, settings)
- Run: `npm run test:e2e` - Run: `bun run test:e2e`
- UI mode: `npm run test:e2e:ui` - UI mode: `bun run test:e2e:ui`
### Development Tooling ### Development Tooling
**Backend:** **Backend:**
- **uv**: Modern Python package manager (10-100x faster than pip) - **uv**: Modern Python package manager (10-100x faster than pip)
- **Ruff**: All-in-one linting/formatting (replaces Black, Flake8, isort) - **Ruff**: All-in-one linting/formatting (replaces Black, Flake8, isort)
- **mypy**: Type checking with Pydantic plugin - **Pyright**: Static type checking (strict mode)
- **pip-audit**: Dependency vulnerability scanning (OSV database)
- **detect-secrets**: Hardcoded secrets detection
- **pip-licenses**: License compliance checking
- **pre-commit**: Git hook framework (Ruff, detect-secrets, standard checks)
- **Makefile**: `make help` for all commands - **Makefile**: `make help` for all commands
**Frontend:** **Frontend:**
@@ -218,11 +222,11 @@ NEXT_PUBLIC_API_URL=http://localhost:8000/api/v1
### Adding a New API Endpoint ### Adding a New API Endpoint
1. **Define schema** in `backend/app/schemas/` 1. **Define schema** in `backend/app/schemas/`
2. **Create CRUD operations** in `backend/app/crud/` 2. **Create repository** in `backend/app/repositories/`
3. **Implement route** in `backend/app/api/routes/` 3. **Implement route** in `backend/app/api/routes/`
4. **Register router** in `backend/app/api/main.py` 4. **Register router** in `backend/app/api/main.py`
5. **Write tests** in `backend/tests/api/` 5. **Write tests** in `backend/tests/api/`
6. **Generate frontend client**: `npm run generate:api` 6. **Generate frontend client**: `bun run generate:api`
### Database Migrations ### Database Migrations
@@ -239,7 +243,7 @@ python migrate.py auto "description" # Generate + apply
2. **Follow design system** (see `frontend/docs/design-system/`) 2. **Follow design system** (see `frontend/docs/design-system/`)
3. **Use dependency injection** for auth (`useAuth()` not `useAuthStore`) 3. **Use dependency injection** for auth (`useAuth()` not `useAuthStore`)
4. **Write tests** in `frontend/tests/` or `__tests__/` 4. **Write tests** in `frontend/tests/` or `__tests__/`
5. **Run type check**: `npm run type-check` 5. **Run type check**: `bun run type-check`
## Security Features ## Security Features
@@ -249,6 +253,10 @@ python migrate.py auto "description" # Generate + apply
- **CSRF protection**: Built into FastAPI - **CSRF protection**: Built into FastAPI
- **Session revocation**: Database-backed session tracking - **Session revocation**: Database-backed session tracking
- **Comprehensive security tests**: JWT algorithm attacks, session hijacking, privilege escalation - **Comprehensive security tests**: JWT algorithm attacks, session hijacking, privilege escalation
- **Dependency vulnerability scanning**: `make dep-audit` (pip-audit against OSV database)
- **License compliance**: `make license-check` (blocks GPL-3.0/AGPL)
- **Secrets detection**: Pre-commit hook blocks hardcoded secrets
- **Unified security pipeline**: `make audit` (all security checks), `make check` (quality + security + tests)
## Docker Deployment ## Docker Deployment
@@ -281,7 +289,7 @@ docker-compose exec backend python -c "from app.init_db import init_db; import a
- Authentication system (JWT with refresh tokens, OAuth/social login) - Authentication system (JWT with refresh tokens, OAuth/social login)
- **OAuth Provider Mode (MCP-ready)**: Full OAuth 2.0 Authorization Server - **OAuth Provider Mode (MCP-ready)**: Full OAuth 2.0 Authorization Server
- Session management (device tracking, revocation) - Session management (device tracking, revocation)
- User management (CRUD, password change) - User management (full lifecycle, password change)
- Organization system (multi-tenant with RBAC) - Organization system (multi-tenant with RBAC)
- Admin panel (user/org management, bulk operations) - Admin panel (user/org management, bulk operations)
- **Internationalization (i18n)** with English and Italian - **Internationalization (i18n)** with English and Italian

View File

@@ -43,7 +43,7 @@ EOF
- Check current state: `python migrate.py current` - Check current state: `python migrate.py current`
**Frontend API Client Generation:** **Frontend API Client Generation:**
- Run `npm run generate:api` after backend schema changes - Run `bun run generate:api` after backend schema changes
- Client is auto-generated from OpenAPI spec - Client is auto-generated from OpenAPI spec
- Located in `frontend/src/lib/api/generated/` - Located in `frontend/src/lib/api/generated/`
- NEVER manually edit generated files - NEVER manually edit generated files
@@ -51,10 +51,16 @@ EOF
**Testing Commands:** **Testing Commands:**
- Backend unit/integration: `IS_TEST=True uv run pytest` (always prefix with `IS_TEST=True`) - Backend unit/integration: `IS_TEST=True uv run pytest` (always prefix with `IS_TEST=True`)
- Backend E2E (requires Docker): `make test-e2e` - Backend E2E (requires Docker): `make test-e2e`
- Frontend unit: `npm test` - Frontend unit: `bun run test`
- Frontend E2E: `npm run test:e2e` - Frontend E2E: `bun run test:e2e`
- Use `make test` or `make test-cov` in backend for convenience - Use `make test` or `make test-cov` in backend for convenience
**Security & Quality Commands (Backend):**
- `make validate` — lint + format + type checks
- `make audit` — dependency vulnerabilities + license compliance
- `make validate-all` — quality + security checks
- `make check`**full pipeline**: quality + security + tests
**Backend E2E Testing (requires Docker):** **Backend E2E Testing (requires Docker):**
- Install deps: `make install-e2e` - Install deps: `make install-e2e`
- Run all E2E tests: `make test-e2e` - Run all E2E tests: `make test-e2e`
@@ -142,7 +148,7 @@ async def mock_commit():
with patch.object(session, 'commit', side_effect=mock_commit): with patch.object(session, 'commit', side_effect=mock_commit):
with patch.object(session, 'rollback', new_callable=AsyncMock) as mock_rollback: with patch.object(session, 'rollback', new_callable=AsyncMock) as mock_rollback:
with pytest.raises(OperationalError): with pytest.raises(OperationalError):
await crud_method(session, obj_in=data) await repo_method(session, obj_in=data)
mock_rollback.assert_called_once() mock_rollback.assert_called_once()
``` ```
@@ -157,14 +163,18 @@ with patch.object(session, 'commit', side_effect=mock_commit):
- Never skip security headers in production - Never skip security headers in production
- Rate limiting is configured in route decorators: `@limiter.limit("10/minute")` - Rate limiting is configured in route decorators: `@limiter.limit("10/minute")`
- Session revocation is database-backed, not just JWT expiry - Session revocation is database-backed, not just JWT expiry
- Run `make audit` to check for dependency vulnerabilities and license compliance
- Run `make check` for the full pipeline: quality + security + tests
- Pre-commit hooks enforce Ruff lint/format and detect-secrets on every commit
- Setup hooks: `cd backend && uv run pre-commit install`
### Common Workflows Guidance ### Common Workflows Guidance
**When Adding a New Feature:** **When Adding a New Feature:**
1. Start with backend schema and CRUD 1. Start with backend schema and repository
2. Implement API route with proper authorization 2. Implement API route with proper authorization
3. Write backend tests (aim for >90% coverage) 3. Write backend tests (aim for >90% coverage)
4. Generate frontend API client: `npm run generate:api` 4. Generate frontend API client: `bun run generate:api`
5. Implement frontend components 5. Implement frontend components
6. Write frontend unit tests 6. Write frontend unit tests
7. Add E2E tests for critical flows 7. Add E2E tests for critical flows
@@ -177,8 +187,8 @@ with patch.object(session, 'commit', side_effect=mock_commit):
**When Debugging:** **When Debugging:**
- Backend: Check `IS_TEST=True` environment variable is set - Backend: Check `IS_TEST=True` environment variable is set
- Frontend: Run `npm run type-check` first - Frontend: Run `bun run type-check` first
- E2E: Use `npm run test:e2e:debug` for step-by-step debugging - E2E: Use `bun run test:e2e:debug` for step-by-step debugging
- Check logs: Backend has detailed error logging - Check logs: Backend has detailed error logging
**Demo Mode (Frontend-Only Showcase):** **Demo Mode (Frontend-Only Showcase):**
@@ -186,7 +196,7 @@ with patch.object(session, 'commit', side_effect=mock_commit):
- Uses MSW (Mock Service Worker) to intercept API calls in browser - Uses MSW (Mock Service Worker) to intercept API calls in browser
- Zero backend required - perfect for Vercel deployments - Zero backend required - perfect for Vercel deployments
- **Fully Automated**: MSW handlers auto-generated from OpenAPI spec - **Fully Automated**: MSW handlers auto-generated from OpenAPI spec
- Run `npm run generate:api` → updates both API client AND MSW handlers - Run `bun run generate:api` → updates both API client AND MSW handlers
- No manual synchronization needed! - No manual synchronization needed!
- Demo credentials (any password ≥8 chars works): - Demo credentials (any password ≥8 chars works):
- User: `demo@example.com` / `DemoPass123` - User: `demo@example.com` / `DemoPass123`
@@ -214,7 +224,7 @@ with patch.object(session, 'commit', side_effect=mock_commit):
No Claude Code Skills installed yet. To create one, invoke the built-in "skill-creator" skill. No Claude Code Skills installed yet. To create one, invoke the built-in "skill-creator" skill.
**Potential skill ideas for this project:** **Potential skill ideas for this project:**
- API endpoint generator workflow (schema → CRUD → route → tests → frontend client) - API endpoint generator workflow (schema → repository → route → tests → frontend client)
- Component generator with design system compliance - Component generator with design system compliance
- Database migration troubleshooting helper - Database migration troubleshooting helper
- Test coverage analyzer and improvement suggester - Test coverage analyzer and improvement suggester

View File

@@ -91,7 +91,10 @@ Ready to write some code? Awesome!
cd backend cd backend
# Install dependencies (uv manages virtual environment automatically) # Install dependencies (uv manages virtual environment automatically)
uv sync make install-dev
# Setup pre-commit hooks
uv run pre-commit install
# Setup environment # Setup environment
cp .env.example .env cp .env.example .env
@@ -100,8 +103,14 @@ cp .env.example .env
# Run migrations # Run migrations
python migrate.py apply python migrate.py apply
# Run quality + security checks
make validate-all
# Run tests # Run tests
IS_TEST=True uv run pytest make test
# Run full pipeline (quality + security + tests)
make check
# Start dev server # Start dev server
uvicorn app.main:app --reload uvicorn app.main:app --reload
@@ -113,20 +122,20 @@ uvicorn app.main:app --reload
cd frontend cd frontend
# Install dependencies # Install dependencies
npm install bun install
# Setup environment # Setup environment
cp .env.local.example .env.local cp .env.local.example .env.local
# Generate API client # Generate API client
npm run generate:api bun run generate:api
# Run tests # Run tests
npm test bun run test
npm run test:e2e:ui bun run test:e2e:ui
# Start dev server # Start dev server
npm run dev bun run dev
``` ```
--- ---
@@ -195,7 +204,7 @@ export function UserProfile({ userId }: UserProfileProps) {
### Key Patterns ### Key Patterns
- **Backend**: Use CRUD pattern, keep routes thin, business logic in services - **Backend**: Use repository pattern, keep routes thin, business logic in services
- **Frontend**: Use React Query for server state, Zustand for client state - **Frontend**: Use React Query for server state, Zustand for client state
- **Both**: Handle errors gracefully, log appropriately, write tests - **Both**: Handle errors gracefully, log appropriately, write tests
@@ -316,7 +325,7 @@ Fixed stuff
### Before Submitting ### Before Submitting
- [ ] Code follows project style guidelines - [ ] Code follows project style guidelines
- [ ] All tests pass locally - [ ] `make check` passes (quality + security + tests) in backend
- [ ] New tests added for new features - [ ] New tests added for new features
- [ ] Documentation updated if needed - [ ] Documentation updated if needed
- [ ] No merge conflicts with `main` - [ ] No merge conflicts with `main`

View File

@@ -1,4 +1,4 @@
.PHONY: help dev dev-full prod down logs logs-dev clean clean-slate drop-db reset-db push-images deploy .PHONY: help dev dev-full prod down logs logs-dev clean clean-slate drop-db reset-db push-images deploy scan-images
VERSION ?= latest VERSION ?= latest
REGISTRY ?= ghcr.io/cardosofelipe/pragma-stack REGISTRY ?= ghcr.io/cardosofelipe/pragma-stack
@@ -21,6 +21,7 @@ help:
@echo " make prod - Start production stack" @echo " make prod - Start production stack"
@echo " make deploy - Pull and deploy latest images" @echo " make deploy - Pull and deploy latest images"
@echo " make push-images - Build and push images to registry" @echo " make push-images - Build and push images to registry"
@echo " make scan-images - Scan production images for CVEs (requires trivy)"
@echo " make logs - Follow production container logs" @echo " make logs - Follow production container logs"
@echo "" @echo ""
@echo "Cleanup:" @echo "Cleanup:"
@@ -89,6 +90,28 @@ push-images:
docker push $(REGISTRY)/backend:$(VERSION) docker push $(REGISTRY)/backend:$(VERSION)
docker push $(REGISTRY)/frontend:$(VERSION) docker push $(REGISTRY)/frontend:$(VERSION)
scan-images:
@docker info > /dev/null 2>&1 || (echo "❌ Docker is not running!"; exit 1)
@echo "🐳 Building and scanning production images for CVEs..."
docker build -t $(REGISTRY)/backend:scan --target production ./backend
docker build -t $(REGISTRY)/frontend:scan --target runner ./frontend
@echo ""
@echo "=== Backend Image Scan ==="
@if command -v trivy > /dev/null 2>&1; then \
trivy image --severity HIGH,CRITICAL --exit-code 1 $(REGISTRY)/backend:scan; \
else \
echo " Trivy not found locally, using Docker to run Trivy..."; \
docker run --rm -v /var/run/docker.sock:/var/run/docker.sock aquasec/trivy image --severity HIGH,CRITICAL --exit-code 1 $(REGISTRY)/backend:scan; \
fi
@echo ""
@echo "=== Frontend Image Scan ==="
@if command -v trivy > /dev/null 2>&1; then \
trivy image --severity HIGH,CRITICAL --exit-code 1 $(REGISTRY)/frontend:scan; \
else \
docker run --rm -v /var/run/docker.sock:/var/run/docker.sock aquasec/trivy image --severity HIGH,CRITICAL --exit-code 1 $(REGISTRY)/frontend:scan; \
fi
@echo "✅ No HIGH/CRITICAL CVEs found in production images!"
# ============================================================================ # ============================================================================
# Cleanup # Cleanup
# ============================================================================ # ============================================================================

View File

@@ -58,7 +58,7 @@ Full OAuth 2.0 Authorization Server for Model Context Protocol (MCP) and third-p
- User can belong to multiple organizations - User can belong to multiple organizations
### 🛠️ **Admin Panel** ### 🛠️ **Admin Panel**
- Complete user management (CRUD, activate/deactivate, bulk operations) - Complete user management (full lifecycle, activate/deactivate, bulk operations)
- Organization management (create, edit, delete, member management) - Organization management (create, edit, delete, member management)
- Session monitoring across all users - Session monitoring across all users
- Real-time statistics dashboard - Real-time statistics dashboard
@@ -166,7 +166,7 @@ Full OAuth 2.0 Authorization Server for Model Context Protocol (MCP) and third-p
```bash ```bash
cd frontend cd frontend
echo "NEXT_PUBLIC_DEMO_MODE=true" > .env.local echo "NEXT_PUBLIC_DEMO_MODE=true" > .env.local
npm run dev bun run dev
``` ```
**Demo Credentials:** **Demo Credentials:**
@@ -298,17 +298,17 @@ uvicorn app.main:app --reload --host 0.0.0.0 --port 8000
cd frontend cd frontend
# Install dependencies # Install dependencies
npm install bun install
# Setup environment # Setup environment
cp .env.local.example .env.local cp .env.local.example .env.local
# Edit .env.local with your backend URL # Edit .env.local with your backend URL
# Generate API client # Generate API client
npm run generate:api bun run generate:api
# Start development server # Start development server
npm run dev bun run dev
``` ```
Visit http://localhost:3000 to see your app! Visit http://localhost:3000 to see your app!
@@ -322,7 +322,7 @@ Visit http://localhost:3000 to see your app!
│ ├── app/ │ ├── app/
│ │ ├── api/ # API routes and dependencies │ │ ├── api/ # API routes and dependencies
│ │ ├── core/ # Core functionality (auth, config, database) │ │ ├── core/ # Core functionality (auth, config, database)
│ │ ├── crud/ # Database operations │ │ ├── repositories/ # Repository pattern (database operations)
│ │ ├── models/ # SQLAlchemy models │ │ ├── models/ # SQLAlchemy models
│ │ ├── schemas/ # Pydantic schemas │ │ ├── schemas/ # Pydantic schemas
│ │ ├── services/ # Business logic │ │ ├── services/ # Business logic
@@ -377,7 +377,7 @@ open htmlcov/index.html
``` ```
**Test types:** **Test types:**
- **Unit tests**: CRUD operations, utilities, business logic - **Unit tests**: Repository operations, utilities, business logic
- **Integration tests**: API endpoints with database - **Integration tests**: API endpoints with database
- **Security tests**: JWT algorithm attacks, session hijacking, privilege escalation - **Security tests**: JWT algorithm attacks, session hijacking, privilege escalation
- **Error handling tests**: Database failures, validation errors - **Error handling tests**: Database failures, validation errors
@@ -390,13 +390,13 @@ open htmlcov/index.html
cd frontend cd frontend
# Run unit tests # Run unit tests
npm test bun run test
# Run with coverage # Run with coverage
npm run test:coverage bun run test:coverage
# Watch mode # Watch mode
npm run test:watch bun run test:watch
``` ```
**Test types:** **Test types:**
@@ -414,10 +414,10 @@ npm run test:watch
cd frontend cd frontend
# Run E2E tests # Run E2E tests
npm run test:e2e bun run test:e2e
# Run E2E tests in UI mode (recommended for development) # Run E2E tests in UI mode (recommended for development)
npm run test:e2e:ui bun run test:e2e:ui
# Run specific test file # Run specific test file
npx playwright test auth-login.spec.ts npx playwright test auth-login.spec.ts
@@ -542,7 +542,7 @@ docker-compose down
### ✅ Completed ### ✅ Completed
- [x] Authentication system (JWT, refresh tokens, session management, OAuth) - [x] Authentication system (JWT, refresh tokens, session management, OAuth)
- [x] User management (CRUD, profile, password change) - [x] User management (full lifecycle, profile, password change)
- [x] Organization system with RBAC (Owner, Admin, Member) - [x] Organization system with RBAC (Owner, Admin, Member)
- [x] Admin panel (users, organizations, sessions, statistics) - [x] Admin panel (users, organizations, sessions, statistics)
- [x] **Internationalization (i18n)** with next-intl (English + Italian) - [x] **Internationalization (i18n)** with next-intl (English + Italian)

View File

@@ -11,7 +11,7 @@ omit =
app/utils/auth_test_utils.py app/utils/auth_test_utils.py
# Async implementations not yet in use # Async implementations not yet in use
app/crud/base_async.py app/repositories/base_async.py
app/core/database_async.py app/core/database_async.py
# CLI scripts - run manually, not tested # CLI scripts - run manually, not tested
@@ -23,7 +23,7 @@ omit =
app/api/routes/__init__.py app/api/routes/__init__.py
app/api/dependencies/__init__.py app/api/dependencies/__init__.py
app/core/__init__.py app/core/__init__.py
app/crud/__init__.py app/repositories/__init__.py
app/models/__init__.py app/models/__init__.py
app/schemas/__init__.py app/schemas/__init__.py
app/services/__init__.py app/services/__init__.py

View File

@@ -0,0 +1,44 @@
# Pre-commit hooks for backend quality and security checks.
#
# Install:
# cd backend && uv run pre-commit install
#
# Run manually on all files:
# cd backend && uv run pre-commit run --all-files
#
# Skip hooks temporarily:
# git commit --no-verify
#
repos:
# ── Code Quality ──────────────────────────────────────────────────────────
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.14.4
hooks:
- id: ruff
args: [--fix, --exit-non-zero-on-fix]
- id: ruff-format
# ── General File Hygiene ──────────────────────────────────────────────────
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
- id: check-yaml
- id: check-toml
- id: check-merge-conflict
- id: check-added-large-files
args: [--maxkb=500]
- id: debug-statements
# ── Security ──────────────────────────────────────────────────────────────
- repo: https://github.com/Yelp/detect-secrets
rev: v1.5.0
hooks:
- id: detect-secrets
args: ['--baseline', '.secrets.baseline']
exclude: |
(?x)^(
.*\.lock$|
.*\.svg$
)$

1073
backend/.secrets.baseline Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -33,11 +33,11 @@ RUN chmod +x /usr/local/bin/entrypoint.sh
ENTRYPOINT ["/usr/local/bin/entrypoint.sh"] ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]
# Production stage # Production stage — Alpine eliminates glibc CVEs (e.g. CVE-2026-0861)
FROM python:3.12-slim AS production FROM python:3.12-alpine AS production
# Create non-root user # Create non-root user
RUN groupadd -r appuser && useradd -r -g appuser appuser RUN addgroup -S appuser && adduser -S -G appuser appuser
WORKDIR /app WORKDIR /app
ENV PYTHONDONTWRITEBYTECODE=1 \ ENV PYTHONDONTWRITEBYTECODE=1 \
@@ -48,18 +48,18 @@ ENV PYTHONDONTWRITEBYTECODE=1 \
UV_NO_CACHE=1 UV_NO_CACHE=1
# Install system dependencies and uv # Install system dependencies and uv
RUN apt-get update && \ RUN apk add --no-cache postgresql-client curl ca-certificates && \
apt-get install -y --no-install-recommends postgresql-client curl ca-certificates && \
curl -LsSf https://astral.sh/uv/install.sh | sh && \ curl -LsSf https://astral.sh/uv/install.sh | sh && \
mv /root/.local/bin/uv* /usr/local/bin/ && \ mv /root/.local/bin/uv* /usr/local/bin/
apt-get clean && \
rm -rf /var/lib/apt/lists/*
# Copy dependency files # Copy dependency files
COPY pyproject.toml uv.lock ./ COPY pyproject.toml uv.lock ./
# Install only production dependencies using uv (no dev dependencies) # Install build dependencies, compile Python packages, then remove build deps
RUN uv sync --frozen --no-dev RUN apk add --no-cache --virtual .build-deps \
gcc g++ musl-dev python3-dev linux-headers libffi-dev openssl-dev && \
uv sync --frozen --no-dev && \
apk del .build-deps
# Copy application code # Copy application code
COPY . . COPY . .

View File

@@ -1,4 +1,4 @@
.PHONY: help lint lint-fix format format-check type-check test test-cov validate clean install-dev sync check-docker install-e2e test-e2e test-e2e-schema test-all .PHONY: help lint lint-fix format format-check type-check test test-cov validate clean install-dev sync check-docker install-e2e test-e2e test-e2e-schema test-all dep-audit license-check audit validate-all check benchmark benchmark-check benchmark-save scan-image test-api-security
# Prevent a stale VIRTUAL_ENV in the caller's shell from confusing uv # Prevent a stale VIRTUAL_ENV in the caller's shell from confusing uv
unexport VIRTUAL_ENV unexport VIRTUAL_ENV
@@ -18,7 +18,20 @@ help:
@echo " make format - Format code with Ruff" @echo " make format - Format code with Ruff"
@echo " make format-check - Check if code is formatted" @echo " make format-check - Check if code is formatted"
@echo " make type-check - Run pyright type checking" @echo " make type-check - Run pyright type checking"
@echo " make validate - Run all checks (lint + format + types)" @echo " make validate - Run all checks (lint + format + types + schema fuzz)"
@echo ""
@echo "Performance:"
@echo " make benchmark - Run performance benchmarks"
@echo " make benchmark-save - Run benchmarks and save as baseline"
@echo " make benchmark-check - Run benchmarks and compare against baseline"
@echo ""
@echo "Security & Audit:"
@echo " make dep-audit - Scan dependencies for known vulnerabilities"
@echo " make license-check - Check dependency license compliance"
@echo " make audit - Run all security audits (deps + licenses)"
@echo " make scan-image - Scan Docker image for CVEs (requires trivy)"
@echo " make validate-all - Run all quality + security checks"
@echo " make check - Full pipeline: quality + security + tests"
@echo "" @echo ""
@echo "Testing:" @echo "Testing:"
@echo " make test - Run pytest (unit/integration, SQLite)" @echo " make test - Run pytest (unit/integration, SQLite)"
@@ -27,6 +40,7 @@ help:
@echo " make test-e2e-schema - Run Schemathesis API schema tests" @echo " make test-e2e-schema - Run Schemathesis API schema tests"
@echo " make test-all - Run all tests (unit + E2E)" @echo " make test-all - Run all tests (unit + E2E)"
@echo " make check-docker - Check if Docker is available" @echo " make check-docker - Check if Docker is available"
@echo " make check - Full pipeline: quality + security + tests"
@echo "" @echo ""
@echo "Cleanup:" @echo "Cleanup:"
@echo " make clean - Remove cache and build artifacts" @echo " make clean - Remove cache and build artifacts"
@@ -69,9 +83,49 @@ type-check:
@echo "🔎 Running pyright type checking..." @echo "🔎 Running pyright type checking..."
@uv run pyright app/ @uv run pyright app/
validate: lint format-check type-check validate: lint format-check type-check test-api-security
@echo "✅ All quality checks passed!" @echo "✅ All quality checks passed!"
# API Security Testing (Schemathesis property-based fuzzing)
test-api-security: check-docker
@echo "🔐 Running Schemathesis API security fuzzing..."
@IS_TEST=True PYTHONPATH=. uv run pytest tests/e2e/ -v -m "schemathesis" --tb=short -n 0
@echo "✅ API schema security tests passed!"
# ============================================================================
# Security & Audit
# ============================================================================
dep-audit:
@echo "🔒 Scanning dependencies for known vulnerabilities..."
@uv run pip-audit --desc --skip-editable
@echo "✅ No known vulnerabilities found!"
license-check:
@echo "📜 Checking dependency license compliance..."
@uv run pip-licenses --fail-on="GPL-3.0-or-later;AGPL-3.0-or-later" --format=plain > /dev/null
@echo "✅ All dependency licenses are compliant!"
audit: dep-audit license-check
@echo "✅ All security audits passed!"
scan-image: check-docker
@echo "🐳 Scanning Docker image for OS-level CVEs with Trivy..."
@docker build -t pragma-backend:scan -q --target production .
@if command -v trivy > /dev/null 2>&1; then \
trivy image --severity HIGH,CRITICAL --exit-code 1 pragma-backend:scan; \
else \
echo " Trivy not found locally, using Docker to run Trivy..."; \
docker run --rm -v /var/run/docker.sock:/var/run/docker.sock aquasec/trivy image --severity HIGH,CRITICAL --exit-code 1 pragma-backend:scan; \
fi
@echo "✅ No HIGH/CRITICAL CVEs found in Docker image!"
validate-all: validate audit
@echo "✅ All quality + security checks passed!"
check: validate-all test
@echo "✅ Full validation pipeline complete!"
# ============================================================================ # ============================================================================
# Testing # Testing
# ============================================================================ # ============================================================================
@@ -117,6 +171,31 @@ test-e2e-schema: check-docker
@echo "🧪 Running Schemathesis API schema tests..." @echo "🧪 Running Schemathesis API schema tests..."
@IS_TEST=True PYTHONPATH=. uv run pytest tests/e2e/ -v -m "schemathesis" --tb=short -n 0 @IS_TEST=True PYTHONPATH=. uv run pytest tests/e2e/ -v -m "schemathesis" --tb=short -n 0
# ============================================================================
# Performance Benchmarks
# ============================================================================
benchmark:
@echo "⏱️ Running performance benchmarks..."
@IS_TEST=True PYTHONPATH=. uv run pytest tests/benchmarks/ -v --benchmark-only --benchmark-sort=mean -p no:xdist --override-ini='addopts='
benchmark-save:
@echo "⏱️ Running benchmarks and saving baseline..."
@IS_TEST=True PYTHONPATH=. uv run pytest tests/benchmarks/ -v --benchmark-only --benchmark-save=baseline --benchmark-sort=mean -p no:xdist --override-ini='addopts='
@echo "✅ Benchmark baseline saved to .benchmarks/"
benchmark-check:
@echo "⏱️ Running benchmarks and comparing against baseline..."
@if find .benchmarks -name '*_baseline*' -print -quit 2>/dev/null | grep -q .; then \
IS_TEST=True PYTHONPATH=. uv run pytest tests/benchmarks/ -v --benchmark-only --benchmark-compare=0001_baseline --benchmark-sort=mean --benchmark-compare-fail=mean:200% -p no:xdist --override-ini='addopts='; \
echo "✅ No performance regressions detected!"; \
else \
echo "⚠️ No benchmark baseline found. Run 'make benchmark-save' first to create one."; \
echo " Running benchmarks without comparison..."; \
IS_TEST=True PYTHONPATH=. uv run pytest tests/benchmarks/ -v --benchmark-only --benchmark-save=baseline --benchmark-sort=mean -p no:xdist --override-ini='addopts='; \
echo "✅ Benchmark baseline created. Future runs of 'make benchmark-check' will compare against it."; \
fi
test-all: test-all:
@echo "🧪 Running ALL tests (unit + E2E)..." @echo "🧪 Running ALL tests (unit + E2E)..."
@$(MAKE) test @$(MAKE) test

View File

@@ -14,7 +14,9 @@ Features:
- **Multi-tenancy**: Organization-based access control with roles (Owner/Admin/Member) - **Multi-tenancy**: Organization-based access control with roles (Owner/Admin/Member)
- **Testing**: 97%+ coverage with security-focused test suite - **Testing**: 97%+ coverage with security-focused test suite
- **Performance**: Async throughout, connection pooling, optimized queries - **Performance**: Async throughout, connection pooling, optimized queries
- **Modern Tooling**: uv for dependencies, Ruff for linting/formatting, mypy for type checking - **Modern Tooling**: uv for dependencies, Ruff for linting/formatting, Pyright for type checking
- **Security Auditing**: Automated dependency vulnerability scanning, license compliance, secrets detection
- **Pre-commit Hooks**: Ruff, detect-secrets, and standard checks on every commit
## Quick Start ## Quick Start
@@ -149,7 +151,7 @@ uv pip list --outdated
# Run any Python command via uv (no activation needed) # Run any Python command via uv (no activation needed)
uv run python script.py uv run python script.py
uv run pytest uv run pytest
uv run mypy app/ uv run pyright app/
# Or activate the virtual environment # Or activate the virtual environment
source .venv/bin/activate source .venv/bin/activate
@@ -171,12 +173,22 @@ make lint # Run Ruff linter (check only)
make lint-fix # Run Ruff with auto-fix make lint-fix # Run Ruff with auto-fix
make format # Format code with Ruff make format # Format code with Ruff
make format-check # Check if code is formatted make format-check # Check if code is formatted
make type-check # Run mypy type checking make type-check # Run Pyright type checking
make validate # Run all checks (lint + format + types) make validate # Run all checks (lint + format + types)
# Security & Audit
make dep-audit # Scan dependencies for known vulnerabilities (CVEs)
make license-check # Check dependency license compliance
make audit # Run all security audits (deps + licenses)
make validate-all # Run all quality + security checks
make check # Full pipeline: quality + security + tests
# Testing # Testing
make test # Run all tests make test # Run all tests
make test-cov # Run tests with coverage report make test-cov # Run tests with coverage report
make test-e2e # Run E2E tests (PostgreSQL, requires Docker)
make test-e2e-schema # Run Schemathesis API schema tests
make test-all # Run all tests (unit + E2E)
# Utilities # Utilities
make clean # Remove cache and build artifacts make clean # Remove cache and build artifacts
@@ -252,7 +264,7 @@ app/
│ ├── database.py # Database engine setup │ ├── database.py # Database engine setup
│ ├── auth.py # JWT token handling │ ├── auth.py # JWT token handling
│ └── exceptions.py # Custom exceptions │ └── exceptions.py # Custom exceptions
├── crud/ # Database operations ├── repositories/ # Repository pattern (database operations)
├── models/ # SQLAlchemy ORM models ├── models/ # SQLAlchemy ORM models
├── schemas/ # Pydantic request/response schemas ├── schemas/ # Pydantic request/response schemas
├── services/ # Business logic layer ├── services/ # Business logic layer
@@ -352,18 +364,29 @@ open htmlcov/index.html
# Using Makefile (recommended) # Using Makefile (recommended)
make lint # Ruff linting make lint # Ruff linting
make format # Ruff formatting make format # Ruff formatting
make type-check # mypy type checking make type-check # Pyright type checking
make validate # All checks at once make validate # All checks at once
# Security audits
make dep-audit # Scan dependencies for CVEs
make license-check # Check license compliance
make audit # All security audits
make validate-all # Quality + security checks
make check # Full pipeline: quality + security + tests
# Using uv directly # Using uv directly
uv run ruff check app/ tests/ uv run ruff check app/ tests/
uv run ruff format app/ tests/ uv run ruff format app/ tests/
uv run mypy app/ uv run pyright app/
``` ```
**Tools:** **Tools:**
- **Ruff**: All-in-one linting, formatting, and import sorting (replaces Black, Flake8, isort) - **Ruff**: All-in-one linting, formatting, and import sorting (replaces Black, Flake8, isort)
- **mypy**: Static type checking with Pydantic plugin - **Pyright**: Static type checking (strict mode)
- **pip-audit**: Dependency vulnerability scanning against the OSV database
- **pip-licenses**: Dependency license compliance checking
- **detect-secrets**: Hardcoded secrets/credentials detection
- **pre-commit**: Git hook framework for automated checks on every commit
All configurations are in `pyproject.toml`. All configurations are in `pyproject.toml`.
@@ -439,7 +462,7 @@ See [docs/FEATURE_EXAMPLE.md](docs/FEATURE_EXAMPLE.md) for step-by-step guide.
Quick overview: Quick overview:
1. Create Pydantic schemas in `app/schemas/` 1. Create Pydantic schemas in `app/schemas/`
2. Create CRUD operations in `app/crud/` 2. Create repository in `app/repositories/`
3. Create route in `app/api/routes/` 3. Create route in `app/api/routes/`
4. Register router in `app/api/main.py` 4. Register router in `app/api/main.py`
5. Write tests in `tests/api/` 5. Write tests in `tests/api/`
@@ -589,13 +612,42 @@ Configured in `app/core/config.py`:
- **Security Headers**: CSP, HSTS, X-Frame-Options, etc. - **Security Headers**: CSP, HSTS, X-Frame-Options, etc.
- **Input Validation**: Pydantic schemas, SQL injection prevention (ORM) - **Input Validation**: Pydantic schemas, SQL injection prevention (ORM)
### Security Auditing
Automated, deterministic security checks are built into the development workflow:
```bash
# Scan dependencies for known vulnerabilities (CVEs)
make dep-audit
# Check dependency license compliance (blocks GPL-3.0/AGPL)
make license-check
# Run all security audits
make audit
# Full pipeline: quality + security + tests
make check
```
**Pre-commit hooks** automatically run on every commit:
- **Ruff** lint + format checks
- **detect-secrets** blocks commits containing hardcoded secrets
- **Standard checks**: trailing whitespace, YAML/TOML validation, merge conflict detection, large file prevention
Setup pre-commit hooks:
```bash
uv run pre-commit install
```
### Security Best Practices ### Security Best Practices
1. **Never commit secrets**: Use `.env` files (git-ignored) 1. **Never commit secrets**: Use `.env` files (git-ignored), enforced by detect-secrets pre-commit hook
2. **Strong SECRET_KEY**: Min 32 chars, cryptographically random 2. **Strong SECRET_KEY**: Min 32 chars, cryptographically random
3. **HTTPS in production**: Required for token security 3. **HTTPS in production**: Required for token security
4. **Regular updates**: Keep dependencies current (`uv sync --upgrade`) 4. **Regular updates**: Keep dependencies current (`uv sync --upgrade`), run `make dep-audit` to check for CVEs
5. **Audit logs**: Monitor authentication events 5. **Audit logs**: Monitor authentication events
6. **Run `make check` before pushing**: Validates quality, security, and tests in one command
--- ---
@@ -645,7 +697,11 @@ logging.basicConfig(level=logging.INFO)
**Built with modern Python tooling:** **Built with modern Python tooling:**
- 🚀 **uv** - 10-100x faster dependency management - 🚀 **uv** - 10-100x faster dependency management
-**Ruff** - 10-100x faster linting & formatting -**Ruff** - 10-100x faster linting & formatting
- 🔍 **mypy** - Static type checking - 🔍 **Pyright** - Static type checking (strict mode)
-**pytest** - Comprehensive test suite -**pytest** - Comprehensive test suite
- 🔒 **pip-audit** - Dependency vulnerability scanning
- 🔑 **detect-secrets** - Hardcoded secrets detection
- 📜 **pip-licenses** - License compliance checking
- 🪝 **pre-commit** - Automated git hooks
**All configured in a single `pyproject.toml` file!** **All configured in a single `pyproject.toml` file!**

View File

@@ -243,7 +243,7 @@ async def admin_get_stats(
# 4. User Status - Active vs Inactive # 4. User Status - Active vs Inactive
logger.info( logger.info(
f"User status counts - Active: {active_count}, Inactive: {inactive_count}" "User status counts - Active: %s, Inactive: %s", active_count, inactive_count
) )
user_status = [ user_status = [
@@ -312,7 +312,7 @@ async def admin_list_users(
return PaginatedResponse(data=users, pagination=pagination_meta) return PaginatedResponse(data=users, pagination=pagination_meta)
except Exception as e: except Exception as e:
logger.error(f"Error listing users (admin): {e!s}", exc_info=True) logger.exception("Error listing users (admin): %s", e)
raise raise
@@ -336,13 +336,13 @@ async def admin_create_user(
""" """
try: try:
user = await user_service.create_user(db, user_in) user = await user_service.create_user(db, user_in)
logger.info(f"Admin {admin.email} created user {user.email}") logger.info("Admin %s created user %s", admin.email, user.email)
return user return user
except DuplicateEntryError as e: except DuplicateEntryError as e:
logger.warning(f"Failed to create user: {e!s}") logger.warning("Failed to create user: %s", e)
raise DuplicateError(message=str(e), error_code=ErrorCode.USER_ALREADY_EXISTS) raise DuplicateError(message=str(e), error_code=ErrorCode.USER_ALREADY_EXISTS)
except Exception as e: except Exception as e:
logger.error(f"Error creating user (admin): {e!s}", exc_info=True) logger.exception("Error creating user (admin): %s", e)
raise raise
@@ -380,11 +380,11 @@ async def admin_update_user(
try: try:
user = await user_service.get_user(db, str(user_id)) user = await user_service.get_user(db, str(user_id))
updated_user = await user_service.update_user(db, user=user, obj_in=user_in) updated_user = await user_service.update_user(db, user=user, obj_in=user_in)
logger.info(f"Admin {admin.email} updated user {updated_user.email}") logger.info("Admin %s updated user %s", admin.email, updated_user.email)
return updated_user return updated_user
except Exception as e: except Exception as e:
logger.error(f"Error updating user (admin): {e!s}", exc_info=True) logger.exception("Error updating user (admin): %s", e)
raise raise
@@ -413,14 +413,14 @@ async def admin_delete_user(
) )
await user_service.soft_delete_user(db, str(user_id)) await user_service.soft_delete_user(db, str(user_id))
logger.info(f"Admin {admin.email} deleted user {user.email}") logger.info("Admin %s deleted user %s", admin.email, user.email)
return MessageResponse( return MessageResponse(
success=True, message=f"User {user.email} has been deleted" success=True, message=f"User {user.email} has been deleted"
) )
except Exception as e: except Exception as e:
logger.error(f"Error deleting user (admin): {e!s}", exc_info=True) logger.exception("Error deleting user (admin): %s", e)
raise raise
@@ -440,14 +440,14 @@ async def admin_activate_user(
try: try:
user = await user_service.get_user(db, str(user_id)) user = await user_service.get_user(db, str(user_id))
await user_service.update_user(db, user=user, obj_in={"is_active": True}) await user_service.update_user(db, user=user, obj_in={"is_active": True})
logger.info(f"Admin {admin.email} activated user {user.email}") logger.info("Admin %s activated user %s", admin.email, user.email)
return MessageResponse( return MessageResponse(
success=True, message=f"User {user.email} has been activated" success=True, message=f"User {user.email} has been activated"
) )
except Exception as e: except Exception as e:
logger.error(f"Error activating user (admin): {e!s}", exc_info=True) logger.exception("Error activating user (admin): %s", e)
raise raise
@@ -476,14 +476,14 @@ async def admin_deactivate_user(
) )
await user_service.update_user(db, user=user, obj_in={"is_active": False}) await user_service.update_user(db, user=user, obj_in={"is_active": False})
logger.info(f"Admin {admin.email} deactivated user {user.email}") logger.info("Admin %s deactivated user %s", admin.email, user.email)
return MessageResponse( return MessageResponse(
success=True, message=f"User {user.email} has been deactivated" success=True, message=f"User {user.email} has been deactivated"
) )
except Exception as e: except Exception as e:
logger.error(f"Error deactivating user (admin): {e!s}", exc_info=True) logger.exception("Error deactivating user (admin): %s", e)
raise raise
@@ -528,8 +528,11 @@ async def admin_bulk_user_action(
failed_count = requested_count - affected_count failed_count = requested_count - affected_count
logger.info( logger.info(
f"Admin {admin.email} performed bulk {bulk_action.action.value} " "Admin %s performed bulk %s on %s users (%s skipped/failed)",
f"on {affected_count} users ({failed_count} skipped/failed)" admin.email,
bulk_action.action.value,
affected_count,
failed_count,
) )
return BulkActionResult( return BulkActionResult(
@@ -541,7 +544,7 @@ async def admin_bulk_user_action(
) )
except Exception as e: # pragma: no cover except Exception as e: # pragma: no cover
logger.error(f"Error in bulk user action: {e!s}", exc_info=True) logger.exception("Error in bulk user action: %s", e)
raise raise
@@ -602,7 +605,7 @@ async def admin_list_organizations(
return PaginatedResponse(data=orgs_with_count, pagination=pagination_meta) return PaginatedResponse(data=orgs_with_count, pagination=pagination_meta)
except Exception as e: except Exception as e:
logger.error(f"Error listing organizations (admin): {e!s}", exc_info=True) logger.exception("Error listing organizations (admin): %s", e)
raise raise
@@ -622,7 +625,7 @@ async def admin_create_organization(
"""Create a new organization.""" """Create a new organization."""
try: try:
org = await organization_service.create_organization(db, obj_in=org_in) org = await organization_service.create_organization(db, obj_in=org_in)
logger.info(f"Admin {admin.email} created organization {org.name}") logger.info("Admin %s created organization %s", admin.email, org.name)
# Add member count # Add member count
org_dict = { org_dict = {
@@ -639,10 +642,10 @@ async def admin_create_organization(
return OrganizationResponse(**org_dict) return OrganizationResponse(**org_dict)
except DuplicateEntryError as e: except DuplicateEntryError as e:
logger.warning(f"Failed to create organization: {e!s}") logger.warning("Failed to create organization: %s", e)
raise DuplicateError(message=str(e), error_code=ErrorCode.ALREADY_EXISTS) raise DuplicateError(message=str(e), error_code=ErrorCode.ALREADY_EXISTS)
except Exception as e: except Exception as e:
logger.error(f"Error creating organization (admin): {e!s}", exc_info=True) logger.exception("Error creating organization (admin): %s", e)
raise raise
@@ -695,7 +698,7 @@ async def admin_update_organization(
updated_org = await organization_service.update_organization( updated_org = await organization_service.update_organization(
db, org=org, obj_in=org_in db, org=org, obj_in=org_in
) )
logger.info(f"Admin {admin.email} updated organization {updated_org.name}") logger.info("Admin %s updated organization %s", admin.email, updated_org.name)
org_dict = { org_dict = {
"id": updated_org.id, "id": updated_org.id,
@@ -713,7 +716,7 @@ async def admin_update_organization(
return OrganizationResponse(**org_dict) return OrganizationResponse(**org_dict)
except Exception as e: except Exception as e:
logger.error(f"Error updating organization (admin): {e!s}", exc_info=True) logger.exception("Error updating organization (admin): %s", e)
raise raise
@@ -733,14 +736,14 @@ async def admin_delete_organization(
try: try:
org = await organization_service.get_organization(db, str(org_id)) org = await organization_service.get_organization(db, str(org_id))
await organization_service.remove_organization(db, str(org_id)) await organization_service.remove_organization(db, str(org_id))
logger.info(f"Admin {admin.email} deleted organization {org.name}") logger.info("Admin %s deleted organization %s", admin.email, org.name)
return MessageResponse( return MessageResponse(
success=True, message=f"Organization {org.name} has been deleted" success=True, message=f"Organization {org.name} has been deleted"
) )
except Exception as e: except Exception as e:
logger.error(f"Error deleting organization (admin): {e!s}", exc_info=True) logger.exception("Error deleting organization (admin): %s", e)
raise raise
@@ -784,9 +787,7 @@ async def admin_list_organization_members(
except NotFoundError: except NotFoundError:
raise raise
except Exception as e: except Exception as e:
logger.error( logger.exception("Error listing organization members (admin): %s", e)
f"Error listing organization members (admin): {e!s}", exc_info=True
)
raise raise
@@ -822,8 +823,11 @@ async def admin_add_organization_member(
) )
logger.info( logger.info(
f"Admin {admin.email} added user {user.email} to organization {org.name} " "Admin %s added user %s to organization %s with role %s",
f"with role {request.role.value}" admin.email,
user.email,
org.name,
request.role.value,
) )
return MessageResponse( return MessageResponse(
@@ -831,14 +835,12 @@ async def admin_add_organization_member(
) )
except DuplicateEntryError as e: except DuplicateEntryError as e:
logger.warning(f"Failed to add user to organization: {e!s}") logger.warning("Failed to add user to organization: %s", e)
raise DuplicateError( raise DuplicateError(
message=str(e), error_code=ErrorCode.USER_ALREADY_EXISTS, field="user_id" message=str(e), error_code=ErrorCode.USER_ALREADY_EXISTS, field="user_id"
) )
except Exception as e: except Exception as e:
logger.error( logger.exception("Error adding member to organization (admin): %s", e)
f"Error adding member to organization (admin): {e!s}", exc_info=True
)
raise raise
@@ -871,7 +873,10 @@ async def admin_remove_organization_member(
) )
logger.info( logger.info(
f"Admin {admin.email} removed user {user.email} from organization {org.name}" "Admin %s removed user %s from organization %s",
admin.email,
user.email,
org.name,
) )
return MessageResponse( return MessageResponse(
@@ -882,9 +887,7 @@ async def admin_remove_organization_member(
except NotFoundError: except NotFoundError:
raise raise
except Exception as e: # pragma: no cover except Exception as e: # pragma: no cover
logger.error( logger.exception("Error removing member from organization (admin): %s", e)
f"Error removing member from organization (admin): {e!s}", exc_info=True
)
raise raise
@@ -953,7 +956,10 @@ async def admin_list_sessions(
session_responses.append(session_response) session_responses.append(session_response)
logger.info( logger.info(
f"Admin {admin.email} listed {len(session_responses)} sessions (total: {total})" "Admin %s listed %s sessions (total: %s)",
admin.email,
len(session_responses),
total,
) )
pagination_meta = create_pagination_meta( pagination_meta = create_pagination_meta(
@@ -966,5 +972,5 @@ async def admin_list_sessions(
return PaginatedResponse(data=session_responses, pagination=pagination_meta) return PaginatedResponse(data=session_responses, pagination=pagination_meta)
except Exception as e: # pragma: no cover except Exception as e: # pragma: no cover
logger.error(f"Error listing sessions (admin): {e!s}", exc_info=True) logger.exception("Error listing sessions (admin): %s", e)
raise raise

View File

@@ -94,14 +94,15 @@ async def _create_login_session(
await session_service.create_session(db, obj_in=session_data) await session_service.create_session(db, obj_in=session_data)
logger.info( logger.info(
f"{login_type.capitalize()} successful: {user.email} from {device_info.device_name} " "%s successful: %s from %s (IP: %s)",
f"(IP: {device_info.ip_address})" login_type.capitalize(),
user.email,
device_info.device_name,
device_info.ip_address,
) )
except Exception as session_err: except Exception as session_err:
# Log but don't fail login if session creation fails # Log but don't fail login if session creation fails
logger.error( logger.exception("Failed to create session for %s: %s", user.email, session_err)
f"Failed to create session for {user.email}: {session_err!s}", exc_info=True
)
@router.post( @router.post(
@@ -125,19 +126,19 @@ async def register_user(
return user return user
except DuplicateError: except DuplicateError:
# SECURITY: Don't reveal if email exists - generic error message # SECURITY: Don't reveal if email exists - generic error message
logger.warning(f"Registration failed: duplicate email {user_data.email}") logger.warning("Registration failed: duplicate email %s", user_data.email)
raise HTTPException( raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST, status_code=status.HTTP_400_BAD_REQUEST,
detail="Registration failed. Please check your information and try again.", detail="Registration failed. Please check your information and try again.",
) )
except AuthError as e: except AuthError as e:
logger.warning(f"Registration failed: {e!s}") logger.warning("Registration failed: %s", e)
raise HTTPException( raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST, status_code=status.HTTP_400_BAD_REQUEST,
detail="Registration failed. Please check your information and try again.", detail="Registration failed. Please check your information and try again.",
) )
except Exception as e: except Exception as e:
logger.error(f"Unexpected error during registration: {e!s}", exc_info=True) logger.exception("Unexpected error during registration: %s", e)
raise DatabaseError( raise DatabaseError(
message="An unexpected error occurred. Please try again later.", message="An unexpected error occurred. Please try again later.",
error_code=ErrorCode.INTERNAL_ERROR, error_code=ErrorCode.INTERNAL_ERROR,
@@ -165,7 +166,7 @@ async def login(
# Explicitly check for None result and raise correct exception # Explicitly check for None result and raise correct exception
if user is None: if user is None:
logger.warning(f"Invalid login attempt for: {login_data.email}") logger.warning("Invalid login attempt for: %s", login_data.email)
raise AuthError( raise AuthError(
message="Invalid email or password", message="Invalid email or password",
error_code=ErrorCode.INVALID_CREDENTIALS, error_code=ErrorCode.INVALID_CREDENTIALS,
@@ -181,11 +182,11 @@ async def login(
except AuthenticationError as e: except AuthenticationError as e:
# Handle specific authentication errors like inactive accounts # Handle specific authentication errors like inactive accounts
logger.warning(f"Authentication failed: {e!s}") logger.warning("Authentication failed: %s", e)
raise AuthError(message=str(e), error_code=ErrorCode.INVALID_CREDENTIALS) raise AuthError(message=str(e), error_code=ErrorCode.INVALID_CREDENTIALS)
except Exception as e: except Exception as e:
# Handle unexpected errors # Handle unexpected errors
logger.error(f"Unexpected error during login: {e!s}", exc_info=True) logger.exception("Unexpected error during login: %s", e)
raise DatabaseError( raise DatabaseError(
message="An unexpected error occurred. Please try again later.", message="An unexpected error occurred. Please try again later.",
error_code=ErrorCode.INTERNAL_ERROR, error_code=ErrorCode.INTERNAL_ERROR,
@@ -227,10 +228,10 @@ async def login_oauth(
# Return full token response with user data # Return full token response with user data
return tokens return tokens
except AuthenticationError as e: except AuthenticationError as e:
logger.warning(f"OAuth authentication failed: {e!s}") logger.warning("OAuth authentication failed: %s", e)
raise AuthError(message=str(e), error_code=ErrorCode.INVALID_CREDENTIALS) raise AuthError(message=str(e), error_code=ErrorCode.INVALID_CREDENTIALS)
except Exception as e: except Exception as e:
logger.error(f"Unexpected error during OAuth login: {e!s}", exc_info=True) logger.exception("Unexpected error during OAuth login: %s", e)
raise DatabaseError( raise DatabaseError(
message="An unexpected error occurred. Please try again later.", message="An unexpected error occurred. Please try again later.",
error_code=ErrorCode.INTERNAL_ERROR, error_code=ErrorCode.INTERNAL_ERROR,
@@ -263,7 +264,8 @@ async def refresh_token(
if not session: if not session:
logger.warning( logger.warning(
f"Refresh token used for inactive or non-existent session: {refresh_payload.jti}" "Refresh token used for inactive or non-existent session: %s",
refresh_payload.jti,
) )
raise HTTPException( raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED, status_code=status.HTTP_401_UNAUTHORIZED,
@@ -286,9 +288,7 @@ async def refresh_token(
new_expires_at=datetime.fromtimestamp(new_refresh_payload.exp, tz=UTC), new_expires_at=datetime.fromtimestamp(new_refresh_payload.exp, tz=UTC),
) )
except Exception as session_err: except Exception as session_err:
logger.error( logger.exception("Failed to update session %s: %s", session.id, session_err)
f"Failed to update session {session.id}: {session_err!s}", exc_info=True
)
# Continue anyway - tokens are already issued # Continue anyway - tokens are already issued
return tokens return tokens
@@ -311,7 +311,7 @@ async def refresh_token(
# Re-raise HTTP exceptions (like session revoked) # Re-raise HTTP exceptions (like session revoked)
raise raise
except Exception as e: except Exception as e:
logger.error(f"Unexpected error during token refresh: {e!s}") logger.error("Unexpected error during token refresh: %s", e)
raise HTTPException( raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="An unexpected error occurred. Please try again later.", detail="An unexpected error occurred. Please try again later.",
@@ -358,11 +358,12 @@ async def request_password_reset(
await email_service.send_password_reset_email( await email_service.send_password_reset_email(
to_email=user.email, reset_token=reset_token, user_name=user.first_name to_email=user.email, reset_token=reset_token, user_name=user.first_name
) )
logger.info(f"Password reset requested for {user.email}") logger.info("Password reset requested for %s", user.email)
else: else:
# Log attempt but don't reveal if email exists # Log attempt but don't reveal if email exists
logger.warning( logger.warning(
f"Password reset requested for non-existent or inactive email: {reset_request.email}" "Password reset requested for non-existent or inactive email: %s",
reset_request.email,
) )
# Always return success to prevent email enumeration # Always return success to prevent email enumeration
@@ -371,7 +372,7 @@ async def request_password_reset(
message="If your email is registered, you will receive a password reset link shortly", message="If your email is registered, you will receive a password reset link shortly",
) )
except Exception as e: except Exception as e:
logger.error(f"Error processing password reset request: {e!s}", exc_info=True) logger.exception("Error processing password reset request: %s", e)
# Still return success to prevent information leakage # Still return success to prevent information leakage
return MessageResponse( return MessageResponse(
success=True, success=True,
@@ -432,12 +433,14 @@ async def confirm_password_reset(
db, user_id=str(user.id) db, user_id=str(user.id)
) )
logger.info( logger.info(
f"Password reset successful for {user.email}, invalidated {deactivated_count} sessions" "Password reset successful for %s, invalidated %s sessions",
user.email,
deactivated_count,
) )
except Exception as session_error: except Exception as session_error:
# Log but don't fail password reset if session invalidation fails # Log but don't fail password reset if session invalidation fails
logger.error( logger.error(
f"Failed to invalidate sessions after password reset: {session_error!s}" "Failed to invalidate sessions after password reset: %s", session_error
) )
return MessageResponse( return MessageResponse(
@@ -448,7 +451,7 @@ async def confirm_password_reset(
except HTTPException: except HTTPException:
raise raise
except Exception as e: except Exception as e:
logger.error(f"Error confirming password reset: {e!s}", exc_info=True) logger.exception("Error confirming password reset: %s", e)
await db.rollback() await db.rollback()
raise HTTPException( raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
@@ -498,7 +501,7 @@ async def logout(
) )
except (TokenExpiredError, TokenInvalidError) as e: except (TokenExpiredError, TokenInvalidError) as e:
# Even if token is expired/invalid, try to deactivate session # Even if token is expired/invalid, try to deactivate session
logger.warning(f"Logout with invalid/expired token: {e!s}") logger.warning("Logout with invalid/expired token: %s", e)
# Don't fail - return success anyway # Don't fail - return success anyway
return MessageResponse(success=True, message="Logged out successfully") return MessageResponse(success=True, message="Logged out successfully")
@@ -509,8 +512,10 @@ async def logout(
# Verify session belongs to current user (security check) # Verify session belongs to current user (security check)
if str(session.user_id) != str(current_user.id): if str(session.user_id) != str(current_user.id):
logger.warning( logger.warning(
f"User {current_user.id} attempted to logout session {session.id} " "User %s attempted to logout session %s belonging to user %s",
f"belonging to user {session.user_id}" current_user.id,
session.id,
session.user_id,
) )
raise HTTPException( raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN, status_code=status.HTTP_403_FORBIDDEN,
@@ -521,14 +526,17 @@ async def logout(
await session_service.deactivate(db, session_id=str(session.id)) await session_service.deactivate(db, session_id=str(session.id))
logger.info( logger.info(
f"User {current_user.id} logged out from {session.device_name} " "User %s logged out from %s (session %s)",
f"(session {session.id})" current_user.id,
session.device_name,
session.id,
) )
else: else:
# Session not found - maybe already deleted or never existed # Session not found - maybe already deleted or never existed
# Return success anyway (idempotent) # Return success anyway (idempotent)
logger.info( logger.info(
f"Logout requested for non-existent session (JTI: {refresh_payload.jti})" "Logout requested for non-existent session (JTI: %s)",
refresh_payload.jti,
) )
return MessageResponse(success=True, message="Logged out successfully") return MessageResponse(success=True, message="Logged out successfully")
@@ -536,9 +544,7 @@ async def logout(
except HTTPException: except HTTPException:
raise raise
except Exception as e: except Exception as e:
logger.error( logger.exception("Error during logout for user %s: %s", current_user.id, e)
f"Error during logout for user {current_user.id}: {e!s}", exc_info=True
)
# Don't expose error details # Don't expose error details
return MessageResponse(success=True, message="Logged out successfully") return MessageResponse(success=True, message="Logged out successfully")
@@ -581,7 +587,7 @@ async def logout_all(
) )
logger.info( logger.info(
f"User {current_user.id} logged out from all devices ({count} sessions)" "User %s logged out from all devices (%s sessions)", current_user.id, count
) )
return MessageResponse( return MessageResponse(
@@ -590,9 +596,7 @@ async def logout_all(
) )
except Exception as e: except Exception as e:
logger.error( logger.exception("Error during logout-all for user %s: %s", current_user.id, e)
f"Error during logout-all for user {current_user.id}: {e!s}", exc_info=True
)
await db.rollback() await db.rollback()
raise HTTPException( raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,

View File

@@ -84,14 +84,16 @@ async def _create_oauth_login_session(
await session_service.create_session(db, obj_in=session_data) await session_service.create_session(db, obj_in=session_data)
logger.info( logger.info(
f"OAuth login successful: {user.email} via {provider} " "OAuth login successful: %s via %s from %s (IP: %s)",
f"from {device_info.device_name} (IP: {device_info.ip_address})" user.email,
provider,
device_info.device_name,
device_info.ip_address,
) )
except Exception as session_err: except Exception as session_err:
# Log but don't fail login if session creation fails # Log but don't fail login if session creation fails
logger.error( logger.exception(
f"Failed to create session for OAuth login {user.email}: {session_err!s}", "Failed to create session for OAuth login %s: %s", user.email, session_err
exc_info=True,
) )
@@ -176,13 +178,13 @@ async def get_authorization_url(
} }
except AuthError as e: except AuthError as e:
logger.warning(f"OAuth authorization failed: {e!s}") logger.warning("OAuth authorization failed: %s", e)
raise HTTPException( raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST, status_code=status.HTTP_400_BAD_REQUEST,
detail=str(e), detail=str(e),
) )
except Exception as e: except Exception as e:
logger.error(f"OAuth authorization error: {e!s}", exc_info=True) logger.exception("OAuth authorization error: %s", e)
raise HTTPException( raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to create authorization URL", detail="Failed to create authorization URL",
@@ -250,13 +252,13 @@ async def handle_callback(
return result return result
except AuthError as e: except AuthError as e:
logger.warning(f"OAuth callback failed: {e!s}") logger.warning("OAuth callback failed: %s", e)
raise HTTPException( raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED, status_code=status.HTTP_401_UNAUTHORIZED,
detail=str(e), detail=str(e),
) )
except Exception as e: except Exception as e:
logger.error(f"OAuth callback error: {e!s}", exc_info=True) logger.exception("OAuth callback error: %s", e)
raise HTTPException( raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="OAuth authentication failed", detail="OAuth authentication failed",
@@ -337,13 +339,13 @@ async def unlink_account(
) )
except AuthError as e: except AuthError as e:
logger.warning(f"OAuth unlink failed for {current_user.email}: {e!s}") logger.warning("OAuth unlink failed for %s: %s", current_user.email, e)
raise HTTPException( raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST, status_code=status.HTTP_400_BAD_REQUEST,
detail=str(e), detail=str(e),
) )
except Exception as e: except Exception as e:
logger.error(f"OAuth unlink error: {e!s}", exc_info=True) logger.exception("OAuth unlink error: %s", e)
raise HTTPException( raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to unlink OAuth account", detail="Failed to unlink OAuth account",
@@ -419,13 +421,13 @@ async def start_link(
} }
except AuthError as e: except AuthError as e:
logger.warning(f"OAuth link authorization failed: {e!s}") logger.warning("OAuth link authorization failed: %s", e)
raise HTTPException( raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST, status_code=status.HTTP_400_BAD_REQUEST,
detail=str(e), detail=str(e),
) )
except Exception as e: except Exception as e:
logger.error(f"OAuth link error: {e!s}", exc_info=True) logger.exception("OAuth link error: %s", e)
raise HTTPException( raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to create authorization URL", detail="Failed to create authorization URL",

View File

@@ -452,7 +452,7 @@ async def token(
except Exception as e: except Exception as e:
# Log malformed Basic auth for security monitoring # Log malformed Basic auth for security monitoring
logger.warning( logger.warning(
f"Malformed Basic auth header in token request: {type(e).__name__}" "Malformed Basic auth header in token request: %s", type(e).__name__
) )
# Fall back to form body # Fall back to form body
@@ -563,7 +563,8 @@ async def revoke(
except Exception as e: except Exception as e:
# Log malformed Basic auth for security monitoring # Log malformed Basic auth for security monitoring
logger.warning( logger.warning(
f"Malformed Basic auth header in revoke request: {type(e).__name__}" "Malformed Basic auth header in revoke request: %s",
type(e).__name__,
) )
# Fall back to form body # Fall back to form body
@@ -585,7 +586,7 @@ async def revoke(
) )
except Exception as e: except Exception as e:
# Log but don't expose errors per RFC 7009 # Log but don't expose errors per RFC 7009
logger.warning(f"Token revocation error: {e}") logger.warning("Token revocation error: %s", e)
# Always return 200 OK per RFC 7009 # Always return 200 OK per RFC 7009
return {"status": "ok"} return {"status": "ok"}
@@ -634,7 +635,8 @@ async def introspect(
except Exception as e: except Exception as e:
# Log malformed Basic auth for security monitoring # Log malformed Basic auth for security monitoring
logger.warning( logger.warning(
f"Malformed Basic auth header in introspect request: {type(e).__name__}" "Malformed Basic auth header in introspect request: %s",
type(e).__name__,
) )
# Fall back to form body # Fall back to form body
@@ -654,7 +656,7 @@ async def introspect(
headers={"WWW-Authenticate": "Basic"}, headers={"WWW-Authenticate": "Basic"},
) )
except Exception as e: except Exception as e:
logger.warning(f"Token introspection error: {e}") logger.warning("Token introspection error: %s", e)
return OAuthTokenIntrospectionResponse(active=False) # pyright: ignore[reportCallIssue] return OAuthTokenIntrospectionResponse(active=False) # pyright: ignore[reportCallIssue]

View File

@@ -77,7 +77,7 @@ async def get_my_organizations(
return orgs_with_data return orgs_with_data
except Exception as e: except Exception as e:
logger.error(f"Error getting user organizations: {e!s}", exc_info=True) logger.exception("Error getting user organizations: %s", e)
raise raise
@@ -116,7 +116,7 @@ async def get_organization(
return OrganizationResponse(**org_dict) return OrganizationResponse(**org_dict)
except Exception as e: except Exception as e:
logger.error(f"Error getting organization: {e!s}", exc_info=True) logger.exception("Error getting organization: %s", e)
raise raise
@@ -160,7 +160,7 @@ async def get_organization_members(
return PaginatedResponse(data=member_responses, pagination=pagination_meta) return PaginatedResponse(data=member_responses, pagination=pagination_meta)
except Exception as e: except Exception as e:
logger.error(f"Error getting organization members: {e!s}", exc_info=True) logger.exception("Error getting organization members: %s", e)
raise raise
@@ -188,7 +188,7 @@ async def update_organization(
db, org=org, obj_in=org_in db, org=org, obj_in=org_in
) )
logger.info( logger.info(
f"User {current_user.email} updated organization {updated_org.name}" "User %s updated organization %s", current_user.email, updated_org.name
) )
org_dict = { org_dict = {
@@ -207,5 +207,5 @@ async def update_organization(
return OrganizationResponse(**org_dict) return OrganizationResponse(**org_dict)
except Exception as e: except Exception as e:
logger.error(f"Error updating organization: {e!s}", exc_info=True) logger.exception("Error updating organization: %s", e)
raise raise

View File

@@ -74,9 +74,7 @@ async def list_my_sessions(
# For now, we'll mark current based on most recent activity # For now, we'll mark current based on most recent activity
except Exception as e: except Exception as e:
# Optional token parsing - silently ignore failures # Optional token parsing - silently ignore failures
logger.debug( logger.debug("Failed to decode access token for session marking: %s", e)
f"Failed to decode access token for session marking: {e!s}"
)
# Convert to response format # Convert to response format
session_responses = [] session_responses = []
@@ -98,7 +96,7 @@ async def list_my_sessions(
session_responses.append(session_response) session_responses.append(session_response)
logger.info( logger.info(
f"User {current_user.id} listed {len(session_responses)} active sessions" "User %s listed %s active sessions", current_user.id, len(session_responses)
) )
return SessionListResponse( return SessionListResponse(
@@ -106,9 +104,7 @@ async def list_my_sessions(
) )
except Exception as e: except Exception as e:
logger.error( logger.exception("Error listing sessions for user %s: %s", current_user.id, e)
f"Error listing sessions for user {current_user.id}: {e!s}", exc_info=True
)
raise HTTPException( raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to retrieve sessions", detail="Failed to retrieve sessions",
@@ -161,8 +157,10 @@ async def revoke_session(
# Verify session belongs to current user # Verify session belongs to current user
if str(session.user_id) != str(current_user.id): if str(session.user_id) != str(current_user.id):
logger.warning( logger.warning(
f"User {current_user.id} attempted to revoke session {session_id} " "User %s attempted to revoke session %s belonging to user %s",
f"belonging to user {session.user_id}" current_user.id,
session_id,
session.user_id,
) )
raise AuthorizationError( raise AuthorizationError(
message="You can only revoke your own sessions", message="You can only revoke your own sessions",
@@ -173,8 +171,10 @@ async def revoke_session(
await session_service.deactivate(db, session_id=str(session_id)) await session_service.deactivate(db, session_id=str(session_id))
logger.info( logger.info(
f"User {current_user.id} revoked session {session_id} " "User %s revoked session %s (%s)",
f"({session.device_name})" current_user.id,
session_id,
session.device_name,
) )
return MessageResponse( return MessageResponse(
@@ -185,7 +185,7 @@ async def revoke_session(
except (NotFoundError, AuthorizationError): except (NotFoundError, AuthorizationError):
raise raise
except Exception as e: except Exception as e:
logger.error(f"Error revoking session {session_id}: {e!s}", exc_info=True) logger.exception("Error revoking session %s: %s", session_id, e)
raise HTTPException( raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to revoke session", detail="Failed to revoke session",
@@ -229,7 +229,7 @@ async def cleanup_expired_sessions(
) )
logger.info( logger.info(
f"User {current_user.id} cleaned up {deleted_count} expired sessions" "User %s cleaned up %s expired sessions", current_user.id, deleted_count
) )
return MessageResponse( return MessageResponse(
@@ -237,9 +237,8 @@ async def cleanup_expired_sessions(
) )
except Exception as e: except Exception as e:
logger.error( logger.exception(
f"Error cleaning up sessions for user {current_user.id}: {e!s}", "Error cleaning up sessions for user %s: %s", current_user.id, e
exc_info=True,
) )
await db.rollback() await db.rollback()
raise HTTPException( raise HTTPException(

View File

@@ -1,5 +1,5 @@
""" """
User management endpoints for CRUD operations. User management endpoints for database operations.
""" """
import logging import logging
@@ -90,7 +90,7 @@ async def list_users(
return PaginatedResponse(data=users, pagination=pagination_meta) return PaginatedResponse(data=users, pagination=pagination_meta)
except Exception as e: except Exception as e:
logger.error(f"Error listing users: {e!s}", exc_info=True) logger.exception("Error listing users: %s", e)
raise raise
@@ -143,15 +143,13 @@ async def update_current_user(
updated_user = await user_service.update_user( updated_user = await user_service.update_user(
db, user=current_user, obj_in=user_update db, user=current_user, obj_in=user_update
) )
logger.info(f"User {current_user.id} updated their profile") logger.info("User %s updated their profile", current_user.id)
return updated_user return updated_user
except ValueError as e: except ValueError as e:
logger.error(f"Error updating user {current_user.id}: {e!s}") logger.error("Error updating user %s: %s", current_user.id, e)
raise raise
except Exception as e: except Exception as e:
logger.error( logger.exception("Unexpected error updating user %s: %s", current_user.id, e)
f"Unexpected error updating user {current_user.id}: {e!s}", exc_info=True
)
raise raise
@@ -184,7 +182,9 @@ async def get_user_by_id(
# Check permissions # Check permissions
if str(user_id) != str(current_user.id) and not current_user.is_superuser: if str(user_id) != str(current_user.id) and not current_user.is_superuser:
logger.warning( logger.warning(
f"User {current_user.id} attempted to access user {user_id} without permission" "User %s attempted to access user %s without permission",
current_user.id,
user_id,
) )
raise AuthorizationError( raise AuthorizationError(
message="Not enough permissions to view this user", message="Not enough permissions to view this user",
@@ -229,7 +229,9 @@ async def update_user(
if not is_own_profile and not current_user.is_superuser: if not is_own_profile and not current_user.is_superuser:
logger.warning( logger.warning(
f"User {current_user.id} attempted to update user {user_id} without permission" "User %s attempted to update user %s without permission",
current_user.id,
user_id,
) )
raise AuthorizationError( raise AuthorizationError(
message="Not enough permissions to update this user", message="Not enough permissions to update this user",
@@ -241,13 +243,13 @@ async def update_user(
try: try:
updated_user = await user_service.update_user(db, user=user, obj_in=user_update) updated_user = await user_service.update_user(db, user=user, obj_in=user_update)
logger.info(f"User {user_id} updated by {current_user.id}") logger.info("User %s updated by %s", user_id, current_user.id)
return updated_user return updated_user
except ValueError as e: except ValueError as e:
logger.error(f"Error updating user {user_id}: {e!s}") logger.error("Error updating user %s: %s", user_id, e)
raise raise
except Exception as e: except Exception as e:
logger.error(f"Unexpected error updating user {user_id}: {e!s}", exc_info=True) logger.exception("Unexpected error updating user %s: %s", user_id, e)
raise raise
@@ -287,19 +289,19 @@ async def change_current_user_password(
) )
if success: if success:
logger.info(f"User {current_user.id} changed their password") logger.info("User %s changed their password", current_user.id)
return MessageResponse( return MessageResponse(
success=True, message="Password changed successfully" success=True, message="Password changed successfully"
) )
except AuthenticationError as e: except AuthenticationError as e:
logger.warning( logger.warning(
f"Failed password change attempt for user {current_user.id}: {e!s}" "Failed password change attempt for user %s: %s", current_user.id, e
) )
raise AuthorizationError( raise AuthorizationError(
message=str(e), error_code=ErrorCode.INVALID_CREDENTIALS message=str(e), error_code=ErrorCode.INVALID_CREDENTIALS
) )
except Exception as e: except Exception as e:
logger.error(f"Error changing password for user {current_user.id}: {e!s}") logger.error("Error changing password for user %s: %s", current_user.id, e)
raise raise
@@ -343,13 +345,13 @@ async def delete_user(
try: try:
# Use soft delete instead of hard delete # Use soft delete instead of hard delete
await user_service.soft_delete_user(db, str(user_id)) await user_service.soft_delete_user(db, str(user_id))
logger.info(f"User {user_id} soft-deleted by {current_user.id}") logger.info("User %s soft-deleted by %s", user_id, current_user.id)
return MessageResponse( return MessageResponse(
success=True, message=f"User {user_id} deleted successfully" success=True, message=f"User {user_id} deleted successfully"
) )
except ValueError as e: except ValueError as e:
logger.error(f"Error deleting user {user_id}: {e!s}") logger.error("Error deleting user %s: %s", user_id, e)
raise raise
except Exception as e: except Exception as e:
logger.error(f"Unexpected error deleting user {user_id}: {e!s}", exc_info=True) logger.exception("Unexpected error deleting user %s: %s", user_id, e)
raise raise

View File

@@ -1,23 +1,21 @@
import asyncio import asyncio
import logging
import uuid import uuid
from datetime import UTC, datetime, timedelta from datetime import UTC, datetime, timedelta
from functools import partial from functools import partial
from typing import Any from typing import Any
from jose import JWTError, jwt import bcrypt
from passlib.context import CryptContext import jwt
from jwt.exceptions import (
ExpiredSignatureError,
InvalidTokenError,
MissingRequiredClaimError,
)
from pydantic import ValidationError from pydantic import ValidationError
from app.core.config import settings from app.core.config import settings
from app.schemas.users import TokenData, TokenPayload from app.schemas.users import TokenData, TokenPayload
# Suppress passlib bcrypt warnings about ident
logging.getLogger("passlib").setLevel(logging.ERROR)
# Password hashing context
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
# Custom exceptions for auth # Custom exceptions for auth
class AuthError(Exception): class AuthError(Exception):
@@ -37,13 +35,16 @@ class TokenMissingClaimError(AuthError):
def verify_password(plain_password: str, hashed_password: str) -> bool: def verify_password(plain_password: str, hashed_password: str) -> bool:
"""Verify a password against a hash.""" """Verify a password against a bcrypt hash."""
return pwd_context.verify(plain_password, hashed_password) return bcrypt.checkpw(
plain_password.encode("utf-8"), hashed_password.encode("utf-8")
)
def get_password_hash(password: str) -> str: def get_password_hash(password: str) -> str:
"""Generate a password hash.""" """Generate a bcrypt password hash."""
return pwd_context.hash(password) salt = bcrypt.gensalt()
return bcrypt.hashpw(password.encode("utf-8"), salt).decode("utf-8")
async def verify_password_async(plain_password: str, hashed_password: str) -> bool: async def verify_password_async(plain_password: str, hashed_password: str) -> bool:
@@ -60,9 +61,9 @@ async def verify_password_async(plain_password: str, hashed_password: str) -> bo
Returns: Returns:
True if password matches, False otherwise True if password matches, False otherwise
""" """
loop = asyncio.get_event_loop() loop = asyncio.get_running_loop()
return await loop.run_in_executor( return await loop.run_in_executor(
None, partial(pwd_context.verify, plain_password, hashed_password) None, partial(verify_password, plain_password, hashed_password)
) )
@@ -80,8 +81,8 @@ async def get_password_hash_async(password: str) -> str:
Returns: Returns:
Hashed password string Hashed password string
""" """
loop = asyncio.get_event_loop() loop = asyncio.get_running_loop()
return await loop.run_in_executor(None, pwd_context.hash, password) return await loop.run_in_executor(None, get_password_hash, password)
def create_access_token( def create_access_token(
@@ -121,11 +122,7 @@ def create_access_token(
to_encode.update(claims) to_encode.update(claims)
# Create the JWT # Create the JWT
encoded_jwt = jwt.encode( return jwt.encode(to_encode, settings.SECRET_KEY, algorithm=settings.ALGORITHM)
to_encode, settings.SECRET_KEY, algorithm=settings.ALGORITHM
)
return encoded_jwt
def create_refresh_token( def create_refresh_token(
@@ -154,11 +151,7 @@ def create_refresh_token(
"type": "refresh", "type": "refresh",
} }
encoded_jwt = jwt.encode( return jwt.encode(to_encode, settings.SECRET_KEY, algorithm=settings.ALGORITHM)
to_encode, settings.SECRET_KEY, algorithm=settings.ALGORITHM
)
return encoded_jwt
def decode_token(token: str, verify_type: str | None = None) -> TokenPayload: def decode_token(token: str, verify_type: str | None = None) -> TokenPayload:
@@ -198,7 +191,7 @@ def decode_token(token: str, verify_type: str | None = None) -> TokenPayload:
# Reject weak or unexpected algorithms # Reject weak or unexpected algorithms
# NOTE: These are defensive checks that provide defense-in-depth. # NOTE: These are defensive checks that provide defense-in-depth.
# The python-jose library rejects these tokens BEFORE we reach here, # PyJWT rejects these tokens BEFORE we reach here,
# but we keep these checks in case the library changes or is misconfigured. # but we keep these checks in case the library changes or is misconfigured.
# Coverage: Marked as pragma since library catches first (see tests/core/test_auth_security.py) # Coverage: Marked as pragma since library catches first (see tests/core/test_auth_security.py)
if token_algorithm == "NONE": # pragma: no cover if token_algorithm == "NONE": # pragma: no cover
@@ -219,10 +212,11 @@ def decode_token(token: str, verify_type: str | None = None) -> TokenPayload:
token_data = TokenPayload(**payload) token_data = TokenPayload(**payload)
return token_data return token_data
except JWTError as e: except ExpiredSignatureError:
# Check if the error is due to an expired token raise TokenExpiredError("Token has expired")
if "expired" in str(e).lower(): except MissingRequiredClaimError as e:
raise TokenExpiredError("Token has expired") raise TokenMissingClaimError(f"Token missing required claim: {e}")
except InvalidTokenError:
raise TokenInvalidError("Invalid authentication token") raise TokenInvalidError("Invalid authentication token")
except ValidationError: except ValidationError:
raise TokenInvalidError("Invalid token payload") raise TokenInvalidError("Invalid token payload")

View File

@@ -128,8 +128,8 @@ async def async_transaction_scope() -> AsyncGenerator[AsyncSession, None]:
Usage: Usage:
async with async_transaction_scope() as db: async with async_transaction_scope() as db:
user = await user_crud.create(db, obj_in=user_create) user = await user_repo.create(db, obj_in=user_create)
profile = await profile_crud.create(db, obj_in=profile_create) profile = await profile_repo.create(db, obj_in=profile_create)
# Both operations committed together # Both operations committed together
""" """
async with SessionLocal() as session: async with SessionLocal() as session:
@@ -139,7 +139,7 @@ async def async_transaction_scope() -> AsyncGenerator[AsyncSession, None]:
logger.debug("Async transaction committed successfully") logger.debug("Async transaction committed successfully")
except Exception as e: except Exception as e:
await session.rollback() await session.rollback()
logger.error(f"Async transaction failed, rolling back: {e!s}") logger.error("Async transaction failed, rolling back: %s", e)
raise raise
finally: finally:
await session.close() await session.close()
@@ -155,7 +155,7 @@ async def check_async_database_health() -> bool:
await db.execute(text("SELECT 1")) await db.execute(text("SELECT 1"))
return True return True
except Exception as e: except Exception as e:
logger.error(f"Async database health check failed: {e!s}") logger.error("Async database health check failed: %s", e)
return False return False

View File

@@ -143,8 +143,11 @@ async def api_exception_handler(request: Request, exc: APIException) -> JSONResp
Returns a standardized error response with error code and message. Returns a standardized error response with error code and message.
""" """
logger.warning( logger.warning(
f"API exception: {exc.error_code} - {exc.message} " "API exception: %s - %s (status: %s, path: %s)",
f"(status: {exc.status_code}, path: {request.url.path})" exc.error_code,
exc.message,
exc.status_code,
request.url.path,
) )
error_response = ErrorResponse( error_response = ErrorResponse(
@@ -186,7 +189,9 @@ async def validation_exception_handler(
) )
) )
logger.warning(f"Validation error: {len(errors)} errors (path: {request.url.path})") logger.warning(
"Validation error: %s errors (path: %s)", len(errors), request.url.path
)
error_response = ErrorResponse(errors=errors) error_response = ErrorResponse(errors=errors)
@@ -218,7 +223,10 @@ async def http_exception_handler(request: Request, exc: HTTPException) -> JSONRe
) )
logger.warning( logger.warning(
f"HTTP exception: {exc.status_code} - {exc.detail} (path: {request.url.path})" "HTTP exception: %s - %s (path: %s)",
exc.status_code,
exc.detail,
request.url.path,
) )
error_response = ErrorResponse( error_response = ErrorResponse(
@@ -239,10 +247,11 @@ async def unhandled_exception_handler(request: Request, exc: Exception) -> JSONR
Logs the full exception and returns a generic error response to avoid Logs the full exception and returns a generic error response to avoid
leaking sensitive information in production. leaking sensitive information in production.
""" """
logger.error( logger.exception(
f"Unhandled exception: {type(exc).__name__} - {exc!s} " "Unhandled exception: %s - %s (path: %s)",
f"(path: {request.url.path})", type(exc).__name__,
exc_info=True, exc,
request.url.path,
) )
# In production, don't expose internal error details # In production, don't expose internal error details

View File

@@ -19,7 +19,7 @@ from app.core.database import SessionLocal, engine
from app.models.organization import Organization from app.models.organization import Organization
from app.models.user import User from app.models.user import User
from app.models.user_organization import UserOrganization from app.models.user_organization import UserOrganization
from app.repositories.user import user_repo as user_crud from app.repositories.user import user_repo as user_repo
from app.schemas.users import UserCreate from app.schemas.users import UserCreate
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -44,16 +44,17 @@ async def init_db() -> User | None:
if not settings.FIRST_SUPERUSER_EMAIL or not settings.FIRST_SUPERUSER_PASSWORD: if not settings.FIRST_SUPERUSER_EMAIL or not settings.FIRST_SUPERUSER_PASSWORD:
logger.warning( logger.warning(
"First superuser credentials not configured in settings. " "First superuser credentials not configured in settings. "
f"Using defaults: {superuser_email}" "Using defaults: %s",
superuser_email,
) )
async with SessionLocal() as session: async with SessionLocal() as session:
try: try:
# Check if superuser already exists # Check if superuser already exists
existing_user = await user_crud.get_by_email(session, email=superuser_email) existing_user = await user_repo.get_by_email(session, email=superuser_email)
if existing_user: if existing_user:
logger.info(f"Superuser already exists: {existing_user.email}") logger.info("Superuser already exists: %s", existing_user.email)
return existing_user return existing_user
# Create superuser if doesn't exist # Create superuser if doesn't exist
@@ -65,11 +66,11 @@ async def init_db() -> User | None:
is_superuser=True, is_superuser=True,
) )
user = await user_crud.create(session, obj_in=user_in) user = await user_repo.create(session, obj_in=user_in)
await session.commit() await session.commit()
await session.refresh(user) await session.refresh(user)
logger.info(f"Created first superuser: {user.email}") logger.info("Created first superuser: %s", user.email)
# Create demo data if in demo mode # Create demo data if in demo mode
if settings.DEMO_MODE: if settings.DEMO_MODE:
@@ -79,7 +80,7 @@ async def init_db() -> User | None:
except Exception as e: except Exception as e:
await session.rollback() await session.rollback()
logger.error(f"Error initializing database: {e}") logger.error("Error initializing database: %s", e)
raise raise
@@ -92,7 +93,7 @@ async def load_demo_data(session):
"""Load demo data from JSON file.""" """Load demo data from JSON file."""
demo_data_path = Path(__file__).parent / "core" / "demo_data.json" demo_data_path = Path(__file__).parent / "core" / "demo_data.json"
if not demo_data_path.exists(): if not demo_data_path.exists():
logger.warning(f"Demo data file not found: {demo_data_path}") logger.warning("Demo data file not found: %s", demo_data_path)
return return
try: try:
@@ -119,7 +120,7 @@ async def load_demo_data(session):
session.add(org) session.add(org)
await session.flush() # Flush to get ID await session.flush() # Flush to get ID
org_map[org.slug] = org org_map[org.slug] = org
logger.info(f"Created demo organization: {org.name}") logger.info("Created demo organization: %s", org.name)
else: else:
# We can't easily get the ORM object from raw SQL result for map without querying again or mapping # We can't easily get the ORM object from raw SQL result for map without querying again or mapping
# So let's just query it properly if we need it for relationships # So let's just query it properly if we need it for relationships
@@ -135,7 +136,7 @@ async def load_demo_data(session):
# Create Users # Create Users
for user_data in data.get("users", []): for user_data in data.get("users", []):
existing_user = await user_crud.get_by_email( existing_user = await user_repo.get_by_email(
session, email=user_data["email"] session, email=user_data["email"]
) )
if not existing_user: if not existing_user:
@@ -148,7 +149,7 @@ async def load_demo_data(session):
is_superuser=user_data["is_superuser"], is_superuser=user_data["is_superuser"],
is_active=user_data.get("is_active", True), is_active=user_data.get("is_active", True),
) )
user = await user_crud.create(session, obj_in=user_in) user = await user_repo.create(session, obj_in=user_in)
# Randomize created_at for demo data (last 30 days) # Randomize created_at for demo data (last 30 days)
# This makes the charts look more realistic # This makes the charts look more realistic
@@ -174,7 +175,10 @@ async def load_demo_data(session):
) )
logger.info( logger.info(
f"Created demo user: {user.email} (created {days_ago} days ago, active={user_data.get('is_active', True)})" "Created demo user: %s (created %s days ago, active=%s)",
user.email,
days_ago,
user_data.get("is_active", True),
) )
# Add to organization if specified # Add to organization if specified
@@ -187,15 +191,15 @@ async def load_demo_data(session):
user_id=user.id, organization_id=org.id, role=role user_id=user.id, organization_id=org.id, role=role
) )
session.add(member) session.add(member)
logger.info(f"Added {user.email} to {org.name} as {role}") logger.info("Added %s to %s as %s", user.email, org.name, role)
else: else:
logger.info(f"Demo user already exists: {existing_user.email}") logger.info("Demo user already exists: %s", existing_user.email)
await session.commit() await session.commit()
logger.info("Demo data loaded successfully") logger.info("Demo data loaded successfully")
except Exception as e: except Exception as e:
logger.error(f"Error loading demo data: {e}") logger.error("Error loading demo data: %s", e)
raise raise

View File

@@ -1,7 +1,7 @@
import logging import logging
import os import os
from contextlib import asynccontextmanager from contextlib import asynccontextmanager
from datetime import datetime from datetime import UTC, datetime
from typing import Any from typing import Any
from apscheduler.schedulers.asyncio import AsyncIOScheduler from apscheduler.schedulers.asyncio import AsyncIOScheduler
@@ -16,7 +16,7 @@ from slowapi.util import get_remote_address
from app.api.main import api_router from app.api.main import api_router
from app.api.routes.oauth_provider import wellknown_router as oauth_wellknown_router from app.api.routes.oauth_provider import wellknown_router as oauth_wellknown_router
from app.core.config import settings from app.core.config import settings
from app.core.database import check_database_health from app.core.database import check_database_health, close_async_db
from app.core.exceptions import ( from app.core.exceptions import (
APIException, APIException,
api_exception_handler, api_exception_handler,
@@ -72,6 +72,7 @@ async def lifespan(app: FastAPI):
if os.getenv("IS_TEST", "False") != "True": if os.getenv("IS_TEST", "False") != "True":
scheduler.shutdown() scheduler.shutdown()
logger.info("Scheduled jobs stopped") logger.info("Scheduled jobs stopped")
await close_async_db()
logger.info("Starting app!!!") logger.info("Starting app!!!")
@@ -294,7 +295,7 @@ async def health_check() -> JSONResponse:
""" """
health_status: dict[str, Any] = { health_status: dict[str, Any] = {
"status": "healthy", "status": "healthy",
"timestamp": datetime.utcnow().isoformat() + "Z", "timestamp": datetime.now(UTC).isoformat().replace("+00:00", "Z"),
"version": settings.VERSION, "version": settings.VERSION,
"environment": settings.ENVIRONMENT, "environment": settings.ENVIRONMENT,
"checks": {}, "checks": {},
@@ -319,7 +320,7 @@ async def health_check() -> JSONResponse:
"message": f"Database connection failed: {e!s}", "message": f"Database connection failed: {e!s}",
} }
response_status = status.HTTP_503_SERVICE_UNAVAILABLE response_status = status.HTTP_503_SERVICE_UNAVAILABLE
logger.error(f"Health check failed - database error: {e}") logger.error("Health check failed - database error: %s", e)
return JSONResponse(status_code=response_status, content=health_status) return JSONResponse(status_code=response_status, content=health_status)

View File

@@ -1,6 +1,6 @@
# app/repositories/base.py # app/repositories/base.py
""" """
Base repository class for async CRUD operations using SQLAlchemy 2.0 async patterns. Base repository class for async database operations using SQLAlchemy 2.0 async patterns.
Provides reusable create, read, update, and delete operations for all models. Provides reusable create, read, update, and delete operations for all models.
""" """
@@ -68,7 +68,7 @@ class BaseRepository[
else: else:
uuid_obj = uuid.UUID(str(id)) uuid_obj = uuid.UUID(str(id))
except (ValueError, AttributeError, TypeError) as e: except (ValueError, AttributeError, TypeError) as e:
logger.warning(f"Invalid UUID format: {id} - {e!s}") logger.warning("Invalid UUID format: %s - %s", id, e)
return None return None
try: try:
@@ -81,7 +81,9 @@ class BaseRepository[
result = await db.execute(query) result = await db.execute(query)
return result.scalar_one_or_none() return result.scalar_one_or_none()
except Exception as e: except Exception as e:
logger.error(f"Error retrieving {self.model.__name__} with id {id}: {e!s}") logger.error(
"Error retrieving %s with id %s: %s", self.model.__name__, id, e
)
raise raise
async def get_multi( async def get_multi(
@@ -113,7 +115,7 @@ class BaseRepository[
return list(result.scalars().all()) return list(result.scalars().all())
except Exception as e: except Exception as e:
logger.error( logger.error(
f"Error retrieving multiple {self.model.__name__} records: {e!s}" "Error retrieving multiple %s records: %s", self.model.__name__, e
) )
raise raise
@@ -138,22 +140,24 @@ class BaseRepository[
error_msg = str(e.orig) if hasattr(e, "orig") else str(e) error_msg = str(e.orig) if hasattr(e, "orig") else str(e)
if "unique" in error_msg.lower() or "duplicate" in error_msg.lower(): if "unique" in error_msg.lower() or "duplicate" in error_msg.lower():
logger.warning( logger.warning(
f"Duplicate entry attempted for {self.model.__name__}: {error_msg}" "Duplicate entry attempted for %s: %s",
self.model.__name__,
error_msg,
) )
raise DuplicateEntryError( raise DuplicateEntryError(
f"A {self.model.__name__} with this data already exists" f"A {self.model.__name__} with this data already exists"
) )
logger.error(f"Integrity error creating {self.model.__name__}: {error_msg}") logger.error(
"Integrity error creating %s: %s", self.model.__name__, error_msg
)
raise IntegrityConstraintError(f"Database integrity error: {error_msg}") raise IntegrityConstraintError(f"Database integrity error: {error_msg}")
except (OperationalError, DataError) as e: # pragma: no cover except (OperationalError, DataError) as e: # pragma: no cover
await db.rollback() await db.rollback()
logger.error(f"Database error creating {self.model.__name__}: {e!s}") logger.error("Database error creating %s: %s", self.model.__name__, e)
raise IntegrityConstraintError(f"Database operation failed: {e!s}") raise IntegrityConstraintError(f"Database operation failed: {e!s}")
except Exception as e: # pragma: no cover except Exception as e: # pragma: no cover
await db.rollback() await db.rollback()
logger.error( logger.exception("Unexpected error creating %s: %s", self.model.__name__, e)
f"Unexpected error creating {self.model.__name__}: {e!s}", exc_info=True
)
raise raise
async def update( async def update(
@@ -184,22 +188,24 @@ class BaseRepository[
error_msg = str(e.orig) if hasattr(e, "orig") else str(e) error_msg = str(e.orig) if hasattr(e, "orig") else str(e)
if "unique" in error_msg.lower() or "duplicate" in error_msg.lower(): if "unique" in error_msg.lower() or "duplicate" in error_msg.lower():
logger.warning( logger.warning(
f"Duplicate entry attempted for {self.model.__name__}: {error_msg}" "Duplicate entry attempted for %s: %s",
self.model.__name__,
error_msg,
) )
raise DuplicateEntryError( raise DuplicateEntryError(
f"A {self.model.__name__} with this data already exists" f"A {self.model.__name__} with this data already exists"
) )
logger.error(f"Integrity error updating {self.model.__name__}: {error_msg}") logger.error(
"Integrity error updating %s: %s", self.model.__name__, error_msg
)
raise IntegrityConstraintError(f"Database integrity error: {error_msg}") raise IntegrityConstraintError(f"Database integrity error: {error_msg}")
except (OperationalError, DataError) as e: except (OperationalError, DataError) as e:
await db.rollback() await db.rollback()
logger.error(f"Database error updating {self.model.__name__}: {e!s}") logger.error("Database error updating %s: %s", self.model.__name__, e)
raise IntegrityConstraintError(f"Database operation failed: {e!s}") raise IntegrityConstraintError(f"Database operation failed: {e!s}")
except Exception as e: except Exception as e:
await db.rollback() await db.rollback()
logger.error( logger.exception("Unexpected error updating %s: %s", self.model.__name__, e)
f"Unexpected error updating {self.model.__name__}: {e!s}", exc_info=True
)
raise raise
async def remove(self, db: AsyncSession, *, id: str) -> ModelType | None: async def remove(self, db: AsyncSession, *, id: str) -> ModelType | None:
@@ -210,7 +216,7 @@ class BaseRepository[
else: else:
uuid_obj = uuid.UUID(str(id)) uuid_obj = uuid.UUID(str(id))
except (ValueError, AttributeError, TypeError) as e: except (ValueError, AttributeError, TypeError) as e:
logger.warning(f"Invalid UUID format for deletion: {id} - {e!s}") logger.warning("Invalid UUID format for deletion: %s - %s", id, e)
return None return None
try: try:
@@ -221,7 +227,7 @@ class BaseRepository[
if obj is None: if obj is None:
logger.warning( logger.warning(
f"{self.model.__name__} with id {id} not found for deletion" "%s with id %s not found for deletion", self.model.__name__, id
) )
return None return None
@@ -231,15 +237,16 @@ class BaseRepository[
except IntegrityError as e: except IntegrityError as e:
await db.rollback() await db.rollback()
error_msg = str(e.orig) if hasattr(e, "orig") else str(e) error_msg = str(e.orig) if hasattr(e, "orig") else str(e)
logger.error(f"Integrity error deleting {self.model.__name__}: {error_msg}") logger.error(
"Integrity error deleting %s: %s", self.model.__name__, error_msg
)
raise IntegrityConstraintError( raise IntegrityConstraintError(
f"Cannot delete {self.model.__name__}: referenced by other records" f"Cannot delete {self.model.__name__}: referenced by other records"
) )
except Exception as e: except Exception as e:
await db.rollback() await db.rollback()
logger.error( logger.exception(
f"Error deleting {self.model.__name__} with id {id}: {e!s}", "Error deleting %s with id %s: %s", self.model.__name__, id, e
exc_info=True,
) )
raise raise
@@ -298,7 +305,7 @@ class BaseRepository[
return items, total return items, total
except Exception as e: # pragma: no cover except Exception as e: # pragma: no cover
logger.error( logger.error(
f"Error retrieving paginated {self.model.__name__} records: {e!s}" "Error retrieving paginated %s records: %s", self.model.__name__, e
) )
raise raise
@@ -308,7 +315,7 @@ class BaseRepository[
result = await db.execute(select(func.count(self.model.id))) result = await db.execute(select(func.count(self.model.id)))
return result.scalar_one() return result.scalar_one()
except Exception as e: except Exception as e:
logger.error(f"Error counting {self.model.__name__} records: {e!s}") logger.error("Error counting %s records: %s", self.model.__name__, e)
raise raise
async def exists(self, db: AsyncSession, id: str) -> bool: async def exists(self, db: AsyncSession, id: str) -> bool:
@@ -330,7 +337,7 @@ class BaseRepository[
else: else:
uuid_obj = uuid.UUID(str(id)) uuid_obj = uuid.UUID(str(id))
except (ValueError, AttributeError, TypeError) as e: except (ValueError, AttributeError, TypeError) as e:
logger.warning(f"Invalid UUID format for soft deletion: {id} - {e!s}") logger.warning("Invalid UUID format for soft deletion: %s - %s", id, e)
return None return None
try: try:
@@ -341,12 +348,12 @@ class BaseRepository[
if obj is None: if obj is None:
logger.warning( logger.warning(
f"{self.model.__name__} with id {id} not found for soft deletion" "%s with id %s not found for soft deletion", self.model.__name__, id
) )
return None return None
if not hasattr(self.model, "deleted_at"): if not hasattr(self.model, "deleted_at"):
logger.error(f"{self.model.__name__} does not support soft deletes") logger.error("%s does not support soft deletes", self.model.__name__)
raise InvalidInputError( raise InvalidInputError(
f"{self.model.__name__} does not have a deleted_at column" f"{self.model.__name__} does not have a deleted_at column"
) )
@@ -358,9 +365,8 @@ class BaseRepository[
return obj return obj
except Exception as e: except Exception as e:
await db.rollback() await db.rollback()
logger.error( logger.exception(
f"Error soft deleting {self.model.__name__} with id {id}: {e!s}", "Error soft deleting %s with id %s: %s", self.model.__name__, id, e
exc_info=True,
) )
raise raise
@@ -376,7 +382,7 @@ class BaseRepository[
else: else:
uuid_obj = uuid.UUID(str(id)) uuid_obj = uuid.UUID(str(id))
except (ValueError, AttributeError, TypeError) as e: except (ValueError, AttributeError, TypeError) as e:
logger.warning(f"Invalid UUID format for restoration: {id} - {e!s}") logger.warning("Invalid UUID format for restoration: %s - %s", id, e)
return None return None
try: try:
@@ -388,14 +394,16 @@ class BaseRepository[
) )
obj = result.scalar_one_or_none() obj = result.scalar_one_or_none()
else: else:
logger.error(f"{self.model.__name__} does not support soft deletes") logger.error("%s does not support soft deletes", self.model.__name__)
raise InvalidInputError( raise InvalidInputError(
f"{self.model.__name__} does not have a deleted_at column" f"{self.model.__name__} does not have a deleted_at column"
) )
if obj is None: if obj is None:
logger.warning( logger.warning(
f"Soft-deleted {self.model.__name__} with id {id} not found for restoration" "Soft-deleted %s with id %s not found for restoration",
self.model.__name__,
id,
) )
return None return None
@@ -406,8 +414,7 @@ class BaseRepository[
return obj return obj
except Exception as e: except Exception as e:
await db.rollback() await db.rollback()
logger.error( logger.exception(
f"Error restoring {self.model.__name__} with id {id}: {e!s}", "Error restoring %s with id %s: %s", self.model.__name__, id, e
exc_info=True,
) )
raise raise

View File

@@ -1,5 +1,5 @@
# app/repositories/oauth_account.py # app/repositories/oauth_account.py
"""Repository for OAuthAccount model async CRUD operations.""" """Repository for OAuthAccount model async database operations."""
import logging import logging
from datetime import datetime from datetime import datetime
@@ -50,7 +50,10 @@ class OAuthAccountRepository(
return result.scalar_one_or_none() return result.scalar_one_or_none()
except Exception as e: # pragma: no cover except Exception as e: # pragma: no cover
logger.error( logger.error(
f"Error getting OAuth account for {provider}:{provider_user_id}: {e!s}" "Error getting OAuth account for %s:%s: %s",
provider,
provider_user_id,
e,
) )
raise raise
@@ -76,7 +79,7 @@ class OAuthAccountRepository(
return result.scalar_one_or_none() return result.scalar_one_or_none()
except Exception as e: # pragma: no cover except Exception as e: # pragma: no cover
logger.error( logger.error(
f"Error getting OAuth account for {provider} email {email}: {e!s}" "Error getting OAuth account for %s email %s: %s", provider, email, e
) )
raise raise
@@ -97,7 +100,7 @@ class OAuthAccountRepository(
) )
return list(result.scalars().all()) return list(result.scalars().all())
except Exception as e: # pragma: no cover except Exception as e: # pragma: no cover
logger.error(f"Error getting OAuth accounts for user {user_id}: {e!s}") logger.error("Error getting OAuth accounts for user %s: %s", user_id, e)
raise raise
async def get_user_account_by_provider( async def get_user_account_by_provider(
@@ -122,7 +125,10 @@ class OAuthAccountRepository(
return result.scalar_one_or_none() return result.scalar_one_or_none()
except Exception as e: # pragma: no cover except Exception as e: # pragma: no cover
logger.error( logger.error(
f"Error getting OAuth account for user {user_id}, provider {provider}: {e!s}" "Error getting OAuth account for user %s, provider %s: %s",
user_id,
provider,
e,
) )
raise raise
@@ -145,7 +151,9 @@ class OAuthAccountRepository(
await db.refresh(db_obj) await db.refresh(db_obj)
logger.info( logger.info(
f"OAuth account created: {obj_in.provider} linked to user {obj_in.user_id}" "OAuth account created: %s linked to user %s",
obj_in.provider,
obj_in.user_id,
) )
return db_obj return db_obj
except IntegrityError as e: # pragma: no cover except IntegrityError as e: # pragma: no cover
@@ -153,16 +161,18 @@ class OAuthAccountRepository(
error_msg = str(e.orig) if hasattr(e, "orig") else str(e) error_msg = str(e.orig) if hasattr(e, "orig") else str(e)
if "uq_oauth_provider_user" in error_msg.lower(): if "uq_oauth_provider_user" in error_msg.lower():
logger.warning( logger.warning(
f"OAuth account already exists: {obj_in.provider}:{obj_in.provider_user_id}" "OAuth account already exists: %s:%s",
obj_in.provider,
obj_in.provider_user_id,
) )
raise DuplicateEntryError( raise DuplicateEntryError(
f"This {obj_in.provider} account is already linked to another user" f"This {obj_in.provider} account is already linked to another user"
) )
logger.error(f"Integrity error creating OAuth account: {error_msg}") logger.error("Integrity error creating OAuth account: %s", error_msg)
raise DuplicateEntryError(f"Failed to create OAuth account: {error_msg}") raise DuplicateEntryError(f"Failed to create OAuth account: {error_msg}")
except Exception as e: # pragma: no cover except Exception as e: # pragma: no cover
await db.rollback() await db.rollback()
logger.error(f"Error creating OAuth account: {e!s}", exc_info=True) logger.exception("Error creating OAuth account: %s", e)
raise raise
async def delete_account( async def delete_account(
@@ -189,18 +199,20 @@ class OAuthAccountRepository(
deleted = result.rowcount > 0 deleted = result.rowcount > 0
if deleted: if deleted:
logger.info( logger.info(
f"OAuth account deleted: {provider} unlinked from user {user_id}" "OAuth account deleted: %s unlinked from user %s", provider, user_id
) )
else: else:
logger.warning( logger.warning(
f"OAuth account not found for deletion: {provider} for user {user_id}" "OAuth account not found for deletion: %s for user %s",
provider,
user_id,
) )
return deleted return deleted
except Exception as e: # pragma: no cover except Exception as e: # pragma: no cover
await db.rollback() await db.rollback()
logger.error( logger.error(
f"Error deleting OAuth account {provider} for user {user_id}: {e!s}" "Error deleting OAuth account %s for user %s: %s", provider, user_id, e
) )
raise raise
@@ -229,7 +241,7 @@ class OAuthAccountRepository(
return account return account
except Exception as e: # pragma: no cover except Exception as e: # pragma: no cover
await db.rollback() await db.rollback()
logger.error(f"Error updating OAuth tokens: {e!s}") logger.error("Error updating OAuth tokens: %s", e)
raise raise

View File

@@ -1,5 +1,5 @@
# app/repositories/oauth_client.py # app/repositories/oauth_client.py
"""Repository for OAuthClient model async CRUD operations.""" """Repository for OAuthClient model async database operations."""
import logging import logging
import secrets import secrets
@@ -42,7 +42,7 @@ class OAuthClientRepository(
) )
return result.scalar_one_or_none() return result.scalar_one_or_none()
except Exception as e: # pragma: no cover except Exception as e: # pragma: no cover
logger.error(f"Error getting OAuth client {client_id}: {e!s}") logger.error("Error getting OAuth client %s: %s", client_id, e)
raise raise
async def create_client( async def create_client(
@@ -80,17 +80,17 @@ class OAuthClientRepository(
await db.refresh(db_obj) await db.refresh(db_obj)
logger.info( logger.info(
f"OAuth client created: {obj_in.client_name} ({client_id[:8]}...)" "OAuth client created: %s (%s...)", obj_in.client_name, client_id[:8]
) )
return db_obj, client_secret return db_obj, client_secret
except IntegrityError as e: # pragma: no cover except IntegrityError as e: # pragma: no cover
await db.rollback() await db.rollback()
error_msg = str(e.orig) if hasattr(e, "orig") else str(e) error_msg = str(e.orig) if hasattr(e, "orig") else str(e)
logger.error(f"Error creating OAuth client: {error_msg}") logger.error("Error creating OAuth client: %s", error_msg)
raise DuplicateEntryError(f"Failed to create OAuth client: {error_msg}") raise DuplicateEntryError(f"Failed to create OAuth client: {error_msg}")
except Exception as e: # pragma: no cover except Exception as e: # pragma: no cover
await db.rollback() await db.rollback()
logger.error(f"Error creating OAuth client: {e!s}", exc_info=True) logger.exception("Error creating OAuth client: %s", e)
raise raise
async def deactivate_client( async def deactivate_client(
@@ -107,11 +107,11 @@ class OAuthClientRepository(
await db.commit() await db.commit()
await db.refresh(client) await db.refresh(client)
logger.info(f"OAuth client deactivated: {client.client_name}") logger.info("OAuth client deactivated: %s", client.client_name)
return client return client
except Exception as e: # pragma: no cover except Exception as e: # pragma: no cover
await db.rollback() await db.rollback()
logger.error(f"Error deactivating OAuth client {client_id}: {e!s}") logger.error("Error deactivating OAuth client %s: %s", client_id, e)
raise raise
async def validate_redirect_uri( async def validate_redirect_uri(
@@ -125,7 +125,7 @@ class OAuthClientRepository(
return redirect_uri in (client.redirect_uris or []) return redirect_uri in (client.redirect_uris or [])
except Exception as e: # pragma: no cover except Exception as e: # pragma: no cover
logger.error(f"Error validating redirect URI: {e!s}") logger.error("Error validating redirect URI: %s", e)
return False return False
async def verify_client_secret( async def verify_client_secret(
@@ -158,7 +158,7 @@ class OAuthClientRepository(
secret_hash = hashlib.sha256(client_secret.encode()).hexdigest() secret_hash = hashlib.sha256(client_secret.encode()).hexdigest()
return secrets.compare_digest(stored_hash, secret_hash) return secrets.compare_digest(stored_hash, secret_hash)
except Exception as e: # pragma: no cover except Exception as e: # pragma: no cover
logger.error(f"Error verifying client secret: {e!s}") logger.error("Error verifying client secret: %s", e)
return False return False
async def get_all_clients( async def get_all_clients(
@@ -173,7 +173,7 @@ class OAuthClientRepository(
result = await db.execute(query) result = await db.execute(query)
return list(result.scalars().all()) return list(result.scalars().all())
except Exception as e: # pragma: no cover except Exception as e: # pragma: no cover
logger.error(f"Error getting all OAuth clients: {e!s}") logger.error("Error getting all OAuth clients: %s", e)
raise raise
async def delete_client(self, db: AsyncSession, *, client_id: str) -> bool: async def delete_client(self, db: AsyncSession, *, client_id: str) -> bool:
@@ -186,14 +186,14 @@ class OAuthClientRepository(
deleted = result.rowcount > 0 deleted = result.rowcount > 0
if deleted: if deleted:
logger.info(f"OAuth client deleted: {client_id}") logger.info("OAuth client deleted: %s", client_id)
else: else:
logger.warning(f"OAuth client not found for deletion: {client_id}") logger.warning("OAuth client not found for deletion: %s", client_id)
return deleted return deleted
except Exception as e: # pragma: no cover except Exception as e: # pragma: no cover
await db.rollback() await db.rollback()
logger.error(f"Error deleting OAuth client {client_id}: {e!s}") logger.error("Error deleting OAuth client %s: %s", client_id, e)
raise raise

View File

@@ -1,5 +1,5 @@
# app/repositories/oauth_state.py # app/repositories/oauth_state.py
"""Repository for OAuthState model async CRUD operations.""" """Repository for OAuthState model async database operations."""
import logging import logging
from datetime import UTC, datetime from datetime import UTC, datetime
@@ -42,16 +42,16 @@ class OAuthStateRepository(BaseRepository[OAuthState, OAuthStateCreate, EmptySch
await db.commit() await db.commit()
await db.refresh(db_obj) await db.refresh(db_obj)
logger.debug(f"OAuth state created for {obj_in.provider}") logger.debug("OAuth state created for %s", obj_in.provider)
return db_obj return db_obj
except IntegrityError as e: # pragma: no cover except IntegrityError as e: # pragma: no cover
await db.rollback() await db.rollback()
error_msg = str(e.orig) if hasattr(e, "orig") else str(e) error_msg = str(e.orig) if hasattr(e, "orig") else str(e)
logger.error(f"OAuth state collision: {error_msg}") logger.error("OAuth state collision: %s", error_msg)
raise DuplicateEntryError("Failed to create OAuth state, please retry") raise DuplicateEntryError("Failed to create OAuth state, please retry")
except Exception as e: # pragma: no cover except Exception as e: # pragma: no cover
await db.rollback() await db.rollback()
logger.error(f"Error creating OAuth state: {e!s}", exc_info=True) logger.exception("Error creating OAuth state: %s", e)
raise raise
async def get_and_consume_state( async def get_and_consume_state(
@@ -65,7 +65,7 @@ class OAuthStateRepository(BaseRepository[OAuthState, OAuthStateCreate, EmptySch
db_obj = result.scalar_one_or_none() db_obj = result.scalar_one_or_none()
if db_obj is None: if db_obj is None:
logger.warning(f"OAuth state not found: {state[:8]}...") logger.warning("OAuth state not found: %s...", state[:8])
return None return None
now = datetime.now(UTC) now = datetime.now(UTC)
@@ -74,7 +74,7 @@ class OAuthStateRepository(BaseRepository[OAuthState, OAuthStateCreate, EmptySch
expires_at = expires_at.replace(tzinfo=UTC) expires_at = expires_at.replace(tzinfo=UTC)
if expires_at < now: if expires_at < now:
logger.warning(f"OAuth state expired: {state[:8]}...") logger.warning("OAuth state expired: %s...", state[:8])
await db.delete(db_obj) await db.delete(db_obj)
await db.commit() await db.commit()
return None return None
@@ -82,11 +82,11 @@ class OAuthStateRepository(BaseRepository[OAuthState, OAuthStateCreate, EmptySch
await db.delete(db_obj) await db.delete(db_obj)
await db.commit() await db.commit()
logger.debug(f"OAuth state consumed: {state[:8]}...") logger.debug("OAuth state consumed: %s...", state[:8])
return db_obj return db_obj
except Exception as e: # pragma: no cover except Exception as e: # pragma: no cover
await db.rollback() await db.rollback()
logger.error(f"Error consuming OAuth state: {e!s}") logger.error("Error consuming OAuth state: %s", e)
raise raise
async def cleanup_expired(self, db: AsyncSession) -> int: async def cleanup_expired(self, db: AsyncSession) -> int:
@@ -100,12 +100,12 @@ class OAuthStateRepository(BaseRepository[OAuthState, OAuthStateCreate, EmptySch
count = result.rowcount count = result.rowcount
if count > 0: if count > 0:
logger.info(f"Cleaned up {count} expired OAuth states") logger.info("Cleaned up %s expired OAuth states", count)
return count return count
except Exception as e: # pragma: no cover except Exception as e: # pragma: no cover
await db.rollback() await db.rollback()
logger.error(f"Error cleaning up expired OAuth states: {e!s}") logger.error("Error cleaning up expired OAuth states: %s", e)
raise raise

View File

@@ -1,5 +1,5 @@
# app/repositories/organization.py # app/repositories/organization.py
"""Repository for Organization model async CRUD operations using SQLAlchemy 2.0 patterns.""" """Repository for Organization model async database operations using SQLAlchemy 2.0 patterns."""
import logging import logging
from typing import Any from typing import Any
@@ -35,7 +35,7 @@ class OrganizationRepository(
) )
return result.scalar_one_or_none() return result.scalar_one_or_none()
except Exception as e: except Exception as e:
logger.error(f"Error getting organization by slug {slug}: {e!s}") logger.error("Error getting organization by slug %s: %s", slug, e)
raise raise
async def create( async def create(
@@ -62,17 +62,15 @@ class OrganizationRepository(
or "unique" in error_msg.lower() or "unique" in error_msg.lower()
or "duplicate" in error_msg.lower() or "duplicate" in error_msg.lower()
): ):
logger.warning(f"Duplicate slug attempted: {obj_in.slug}") logger.warning("Duplicate slug attempted: %s", obj_in.slug)
raise DuplicateEntryError( raise DuplicateEntryError(
f"Organization with slug '{obj_in.slug}' already exists" f"Organization with slug '{obj_in.slug}' already exists"
) )
logger.error(f"Integrity error creating organization: {error_msg}") logger.error("Integrity error creating organization: %s", error_msg)
raise IntegrityConstraintError(f"Database integrity error: {error_msg}") raise IntegrityConstraintError(f"Database integrity error: {error_msg}")
except Exception as e: except Exception as e:
await db.rollback() await db.rollback()
logger.error( logger.exception("Unexpected error creating organization: %s", e)
f"Unexpected error creating organization: {e!s}", exc_info=True
)
raise raise
async def get_multi_with_filters( async def get_multi_with_filters(
@@ -117,7 +115,7 @@ class OrganizationRepository(
return organizations, total return organizations, total
except Exception as e: except Exception as e:
logger.error(f"Error getting organizations with filters: {e!s}") logger.error("Error getting organizations with filters: %s", e)
raise raise
async def get_member_count(self, db: AsyncSession, *, organization_id: UUID) -> int: async def get_member_count(self, db: AsyncSession, *, organization_id: UUID) -> int:
@@ -134,7 +132,7 @@ class OrganizationRepository(
return result.scalar_one() or 0 return result.scalar_one() or 0
except Exception as e: except Exception as e:
logger.error( logger.error(
f"Error getting member count for organization {organization_id}: {e!s}" "Error getting member count for organization %s: %s", organization_id, e
) )
raise raise
@@ -207,9 +205,7 @@ class OrganizationRepository(
return orgs_with_counts, total return orgs_with_counts, total
except Exception as e: except Exception as e:
logger.error( logger.exception("Error getting organizations with member counts: %s", e)
f"Error getting organizations with member counts: {e!s}", exc_info=True
)
raise raise
async def add_user( async def add_user(
@@ -259,11 +255,11 @@ class OrganizationRepository(
return user_org return user_org
except IntegrityError as e: except IntegrityError as e:
await db.rollback() await db.rollback()
logger.error(f"Integrity error adding user to organization: {e!s}") logger.error("Integrity error adding user to organization: %s", e)
raise IntegrityConstraintError("Failed to add user to organization") raise IntegrityConstraintError("Failed to add user to organization")
except Exception as e: except Exception as e:
await db.rollback() await db.rollback()
logger.error(f"Error adding user to organization: {e!s}", exc_info=True) logger.exception("Error adding user to organization: %s", e)
raise raise
async def remove_user( async def remove_user(
@@ -289,7 +285,7 @@ class OrganizationRepository(
return True return True
except Exception as e: except Exception as e:
await db.rollback() await db.rollback()
logger.error(f"Error removing user from organization: {e!s}", exc_info=True) logger.exception("Error removing user from organization: %s", e)
raise raise
async def update_user_role( async def update_user_role(
@@ -324,7 +320,7 @@ class OrganizationRepository(
return user_org return user_org
except Exception as e: except Exception as e:
await db.rollback() await db.rollback()
logger.error(f"Error updating user role: {e!s}", exc_info=True) logger.exception("Error updating user role: %s", e)
raise raise
async def get_organization_members( async def get_organization_members(
@@ -384,7 +380,7 @@ class OrganizationRepository(
return members, total return members, total
except Exception as e: except Exception as e:
logger.error(f"Error getting organization members: {e!s}") logger.error("Error getting organization members: %s", e)
raise raise
async def get_user_organizations( async def get_user_organizations(
@@ -407,7 +403,7 @@ class OrganizationRepository(
result = await db.execute(query) result = await db.execute(query)
return list(result.scalars().all()) return list(result.scalars().all())
except Exception as e: except Exception as e:
logger.error(f"Error getting user organizations: {e!s}") logger.error("Error getting user organizations: %s", e)
raise raise
async def get_user_organizations_with_details( async def get_user_organizations_with_details(
@@ -456,9 +452,7 @@ class OrganizationRepository(
] ]
except Exception as e: except Exception as e:
logger.error( logger.exception("Error getting user organizations with details: %s", e)
f"Error getting user organizations with details: {e!s}", exc_info=True
)
raise raise
async def get_user_role_in_org( async def get_user_role_in_org(
@@ -479,7 +473,7 @@ class OrganizationRepository(
return user_org.role if user_org else None # pyright: ignore[reportReturnType] return user_org.role if user_org else None # pyright: ignore[reportReturnType]
except Exception as e: except Exception as e:
logger.error(f"Error getting user role in org: {e!s}") logger.error("Error getting user role in org: %s", e)
raise raise
async def is_user_org_owner( async def is_user_org_owner(

View File

@@ -1,5 +1,5 @@
# app/repositories/session.py # app/repositories/session.py
"""Repository for UserSession model async CRUD operations using SQLAlchemy 2.0 patterns.""" """Repository for UserSession model async database operations using SQLAlchemy 2.0 patterns."""
import logging import logging
import uuid import uuid
@@ -29,7 +29,7 @@ class SessionRepository(BaseRepository[UserSession, SessionCreate, SessionUpdate
) )
return result.scalar_one_or_none() return result.scalar_one_or_none()
except Exception as e: except Exception as e:
logger.error(f"Error getting session by JTI {jti}: {e!s}") logger.error("Error getting session by JTI %s: %s", jti, e)
raise raise
async def get_active_by_jti( async def get_active_by_jti(
@@ -47,7 +47,7 @@ class SessionRepository(BaseRepository[UserSession, SessionCreate, SessionUpdate
) )
return result.scalar_one_or_none() return result.scalar_one_or_none()
except Exception as e: except Exception as e:
logger.error(f"Error getting active session by JTI {jti}: {e!s}") logger.error("Error getting active session by JTI %s: %s", jti, e)
raise raise
async def get_user_sessions( async def get_user_sessions(
@@ -74,7 +74,7 @@ class SessionRepository(BaseRepository[UserSession, SessionCreate, SessionUpdate
result = await db.execute(query) result = await db.execute(query)
return list(result.scalars().all()) return list(result.scalars().all())
except Exception as e: except Exception as e:
logger.error(f"Error getting sessions for user {user_id}: {e!s}") logger.error("Error getting sessions for user %s: %s", user_id, e)
raise raise
async def create_session( async def create_session(
@@ -100,14 +100,16 @@ class SessionRepository(BaseRepository[UserSession, SessionCreate, SessionUpdate
await db.refresh(db_obj) await db.refresh(db_obj)
logger.info( logger.info(
f"Session created for user {obj_in.user_id} from {obj_in.device_name} " "Session created for user %s from %s (IP: %s)",
f"(IP: {obj_in.ip_address})" obj_in.user_id,
obj_in.device_name,
obj_in.ip_address,
) )
return db_obj return db_obj
except Exception as e: except Exception as e:
await db.rollback() await db.rollback()
logger.error(f"Error creating session: {e!s}", exc_info=True) logger.exception("Error creating session: %s", e)
raise IntegrityConstraintError(f"Failed to create session: {e!s}") raise IntegrityConstraintError(f"Failed to create session: {e!s}")
async def deactivate( async def deactivate(
@@ -117,7 +119,7 @@ class SessionRepository(BaseRepository[UserSession, SessionCreate, SessionUpdate
try: try:
session = await self.get(db, id=session_id) session = await self.get(db, id=session_id)
if not session: if not session:
logger.warning(f"Session {session_id} not found for deactivation") logger.warning("Session %s not found for deactivation", session_id)
return None return None
session.is_active = False session.is_active = False
@@ -126,14 +128,16 @@ class SessionRepository(BaseRepository[UserSession, SessionCreate, SessionUpdate
await db.refresh(session) await db.refresh(session)
logger.info( logger.info(
f"Session {session_id} deactivated for user {session.user_id} " "Session %s deactivated for user %s (%s)",
f"({session.device_name})" session_id,
session.user_id,
session.device_name,
) )
return session return session
except Exception as e: except Exception as e:
await db.rollback() await db.rollback()
logger.error(f"Error deactivating session {session_id}: {e!s}") logger.error("Error deactivating session %s: %s", session_id, e)
raise raise
async def deactivate_all_user_sessions( async def deactivate_all_user_sessions(
@@ -154,12 +158,12 @@ class SessionRepository(BaseRepository[UserSession, SessionCreate, SessionUpdate
count = result.rowcount count = result.rowcount
logger.info(f"Deactivated {count} sessions for user {user_id}") logger.info("Deactivated %s sessions for user %s", count, user_id)
return count return count
except Exception as e: except Exception as e:
await db.rollback() await db.rollback()
logger.error(f"Error deactivating all sessions for user {user_id}: {e!s}") logger.error("Error deactivating all sessions for user %s: %s", user_id, e)
raise raise
async def update_last_used( async def update_last_used(
@@ -174,7 +178,7 @@ class SessionRepository(BaseRepository[UserSession, SessionCreate, SessionUpdate
return session return session
except Exception as e: except Exception as e:
await db.rollback() await db.rollback()
logger.error(f"Error updating last_used for session {session.id}: {e!s}") logger.error("Error updating last_used for session %s: %s", session.id, e)
raise raise
async def update_refresh_token( async def update_refresh_token(
@@ -197,7 +201,7 @@ class SessionRepository(BaseRepository[UserSession, SessionCreate, SessionUpdate
except Exception as e: except Exception as e:
await db.rollback() await db.rollback()
logger.error( logger.error(
f"Error updating refresh token for session {session.id}: {e!s}" "Error updating refresh token for session %s: %s", session.id, e
) )
raise raise
@@ -221,12 +225,12 @@ class SessionRepository(BaseRepository[UserSession, SessionCreate, SessionUpdate
count = result.rowcount count = result.rowcount
if count > 0: if count > 0:
logger.info(f"Cleaned up {count} expired sessions using bulk DELETE") logger.info("Cleaned up %s expired sessions using bulk DELETE", count)
return count return count
except Exception as e: except Exception as e:
await db.rollback() await db.rollback()
logger.error(f"Error cleaning up expired sessions: {e!s}") logger.error("Error cleaning up expired sessions: %s", e)
raise raise
async def cleanup_expired_for_user(self, db: AsyncSession, *, user_id: str) -> int: async def cleanup_expired_for_user(self, db: AsyncSession, *, user_id: str) -> int:
@@ -235,7 +239,7 @@ class SessionRepository(BaseRepository[UserSession, SessionCreate, SessionUpdate
try: try:
uuid_obj = uuid.UUID(user_id) uuid_obj = uuid.UUID(user_id)
except (ValueError, AttributeError): except (ValueError, AttributeError):
logger.error(f"Invalid UUID format: {user_id}") logger.error("Invalid UUID format: %s", user_id)
raise InvalidInputError(f"Invalid user ID format: {user_id}") raise InvalidInputError(f"Invalid user ID format: {user_id}")
now = datetime.now(UTC) now = datetime.now(UTC)
@@ -255,14 +259,16 @@ class SessionRepository(BaseRepository[UserSession, SessionCreate, SessionUpdate
if count > 0: if count > 0:
logger.info( logger.info(
f"Cleaned up {count} expired sessions for user {user_id} using bulk DELETE" "Cleaned up %s expired sessions for user %s using bulk DELETE",
count,
user_id,
) )
return count return count
except Exception as e: except Exception as e:
await db.rollback() await db.rollback()
logger.error( logger.error(
f"Error cleaning up expired sessions for user {user_id}: {e!s}" "Error cleaning up expired sessions for user %s: %s", user_id, e
) )
raise raise
@@ -278,7 +284,7 @@ class SessionRepository(BaseRepository[UserSession, SessionCreate, SessionUpdate
) )
return result.scalar_one() return result.scalar_one()
except Exception as e: except Exception as e:
logger.error(f"Error counting sessions for user {user_id}: {e!s}") logger.error("Error counting sessions for user %s: %s", user_id, e)
raise raise
async def get_all_sessions( async def get_all_sessions(
@@ -319,7 +325,7 @@ class SessionRepository(BaseRepository[UserSession, SessionCreate, SessionUpdate
return sessions, total return sessions, total
except Exception as e: except Exception as e:
logger.error(f"Error getting all sessions: {e!s}", exc_info=True) logger.exception("Error getting all sessions: %s", e)
raise raise

View File

@@ -1,5 +1,5 @@
# app/repositories/user.py # app/repositories/user.py
"""Repository for User model async CRUD operations using SQLAlchemy 2.0 patterns.""" """Repository for User model async database operations using SQLAlchemy 2.0 patterns."""
import logging import logging
from datetime import UTC, datetime from datetime import UTC, datetime
@@ -28,7 +28,7 @@ class UserRepository(BaseRepository[User, UserCreate, UserUpdate]):
result = await db.execute(select(User).where(User.email == email)) result = await db.execute(select(User).where(User.email == email))
return result.scalar_one_or_none() return result.scalar_one_or_none()
except Exception as e: except Exception as e:
logger.error(f"Error getting user by email {email}: {e!s}") logger.error("Error getting user by email %s: %s", email, e)
raise raise
async def create(self, db: AsyncSession, *, obj_in: UserCreate) -> User: async def create(self, db: AsyncSession, *, obj_in: UserCreate) -> User:
@@ -57,15 +57,15 @@ class UserRepository(BaseRepository[User, UserCreate, UserUpdate]):
await db.rollback() await db.rollback()
error_msg = str(e.orig) if hasattr(e, "orig") else str(e) error_msg = str(e.orig) if hasattr(e, "orig") else str(e)
if "email" in error_msg.lower(): if "email" in error_msg.lower():
logger.warning(f"Duplicate email attempted: {obj_in.email}") logger.warning("Duplicate email attempted: %s", obj_in.email)
raise DuplicateEntryError( raise DuplicateEntryError(
f"User with email {obj_in.email} already exists" f"User with email {obj_in.email} already exists"
) )
logger.error(f"Integrity error creating user: {error_msg}") logger.error("Integrity error creating user: %s", error_msg)
raise DuplicateEntryError(f"Database integrity error: {error_msg}") raise DuplicateEntryError(f"Database integrity error: {error_msg}")
except Exception as e: except Exception as e:
await db.rollback() await db.rollback()
logger.error(f"Unexpected error creating user: {e!s}", exc_info=True) logger.exception("Unexpected error creating user: %s", e)
raise raise
async def create_oauth_user( async def create_oauth_user(
@@ -93,13 +93,13 @@ class UserRepository(BaseRepository[User, UserCreate, UserUpdate]):
await db.rollback() await db.rollback()
error_msg = str(e.orig) if hasattr(e, "orig") else str(e) error_msg = str(e.orig) if hasattr(e, "orig") else str(e)
if "email" in error_msg.lower(): if "email" in error_msg.lower():
logger.warning(f"Duplicate email attempted: {email}") logger.warning("Duplicate email attempted: %s", email)
raise DuplicateEntryError(f"User with email {email} already exists") raise DuplicateEntryError(f"User with email {email} already exists")
logger.error(f"Integrity error creating OAuth user: {error_msg}") logger.error("Integrity error creating OAuth user: %s", error_msg)
raise DuplicateEntryError(f"Database integrity error: {error_msg}") raise DuplicateEntryError(f"Database integrity error: {error_msg}")
except Exception as e: except Exception as e:
await db.rollback() await db.rollback()
logger.error(f"Unexpected error creating OAuth user: {e!s}", exc_info=True) logger.exception("Unexpected error creating OAuth user: %s", e)
raise raise
async def update( async def update(
@@ -184,7 +184,7 @@ class UserRepository(BaseRepository[User, UserCreate, UserUpdate]):
return users, total return users, total
except Exception as e: except Exception as e:
logger.error(f"Error retrieving paginated users: {e!s}") logger.error("Error retrieving paginated users: %s", e)
raise raise
async def bulk_update_status( async def bulk_update_status(
@@ -206,12 +206,14 @@ class UserRepository(BaseRepository[User, UserCreate, UserUpdate]):
await db.commit() await db.commit()
updated_count = result.rowcount updated_count = result.rowcount
logger.info(f"Bulk updated {updated_count} users to is_active={is_active}") logger.info(
"Bulk updated %s users to is_active=%s", updated_count, is_active
)
return updated_count return updated_count
except Exception as e: except Exception as e:
await db.rollback() await db.rollback()
logger.error(f"Error bulk updating user status: {e!s}", exc_info=True) logger.exception("Error bulk updating user status: %s", e)
raise raise
async def bulk_soft_delete( async def bulk_soft_delete(
@@ -246,12 +248,12 @@ class UserRepository(BaseRepository[User, UserCreate, UserUpdate]):
await db.commit() await db.commit()
deleted_count = result.rowcount deleted_count = result.rowcount
logger.info(f"Bulk soft deleted {deleted_count} users") logger.info("Bulk soft deleted %s users", deleted_count)
return deleted_count return deleted_count
except Exception as e: except Exception as e:
await db.rollback() await db.rollback()
logger.error(f"Error bulk deleting users: {e!s}", exc_info=True) logger.exception("Error bulk deleting users: %s", e)
raise raise
def is_active(self, user: User) -> bool: def is_active(self, user: User) -> bool:

View File

@@ -85,7 +85,7 @@ class AuthService:
# Delegate creation (hashing + commit) to the repository # Delegate creation (hashing + commit) to the repository
user = await user_repo.create(db, obj_in=user_data) user = await user_repo.create(db, obj_in=user_data)
logger.info(f"User created successfully: {user.email}") logger.info("User created successfully: %s", user.email)
return user return user
except (AuthenticationError, DuplicateError): except (AuthenticationError, DuplicateError):
@@ -94,7 +94,7 @@ class AuthService:
except DuplicateEntryError as e: except DuplicateEntryError as e:
raise DuplicateError(str(e)) raise DuplicateError(str(e))
except Exception as e: except Exception as e:
logger.error(f"Error creating user: {e!s}", exc_info=True) logger.exception("Error creating user: %s", e)
raise AuthenticationError(f"Failed to create user: {e!s}") raise AuthenticationError(f"Failed to create user: {e!s}")
@staticmethod @staticmethod
@@ -166,7 +166,7 @@ class AuthService:
return AuthService.create_tokens(user) return AuthService.create_tokens(user)
except (TokenExpiredError, TokenInvalidError) as e: except (TokenExpiredError, TokenInvalidError) as e:
logger.warning(f"Token refresh failed: {e!s}") logger.warning("Token refresh failed: %s", e)
raise raise
@staticmethod @staticmethod
@@ -201,7 +201,7 @@ class AuthService:
new_hash = await get_password_hash_async(new_password) new_hash = await get_password_hash_async(new_password)
await user_repo.update_password(db, user=user, password_hash=new_hash) await user_repo.update_password(db, user=user, password_hash=new_hash)
logger.info(f"Password changed successfully for user {user_id}") logger.info("Password changed successfully for user %s", user_id)
return True return True
except AuthenticationError: except AuthenticationError:
@@ -210,9 +210,7 @@ class AuthService:
except Exception as e: except Exception as e:
# Rollback on any database errors # Rollback on any database errors
await db.rollback() await db.rollback()
logger.error( logger.exception("Error changing password for user %s: %s", user_id, e)
f"Error changing password for user {user_id}: {e!s}", exc_info=True
)
raise AuthenticationError(f"Failed to change password: {e!s}") raise AuthenticationError(f"Failed to change password: {e!s}")
@staticmethod @staticmethod
@@ -241,5 +239,5 @@ class AuthService:
new_hash = await get_password_hash_async(new_password) new_hash = await get_password_hash_async(new_password)
user = await user_repo.update_password(db, user=user, password_hash=new_hash) user = await user_repo.update_password(db, user=user, password_hash=new_hash)
logger.info(f"Password reset successfully for {email}") logger.info("Password reset successfully for %s", email)
return user return user

View File

@@ -58,8 +58,8 @@ class ConsoleEmailBackend(EmailBackend):
logger.info("=" * 80) logger.info("=" * 80)
logger.info("EMAIL SENT (Console Backend)") logger.info("EMAIL SENT (Console Backend)")
logger.info("=" * 80) logger.info("=" * 80)
logger.info(f"To: {', '.join(to)}") logger.info("To: %s", ", ".join(to))
logger.info(f"Subject: {subject}") logger.info("Subject: %s", subject)
logger.info("-" * 80) logger.info("-" * 80)
if text_content: if text_content:
logger.info("Plain Text Content:") logger.info("Plain Text Content:")
@@ -199,7 +199,7 @@ The {settings.PROJECT_NAME} Team
text_content=text_content, text_content=text_content,
) )
except Exception as e: except Exception as e:
logger.error(f"Failed to send password reset email to {to_email}: {e!s}") logger.error("Failed to send password reset email to %s: %s", to_email, e)
return False return False
async def send_email_verification( async def send_email_verification(
@@ -287,7 +287,7 @@ The {settings.PROJECT_NAME} Team
text_content=text_content, text_content=text_content,
) )
except Exception as e: except Exception as e:
logger.error(f"Failed to send verification email to {to_email}: {e!s}") logger.error("Failed to send verification email to %s: %s", to_email, e)
return False return False

View File

@@ -25,8 +25,8 @@ from datetime import UTC, datetime, timedelta
from typing import Any from typing import Any
from uuid import UUID from uuid import UUID
from jose import JWTError, jwt import jwt
from jose.exceptions import ExpiredSignatureError from jwt.exceptions import ExpiredSignatureError, InvalidTokenError
from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.ext.asyncio import AsyncSession
from app.core.config import settings from app.core.config import settings
@@ -139,7 +139,7 @@ def verify_pkce(code_verifier: str, code_challenge: str, method: str) -> bool:
if method != "S256": if method != "S256":
# SECURITY: Reject any method other than S256 # SECURITY: Reject any method other than S256
# 'plain' method provides no security against code interception attacks # 'plain' method provides no security against code interception attacks
logger.warning(f"PKCE verification rejected for unsupported method: {method}") logger.warning("PKCE verification rejected for unsupported method: %s", method)
return False return False
# SHA-256 hash, then base64url encode (RFC 7636 Section 4.2) # SHA-256 hash, then base64url encode (RFC 7636 Section 4.2)
@@ -257,7 +257,9 @@ def validate_scopes(client: OAuthClient, requested_scopes: list[str]) -> list[st
# Warn if some scopes were filtered out # Warn if some scopes were filtered out
invalid = requested - allowed invalid = requested - allowed
if invalid: if invalid:
logger.warning(f"Client {client.client_id} requested invalid scopes: {invalid}") logger.warning(
"Client %s requested invalid scopes: %s", client.client_id, invalid
)
return list(valid) return list(valid)
@@ -320,7 +322,9 @@ async def create_authorization_code(
) )
logger.info( logger.info(
f"Created authorization code for user {user.id} and client {client.client_id}" "Created authorization code for user %s and client %s",
user.id,
client.client_id,
) )
return code return code
@@ -369,7 +373,8 @@ async def exchange_authorization_code(
if existing_code and existing_code.used: if existing_code and existing_code.used:
# Code reuse is a security incident - revoke all tokens for this grant # Code reuse is a security incident - revoke all tokens for this grant
logger.warning( logger.warning(
f"Authorization code reuse detected for client {existing_code.client_id}" "Authorization code reuse detected for client %s",
existing_code.client_id,
) )
await revoke_tokens_for_user_client( await revoke_tokens_for_user_client(
db, UUID(str(existing_code.user_id)), str(existing_code.client_id) db, UUID(str(existing_code.user_id)), str(existing_code.client_id)
@@ -527,7 +532,7 @@ async def create_tokens(
ip_address=ip_address, ip_address=ip_address,
) )
logger.info(f"Issued tokens for user {user.id} to client {client.client_id}") logger.info("Issued tokens for user %s to client %s", user.id, client.client_id)
return { return {
"access_token": access_token, "access_token": access_token,
@@ -580,7 +585,7 @@ async def refresh_tokens(
if token_record.revoked: if token_record.revoked:
# Token reuse after revocation - security incident # Token reuse after revocation - security incident
logger.warning( logger.warning(
f"Revoked refresh token reuse detected for client {token_record.client_id}" "Revoked refresh token reuse detected for client %s", token_record.client_id
) )
raise InvalidGrantError("Refresh token has been revoked") raise InvalidGrantError("Refresh token has been revoked")
@@ -672,7 +677,7 @@ async def revoke_token(
raise InvalidClientError("Token was not issued to this client") raise InvalidClientError("Token was not issued to this client")
await oauth_provider_token_repo.revoke(db, token=refresh_record) await oauth_provider_token_repo.revoke(db, token=refresh_record)
logger.info(f"Revoked refresh token {refresh_record.jti[:8]}...") logger.info("Revoked refresh token %s...", refresh_record.jti[:8])
return True return True
# Try as access token (JWT) # Try as access token (JWT)
@@ -696,10 +701,10 @@ async def revoke_token(
raise InvalidClientError("Token was not issued to this client") raise InvalidClientError("Token was not issued to this client")
await oauth_provider_token_repo.revoke(db, token=refresh_record) await oauth_provider_token_repo.revoke(db, token=refresh_record)
logger.info( logger.info(
f"Revoked refresh token via access token JTI {jti[:8]}..." "Revoked refresh token via access token JTI %s...", jti[:8]
) )
return True return True
except JWTError: except InvalidTokenError:
pass pass
except Exception: # noqa: S110 - Intentional: invalid JWT not an error except Exception: # noqa: S110 - Intentional: invalid JWT not an error
pass pass
@@ -731,7 +736,7 @@ async def revoke_tokens_for_user_client(
if count > 0: if count > 0:
logger.warning( logger.warning(
f"Revoked {count} tokens for user {user_id} and client {client_id}" "Revoked %s tokens for user %s and client %s", count, user_id, client_id
) )
return count return count
@@ -753,7 +758,7 @@ async def revoke_all_user_tokens(db: AsyncSession, user_id: UUID) -> int:
count = await oauth_provider_token_repo.revoke_all_for_user(db, user_id=user_id) count = await oauth_provider_token_repo.revoke_all_for_user(db, user_id=user_id)
if count > 0: if count > 0:
logger.info(f"Revoked {count} OAuth provider tokens for user {user_id}") logger.info("Revoked %s OAuth provider tokens for user %s", count, user_id)
return count return count
@@ -822,7 +827,7 @@ async def introspect_token(
} }
except ExpiredSignatureError: except ExpiredSignatureError:
return {"active": False} return {"active": False}
except JWTError: except InvalidTokenError:
pass pass
except Exception: # noqa: S110 - Intentional: invalid JWT falls through to refresh token check except Exception: # noqa: S110 - Intentional: invalid JWT falls through to refresh token check
pass pass

View File

@@ -219,7 +219,7 @@ class OAuthService:
**auth_params, **auth_params,
) )
logger.info(f"OAuth authorization URL created for {provider}") logger.info("OAuth authorization URL created for %s", provider)
return url, state return url, state
@staticmethod @staticmethod
@@ -254,8 +254,9 @@ class OAuthService:
# This prevents authorization code injection attacks (RFC 6749 Section 10.6) # This prevents authorization code injection attacks (RFC 6749 Section 10.6)
if state_record.redirect_uri != redirect_uri: if state_record.redirect_uri != redirect_uri:
logger.warning( logger.warning(
f"OAuth redirect_uri mismatch: expected {state_record.redirect_uri}, " "OAuth redirect_uri mismatch: expected %s, got %s",
f"got {redirect_uri}" state_record.redirect_uri,
redirect_uri,
) )
raise AuthenticationError("Redirect URI mismatch") raise AuthenticationError("Redirect URI mismatch")
@@ -299,7 +300,7 @@ class OAuthService:
except AuthenticationError: except AuthenticationError:
raise raise
except Exception as e: except Exception as e:
logger.error(f"OAuth token exchange failed: {e!s}") logger.error("OAuth token exchange failed: %s", e)
raise AuthenticationError("Failed to exchange authorization code") raise AuthenticationError("Failed to exchange authorization code")
# Get user info from provider # Get user info from provider
@@ -312,7 +313,7 @@ class OAuthService:
client, provider, config, access_token client, provider, config, access_token
) )
except Exception as e: except Exception as e:
logger.error(f"Failed to get user info: {e!s}") logger.error("Failed to get user info: %s", e)
raise AuthenticationError( raise AuthenticationError(
"Failed to get user information from provider" "Failed to get user information from provider"
) )
@@ -353,7 +354,7 @@ class OAuthService:
+ timedelta(seconds=token.get("expires_in", 3600)), + timedelta(seconds=token.get("expires_in", 3600)),
) )
logger.info(f"OAuth login successful for {user.email} via {provider}") logger.info("OAuth login successful for %s via %s", user.email, provider)
elif state_record.user_id: elif state_record.user_id:
# Account linking flow (user is already logged in) # Account linking flow (user is already logged in)
@@ -387,7 +388,7 @@ class OAuthService:
) )
await oauth_account.create_account(db, obj_in=oauth_create) await oauth_account.create_account(db, obj_in=oauth_create)
logger.info(f"OAuth account linked: {provider} -> {user.email}") logger.info("OAuth account linked: %s -> %s", provider, user.email)
else: else:
# New OAuth login - check for existing user by email # New OAuth login - check for existing user by email
@@ -409,7 +410,9 @@ class OAuthService:
if existing_provider: if existing_provider:
# This shouldn't happen if we got here, but safety check # This shouldn't happen if we got here, but safety check
logger.warning( logger.warning(
f"OAuth account already linked (race condition?): {provider} -> {user.email}" "OAuth account already linked (race condition?): %s -> %s",
provider,
user.email,
) )
else: else:
# Create OAuth account link # Create OAuth account link
@@ -427,7 +430,9 @@ class OAuthService:
) )
await oauth_account.create_account(db, obj_in=oauth_create) await oauth_account.create_account(db, obj_in=oauth_create)
logger.info(f"OAuth auto-linked by email: {provider} -> {user.email}") logger.info(
"OAuth auto-linked by email: %s -> %s", provider, user.email
)
else: else:
# Create new user # Create new user
@@ -447,7 +452,7 @@ class OAuthService:
) )
is_new_user = True is_new_user = True
logger.info(f"New user created via OAuth: {user.email} ({provider})") logger.info("New user created via OAuth: %s (%s)", user.email, provider)
# Generate JWT tokens # Generate JWT tokens
claims = { claims = {
@@ -532,8 +537,9 @@ class OAuthService:
AuthenticationError: If verification fails AuthenticationError: If verification fails
""" """
import httpx import httpx
from jose import jwt as jose_jwt import jwt as pyjwt
from jose.exceptions import JWTError from jwt.algorithms import RSAAlgorithm
from jwt.exceptions import InvalidTokenError
try: try:
# Fetch Google's public keys (JWKS) # Fetch Google's public keys (JWKS)
@@ -547,24 +553,27 @@ class OAuthService:
jwks = jwks_response.json() jwks = jwks_response.json()
# Get the key ID from the token header # Get the key ID from the token header
unverified_header = jose_jwt.get_unverified_header(id_token) unverified_header = pyjwt.get_unverified_header(id_token)
kid = unverified_header.get("kid") kid = unverified_header.get("kid")
if not kid: if not kid:
raise AuthenticationError("ID token missing key ID (kid)") raise AuthenticationError("ID token missing key ID (kid)")
# Find the matching public key # Find the matching public key
public_key = None jwk_data = None
for key in jwks.get("keys", []): for key in jwks.get("keys", []):
if key.get("kid") == kid: if key.get("kid") == kid:
public_key = key jwk_data = key
break break
if not public_key: if not jwk_data:
raise AuthenticationError("ID token signed with unknown key") raise AuthenticationError("ID token signed with unknown key")
# Convert JWK to a public key object for PyJWT
public_key = RSAAlgorithm.from_jwk(jwk_data)
# Verify the token signature and decode claims # Verify the token signature and decode claims
# jose library will verify signature against the JWK # PyJWT will verify signature against the RSA public key
payload = jose_jwt.decode( payload = pyjwt.decode(
id_token, id_token,
public_key, public_key,
algorithms=["RS256"], # Google uses RS256 algorithms=["RS256"], # Google uses RS256
@@ -583,23 +592,24 @@ class OAuthService:
token_nonce = payload.get("nonce") token_nonce = payload.get("nonce")
if token_nonce != expected_nonce: if token_nonce != expected_nonce:
logger.warning( logger.warning(
f"OAuth ID token nonce mismatch: expected {expected_nonce}, " "OAuth ID token nonce mismatch: expected %s, got %s",
f"got {token_nonce}" expected_nonce,
token_nonce,
) )
raise AuthenticationError("Invalid ID token nonce") raise AuthenticationError("Invalid ID token nonce")
logger.debug("Google ID token verified successfully") logger.debug("Google ID token verified successfully")
return payload return payload
except JWTError as e: except InvalidTokenError as e:
logger.warning(f"Google ID token verification failed: {e}") logger.warning("Google ID token verification failed: %s", e)
raise AuthenticationError("Invalid ID token signature") raise AuthenticationError("Invalid ID token signature")
except httpx.HTTPError as e: except httpx.HTTPError as e:
logger.error(f"Failed to fetch Google JWKS: {e}") logger.error("Failed to fetch Google JWKS: %s", e)
# If we can't verify the ID token, fail closed for security # If we can't verify the ID token, fail closed for security
raise AuthenticationError("Failed to verify ID token") raise AuthenticationError("Failed to verify ID token")
except Exception as e: except Exception as e:
logger.error(f"Unexpected error verifying Google ID token: {e}") logger.error("Unexpected error verifying Google ID token: %s", e)
raise AuthenticationError("ID token verification error") raise AuthenticationError("ID token verification error")
@staticmethod @staticmethod
@@ -701,7 +711,7 @@ class OAuthService:
if not deleted: if not deleted:
raise AuthenticationError(f"No {provider} account found to unlink") raise AuthenticationError(f"No {provider} account found to unlink")
logger.info(f"OAuth provider unlinked: {provider} from {user.email}") logger.info("OAuth provider unlinked: %s from %s", provider, user.email)
return True return True
@staticmethod @staticmethod

View File

@@ -8,7 +8,7 @@ import logging
from datetime import UTC, datetime from datetime import UTC, datetime
from app.core.database import SessionLocal from app.core.database import SessionLocal
from app.repositories.session import session_repo as session_crud from app.repositories.session import session_repo as session_repo
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -32,15 +32,15 @@ async def cleanup_expired_sessions(keep_days: int = 30) -> int:
async with SessionLocal() as db: async with SessionLocal() as db:
try: try:
# Use CRUD method to cleanup # Use repository method to cleanup
count = await session_crud.cleanup_expired(db, keep_days=keep_days) count = await session_repo.cleanup_expired(db, keep_days=keep_days)
logger.info(f"Session cleanup complete: {count} sessions deleted") logger.info("Session cleanup complete: %s sessions deleted", count)
return count return count
except Exception as e: except Exception as e:
logger.error(f"Error during session cleanup: {e!s}", exc_info=True) logger.exception("Error during session cleanup: %s", e)
return 0 return 0
@@ -79,10 +79,10 @@ async def get_session_statistics() -> dict:
"expired": expired_sessions, "expired": expired_sessions,
} }
logger.info(f"Session statistics: {stats}") logger.info("Session statistics: %s", stats)
return stats return stats
except Exception as e: except Exception as e:
logger.error(f"Error getting session statistics: {e!s}", exc_info=True) logger.exception("Error getting session statistics: %s", e)
return {} return {}

View File

@@ -79,12 +79,13 @@ This FastAPI backend application follows a **clean layered architecture** patter
### Authentication & Security ### Authentication & Security
- **python-jose**: JWT token generation and validation - **PyJWT**: JWT token generation and validation
- Cryptographic signing - Cryptographic signing (HS256, RS256)
- Token expiration handling - Token expiration handling
- Claims validation - Claims validation
- JWK support for Google ID token verification
- **passlib + bcrypt**: Password hashing - **bcrypt**: Password hashing
- Industry-standard bcrypt algorithm - Industry-standard bcrypt algorithm
- Configurable cost factor - Configurable cost factor
- Salt generation - Salt generation
@@ -117,7 +118,8 @@ backend/
│ ├── api/ # API layer │ ├── api/ # API layer
│ │ ├── dependencies/ # Dependency injection │ │ ├── dependencies/ # Dependency injection
│ │ │ ├── auth.py # Authentication dependencies │ │ │ ├── auth.py # Authentication dependencies
│ │ │ ── permissions.py # Authorization dependencies │ │ │ ── permissions.py # Authorization dependencies
│ │ │ └── services.py # Service singleton injection
│ │ ├── routes/ # API endpoints │ │ ├── routes/ # API endpoints
│ │ │ ├── auth.py # Authentication routes │ │ │ ├── auth.py # Authentication routes
│ │ │ ├── users.py # User management routes │ │ │ ├── users.py # User management routes
@@ -131,13 +133,14 @@ backend/
│ │ ├── config.py # Application configuration │ │ ├── config.py # Application configuration
│ │ ├── database.py # Database connection │ │ ├── database.py # Database connection
│ │ ├── exceptions.py # Custom exception classes │ │ ├── exceptions.py # Custom exception classes
│ │ ├── repository_exceptions.py # Repository-level exception hierarchy
│ │ └── middleware.py # Custom middleware │ │ └── middleware.py # Custom middleware
│ │ │ │
│ ├── crud/ # Database operations │ ├── repositories/ # Data access layer
│ │ ├── base.py # Generic CRUD base class │ │ ├── base.py # Generic repository base class
│ │ ├── user.py # User CRUD operations │ │ ├── user.py # User repository
│ │ ├── session.py # Session CRUD operations │ │ ├── session.py # Session repository
│ │ └── organization.py # Organization CRUD │ │ └── organization.py # Organization repository
│ │ │ │
│ ├── models/ # SQLAlchemy models │ ├── models/ # SQLAlchemy models
│ │ ├── base.py # Base model with mixins │ │ ├── base.py # Base model with mixins
@@ -153,8 +156,11 @@ backend/
│ │ ├── sessions.py # Session schemas │ │ ├── sessions.py # Session schemas
│ │ └── organizations.py # Organization schemas │ │ └── organizations.py # Organization schemas
│ │ │ │
│ ├── services/ # Business logic │ ├── services/ # Business logic layer
│ │ ├── auth_service.py # Authentication service │ │ ├── auth_service.py # Authentication service
│ │ ├── user_service.py # User management service
│ │ ├── session_service.py # Session management service
│ │ ├── organization_service.py # Organization service
│ │ ├── email_service.py # Email service │ │ ├── email_service.py # Email service
│ │ └── session_cleanup.py # Background cleanup │ │ └── session_cleanup.py # Background cleanup
│ │ │ │
@@ -168,20 +174,25 @@ backend/
├── tests/ # Test suite ├── tests/ # Test suite
│ ├── api/ # Integration tests │ ├── api/ # Integration tests
│ ├── crud/ # CRUD tests │ ├── repositories/ # Repository unit tests
│ ├── services/ # Service unit tests
│ ├── models/ # Model tests │ ├── models/ # Model tests
│ ├── services/ # Service tests
│ └── conftest.py # Test configuration │ └── conftest.py # Test configuration
├── docs/ # Documentation ├── docs/ # Documentation
│ ├── ARCHITECTURE.md # This file │ ├── ARCHITECTURE.md # This file
│ ├── CODING_STANDARDS.md # Coding standards │ ├── CODING_STANDARDS.md # Coding standards
│ ├── COMMON_PITFALLS.md # Common mistakes to avoid
│ ├── E2E_TESTING.md # E2E testing guide
│ └── FEATURE_EXAMPLE.md # Feature implementation guide │ └── FEATURE_EXAMPLE.md # Feature implementation guide
├── requirements.txt # Python dependencies ├── pyproject.toml # Dependencies, tool configs (Ruff, pytest, coverage, Pyright)
├── pytest.ini # Pytest configuration ├── uv.lock # Locked dependency versions (commit to git)
├── .coveragerc # Coverage configuration ├── Makefile # Development commands (quality, security, testing)
── alembic.ini # Alembic configuration ── .pre-commit-config.yaml # Pre-commit hook configuration
├── .secrets.baseline # detect-secrets baseline (known false positives)
├── alembic.ini # Alembic configuration
└── migrate.py # Migration helper script
``` ```
## Layered Architecture ## Layered Architecture
@@ -214,11 +225,11 @@ The application follows a strict 5-layer architecture:
└──────────────────────────┬──────────────────────────────────┘ └──────────────────────────┬──────────────────────────────────┘
│ calls │ calls
┌──────────────────────────▼──────────────────────────────────┐ ┌──────────────────────────▼──────────────────────────────────┐
CRUD Layer (crud/) Repository Layer (repositories/)
│ - Database operations │ │ - Database operations │
│ - Query building │ │ - Query building │
│ - Transaction management │ - Custom repository exceptions
│ - Error handling │ - No business logic
└──────────────────────────┬──────────────────────────────────┘ └──────────────────────────┬──────────────────────────────────┘
│ uses │ uses
┌──────────────────────────▼──────────────────────────────────┐ ┌──────────────────────────▼──────────────────────────────────┐
@@ -262,7 +273,7 @@ async def get_current_user_info(
**Rules**: **Rules**:
- Should NOT contain business logic - Should NOT contain business logic
- Should NOT directly perform database operations (use CRUD or services) - Should NOT directly call repositories (use services injected via `dependencies/services.py`)
- Must validate all input via Pydantic schemas - Must validate all input via Pydantic schemas
- Must specify response models - Must specify response models
- Should apply appropriate rate limits - Should apply appropriate rate limits
@@ -279,9 +290,9 @@ async def get_current_user_info(
**Example**: **Example**:
```python ```python
def get_current_user( async def get_current_user(
token: str = Depends(oauth2_scheme), token: str = Depends(oauth2_scheme),
db: Session = Depends(get_db) db: AsyncSession = Depends(get_db)
) -> User: ) -> User:
""" """
Extract and validate user from JWT token. Extract and validate user from JWT token.
@@ -295,7 +306,7 @@ def get_current_user(
except Exception: except Exception:
raise AuthenticationError("Invalid authentication credentials") raise AuthenticationError("Invalid authentication credentials")
user = user_crud.get(db, id=user_id) user = await user_repo.get(db, id=user_id)
if not user: if not user:
raise AuthenticationError("User not found") raise AuthenticationError("User not found")
@@ -313,7 +324,7 @@ def get_current_user(
**Responsibility**: Implement complex business logic **Responsibility**: Implement complex business logic
**Key Functions**: **Key Functions**:
- Orchestrate multiple CRUD operations - Orchestrate multiple repository operations
- Implement business rules - Implement business rules
- Handle external service integration - Handle external service integration
- Coordinate transactions - Coordinate transactions
@@ -323,9 +334,9 @@ def get_current_user(
class AuthService: class AuthService:
"""Authentication service with business logic.""" """Authentication service with business logic."""
def login( async def login(
self, self,
db: Session, db: AsyncSession,
email: str, email: str,
password: str, password: str,
request: Request request: Request
@@ -339,8 +350,8 @@ class AuthService:
3. Generate tokens 3. Generate tokens
4. Return tokens and user info 4. Return tokens and user info
""" """
# Validate credentials # Validate credentials via repository
user = user_crud.get_by_email(db, email=email) user = await user_repo.get_by_email(db, email=email)
if not user or not verify_password(password, user.hashed_password): if not user or not verify_password(password, user.hashed_password):
raise AuthenticationError("Invalid credentials") raise AuthenticationError("Invalid credentials")
@@ -350,11 +361,10 @@ class AuthService:
# Extract device info # Extract device info
device_info = extract_device_info(request) device_info = extract_device_info(request)
# Create session # Create session via repository
session = session_crud.create_session( session = await session_repo.create(
db, db,
user_id=user.id, obj_in=SessionCreate(user_id=user.id, **device_info)
device_info=device_info
) )
# Generate tokens # Generate tokens
@@ -373,75 +383,60 @@ class AuthService:
**Rules**: **Rules**:
- Contains business logic, not just data operations - Contains business logic, not just data operations
- Can call multiple CRUD operations - Can call multiple repository operations
- Should handle complex workflows - Should handle complex workflows
- Must maintain data consistency - Must maintain data consistency
- Should use transactions when needed - Should use transactions when needed
#### 4. CRUD Layer (`app/crud/`) #### 4. Repository Layer (`app/repositories/`)
**Responsibility**: Database operations and queries **Responsibility**: Database operations and queries — no business logic
**Key Functions**: **Key Functions**:
- Create, read, update, delete operations - Create, read, update, delete operations
- Build database queries - Build database queries
- Handle database errors - Raise custom repository exceptions (`DuplicateEntryError`, `IntegrityConstraintError`)
- Manage soft deletes - Manage soft deletes
- Implement pagination and filtering - Implement pagination and filtering
**Example**: **Example**:
```python ```python
class CRUDSession(CRUDBase[UserSession, SessionCreate, SessionUpdate]): class SessionRepository(RepositoryBase[UserSession, SessionCreate, SessionUpdate]):
"""CRUD operations for user sessions.""" """Repository for user sessions — database operations only."""
def get_by_jti(self, db: Session, jti: UUID) -> Optional[UserSession]: async def get_by_jti(self, db: AsyncSession, *, jti: str) -> UserSession | None:
"""Get session by refresh token JTI.""" """Get session by refresh token JTI."""
try: result = await db.execute(
return ( select(UserSession).where(UserSession.refresh_token_jti == jti)
db.query(UserSession) )
.filter(UserSession.refresh_token_jti == jti) return result.scalar_one_or_none()
.first()
)
except Exception as e:
logger.error(f"Error getting session by JTI: {str(e)}")
return None
def get_active_by_jti( async def deactivate(self, db: AsyncSession, *, session_id: UUID) -> bool:
self,
db: Session,
jti: UUID
) -> Optional[UserSession]:
"""Get active session by refresh token JTI."""
session = self.get_by_jti(db, jti=jti)
if session and session.is_active and not session.is_expired:
return session
return None
def deactivate(self, db: Session, session_id: UUID) -> bool:
"""Deactivate a session (logout).""" """Deactivate a session (logout)."""
try: try:
session = self.get(db, id=session_id) session = await self.get(db, id=session_id)
if not session: if not session:
return False return False
session.is_active = False session.is_active = False
db.commit() await db.commit()
logger.info(f"Session {session_id} deactivated") logger.info(f"Session {session_id} deactivated")
return True return True
except Exception as e: except Exception as e:
db.rollback() await db.rollback()
logger.error(f"Error deactivating session: {str(e)}") logger.error(f"Error deactivating session: {str(e)}")
return False return False
``` ```
**Rules**: **Rules**:
- Should NOT contain business logic - Should NOT contain business logic
- Must handle database exceptions - Must raise custom repository exceptions (not raw `ValueError`/`IntegrityError`)
- Must use parameterized queries (SQLAlchemy does this) - Must use async SQLAlchemy 2.0 `select()` API (never `db.query()`)
- Should log all database errors - Should log all database errors
- Must rollback on errors - Must rollback on errors
- Should use soft deletes when possible - Should use soft deletes when possible
- **Never imported directly by routes** — always called through services
#### 5. Data Layer (`app/models/` + `app/schemas/`) #### 5. Data Layer (`app/models/` + `app/schemas/`)
@@ -546,51 +541,23 @@ SessionLocal = sessionmaker(
#### Dependency Injection Pattern #### Dependency Injection Pattern
```python ```python
def get_db() -> Generator[Session, None, None]: async def get_db() -> AsyncGenerator[AsyncSession, None]:
""" """
Database session dependency for FastAPI routes. Async database session dependency for FastAPI routes.
Automatically commits on success, rolls back on error. The session is passed to service methods; commit/rollback is
managed inside service or repository methods.
""" """
db = SessionLocal() async with AsyncSessionLocal() as db:
try:
yield db yield db
finally:
db.close()
# Usage in routes # Usage in routes — always through a service, never direct repository
@router.get("/users") @router.get("/users")
def list_users(db: Session = Depends(get_db)): async def list_users(
return user_crud.get_multi(db) user_service: UserService = Depends(get_user_service),
``` db: AsyncSession = Depends(get_db),
):
#### Context Manager Pattern return await user_service.get_users(db)
```python
@contextmanager
def transaction_scope() -> Generator[Session, None, None]:
"""
Context manager for database transactions.
Use for complex operations requiring multiple steps.
Automatically commits on success, rolls back on error.
"""
db = SessionLocal()
try:
yield db
db.commit()
except Exception:
db.rollback()
raise
finally:
db.close()
# Usage in services
def complex_operation():
with transaction_scope() as db:
user = user_crud.create(db, obj_in=user_data)
session = session_crud.create(db, session_data)
return user, session
``` ```
### Model Mixins ### Model Mixins
@@ -782,22 +749,15 @@ def get_profile(
```python ```python
@router.delete("/sessions/{session_id}") @router.delete("/sessions/{session_id}")
def revoke_session( async def revoke_session(
session_id: UUID, session_id: UUID,
current_user: User = Depends(get_current_user), current_user: User = Depends(get_current_user),
db: Session = Depends(get_db) session_service: SessionService = Depends(get_session_service),
db: AsyncSession = Depends(get_db),
): ):
"""Users can only revoke their own sessions.""" """Users can only revoke their own sessions."""
session = session_crud.get(db, id=session_id) # SessionService verifies ownership and raises NotFoundError / AuthorizationError
await session_service.revoke_session(db, session_id=session_id, user_id=current_user.id)
if not session:
raise NotFoundError("Session not found")
# Check ownership
if session.user_id != current_user.id:
raise AuthorizationError("You can only revoke your own sessions")
session_crud.deactivate(db, session_id=session_id)
return MessageResponse(success=True, message="Session revoked") return MessageResponse(success=True, message="Session revoked")
``` ```
@@ -1061,23 +1021,27 @@ from app.services.session_cleanup import cleanup_expired_sessions
scheduler = AsyncIOScheduler() scheduler = AsyncIOScheduler()
@app.on_event("startup") @asynccontextmanager
async def startup_event(): async def lifespan(app: FastAPI):
"""Start background jobs on application startup.""" """Application lifespan context manager."""
if not settings.IS_TEST: # Don't run in tests # Startup
if os.getenv("IS_TEST", "False") != "True":
scheduler.add_job( scheduler.add_job(
cleanup_expired_sessions, cleanup_expired_sessions,
"cron", "cron",
hour=2, # Run at 2 AM daily hour=2, # Run at 2 AM daily
id="cleanup_expired_sessions" id="cleanup_expired_sessions",
replace_existing=True,
) )
scheduler.start() scheduler.start()
logger.info("Background jobs started") logger.info("Background jobs started")
@app.on_event("shutdown") yield
async def shutdown_event():
"""Stop background jobs on application shutdown.""" # Shutdown
scheduler.shutdown() if os.getenv("IS_TEST", "False") != "True":
scheduler.shutdown()
await close_async_db() # Dispose database engine connections
``` ```
### Job Implementation ### Job Implementation
@@ -1092,8 +1056,8 @@ async def cleanup_expired_sessions():
Runs daily at 2 AM. Removes sessions expired for more than 30 days. Runs daily at 2 AM. Removes sessions expired for more than 30 days.
""" """
try: try:
with transaction_scope() as db: async with AsyncSessionLocal() as db:
count = session_crud.cleanup_expired(db, keep_days=30) count = await session_repo.cleanup_expired(db, keep_days=30)
logger.info(f"Cleaned up {count} expired sessions") logger.info(f"Cleaned up {count} expired sessions")
except Exception as e: except Exception as e:
logger.error(f"Error cleaning up sessions: {str(e)}", exc_info=True) logger.error(f"Error cleaning up sessions: {str(e)}", exc_info=True)
@@ -1110,7 +1074,7 @@ async def cleanup_expired_sessions():
│Integration │ ← API endpoint tests │Integration │ ← API endpoint tests
│ Tests │ │ Tests │
├─────────────┤ ├─────────────┤
│ Unit │ ← CRUD, services, utilities │ Unit │ ← repositories, services, utilities
│ Tests │ │ Tests │
└─────────────┘ └─────────────┘
``` ```
@@ -1205,6 +1169,8 @@ app.add_middleware(
## Performance Considerations ## Performance Considerations
> 📖 For the full benchmarking guide (how to run, read results, write new benchmarks, and manage baselines), see **[BENCHMARKS.md](BENCHMARKS.md)**.
### Database Connection Pooling ### Database Connection Pooling
- Pool size: 20 connections - Pool size: 20 connections

311
backend/docs/BENCHMARKS.md Normal file
View File

@@ -0,0 +1,311 @@
# Performance Benchmarks Guide
Automated performance benchmarking infrastructure using **pytest-benchmark** to detect latency regressions in critical API endpoints.
## Table of Contents
- [Why Benchmark?](#why-benchmark)
- [Quick Start](#quick-start)
- [How It Works](#how-it-works)
- [Understanding Results](#understanding-results)
- [Test Organization](#test-organization)
- [Writing Benchmark Tests](#writing-benchmark-tests)
- [Baseline Management](#baseline-management)
- [CI/CD Integration](#cicd-integration)
- [Troubleshooting](#troubleshooting)
---
## Why Benchmark?
Performance regressions are silent bugs — they don't break tests or cause errors, but they degrade the user experience over time. Common causes include:
- **Unintended N+1 queries** after adding a relationship
- **Heavier serialization** after adding new fields to a response model
- **Middleware overhead** from new security headers or logging
- **Dependency upgrades** that introduce slower code paths
Without automated benchmarks, these regressions go unnoticed until users complain. Performance benchmarks serve as an **early warning system** — they measure endpoint latency on every run and flag significant deviations from an established baseline.
### What benchmarks give you
| Benefit | Description |
|---------|-------------|
| **Regression detection** | Automatically flags when an endpoint becomes significantly slower |
| **Baseline tracking** | Stores known-good performance numbers for comparison |
| **Confidence in refactors** | Verify that code changes don't degrade response times |
| **Visibility** | Makes performance a first-class, measurable quality attribute |
---
## Quick Start
```bash
# Run benchmarks (no comparison, just see current numbers)
make benchmark
# Save current results as the baseline
make benchmark-save
# Run benchmarks and compare against the saved baseline
make benchmark-check
```
---
## How It Works
The benchmarking system has three layers:
### 1. pytest-benchmark integration
[pytest-benchmark](https://pytest-benchmark.readthedocs.io/) is a pytest plugin that provides a `benchmark` fixture. It handles:
- **Calibration**: Automatically determines how many iterations to run for statistical significance
- **Timing**: Uses `time.perf_counter` for high-resolution measurements
- **Statistics**: Computes min, max, mean, median, standard deviation, IQR, and outlier detection
- **Comparison**: Compares current results against saved baselines and flags regressions
### 2. Benchmark types
The test suite includes two categories of performance tests:
| Type | How it works | Examples |
|------|-------------|----------|
| **pytest-benchmark tests** | Uses the `benchmark` fixture for precise, multi-round timing | `test_health_endpoint_performance`, `test_openapi_schema_performance`, `test_password_hashing_performance`, `test_password_verification_performance`, `test_access_token_creation_performance`, `test_refresh_token_creation_performance`, `test_token_decode_performance` |
| **Manual latency tests** | Uses `time.perf_counter` with explicit thresholds (for async endpoints that pytest-benchmark doesn't support natively) | `test_login_latency`, `test_get_current_user_latency`, `test_register_latency`, `test_token_refresh_latency`, `test_sessions_list_latency`, `test_user_profile_update_latency` |
### 3. Regression detection
When running `make benchmark-check`, the system:
1. Runs all benchmark tests
2. Compares results against the saved baseline (`.benchmarks/` directory)
3. **Fails the build** if any test's mean time exceeds **200%** of the baseline (i.e., 3× slower)
The `200%` threshold in `--benchmark-compare-fail=mean:200%` means "fail if the mean increased by more than 200% relative to the baseline." This is deliberately generous to avoid false positives from normal run-to-run variance while still catching real regressions.
---
## Understanding Results
A typical benchmark output looks like this:
```
--------------------------------------------------------------------------------------- benchmark: 2 tests --------------------------------------------------------------------------------------
Name (time in ms) Min Max Mean StdDev Median IQR Outliers OPS Rounds Iterations
-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
test_health_endpoint_performance 0.9841 (1.0) 1.5513 (1.0) 1.1390 (1.0) 0.1098 (1.0) 1.1151 (1.0) 0.1672 (1.0) 39;2 877.9666 (1.0) 133 1
test_openapi_schema_performance 1.6523 (1.68) 2.0892 (1.35) 1.7843 (1.57) 0.1553 (1.41) 1.7200 (1.54) 0.1727 (1.03) 2;0 560.4471 (0.64) 10 1
-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
```
### Column reference
| Column | Meaning |
|--------|---------|
| **Min** | Fastest single execution |
| **Max** | Slowest single execution |
| **Mean** | Average across all rounds — the primary metric for regression detection |
| **StdDev** | How much results vary between rounds (lower = more stable) |
| **Median** | Middle value, less sensitive to outliers than mean |
| **IQR** | Interquartile range — spread of the middle 50% of results |
| **Outliers** | Format `A;B` — A = within 1 StdDev, B = within 1.5 IQR from quartiles |
| **OPS** | Operations per second (`1 / Mean`) |
| **Rounds** | How many times the test was executed (auto-calibrated) |
| **Iterations** | Iterations per round (usually 1 for ms-scale tests) |
### The ratio numbers `(1.0)`, `(1.68)`, etc.
These show how each test compares **to the best result in that column**. The fastest test is always `(1.0)`, and others show their relative factor. For example, `(1.68)` means "1.68× slower than the fastest."
### Color coding
- **Green**: The fastest (best) value in each column
- **Red**: The slowest (worst) value in each column
This is a **relative ranking within the current run** — red does NOT mean the test failed or that performance is bad. It simply highlights which endpoint is the slower one in the group.
### What's "normal"?
For this project's current endpoints:
| Test | Expected range | Why |
|------|---------------|-----|
| `GET /health` | ~11.5ms | Minimal logic, mocked DB check |
| `GET /api/v1/openapi.json` | ~1.52.5ms | Serializes entire API schema |
| `get_password_hash` | ~200ms | CPU-bound bcrypt hashing |
| `verify_password` | ~200ms | CPU-bound bcrypt verification |
| `create_access_token` | ~1720µs | JWT encoding with HMAC-SHA256 |
| `create_refresh_token` | ~1720µs | JWT encoding with HMAC-SHA256 |
| `decode_token` | ~2025µs | JWT decoding and claim validation |
| `POST /api/v1/auth/login` | < 500ms threshold | Includes bcrypt password verification |
| `POST /api/v1/auth/register` | < 500ms threshold | Includes bcrypt password hashing |
| `POST /api/v1/auth/refresh` | < 200ms threshold | Token rotation + DB session update |
| `GET /api/v1/users/me` | < 200ms threshold | DB lookup + token validation |
| `GET /api/v1/sessions/me` | < 200ms threshold | Session list query + token validation |
| `PATCH /api/v1/users/me` | < 200ms threshold | DB update + token validation |
---
## Test Organization
```
backend/tests/
├── benchmarks/
│ └── test_endpoint_performance.py # All performance benchmark tests
backend/.benchmarks/ # Saved baselines (auto-generated)
└── Linux-CPython-3.12-64bit/
└── 0001_baseline.json # Platform-specific baseline file
```
### Test markers
All benchmark tests use the `@pytest.mark.benchmark` marker. The `--benchmark-only` flag ensures that only tests using the `benchmark` fixture are executed during benchmark runs, while manual latency tests (async) are skipped.
---
## Writing Benchmark Tests
### Stateless endpoint (using pytest-benchmark fixture)
```python
import pytest
from fastapi.testclient import TestClient
def test_my_endpoint_performance(sync_client, benchmark):
"""Benchmark: GET /my-endpoint should respond within acceptable latency."""
result = benchmark(sync_client.get, "/my-endpoint")
assert result.status_code == 200
```
The `benchmark` fixture handles all timing, calibration, and statistics automatically. Just pass it the callable and arguments.
### Async / DB-dependent endpoint (manual timing)
For async endpoints that require database access, use manual timing with an explicit threshold:
```python
import time
import pytest
MAX_RESPONSE_MS = 300
@pytest.mark.asyncio
async def test_my_async_endpoint_latency(client, setup_fixture):
"""Performance: endpoint must respond under threshold."""
iterations = 5
total_ms = 0.0
for _ in range(iterations):
start = time.perf_counter()
response = await client.get("/api/v1/my-endpoint")
elapsed_ms = (time.perf_counter() - start) * 1000
total_ms += elapsed_ms
assert response.status_code == 200
mean_ms = total_ms / iterations
assert mean_ms < MAX_RESPONSE_MS, (
f"Latency regression: {mean_ms:.1f}ms exceeds {MAX_RESPONSE_MS}ms threshold"
)
```
### Guidelines for new benchmarks
1. **Benchmark critical paths** — endpoints users hit frequently or where latency matters most
2. **Mock external dependencies** for stateless tests to isolate endpoint overhead
3. **Set generous thresholds** for manual tests — account for CI variability
4. **Keep benchmarks fast** — they run on every check, so avoid heavy setup
---
## Baseline Management
### Saving a baseline
```bash
make benchmark-save
```
This runs all benchmarks and saves results to `.benchmarks/<platform>/0001_baseline.json`. The baseline captures:
- Mean, min, max, median, stddev for each test
- Machine info (CPU, OS, Python version)
- Timestamp
### Comparing against baseline
```bash
make benchmark-check
```
If no baseline exists, this command automatically creates one and prints a warning. On subsequent runs, it compares current results against the saved baseline.
### When to update the baseline
- **After intentional performance changes** (e.g., you optimized an endpoint — save the new, faster baseline)
- **After infrastructure changes** (e.g., new CI runner, different hardware)
- **After adding new benchmark tests** (the new tests need a baseline entry)
```bash
# Update the baseline after intentional changes
make benchmark-save
```
### Version control
The `.benchmarks/` directory can be committed to version control so that CI pipelines can compare against a known-good baseline. However, since benchmark results are machine-specific, you may prefer to generate baselines in CI rather than committing local results.
---
## CI/CD Integration
Add benchmark checking to your CI pipeline to catch regressions on every PR:
```yaml
# Example GitHub Actions step
- name: Performance regression check
run: |
cd backend
make benchmark-save # Create baseline from main branch
# ... apply PR changes ...
make benchmark-check # Compare PR against baseline
```
A more robust approach:
1. Save the baseline on the `main` branch after each merge
2. On PR branches, run `make benchmark-check` against the `main` baseline
3. The pipeline fails if any endpoint regresses beyond the 200% threshold
---
## Troubleshooting
### "No benchmark baseline found" warning
```
⚠️ No benchmark baseline found. Run 'make benchmark-save' first to create one.
```
This means no baseline file exists yet. The command will auto-create one. Future runs of `make benchmark-check` will compare against it.
### Machine info mismatch warning
```
WARNING: benchmark machine_info is different
```
This is expected when comparing baselines generated on a different machine or OS. The comparison still works, but absolute numbers may differ. Re-save the baseline on the current machine if needed.
### High variance (large StdDev)
If StdDev is high relative to the Mean, results may be unreliable. Common causes:
- System under load during benchmark run
- Garbage collection interference
- Thermal throttling
Try running benchmarks on an idle system or increasing `min_rounds` in `pyproject.toml`.
### Only 7 of 13 tests run
The async tests (`test_login_latency`, `test_get_current_user_latency`, `test_register_latency`, `test_token_refresh_latency`, `test_sessions_list_latency`, `test_user_profile_update_latency`) are skipped during `--benchmark-only` runs because they don't use the `benchmark` fixture. They run as part of the normal test suite (`make test`) with manual threshold assertions.

View File

@@ -75,15 +75,14 @@ def create_user(db: Session, user_in: UserCreate) -> User:
### 4. Code Formatting ### 4. Code Formatting
Use automated formatters: Use automated formatters:
- **Black**: Code formatting - **Ruff**: Code formatting and linting (replaces Black, isort, flake8)
- **isort**: Import sorting - **pyright**: Static type checking
- **flake8**: Linting
Run before committing: Run before committing (or use `make validate`):
```bash ```bash
black app tests uv run ruff format app tests
isort app tests uv run ruff check app tests
flake8 app tests uv run pyright app
``` ```
## Code Organization ## Code Organization
@@ -94,19 +93,17 @@ Follow the 5-layer architecture strictly:
``` ```
API Layer (routes/) API Layer (routes/)
↓ calls ↓ calls (via service injected from dependencies/services.py)
Dependencies (dependencies/)
↓ injects
Service Layer (services/) Service Layer (services/)
↓ calls ↓ calls
CRUD Layer (crud/) Repository Layer (repositories/)
↓ uses ↓ uses
Models & Schemas (models/, schemas/) Models & Schemas (models/, schemas/)
``` ```
**Rules:** **Rules:**
- Routes should NOT directly call CRUD operations (use services when business logic is needed) - Routes must NEVER import repositories directly — always use a service
- CRUD operations should NOT contain business logic - Services call repositories; repositories contain only database operations
- Models should NOT import from higher layers - Models should NOT import from higher layers
- Each layer should only depend on the layer directly below it - Each layer should only depend on the layer directly below it
@@ -125,7 +122,7 @@ from sqlalchemy.orm import Session
# 3. Local application imports # 3. Local application imports
from app.api.dependencies.auth import get_current_user from app.api.dependencies.auth import get_current_user
from app.crud import user_crud from app.api.dependencies.services import get_user_service
from app.models.user import User from app.models.user import User
from app.schemas.users import UserResponse, UserCreate from app.schemas.users import UserResponse, UserCreate
``` ```
@@ -217,7 +214,7 @@ if not user:
### Error Handling Pattern ### Error Handling Pattern
Always follow this pattern in CRUD operations (Async version): Always follow this pattern in repository operations (Async version):
```python ```python
from sqlalchemy.exc import IntegrityError, OperationalError, DataError from sqlalchemy.exc import IntegrityError, OperationalError, DataError
@@ -430,7 +427,7 @@ backend/app/alembic/versions/
## Database Operations ## Database Operations
### Async CRUD Pattern ### Async Repository Pattern
**IMPORTANT**: This application uses **async SQLAlchemy** with modern patterns for better performance and testability. **IMPORTANT**: This application uses **async SQLAlchemy** with modern patterns for better performance and testability.
@@ -442,19 +439,19 @@ backend/app/alembic/versions/
4. **Testability**: Easy to mock and test 4. **Testability**: Easy to mock and test
5. **Consistent Ordering**: Always order queries for pagination 5. **Consistent Ordering**: Always order queries for pagination
### Use the Async CRUD Base Class ### Use the Async Repository Base Class
Always inherit from `CRUDBase` for database operations: Always inherit from `RepositoryBase` for database operations:
```python ```python
from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select from sqlalchemy import select
from app.crud.base import CRUDBase from app.repositories.base import RepositoryBase
from app.models.user import User from app.models.user import User
from app.schemas.users import UserCreate, UserUpdate from app.schemas.users import UserCreate, UserUpdate
class CRUDUser(CRUDBase[User, UserCreate, UserUpdate]): class UserRepository(RepositoryBase[User, UserCreate, UserUpdate]):
"""CRUD operations for User model.""" """Repository for User model — database operations only."""
async def get_by_email( async def get_by_email(
self, self,
@@ -467,7 +464,7 @@ class CRUDUser(CRUDBase[User, UserCreate, UserUpdate]):
) )
return result.scalar_one_or_none() return result.scalar_one_or_none()
user_crud = CRUDUser(User) user_repo = UserRepository(User)
``` ```
**Key Points:** **Key Points:**
@@ -476,6 +473,7 @@ user_crud = CRUDUser(User)
- Use `await db.execute()` for queries - Use `await db.execute()` for queries
- Use `.scalar_one_or_none()` instead of `.first()` - Use `.scalar_one_or_none()` instead of `.first()`
- Use `T | None` instead of `Optional[T]` - Use `T | None` instead of `Optional[T]`
- Repository instances are used internally by services — never import them in routes
### Modern SQLAlchemy Patterns ### Modern SQLAlchemy Patterns
@@ -563,13 +561,13 @@ async def create_user(
The database session is automatically managed by FastAPI. The database session is automatically managed by FastAPI.
Commit on success, rollback on error. Commit on success, rollback on error.
""" """
return await user_crud.create(db, obj_in=user_in) return await user_service.create_user(db, obj_in=user_in)
``` ```
**Key Points:** **Key Points:**
- Route functions must be `async def` - Route functions must be `async def`
- Database parameter is `AsyncSession` - Database parameter is `AsyncSession`
- Always `await` CRUD operations - Always `await` repository operations
#### In Services (Multiple Operations) #### In Services (Multiple Operations)
@@ -582,12 +580,11 @@ async def complex_operation(
""" """
Perform multiple database operations atomically. Perform multiple database operations atomically.
The session automatically commits on success or rolls back on error. Services call repositories; commit/rollback is handled inside
each repository method.
""" """
user = await user_crud.create(db, obj_in=user_data) user = await user_repo.create(db, obj_in=user_data)
session = await session_crud.create(db, obj_in=session_data) session = await session_repo.create(db, obj_in=session_data)
# Commit is handled by the route's dependency
return user, session return user, session
``` ```
@@ -597,10 +594,10 @@ Prefer soft deletes over hard deletes for audit trails:
```python ```python
# Good - Soft delete (sets deleted_at) # Good - Soft delete (sets deleted_at)
await user_crud.soft_delete(db, id=user_id) await user_repo.soft_delete(db, id=user_id)
# Acceptable only when required - Hard delete # Acceptable only when required - Hard delete
user_crud.remove(db, id=user_id) await user_repo.remove(db, id=user_id)
``` ```
### Query Patterns ### Query Patterns
@@ -740,9 +737,10 @@ Always implement pagination for list endpoints:
from app.schemas.common import PaginationParams, PaginatedResponse from app.schemas.common import PaginationParams, PaginatedResponse
@router.get("/users", response_model=PaginatedResponse[UserResponse]) @router.get("/users", response_model=PaginatedResponse[UserResponse])
def list_users( async def list_users(
pagination: PaginationParams = Depends(), pagination: PaginationParams = Depends(),
db: Session = Depends(get_db) user_service: UserService = Depends(get_user_service),
db: AsyncSession = Depends(get_db),
): ):
""" """
List all users with pagination. List all users with pagination.
@@ -750,10 +748,8 @@ def list_users(
Default page size: 20 Default page size: 20
Maximum page size: 100 Maximum page size: 100
""" """
users, total = user_crud.get_multi_with_total( users, total = await user_service.get_users(
db, db, skip=pagination.offset, limit=pagination.limit
skip=pagination.offset,
limit=pagination.limit
) )
return PaginatedResponse(data=users, pagination=pagination.create_meta(total)) return PaginatedResponse(data=users, pagination=pagination.create_meta(total))
``` ```
@@ -816,19 +812,17 @@ def admin_route(
pass pass
# Check ownership # Check ownership
def delete_resource( async def delete_resource(
resource_id: UUID, resource_id: UUID,
current_user: User = Depends(get_current_user), current_user: User = Depends(get_current_user),
db: Session = Depends(get_db) resource_service: ResourceService = Depends(get_resource_service),
db: AsyncSession = Depends(get_db),
): ):
resource = resource_crud.get(db, id=resource_id) # Service handles ownership check and raises appropriate errors
if not resource: await resource_service.delete_resource(
raise NotFoundError("Resource not found") db, resource_id=resource_id, user_id=current_user.id,
is_superuser=current_user.is_superuser,
if resource.user_id != current_user.id and not current_user.is_superuser: )
raise AuthorizationError("You can only delete your own resources")
resource_crud.remove(db, id=resource_id)
``` ```
### Input Validation ### Input Validation
@@ -862,9 +856,9 @@ tests/
├── api/ # Integration tests ├── api/ # Integration tests
│ ├── test_users.py │ ├── test_users.py
│ └── test_auth.py │ └── test_auth.py
├── crud/ # Unit tests for CRUD ├── repositories/ # Unit tests for repositories
├── models/ # Model tests ├── services/ # Unit tests for services
└── services/ # Service tests └── models/ # Model tests
``` ```
### Async Testing with pytest-asyncio ### Async Testing with pytest-asyncio
@@ -927,7 +921,7 @@ async def test_user(db_session: AsyncSession) -> User:
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_get_user(db_session: AsyncSession, test_user: User): async def test_get_user(db_session: AsyncSession, test_user: User):
"""Test retrieving a user by ID.""" """Test retrieving a user by ID."""
user = await user_crud.get(db_session, id=test_user.id) user = await user_repo.get(db_session, id=test_user.id)
assert user is not None assert user is not None
assert user.email == test_user.email assert user.email == test_user.email
``` ```

View File

@@ -334,14 +334,14 @@ def login(request: Request, credentials: OAuth2PasswordRequestForm):
# ❌ WRONG - Returns password hash! # ❌ WRONG - Returns password hash!
@router.get("/users/{user_id}") @router.get("/users/{user_id}")
def get_user(user_id: UUID, db: Session = Depends(get_db)) -> User: def get_user(user_id: UUID, db: Session = Depends(get_db)) -> User:
return user_crud.get(db, id=user_id) # Returns ORM model with ALL fields! return user_repo.get(db, id=user_id) # Returns ORM model with ALL fields!
``` ```
```python ```python
# ✅ CORRECT - Use response schema # ✅ CORRECT - Use response schema
@router.get("/users/{user_id}", response_model=UserResponse) @router.get("/users/{user_id}", response_model=UserResponse)
def get_user(user_id: UUID, db: Session = Depends(get_db)): def get_user(user_id: UUID, db: Session = Depends(get_db)):
user = user_crud.get(db, id=user_id) user = user_repo.get(db, id=user_id)
if not user: if not user:
raise HTTPException(status_code=404, detail="User not found") raise HTTPException(status_code=404, detail="User not found")
return user # Pydantic filters to only UserResponse fields return user # Pydantic filters to only UserResponse fields
@@ -506,8 +506,8 @@ def revoke_session(
current_user: User = Depends(get_current_user), current_user: User = Depends(get_current_user),
db: Session = Depends(get_db) db: Session = Depends(get_db)
): ):
session = session_crud.get(db, id=session_id) session = session_repo.get(db, id=session_id)
session_crud.deactivate(db, session_id=session_id) session_repo.deactivate(db, session_id=session_id)
# BUG: User can revoke ANYONE'S session! # BUG: User can revoke ANYONE'S session!
return {"message": "Session revoked"} return {"message": "Session revoked"}
``` ```
@@ -520,7 +520,7 @@ def revoke_session(
current_user: User = Depends(get_current_user), current_user: User = Depends(get_current_user),
db: Session = Depends(get_db) db: Session = Depends(get_db)
): ):
session = session_crud.get(db, id=session_id) session = session_repo.get(db, id=session_id)
if not session: if not session:
raise NotFoundError("Session not found") raise NotFoundError("Session not found")
@@ -529,7 +529,7 @@ def revoke_session(
if session.user_id != current_user.id: if session.user_id != current_user.id:
raise AuthorizationError("You can only revoke your own sessions") raise AuthorizationError("You can only revoke your own sessions")
session_crud.deactivate(db, session_id=session_id) session_repo.deactivate(db, session_id=session_id)
return {"message": "Session revoked"} return {"message": "Session revoked"}
``` ```
@@ -616,7 +616,43 @@ def create_user(
return user return user
``` ```
**Rule**: Add type hints to ALL functions. Use `mypy` to enforce type checking. **Rule**: Add type hints to ALL functions. Use `pyright` to enforce type checking (`make type-check`).
---
---
### ❌ PITFALL #19: Importing Repositories Directly in Routes
**Issue**: Routes should never call repositories directly. The layered architecture requires all business operations to go through the service layer.
```python
# ❌ WRONG - Route bypasses service layer
from app.repositories.session import session_repo
@router.get("/sessions/me")
async def list_sessions(
current_user: User = Depends(get_current_active_user),
db: AsyncSession = Depends(get_db),
):
return await session_repo.get_user_sessions(db, user_id=current_user.id)
```
```python
# ✅ CORRECT - Route calls service injected via dependency
from app.api.dependencies.services import get_session_service
from app.services.session_service import SessionService
@router.get("/sessions/me")
async def list_sessions(
current_user: User = Depends(get_current_active_user),
session_service: SessionService = Depends(get_session_service),
db: AsyncSession = Depends(get_db),
):
return await session_service.get_user_sessions(db, user_id=current_user.id)
```
**Rule**: Routes import from `app.api.dependencies.services`, never from `app.repositories.*`. Services are the only callers of repositories.
--- ---
@@ -649,6 +685,11 @@ Use this checklist to catch issues before code review:
- [ ] Resource ownership verification - [ ] Resource ownership verification
- [ ] CORS configured (no wildcards in production) - [ ] CORS configured (no wildcards in production)
### Architecture
- [ ] Routes never import repositories directly (only services)
- [ ] Services call repositories; repositories call database only
- [ ] New service registered in `app/api/dependencies/services.py`
### Python ### Python
- [ ] Use `==` not `is` for value comparison - [ ] Use `==` not `is` for value comparison
- [ ] No mutable default arguments - [ ] No mutable default arguments
@@ -661,21 +702,18 @@ Use this checklist to catch issues before code review:
### Pre-commit Checks ### Pre-commit Checks
Add these to your development workflow: Add these to your development workflow (or use `make validate`):
```bash ```bash
# Format code # Format + lint (Ruff replaces Black, isort, flake8)
black app tests uv run ruff format app tests
isort app tests uv run ruff check app tests
# Type checking # Type checking
mypy app --strict uv run pyright app
# Linting
flake8 app tests
# Run tests # Run tests
pytest --cov=app --cov-report=term-missing IS_TEST=True uv run pytest --cov=app --cov-report=term-missing
# Check coverage (should be 80%+) # Check coverage (should be 80%+)
coverage report --fail-under=80 coverage report --fail-under=80
@@ -693,6 +731,6 @@ Add new entries when:
--- ---
**Last Updated**: 2025-10-31 **Last Updated**: 2026-02-28
**Issues Cataloged**: 18 common pitfalls **Issues Cataloged**: 19 common pitfalls
**Remember**: This document exists because these issues HAVE occurred. Don't skip it. **Remember**: This document exists because these issues HAVE occurred. Don't skip it.

View File

@@ -99,7 +99,7 @@ backend/tests/
│ └── test_database_workflows.py # PostgreSQL workflow tests │ └── test_database_workflows.py # PostgreSQL workflow tests
├── api/ # Integration tests (SQLite, fast) ├── api/ # Integration tests (SQLite, fast)
├── crud/ # Unit tests ├── repositories/ # Repository unit tests
└── conftest.py # Standard fixtures └── conftest.py # Standard fixtures
``` ```

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,4 @@
#!/bin/bash #!/bin/sh
set -e set -e
echo "Starting Backend" echo "Starting Backend"

View File

@@ -20,43 +20,36 @@ dependencies = [
"uvicorn>=0.34.0", "uvicorn>=0.34.0",
"pydantic>=2.10.6", "pydantic>=2.10.6",
"pydantic-settings>=2.2.1", "pydantic-settings>=2.2.1",
"python-multipart>=0.0.19", "python-multipart>=0.0.22",
"fastapi-utils==0.8.0", "fastapi-utils==0.8.0",
# Database # Database
"sqlalchemy>=2.0.29", "sqlalchemy>=2.0.29",
"alembic>=1.14.1", "alembic>=1.14.1",
"psycopg2-binary>=2.9.9", "psycopg2-binary>=2.9.9",
"asyncpg>=0.29.0", "asyncpg>=0.29.0",
"aiosqlite==0.21.0", "aiosqlite==0.21.0",
# Environment configuration # Environment configuration
"python-dotenv>=1.0.1", "python-dotenv>=1.0.1",
# API utilities # API utilities
"email-validator>=2.1.0.post1", "email-validator>=2.1.0.post1",
"ujson>=5.9.0", "ujson>=5.9.0",
# CORS and security # CORS and security
"starlette>=0.40.0", "starlette>=0.40.0",
"starlette-csrf>=1.4.5", "starlette-csrf>=1.4.5",
"slowapi>=0.1.9", "slowapi>=0.1.9",
# Utilities # Utilities
"httpx>=0.27.0", "httpx>=0.27.0",
"tenacity>=8.2.3", "tenacity>=8.2.3",
"pytz>=2024.1", "pytz>=2024.1",
"pillow>=10.3.0", "pillow>=12.1.1",
"apscheduler==3.11.0", "apscheduler==3.11.0",
# Security and authentication
# Security and authentication (pinned for reproducibility) "PyJWT>=2.9.0",
"python-jose==3.4.0",
"passlib==1.7.4",
"bcrypt==4.2.1", "bcrypt==4.2.1",
"cryptography==44.0.1", "cryptography>=46.0.5",
# OAuth authentication # OAuth authentication
"authlib>=1.3.0", "authlib>=1.6.6",
"urllib3>=2.6.3",
] ]
# Development dependencies # Development dependencies
@@ -73,6 +66,17 @@ dev = [
# Development tools # Development tools
"ruff>=0.8.0", # All-in-one: linting, formatting, import sorting "ruff>=0.8.0", # All-in-one: linting, formatting, import sorting
"pyright>=1.1.390", # Type checking "pyright>=1.1.390", # Type checking
# Security auditing
"pip-audit>=2.7.0", # Dependency vulnerability scanning (PyPA/OSV)
"pip-licenses>=4.0.0", # License compliance checking
"detect-secrets>=1.5.0", # Hardcoded secrets detection
# Performance benchmarking
"pytest-benchmark>=4.0.0", # Performance regression detection
# Pre-commit hooks
"pre-commit>=4.0.0", # Git pre-commit hook framework
] ]
# E2E testing with real PostgreSQL (requires Docker) # E2E testing with real PostgreSQL (requires Docker)
@@ -131,6 +135,8 @@ select = [
"RUF", # Ruff-specific "RUF", # Ruff-specific
"ASYNC", # flake8-async "ASYNC", # flake8-async
"S", # flake8-bandit (security) "S", # flake8-bandit (security)
"G", # flake8-logging-format (logging best practices)
"T20", # flake8-print (no print statements in production code)
] ]
# Ignore specific rules # Ignore specific rules
@@ -154,11 +160,13 @@ unfixable = []
[tool.ruff.lint.per-file-ignores] [tool.ruff.lint.per-file-ignores]
"app/alembic/env.py" = ["E402", "F403", "F405"] # Alembic requires specific import order "app/alembic/env.py" = ["E402", "F403", "F405"] # Alembic requires specific import order
"app/alembic/versions/*.py" = ["E402"] # Migration files have specific structure "app/alembic/versions/*.py" = ["E402"] # Migration files have specific structure
"tests/**/*.py" = ["S101", "N806", "B017", "N817", "S110", "ASYNC251", "RUF043"] # pytest: asserts, CamelCase fixtures, blind exceptions, try-pass patterns, and async test helpers are intentional "tests/**/*.py" = ["S101", "N806", "B017", "N817", "ASYNC251", "RUF043", "T20"] # pytest: asserts, CamelCase fixtures, blind exceptions, async test helpers, and print for debugging are intentional
"app/models/__init__.py" = ["F401"] # __init__ files re-export modules "app/models/__init__.py" = ["F401"] # __init__ files re-export modules
"app/models/base.py" = ["F401"] # Re-exports Base for use by other models "app/models/base.py" = ["F401"] # Re-exports Base for use by other models
"app/utils/test_utils.py" = ["N806"] # SQLAlchemy session factories use CamelCase convention "app/utils/test_utils.py" = ["N806"] # SQLAlchemy session factories use CamelCase convention
"app/main.py" = ["N806"] # Constants use UPPER_CASE convention "app/main.py" = ["N806"] # Constants use UPPER_CASE convention
"app/init_db.py" = ["T20"] # CLI script uses print for user-facing output
"migrate.py" = ["T20"] # CLI script uses print for user-facing output
# ============================================================================ # ============================================================================
# Ruff Import Sorting (isort replacement) # Ruff Import Sorting (isort replacement)
@@ -201,12 +209,15 @@ addopts = [
"--cov=app", "--cov=app",
"--cov-report=term-missing", "--cov-report=term-missing",
"--cov-report=html", "--cov-report=html",
"--ignore=tests/benchmarks", # benchmarks are incompatible with xdist; run via 'make benchmark'
"-p", "no:benchmark", # disable pytest-benchmark plugin during normal runs (conflicts with xdist)
] ]
markers = [ markers = [
"sqlite: marks tests that should run on SQLite (mocked).", "sqlite: marks tests that should run on SQLite (mocked).",
"postgres: marks tests that require a real PostgreSQL database.", "postgres: marks tests that require a real PostgreSQL database.",
"e2e: marks end-to-end tests requiring Docker containers.", "e2e: marks end-to-end tests requiring Docker containers.",
"schemathesis: marks Schemathesis-generated API tests.", "schemathesis: marks Schemathesis-generated API tests.",
"benchmark: marks performance benchmark tests.",
] ]
asyncio_default_fixture_loop_scope = "function" asyncio_default_fixture_loop_scope = "function"

View File

@@ -13,7 +13,7 @@ import pytest
from httpx import AsyncClient from httpx import AsyncClient
from app.models.user import User from app.models.user import User
from app.repositories.session import session_repo as session_crud from app.repositories.session import session_repo as session_repo
class TestRevokedSessionSecurity: class TestRevokedSessionSecurity:
@@ -117,7 +117,7 @@ class TestRevokedSessionSecurity:
async with SessionLocal() as session: async with SessionLocal() as session:
# Find and delete the session # Find and delete the session
db_session = await session_crud.get_by_jti(session, jti=jti) db_session = await session_repo.get_by_jti(session, jti=jti)
if db_session: if db_session:
await session.delete(db_session) await session.delete(db_session)
await session.commit() await session.commit()

View File

@@ -13,7 +13,7 @@ from httpx import AsyncClient
from app.models.organization import Organization from app.models.organization import Organization
from app.models.user import User from app.models.user import User
from app.repositories.user import user_repo as user_crud from app.repositories.user import user_repo as user_repo
class TestInactiveUserBlocking: class TestInactiveUserBlocking:
@@ -50,7 +50,7 @@ class TestInactiveUserBlocking:
# Step 2: Admin deactivates the user # Step 2: Admin deactivates the user
async with SessionLocal() as session: async with SessionLocal() as session:
user = await user_crud.get(session, id=async_test_user.id) user = await user_repo.get(session, id=async_test_user.id)
user.is_active = False user.is_active = False
await session.commit() await session.commit()
@@ -80,7 +80,7 @@ class TestInactiveUserBlocking:
# Deactivate user # Deactivate user
async with SessionLocal() as session: async with SessionLocal() as session:
user = await user_crud.get(session, id=async_test_user.id) user = await user_repo.get(session, id=async_test_user.id)
user.is_active = False user.is_active = False
await session.commit() await session.commit()

View File

@@ -39,7 +39,7 @@ async def async_test_user2(async_test_db):
_test_engine, SessionLocal = async_test_db _test_engine, SessionLocal = async_test_db
async with SessionLocal() as session: async with SessionLocal() as session:
from app.repositories.user import user_repo as user_crud from app.repositories.user import user_repo as user_repo
from app.schemas.users import UserCreate from app.schemas.users import UserCreate
user_data = UserCreate( user_data = UserCreate(
@@ -48,7 +48,7 @@ async def async_test_user2(async_test_db):
first_name="Test", first_name="Test",
last_name="User2", last_name="User2",
) )
user = await user_crud.create(session, obj_in=user_data) user = await user_repo.create(session, obj_in=user_data)
await session.commit() await session.commit()
await session.refresh(user) await session.refresh(user)
return user return user
@@ -191,9 +191,9 @@ class TestRevokeSession:
# Verify session is deactivated # Verify session is deactivated
async with SessionLocal() as session: async with SessionLocal() as session:
from app.repositories.session import session_repo as session_crud from app.repositories.session import session_repo as session_repo
revoked_session = await session_crud.get(session, id=str(session_id)) revoked_session = await session_repo.get(session, id=str(session_id))
assert revoked_session.is_active is False assert revoked_session.is_active is False
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -267,8 +267,8 @@ class TestCleanupExpiredSessions:
"""Test successfully cleaning up expired sessions.""" """Test successfully cleaning up expired sessions."""
_test_engine, SessionLocal = async_test_db _test_engine, SessionLocal = async_test_db
# Create expired and active sessions using CRUD to avoid greenlet issues # Create expired and active sessions using repository to avoid greenlet issues
from app.repositories.session import session_repo as session_crud from app.repositories.session import session_repo as session_repo
from app.schemas.sessions import SessionCreate from app.schemas.sessions import SessionCreate
async with SessionLocal() as db: async with SessionLocal() as db:
@@ -282,7 +282,7 @@ class TestCleanupExpiredSessions:
expires_at=datetime.now(UTC) - timedelta(days=1), expires_at=datetime.now(UTC) - timedelta(days=1),
last_used_at=datetime.now(UTC) - timedelta(days=2), last_used_at=datetime.now(UTC) - timedelta(days=2),
) )
e1 = await session_crud.create_session(db, obj_in=e1_data) e1 = await session_repo.create_session(db, obj_in=e1_data)
e1.is_active = False e1.is_active = False
db.add(e1) db.add(e1)
@@ -296,7 +296,7 @@ class TestCleanupExpiredSessions:
expires_at=datetime.now(UTC) - timedelta(hours=1), expires_at=datetime.now(UTC) - timedelta(hours=1),
last_used_at=datetime.now(UTC) - timedelta(hours=2), last_used_at=datetime.now(UTC) - timedelta(hours=2),
) )
e2 = await session_crud.create_session(db, obj_in=e2_data) e2 = await session_repo.create_session(db, obj_in=e2_data)
e2.is_active = False e2.is_active = False
db.add(e2) db.add(e2)
@@ -310,7 +310,7 @@ class TestCleanupExpiredSessions:
expires_at=datetime.now(UTC) + timedelta(days=7), expires_at=datetime.now(UTC) + timedelta(days=7),
last_used_at=datetime.now(UTC), last_used_at=datetime.now(UTC),
) )
await session_crud.create_session(db, obj_in=a1_data) await session_repo.create_session(db, obj_in=a1_data)
await db.commit() await db.commit()
# Cleanup expired sessions # Cleanup expired sessions
@@ -333,8 +333,8 @@ class TestCleanupExpiredSessions:
"""Test cleanup when no sessions are expired.""" """Test cleanup when no sessions are expired."""
_test_engine, SessionLocal = async_test_db _test_engine, SessionLocal = async_test_db
# Create only active sessions using CRUD # Create only active sessions using repository
from app.repositories.session import session_repo as session_crud from app.repositories.session import session_repo as session_repo
from app.schemas.sessions import SessionCreate from app.schemas.sessions import SessionCreate
async with SessionLocal() as db: async with SessionLocal() as db:
@@ -347,7 +347,7 @@ class TestCleanupExpiredSessions:
expires_at=datetime.now(UTC) + timedelta(days=7), expires_at=datetime.now(UTC) + timedelta(days=7),
last_used_at=datetime.now(UTC), last_used_at=datetime.now(UTC),
) )
await session_crud.create_session(db, obj_in=a1_data) await session_repo.create_session(db, obj_in=a1_data)
await db.commit() await db.commit()
response = await client.delete( response = await client.delete(
@@ -384,7 +384,7 @@ class TestSessionsAdditionalCases:
# Create multiple sessions # Create multiple sessions
async with SessionLocal() as session: async with SessionLocal() as session:
from app.repositories.session import session_repo as session_crud from app.repositories.session import session_repo as session_repo
from app.schemas.sessions import SessionCreate from app.schemas.sessions import SessionCreate
for i in range(5): for i in range(5):
@@ -397,7 +397,7 @@ class TestSessionsAdditionalCases:
expires_at=datetime.now(UTC) + timedelta(days=7), expires_at=datetime.now(UTC) + timedelta(days=7),
last_used_at=datetime.now(UTC), last_used_at=datetime.now(UTC),
) )
await session_crud.create_session(session, obj_in=session_data) await session_repo.create_session(session, obj_in=session_data)
await session.commit() await session.commit()
response = await client.get( response = await client.get(
@@ -431,7 +431,7 @@ class TestSessionsAdditionalCases:
"""Test cleanup with mix of active/inactive and expired/not-expired sessions.""" """Test cleanup with mix of active/inactive and expired/not-expired sessions."""
_test_engine, SessionLocal = async_test_db _test_engine, SessionLocal = async_test_db
from app.repositories.session import session_repo as session_crud from app.repositories.session import session_repo as session_repo
from app.schemas.sessions import SessionCreate from app.schemas.sessions import SessionCreate
async with SessionLocal() as db: async with SessionLocal() as db:
@@ -445,7 +445,7 @@ class TestSessionsAdditionalCases:
expires_at=datetime.now(UTC) - timedelta(days=1), expires_at=datetime.now(UTC) - timedelta(days=1),
last_used_at=datetime.now(UTC) - timedelta(days=2), last_used_at=datetime.now(UTC) - timedelta(days=2),
) )
e1 = await session_crud.create_session(db, obj_in=e1_data) e1 = await session_repo.create_session(db, obj_in=e1_data)
e1.is_active = False e1.is_active = False
db.add(e1) db.add(e1)
@@ -459,7 +459,7 @@ class TestSessionsAdditionalCases:
expires_at=datetime.now(UTC) - timedelta(hours=1), expires_at=datetime.now(UTC) - timedelta(hours=1),
last_used_at=datetime.now(UTC) - timedelta(hours=2), last_used_at=datetime.now(UTC) - timedelta(hours=2),
) )
await session_crud.create_session(db, obj_in=e2_data) await session_repo.create_session(db, obj_in=e2_data)
await db.commit() await db.commit()
@@ -530,7 +530,7 @@ class TestSessionExceptionHandlers:
from app.repositories import session as session_module from app.repositories import session as session_module
# First create a session to revoke # First create a session to revoke
from app.repositories.session import session_repo as session_crud from app.repositories.session import session_repo as session_repo
from app.schemas.sessions import SessionCreate from app.schemas.sessions import SessionCreate
_test_engine, AsyncTestingSessionLocal = async_test_db _test_engine, AsyncTestingSessionLocal = async_test_db
@@ -545,7 +545,7 @@ class TestSessionExceptionHandlers:
last_used_at=datetime.now(UTC), last_used_at=datetime.now(UTC),
expires_at=datetime.now(UTC) + timedelta(days=60), expires_at=datetime.now(UTC) + timedelta(days=60),
) )
user_session = await session_crud.create_session(db, obj_in=session_in) user_session = await session_repo.create_session(db, obj_in=session_in)
session_id = user_session.id session_id = user_session.id
# Mock the deactivate method to raise an exception # Mock the deactivate method to raise an exception

View File

@@ -157,7 +157,7 @@ class TestListUsers:
response = await client.get("/api/v1/users") response = await client.get("/api/v1/users")
assert response.status_code == status.HTTP_401_UNAUTHORIZED assert response.status_code == status.HTTP_401_UNAUTHORIZED
# Note: Removed test_list_users_unexpected_error because mocking at CRUD level # Note: Removed test_list_users_unexpected_error because mocking at repository level
# causes the exception to be raised before FastAPI can handle it properly # causes the exception to be raised before FastAPI can handle it properly

View File

View File

@@ -0,0 +1,327 @@
"""
Performance Benchmark Tests.
These tests establish baseline performance metrics for critical API endpoints
and core operations, detecting regressions when response times degrade.
Usage:
make benchmark # Run benchmarks and save baseline
make benchmark-check # Run benchmarks and compare against saved baseline
Baselines are stored in .benchmarks/ and should be committed to version control
so CI can detect performance regressions across commits.
"""
import time
import uuid
from unittest.mock import patch
import pytest
import pytest_asyncio
from fastapi.testclient import TestClient
from app.core.auth import (
create_access_token,
create_refresh_token,
decode_token,
get_password_hash,
verify_password,
)
from app.main import app
pytestmark = [pytest.mark.benchmark]
# Pre-computed hash for sync benchmarks (avoids hashing in every iteration)
_BENCH_PASSWORD = "BenchPass123!"
_BENCH_HASH = get_password_hash(_BENCH_PASSWORD)
# =============================================================================
# Fixtures
# =============================================================================
@pytest.fixture
def sync_client():
"""Create a FastAPI test client with mocked database for stateless endpoints."""
with patch("app.main.check_database_health") as mock_health_check:
mock_health_check.return_value = True
yield TestClient(app)
# =============================================================================
# Stateless Endpoint Benchmarks (no DB required)
# =============================================================================
def test_health_endpoint_performance(sync_client, benchmark):
"""Benchmark: GET /health should respond within acceptable latency."""
result = benchmark(sync_client.get, "/health")
assert result.status_code == 200
def test_openapi_schema_performance(sync_client, benchmark):
"""Benchmark: OpenAPI schema generation should not regress."""
result = benchmark(sync_client.get, "/api/v1/openapi.json")
assert result.status_code == 200
# =============================================================================
# Core Crypto & Token Benchmarks (no DB required)
#
# These benchmark the CPU-intensive operations that underpin auth:
# password hashing, verification, and JWT creation/decoding.
# =============================================================================
def test_password_hashing_performance(benchmark):
"""Benchmark: bcrypt password hashing (CPU-bound, ~100ms expected)."""
result = benchmark(get_password_hash, _BENCH_PASSWORD)
assert result.startswith("$2b$")
def test_password_verification_performance(benchmark):
"""Benchmark: bcrypt password verification against a known hash."""
result = benchmark(verify_password, _BENCH_PASSWORD, _BENCH_HASH)
assert result is True
def test_access_token_creation_performance(benchmark):
"""Benchmark: JWT access token generation."""
user_id = str(uuid.uuid4())
token = benchmark(create_access_token, user_id)
assert isinstance(token, str)
assert len(token) > 0
def test_refresh_token_creation_performance(benchmark):
"""Benchmark: JWT refresh token generation."""
user_id = str(uuid.uuid4())
token = benchmark(create_refresh_token, user_id)
assert isinstance(token, str)
assert len(token) > 0
def test_token_decode_performance(benchmark):
"""Benchmark: JWT token decoding and validation."""
user_id = str(uuid.uuid4())
token = create_access_token(user_id)
payload = benchmark(decode_token, token, "access")
assert payload.sub == user_id
# =============================================================================
# Database-dependent Endpoint Benchmarks (async, manual timing)
#
# pytest-benchmark does not support async functions natively. These tests
# measure latency manually and assert against a maximum threshold (in ms)
# to catch performance regressions.
# =============================================================================
MAX_LOGIN_MS = 500
MAX_GET_USER_MS = 200
MAX_REGISTER_MS = 500
MAX_TOKEN_REFRESH_MS = 200
MAX_SESSIONS_LIST_MS = 200
MAX_USER_UPDATE_MS = 200
@pytest_asyncio.fixture
async def bench_user(async_test_db):
"""Create a test user for benchmark tests."""
from app.models.user import User
_test_engine, AsyncTestingSessionLocal = async_test_db
async with AsyncTestingSessionLocal() as session:
user = User(
id=uuid.uuid4(),
email="bench@example.com",
password_hash=get_password_hash("BenchPass123!"),
first_name="Bench",
last_name="User",
is_active=True,
is_superuser=False,
)
session.add(user)
await session.commit()
await session.refresh(user)
return user
@pytest_asyncio.fixture
async def bench_token(client, bench_user):
"""Get an auth token for the benchmark user."""
response = await client.post(
"/api/v1/auth/login",
json={"email": "bench@example.com", "password": "BenchPass123!"},
)
assert response.status_code == 200, f"Login failed: {response.text}"
return response.json()["access_token"]
@pytest_asyncio.fixture
async def bench_refresh_token(client, bench_user):
"""Get a refresh token for the benchmark user."""
response = await client.post(
"/api/v1/auth/login",
json={"email": "bench@example.com", "password": "BenchPass123!"},
)
assert response.status_code == 200, f"Login failed: {response.text}"
return response.json()["refresh_token"]
@pytest.mark.asyncio
async def test_login_latency(client, bench_user):
"""Performance: POST /api/v1/auth/login must respond under threshold."""
iterations = 5
total_ms = 0.0
for _ in range(iterations):
start = time.perf_counter()
response = await client.post(
"/api/v1/auth/login",
json={"email": "bench@example.com", "password": "BenchPass123!"},
)
elapsed_ms = (time.perf_counter() - start) * 1000
total_ms += elapsed_ms
assert response.status_code == 200
mean_ms = total_ms / iterations
print(f"\n Login mean latency: {mean_ms:.1f}ms (threshold: {MAX_LOGIN_MS}ms)")
assert mean_ms < MAX_LOGIN_MS, (
f"Login latency regression: {mean_ms:.1f}ms exceeds {MAX_LOGIN_MS}ms threshold"
)
@pytest.mark.asyncio
async def test_get_current_user_latency(client, bench_token):
"""Performance: GET /api/v1/users/me must respond under threshold."""
iterations = 10
total_ms = 0.0
for _ in range(iterations):
start = time.perf_counter()
response = await client.get(
"/api/v1/users/me",
headers={"Authorization": f"Bearer {bench_token}"},
)
elapsed_ms = (time.perf_counter() - start) * 1000
total_ms += elapsed_ms
assert response.status_code == 200
mean_ms = total_ms / iterations
print(
f"\n Get user mean latency: {mean_ms:.1f}ms (threshold: {MAX_GET_USER_MS}ms)"
)
assert mean_ms < MAX_GET_USER_MS, (
f"Get user latency regression: {mean_ms:.1f}ms exceeds {MAX_GET_USER_MS}ms threshold"
)
@pytest.mark.asyncio
async def test_register_latency(client):
"""Performance: POST /api/v1/auth/register must respond under threshold."""
iterations = 3
total_ms = 0.0
for i in range(iterations):
start = time.perf_counter()
response = await client.post(
"/api/v1/auth/register",
json={
"email": f"benchreg{i}@example.com",
"password": "BenchRegPass123!",
"first_name": "Bench",
"last_name": "Register",
},
)
elapsed_ms = (time.perf_counter() - start) * 1000
total_ms += elapsed_ms
assert response.status_code == 201, f"Register failed: {response.text}"
mean_ms = total_ms / iterations
print(
f"\n Register mean latency: {mean_ms:.1f}ms (threshold: {MAX_REGISTER_MS}ms)"
)
assert mean_ms < MAX_REGISTER_MS, (
f"Register latency regression: {mean_ms:.1f}ms exceeds {MAX_REGISTER_MS}ms threshold"
)
@pytest.mark.asyncio
async def test_token_refresh_latency(client, bench_refresh_token):
"""Performance: POST /api/v1/auth/refresh must respond under threshold."""
iterations = 5
total_ms = 0.0
for _ in range(iterations):
start = time.perf_counter()
response = await client.post(
"/api/v1/auth/refresh",
json={"refresh_token": bench_refresh_token},
)
elapsed_ms = (time.perf_counter() - start) * 1000
total_ms += elapsed_ms
assert response.status_code == 200, f"Refresh failed: {response.text}"
# Use the new refresh token for the next iteration
bench_refresh_token = response.json()["refresh_token"]
mean_ms = total_ms / iterations
print(
f"\n Token refresh mean latency: {mean_ms:.1f}ms (threshold: {MAX_TOKEN_REFRESH_MS}ms)"
)
assert mean_ms < MAX_TOKEN_REFRESH_MS, (
f"Token refresh latency regression: {mean_ms:.1f}ms exceeds {MAX_TOKEN_REFRESH_MS}ms threshold"
)
@pytest.mark.asyncio
async def test_sessions_list_latency(client, bench_token):
"""Performance: GET /api/v1/sessions must respond under threshold."""
iterations = 10
total_ms = 0.0
for _ in range(iterations):
start = time.perf_counter()
response = await client.get(
"/api/v1/sessions/me",
headers={"Authorization": f"Bearer {bench_token}"},
)
elapsed_ms = (time.perf_counter() - start) * 1000
total_ms += elapsed_ms
assert response.status_code == 200
mean_ms = total_ms / iterations
print(
f"\n Sessions list mean latency: {mean_ms:.1f}ms (threshold: {MAX_SESSIONS_LIST_MS}ms)"
)
assert mean_ms < MAX_SESSIONS_LIST_MS, (
f"Sessions list latency regression: {mean_ms:.1f}ms exceeds {MAX_SESSIONS_LIST_MS}ms threshold"
)
@pytest.mark.asyncio
async def test_user_profile_update_latency(client, bench_token):
"""Performance: PATCH /api/v1/users/me must respond under threshold."""
iterations = 5
total_ms = 0.0
for i in range(iterations):
start = time.perf_counter()
response = await client.patch(
"/api/v1/users/me",
headers={"Authorization": f"Bearer {bench_token}"},
json={"first_name": f"Bench{i}"},
)
elapsed_ms = (time.perf_counter() - start) * 1000
total_ms += elapsed_ms
assert response.status_code == 200, f"Update failed: {response.text}"
mean_ms = total_ms / iterations
print(
f"\n User update mean latency: {mean_ms:.1f}ms (threshold: {MAX_USER_UPDATE_MS}ms)"
)
assert mean_ms < MAX_USER_UPDATE_MS, (
f"User update latency regression: {mean_ms:.1f}ms exceeds {MAX_USER_UPDATE_MS}ms threshold"
)

View File

@@ -2,8 +2,8 @@
import uuid import uuid
from datetime import UTC, datetime, timedelta from datetime import UTC, datetime, timedelta
import jwt
import pytest import pytest
from jose import jwt
from app.core.auth import ( from app.core.auth import (
TokenExpiredError, TokenExpiredError,
@@ -215,6 +215,7 @@ class TestTokenDecoding:
payload = { payload = {
"sub": 123, # sub should be a string, not an integer "sub": 123, # sub should be a string, not an integer
"exp": int((now + timedelta(minutes=30)).timestamp()), "exp": int((now + timedelta(minutes=30)).timestamp()),
"iat": int(now.timestamp()),
} }
token = jwt.encode(payload, settings.SECRET_KEY, algorithm=settings.ALGORITHM) token = jwt.encode(payload, settings.SECRET_KEY, algorithm=settings.ALGORITHM)

View File

@@ -9,8 +9,8 @@ Critical security tests covering:
These tests cover critical security vulnerabilities that could be exploited. These tests cover critical security vulnerabilities that could be exploited.
""" """
import jwt
import pytest import pytest
from jose import jwt
from app.core.auth import TokenInvalidError, create_access_token, decode_token from app.core.auth import TokenInvalidError, create_access_token, decode_token
from app.core.config import settings from app.core.config import settings
@@ -38,8 +38,8 @@ class TestJWTAlgorithmSecurityAttacks:
Attacker creates a token with "alg: none" to bypass signature verification. Attacker creates a token with "alg: none" to bypass signature verification.
NOTE: Lines 209 and 212 in auth.py are DEFENSIVE CODE that's never reached NOTE: Lines 209 and 212 in auth.py are DEFENSIVE CODE that's never reached
because python-jose library rejects "none" algorithm tokens BEFORE we get there. because PyJWT rejects "none" algorithm tokens BEFORE we get there.
This is good for security! The library throws JWTError which becomes TokenInvalidError. This is good for security! The library throws InvalidTokenError which becomes TokenInvalidError.
This test verifies the overall protection works, even though our defensive This test verifies the overall protection works, even though our defensive
checks at lines 209-212 don't execute because the library catches it first. checks at lines 209-212 don't execute because the library catches it first.
@@ -108,36 +108,33 @@ class TestJWTAlgorithmSecurityAttacks:
Test that tokens with wrong algorithm are rejected. Test that tokens with wrong algorithm are rejected.
Attack Scenario: Attack Scenario:
Attacker changes algorithm from HS256 to RS256, attempting to use Attacker changes the "alg" header to RS256 while keeping an HMAC
the public key as the HMAC secret. This could allow token forgery. signature, attempting algorithm confusion to forge tokens.
Reference: https://www.nccgroup.com/us/about-us/newsroom-and-events/blog/2019/january/jwt-algorithm-confusion/ Reference: https://www.nccgroup.com/us/about-us/newsroom-and-events/blog/2019/january/jwt-algorithm-confusion/
NOTE: Like the "none" algorithm test, python-jose library catches this
before our defensive checks at line 212. This is good for security!
""" """
import base64
import json
import time import time
now = int(time.time()) now = int(time.time())
# Create a valid payload
payload = {"sub": "user123", "exp": now + 3600, "iat": now, "type": "access"} payload = {"sub": "user123", "exp": now + 3600, "iat": now, "type": "access"}
# Encode with wrong algorithm (RS256 instead of HS256) # Hand-craft a token claiming RS256 in the header — PyJWT cannot encode
# This simulates an attacker trying algorithm substitution # RS256 with an HMAC key, so we craft the header manually (same technique
wrong_algorithm = "RS256" if settings.ALGORITHM == "HS256" else "HS256" # as the "alg: none" tests) to produce a token that actually reaches decode_token.
header = {"alg": "RS256", "typ": "JWT"}
header_encoded = (
base64.urlsafe_b64encode(json.dumps(header).encode()).decode().rstrip("=")
)
payload_encoded = (
base64.urlsafe_b64encode(json.dumps(payload).encode()).decode().rstrip("=")
)
# Attach a fake signature to form a complete (but invalid) JWT
malicious_token = f"{header_encoded}.{payload_encoded}.fakesignature"
try: with pytest.raises(TokenInvalidError):
malicious_token = jwt.encode( decode_token(malicious_token)
payload, settings.SECRET_KEY, algorithm=wrong_algorithm
)
# Should reject the token (library catches mismatch)
with pytest.raises(TokenInvalidError):
decode_token(malicious_token)
except Exception:
# If encoding fails, that's also acceptable (library protection)
pass
def test_reject_hs384_when_hs256_expected(self): def test_reject_hs384_when_hs256_expected(self):
""" """
@@ -151,17 +148,11 @@ class TestJWTAlgorithmSecurityAttacks:
payload = {"sub": "user123", "exp": now + 3600, "iat": now, "type": "access"} payload = {"sub": "user123", "exp": now + 3600, "iat": now, "type": "access"}
# Create token with HS384 instead of HS256 # Create token with HS384 instead of HS256 (HMAC key works with HS384)
try: malicious_token = jwt.encode(payload, settings.SECRET_KEY, algorithm="HS384")
malicious_token = jwt.encode(
payload, settings.SECRET_KEY, algorithm="HS384"
)
with pytest.raises(TokenInvalidError): with pytest.raises(TokenInvalidError):
decode_token(malicious_token) decode_token(malicious_token)
except Exception:
# If encoding fails, that's also fine
pass
def test_valid_token_with_correct_algorithm_accepted(self): def test_valid_token_with_correct_algorithm_accepted(self):
""" """

View File

@@ -46,7 +46,7 @@ async def login_user(client, email: str, password: str = "SecurePassword123!"):
async def create_superuser(e2e_db_session, email: str, password: str): async def create_superuser(e2e_db_session, email: str, password: str):
"""Create a superuser directly in the database.""" """Create a superuser directly in the database."""
from app.repositories.user import user_repo as user_crud from app.repositories.user import user_repo as user_repo
from app.schemas.users import UserCreate from app.schemas.users import UserCreate
user_in = UserCreate( user_in = UserCreate(
@@ -56,7 +56,7 @@ async def create_superuser(e2e_db_session, email: str, password: str):
last_name="User", last_name="User",
is_superuser=True, is_superuser=True,
) )
user = await user_crud.create(e2e_db_session, obj_in=user_in) user = await user_repo.create(e2e_db_session, obj_in=user_in)
return user return user

View File

@@ -27,13 +27,16 @@ except ImportError:
pytestmark = [ pytestmark = [
pytest.mark.e2e, pytest.mark.e2e,
pytest.mark.schemathesis, pytest.mark.schemathesis,
pytest.mark.skipif(
not SCHEMATHESIS_AVAILABLE,
reason="schemathesis not installed - run: make install-e2e",
),
] ]
if not SCHEMATHESIS_AVAILABLE:
def test_schemathesis_compatibility():
"""Gracefully handle missing schemathesis dependency."""
pytest.skip("schemathesis not installed - run: make install-e2e")
if SCHEMATHESIS_AVAILABLE: if SCHEMATHESIS_AVAILABLE:
from app.main import app from app.main import app

View File

@@ -46,7 +46,7 @@ async def register_and_login(client, email: str, password: str = "SecurePassword
async def create_superuser_and_login(client, db_session): async def create_superuser_and_login(client, db_session):
"""Helper to create a superuser directly in DB and login.""" """Helper to create a superuser directly in DB and login."""
from app.repositories.user import user_repo as user_crud from app.repositories.user import user_repo as user_repo
from app.schemas.users import UserCreate from app.schemas.users import UserCreate
email = f"admin-{uuid4().hex[:8]}@example.com" email = f"admin-{uuid4().hex[:8]}@example.com"
@@ -60,7 +60,7 @@ async def create_superuser_and_login(client, db_session):
last_name="User", last_name="User",
is_superuser=True, is_superuser=True,
) )
await user_crud.create(db_session, obj_in=user_in) await user_repo.create(db_session, obj_in=user_in)
# Login # Login
login_resp = await client.post( login_resp = await client.post(

View File

@@ -1,6 +1,6 @@
# tests/crud/test_base.py # tests/repositories/test_base.py
""" """
Comprehensive tests for CRUDBase class covering all error paths and edge cases. Comprehensive tests for BaseRepository class covering all error paths and edge cases.
""" """
from datetime import UTC from datetime import UTC
@@ -16,11 +16,11 @@ from app.core.repository_exceptions import (
IntegrityConstraintError, IntegrityConstraintError,
InvalidInputError, InvalidInputError,
) )
from app.repositories.user import user_repo as user_crud from app.repositories.user import user_repo as user_repo
from app.schemas.users import UserCreate, UserUpdate from app.schemas.users import UserCreate, UserUpdate
class TestCRUDBaseGet: class TestRepositoryBaseGet:
"""Tests for get method covering UUID validation and options.""" """Tests for get method covering UUID validation and options."""
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -29,7 +29,7 @@ class TestCRUDBaseGet:
_test_engine, SessionLocal = async_test_db _test_engine, SessionLocal = async_test_db
async with SessionLocal() as session: async with SessionLocal() as session:
result = await user_crud.get(session, id="invalid-uuid") result = await user_repo.get(session, id="invalid-uuid")
assert result is None assert result is None
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -38,7 +38,7 @@ class TestCRUDBaseGet:
_test_engine, SessionLocal = async_test_db _test_engine, SessionLocal = async_test_db
async with SessionLocal() as session: async with SessionLocal() as session:
result = await user_crud.get(session, id=12345) # int instead of UUID result = await user_repo.get(session, id=12345) # int instead of UUID
assert result is None assert result is None
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -48,7 +48,7 @@ class TestCRUDBaseGet:
async with SessionLocal() as session: async with SessionLocal() as session:
# Pass UUID object directly # Pass UUID object directly
result = await user_crud.get(session, id=async_test_user.id) result = await user_repo.get(session, id=async_test_user.id)
assert result is not None assert result is not None
assert result.id == async_test_user.id assert result.id == async_test_user.id
@@ -60,7 +60,7 @@ class TestCRUDBaseGet:
async with SessionLocal() as session: async with SessionLocal() as session:
# Test that options parameter is accepted and doesn't error # Test that options parameter is accepted and doesn't error
# We pass an empty list which still tests the code path # We pass an empty list which still tests the code path
result = await user_crud.get( result = await user_repo.get(
session, id=str(async_test_user.id), options=[] session, id=str(async_test_user.id), options=[]
) )
assert result is not None assert result is not None
@@ -74,10 +74,10 @@ class TestCRUDBaseGet:
# Mock execute to raise an exception # Mock execute to raise an exception
with patch.object(session, "execute", side_effect=Exception("DB error")): with patch.object(session, "execute", side_effect=Exception("DB error")):
with pytest.raises(Exception, match="DB error"): with pytest.raises(Exception, match="DB error"):
await user_crud.get(session, id=str(uuid4())) await user_repo.get(session, id=str(uuid4()))
class TestCRUDBaseGetMulti: class TestRepositoryBaseGetMulti:
"""Tests for get_multi method covering pagination validation and options.""" """Tests for get_multi method covering pagination validation and options."""
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -87,7 +87,7 @@ class TestCRUDBaseGetMulti:
async with SessionLocal() as session: async with SessionLocal() as session:
with pytest.raises(InvalidInputError, match="skip must be non-negative"): with pytest.raises(InvalidInputError, match="skip must be non-negative"):
await user_crud.get_multi(session, skip=-1) await user_repo.get_multi(session, skip=-1)
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_get_multi_negative_limit(self, async_test_db): async def test_get_multi_negative_limit(self, async_test_db):
@@ -96,7 +96,7 @@ class TestCRUDBaseGetMulti:
async with SessionLocal() as session: async with SessionLocal() as session:
with pytest.raises(InvalidInputError, match="limit must be non-negative"): with pytest.raises(InvalidInputError, match="limit must be non-negative"):
await user_crud.get_multi(session, limit=-1) await user_repo.get_multi(session, limit=-1)
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_get_multi_limit_too_large(self, async_test_db): async def test_get_multi_limit_too_large(self, async_test_db):
@@ -105,7 +105,7 @@ class TestCRUDBaseGetMulti:
async with SessionLocal() as session: async with SessionLocal() as session:
with pytest.raises(InvalidInputError, match="Maximum limit is 1000"): with pytest.raises(InvalidInputError, match="Maximum limit is 1000"):
await user_crud.get_multi(session, limit=1001) await user_repo.get_multi(session, limit=1001)
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_get_multi_with_options(self, async_test_db, async_test_user): async def test_get_multi_with_options(self, async_test_db, async_test_user):
@@ -114,7 +114,7 @@ class TestCRUDBaseGetMulti:
async with SessionLocal() as session: async with SessionLocal() as session:
# Test that options parameter is accepted # Test that options parameter is accepted
results = await user_crud.get_multi(session, skip=0, limit=10, options=[]) results = await user_repo.get_multi(session, skip=0, limit=10, options=[])
assert isinstance(results, list) assert isinstance(results, list)
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -125,10 +125,10 @@ class TestCRUDBaseGetMulti:
async with SessionLocal() as session: async with SessionLocal() as session:
with patch.object(session, "execute", side_effect=Exception("DB error")): with patch.object(session, "execute", side_effect=Exception("DB error")):
with pytest.raises(Exception, match="DB error"): with pytest.raises(Exception, match="DB error"):
await user_crud.get_multi(session) await user_repo.get_multi(session)
class TestCRUDBaseCreate: class TestRepositoryBaseCreate:
"""Tests for create method covering various error conditions.""" """Tests for create method covering various error conditions."""
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -146,7 +146,7 @@ class TestCRUDBaseCreate:
) )
with pytest.raises(DuplicateEntryError, match="already exists"): with pytest.raises(DuplicateEntryError, match="already exists"):
await user_crud.create(session, obj_in=user_data) await user_repo.create(session, obj_in=user_data)
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_create_integrity_error_non_duplicate(self, async_test_db): async def test_create_integrity_error_non_duplicate(self, async_test_db):
@@ -173,11 +173,11 @@ class TestCRUDBaseCreate:
with pytest.raises( with pytest.raises(
DuplicateEntryError, match="Database integrity error" DuplicateEntryError, match="Database integrity error"
): ):
await user_crud.create(session, obj_in=user_data) await user_repo.create(session, obj_in=user_data)
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_create_operational_error(self, async_test_db): async def test_create_operational_error(self, async_test_db):
"""Test create with OperationalError (user CRUD catches as generic Exception).""" """Test create with OperationalError (user repository catches as generic Exception)."""
_test_engine, SessionLocal = async_test_db _test_engine, SessionLocal = async_test_db
async with SessionLocal() as session: async with SessionLocal() as session:
@@ -195,13 +195,13 @@ class TestCRUDBaseCreate:
last_name="User", last_name="User",
) )
# User CRUD catches this as generic Exception and re-raises # User repository catches this as generic Exception and re-raises
with pytest.raises(OperationalError): with pytest.raises(OperationalError):
await user_crud.create(session, obj_in=user_data) await user_repo.create(session, obj_in=user_data)
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_create_data_error(self, async_test_db): async def test_create_data_error(self, async_test_db):
"""Test create with DataError (user CRUD catches as generic Exception).""" """Test create with DataError (user repository catches as generic Exception)."""
_test_engine, SessionLocal = async_test_db _test_engine, SessionLocal = async_test_db
async with SessionLocal() as session: async with SessionLocal() as session:
@@ -217,9 +217,9 @@ class TestCRUDBaseCreate:
last_name="User", last_name="User",
) )
# User CRUD catches this as generic Exception and re-raises # User repository catches this as generic Exception and re-raises
with pytest.raises(DataError): with pytest.raises(DataError):
await user_crud.create(session, obj_in=user_data) await user_repo.create(session, obj_in=user_data)
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_create_unexpected_error(self, async_test_db): async def test_create_unexpected_error(self, async_test_db):
@@ -238,10 +238,10 @@ class TestCRUDBaseCreate:
) )
with pytest.raises(RuntimeError, match="Unexpected error"): with pytest.raises(RuntimeError, match="Unexpected error"):
await user_crud.create(session, obj_in=user_data) await user_repo.create(session, obj_in=user_data)
class TestCRUDBaseUpdate: class TestRepositoryBaseUpdate:
"""Tests for update method covering error conditions.""" """Tests for update method covering error conditions."""
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -251,7 +251,7 @@ class TestCRUDBaseUpdate:
# Create another user # Create another user
async with SessionLocal() as session: async with SessionLocal() as session:
from app.repositories.user import user_repo as user_crud from app.repositories.user import user_repo as user_repo
user2_data = UserCreate( user2_data = UserCreate(
email="user2@example.com", email="user2@example.com",
@@ -259,12 +259,12 @@ class TestCRUDBaseUpdate:
first_name="User", first_name="User",
last_name="Two", last_name="Two",
) )
user2 = await user_crud.create(session, obj_in=user2_data) user2 = await user_repo.create(session, obj_in=user2_data)
await session.commit() await session.commit()
# Try to update user2 with user1's email # Try to update user2 with user1's email
async with SessionLocal() as session: async with SessionLocal() as session:
user2_obj = await user_crud.get(session, id=str(user2.id)) user2_obj = await user_repo.get(session, id=str(user2.id))
with patch.object( with patch.object(
session, session,
@@ -276,7 +276,7 @@ class TestCRUDBaseUpdate:
update_data = UserUpdate(email=async_test_user.email) update_data = UserUpdate(email=async_test_user.email)
with pytest.raises(DuplicateEntryError, match="already exists"): with pytest.raises(DuplicateEntryError, match="already exists"):
await user_crud.update( await user_repo.update(
session, db_obj=user2_obj, obj_in=update_data session, db_obj=user2_obj, obj_in=update_data
) )
@@ -286,10 +286,10 @@ class TestCRUDBaseUpdate:
_test_engine, SessionLocal = async_test_db _test_engine, SessionLocal = async_test_db
async with SessionLocal() as session: async with SessionLocal() as session:
user = await user_crud.get(session, id=str(async_test_user.id)) user = await user_repo.get(session, id=str(async_test_user.id))
# Update with dict (tests lines 164-165) # Update with dict (tests lines 164-165)
updated = await user_crud.update( updated = await user_repo.update(
session, db_obj=user, obj_in={"first_name": "UpdatedName"} session, db_obj=user, obj_in={"first_name": "UpdatedName"}
) )
assert updated.first_name == "UpdatedName" assert updated.first_name == "UpdatedName"
@@ -300,7 +300,7 @@ class TestCRUDBaseUpdate:
_test_engine, SessionLocal = async_test_db _test_engine, SessionLocal = async_test_db
async with SessionLocal() as session: async with SessionLocal() as session:
user = await user_crud.get(session, id=str(async_test_user.id)) user = await user_repo.get(session, id=str(async_test_user.id))
with patch.object( with patch.object(
session, session,
@@ -312,7 +312,7 @@ class TestCRUDBaseUpdate:
with pytest.raises( with pytest.raises(
IntegrityConstraintError, match="Database integrity error" IntegrityConstraintError, match="Database integrity error"
): ):
await user_crud.update( await user_repo.update(
session, db_obj=user, obj_in={"first_name": "Test"} session, db_obj=user, obj_in={"first_name": "Test"}
) )
@@ -322,7 +322,7 @@ class TestCRUDBaseUpdate:
_test_engine, SessionLocal = async_test_db _test_engine, SessionLocal = async_test_db
async with SessionLocal() as session: async with SessionLocal() as session:
user = await user_crud.get(session, id=str(async_test_user.id)) user = await user_repo.get(session, id=str(async_test_user.id))
with patch.object( with patch.object(
session, session,
@@ -334,7 +334,7 @@ class TestCRUDBaseUpdate:
with pytest.raises( with pytest.raises(
IntegrityConstraintError, match="Database operation failed" IntegrityConstraintError, match="Database operation failed"
): ):
await user_crud.update( await user_repo.update(
session, db_obj=user, obj_in={"first_name": "Test"} session, db_obj=user, obj_in={"first_name": "Test"}
) )
@@ -344,18 +344,18 @@ class TestCRUDBaseUpdate:
_test_engine, SessionLocal = async_test_db _test_engine, SessionLocal = async_test_db
async with SessionLocal() as session: async with SessionLocal() as session:
user = await user_crud.get(session, id=str(async_test_user.id)) user = await user_repo.get(session, id=str(async_test_user.id))
with patch.object( with patch.object(
session, "commit", side_effect=RuntimeError("Unexpected") session, "commit", side_effect=RuntimeError("Unexpected")
): ):
with pytest.raises(RuntimeError): with pytest.raises(RuntimeError):
await user_crud.update( await user_repo.update(
session, db_obj=user, obj_in={"first_name": "Test"} session, db_obj=user, obj_in={"first_name": "Test"}
) )
class TestCRUDBaseRemove: class TestRepositoryBaseRemove:
"""Tests for remove method covering UUID validation and error conditions.""" """Tests for remove method covering UUID validation and error conditions."""
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -364,7 +364,7 @@ class TestCRUDBaseRemove:
_test_engine, SessionLocal = async_test_db _test_engine, SessionLocal = async_test_db
async with SessionLocal() as session: async with SessionLocal() as session:
result = await user_crud.remove(session, id="invalid-uuid") result = await user_repo.remove(session, id="invalid-uuid")
assert result is None assert result is None
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -380,13 +380,13 @@ class TestCRUDBaseRemove:
first_name="To", first_name="To",
last_name="Delete", last_name="Delete",
) )
user = await user_crud.create(session, obj_in=user_data) user = await user_repo.create(session, obj_in=user_data)
user_id = user.id user_id = user.id
await session.commit() await session.commit()
# Delete with UUID object # Delete with UUID object
async with SessionLocal() as session: async with SessionLocal() as session:
result = await user_crud.remove(session, id=user_id) # UUID object result = await user_repo.remove(session, id=user_id) # UUID object
assert result is not None assert result is not None
assert result.id == user_id assert result.id == user_id
@@ -396,7 +396,7 @@ class TestCRUDBaseRemove:
_test_engine, SessionLocal = async_test_db _test_engine, SessionLocal = async_test_db
async with SessionLocal() as session: async with SessionLocal() as session:
result = await user_crud.remove(session, id=str(uuid4())) result = await user_repo.remove(session, id=str(uuid4()))
assert result is None assert result is None
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -417,7 +417,7 @@ class TestCRUDBaseRemove:
IntegrityConstraintError, IntegrityConstraintError,
match="Cannot delete.*referenced by other records", match="Cannot delete.*referenced by other records",
): ):
await user_crud.remove(session, id=str(async_test_user.id)) await user_repo.remove(session, id=str(async_test_user.id))
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_remove_unexpected_error(self, async_test_db, async_test_user): async def test_remove_unexpected_error(self, async_test_db, async_test_user):
@@ -429,10 +429,10 @@ class TestCRUDBaseRemove:
session, "commit", side_effect=RuntimeError("Unexpected") session, "commit", side_effect=RuntimeError("Unexpected")
): ):
with pytest.raises(RuntimeError): with pytest.raises(RuntimeError):
await user_crud.remove(session, id=str(async_test_user.id)) await user_repo.remove(session, id=str(async_test_user.id))
class TestCRUDBaseGetMultiWithTotal: class TestRepositoryBaseGetMultiWithTotal:
"""Tests for get_multi_with_total method covering pagination, filtering, sorting.""" """Tests for get_multi_with_total method covering pagination, filtering, sorting."""
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -441,7 +441,7 @@ class TestCRUDBaseGetMultiWithTotal:
_test_engine, SessionLocal = async_test_db _test_engine, SessionLocal = async_test_db
async with SessionLocal() as session: async with SessionLocal() as session:
items, total = await user_crud.get_multi_with_total( items, total = await user_repo.get_multi_with_total(
session, skip=0, limit=10 session, skip=0, limit=10
) )
assert isinstance(items, list) assert isinstance(items, list)
@@ -455,7 +455,7 @@ class TestCRUDBaseGetMultiWithTotal:
async with SessionLocal() as session: async with SessionLocal() as session:
with pytest.raises(InvalidInputError, match="skip must be non-negative"): with pytest.raises(InvalidInputError, match="skip must be non-negative"):
await user_crud.get_multi_with_total(session, skip=-1) await user_repo.get_multi_with_total(session, skip=-1)
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_get_multi_with_total_negative_limit(self, async_test_db): async def test_get_multi_with_total_negative_limit(self, async_test_db):
@@ -464,7 +464,7 @@ class TestCRUDBaseGetMultiWithTotal:
async with SessionLocal() as session: async with SessionLocal() as session:
with pytest.raises(InvalidInputError, match="limit must be non-negative"): with pytest.raises(InvalidInputError, match="limit must be non-negative"):
await user_crud.get_multi_with_total(session, limit=-1) await user_repo.get_multi_with_total(session, limit=-1)
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_get_multi_with_total_limit_too_large(self, async_test_db): async def test_get_multi_with_total_limit_too_large(self, async_test_db):
@@ -473,7 +473,7 @@ class TestCRUDBaseGetMultiWithTotal:
async with SessionLocal() as session: async with SessionLocal() as session:
with pytest.raises(InvalidInputError, match="Maximum limit is 1000"): with pytest.raises(InvalidInputError, match="Maximum limit is 1000"):
await user_crud.get_multi_with_total(session, limit=1001) await user_repo.get_multi_with_total(session, limit=1001)
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_get_multi_with_total_with_filters( async def test_get_multi_with_total_with_filters(
@@ -484,7 +484,7 @@ class TestCRUDBaseGetMultiWithTotal:
async with SessionLocal() as session: async with SessionLocal() as session:
filters = {"email": async_test_user.email} filters = {"email": async_test_user.email}
items, total = await user_crud.get_multi_with_total( items, total = await user_repo.get_multi_with_total(
session, filters=filters session, filters=filters
) )
assert total == 1 assert total == 1
@@ -512,12 +512,12 @@ class TestCRUDBaseGetMultiWithTotal:
first_name="ZZZ", first_name="ZZZ",
last_name="User", last_name="User",
) )
await user_crud.create(session, obj_in=user_data1) await user_repo.create(session, obj_in=user_data1)
await user_crud.create(session, obj_in=user_data2) await user_repo.create(session, obj_in=user_data2)
await session.commit() await session.commit()
async with SessionLocal() as session: async with SessionLocal() as session:
items, total = await user_crud.get_multi_with_total( items, total = await user_repo.get_multi_with_total(
session, sort_by="email", sort_order="asc" session, sort_by="email", sort_order="asc"
) )
assert total >= 3 assert total >= 3
@@ -545,12 +545,12 @@ class TestCRUDBaseGetMultiWithTotal:
first_name="CCC", first_name="CCC",
last_name="User", last_name="User",
) )
await user_crud.create(session, obj_in=user_data1) await user_repo.create(session, obj_in=user_data1)
await user_crud.create(session, obj_in=user_data2) await user_repo.create(session, obj_in=user_data2)
await session.commit() await session.commit()
async with SessionLocal() as session: async with SessionLocal() as session:
items, _total = await user_crud.get_multi_with_total( items, _total = await user_repo.get_multi_with_total(
session, sort_by="email", sort_order="desc", limit=1 session, sort_by="email", sort_order="desc", limit=1
) )
assert len(items) == 1 assert len(items) == 1
@@ -570,19 +570,19 @@ class TestCRUDBaseGetMultiWithTotal:
first_name=f"User{i}", first_name=f"User{i}",
last_name="Test", last_name="Test",
) )
await user_crud.create(session, obj_in=user_data) await user_repo.create(session, obj_in=user_data)
await session.commit() await session.commit()
async with SessionLocal() as session: async with SessionLocal() as session:
# Get first page # Get first page
items1, total = await user_crud.get_multi_with_total( items1, total = await user_repo.get_multi_with_total(
session, skip=0, limit=2 session, skip=0, limit=2
) )
assert len(items1) == 2 assert len(items1) == 2
assert total >= 3 assert total >= 3
# Get second page # Get second page
items2, total2 = await user_crud.get_multi_with_total( items2, total2 = await user_repo.get_multi_with_total(
session, skip=2, limit=2 session, skip=2, limit=2
) )
assert len(items2) >= 1 assert len(items2) >= 1
@@ -594,7 +594,7 @@ class TestCRUDBaseGetMultiWithTotal:
assert ids1.isdisjoint(ids2) assert ids1.isdisjoint(ids2)
class TestCRUDBaseCount: class TestRepositoryBaseCount:
"""Tests for count method.""" """Tests for count method."""
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -603,7 +603,7 @@ class TestCRUDBaseCount:
_test_engine, SessionLocal = async_test_db _test_engine, SessionLocal = async_test_db
async with SessionLocal() as session: async with SessionLocal() as session:
count = await user_crud.count(session) count = await user_repo.count(session)
assert isinstance(count, int) assert isinstance(count, int)
assert count >= 1 # At least the test user assert count >= 1 # At least the test user
@@ -614,7 +614,7 @@ class TestCRUDBaseCount:
# Create additional users # Create additional users
async with SessionLocal() as session: async with SessionLocal() as session:
initial_count = await user_crud.count(session) initial_count = await user_repo.count(session)
user_data1 = UserCreate( user_data1 = UserCreate(
email="count1@example.com", email="count1@example.com",
@@ -628,12 +628,12 @@ class TestCRUDBaseCount:
first_name="Count", first_name="Count",
last_name="Two", last_name="Two",
) )
await user_crud.create(session, obj_in=user_data1) await user_repo.create(session, obj_in=user_data1)
await user_crud.create(session, obj_in=user_data2) await user_repo.create(session, obj_in=user_data2)
await session.commit() await session.commit()
async with SessionLocal() as session: async with SessionLocal() as session:
new_count = await user_crud.count(session) new_count = await user_repo.count(session)
assert new_count == initial_count + 2 assert new_count == initial_count + 2
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -644,10 +644,10 @@ class TestCRUDBaseCount:
async with SessionLocal() as session: async with SessionLocal() as session:
with patch.object(session, "execute", side_effect=Exception("DB error")): with patch.object(session, "execute", side_effect=Exception("DB error")):
with pytest.raises(Exception, match="DB error"): with pytest.raises(Exception, match="DB error"):
await user_crud.count(session) await user_repo.count(session)
class TestCRUDBaseExists: class TestRepositoryBaseExists:
"""Tests for exists method.""" """Tests for exists method."""
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -656,7 +656,7 @@ class TestCRUDBaseExists:
_test_engine, SessionLocal = async_test_db _test_engine, SessionLocal = async_test_db
async with SessionLocal() as session: async with SessionLocal() as session:
result = await user_crud.exists(session, id=str(async_test_user.id)) result = await user_repo.exists(session, id=str(async_test_user.id))
assert result is True assert result is True
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -665,7 +665,7 @@ class TestCRUDBaseExists:
_test_engine, SessionLocal = async_test_db _test_engine, SessionLocal = async_test_db
async with SessionLocal() as session: async with SessionLocal() as session:
result = await user_crud.exists(session, id=str(uuid4())) result = await user_repo.exists(session, id=str(uuid4()))
assert result is False assert result is False
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -674,11 +674,11 @@ class TestCRUDBaseExists:
_test_engine, SessionLocal = async_test_db _test_engine, SessionLocal = async_test_db
async with SessionLocal() as session: async with SessionLocal() as session:
result = await user_crud.exists(session, id="invalid-uuid") result = await user_repo.exists(session, id="invalid-uuid")
assert result is False assert result is False
class TestCRUDBaseSoftDelete: class TestRepositoryBaseSoftDelete:
"""Tests for soft_delete method.""" """Tests for soft_delete method."""
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -694,13 +694,13 @@ class TestCRUDBaseSoftDelete:
first_name="Soft", first_name="Soft",
last_name="Delete", last_name="Delete",
) )
user = await user_crud.create(session, obj_in=user_data) user = await user_repo.create(session, obj_in=user_data)
user_id = user.id user_id = user.id
await session.commit() await session.commit()
# Soft delete the user # Soft delete the user
async with SessionLocal() as session: async with SessionLocal() as session:
deleted = await user_crud.soft_delete(session, id=str(user_id)) deleted = await user_repo.soft_delete(session, id=str(user_id))
assert deleted is not None assert deleted is not None
assert deleted.deleted_at is not None assert deleted.deleted_at is not None
@@ -710,7 +710,7 @@ class TestCRUDBaseSoftDelete:
_test_engine, SessionLocal = async_test_db _test_engine, SessionLocal = async_test_db
async with SessionLocal() as session: async with SessionLocal() as session:
result = await user_crud.soft_delete(session, id="invalid-uuid") result = await user_repo.soft_delete(session, id="invalid-uuid")
assert result is None assert result is None
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -719,7 +719,7 @@ class TestCRUDBaseSoftDelete:
_test_engine, SessionLocal = async_test_db _test_engine, SessionLocal = async_test_db
async with SessionLocal() as session: async with SessionLocal() as session:
result = await user_crud.soft_delete(session, id=str(uuid4())) result = await user_repo.soft_delete(session, id=str(uuid4()))
assert result is None assert result is None
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -735,18 +735,18 @@ class TestCRUDBaseSoftDelete:
first_name="Soft", first_name="Soft",
last_name="Delete2", last_name="Delete2",
) )
user = await user_crud.create(session, obj_in=user_data) user = await user_repo.create(session, obj_in=user_data)
user_id = user.id user_id = user.id
await session.commit() await session.commit()
# Soft delete with UUID object # Soft delete with UUID object
async with SessionLocal() as session: async with SessionLocal() as session:
deleted = await user_crud.soft_delete(session, id=user_id) # UUID object deleted = await user_repo.soft_delete(session, id=user_id) # UUID object
assert deleted is not None assert deleted is not None
assert deleted.deleted_at is not None assert deleted.deleted_at is not None
class TestCRUDBaseRestore: class TestRepositoryBaseRestore:
"""Tests for restore method.""" """Tests for restore method."""
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -762,16 +762,16 @@ class TestCRUDBaseRestore:
first_name="Restore", first_name="Restore",
last_name="Test", last_name="Test",
) )
user = await user_crud.create(session, obj_in=user_data) user = await user_repo.create(session, obj_in=user_data)
user_id = user.id user_id = user.id
await session.commit() await session.commit()
async with SessionLocal() as session: async with SessionLocal() as session:
await user_crud.soft_delete(session, id=str(user_id)) await user_repo.soft_delete(session, id=str(user_id))
# Restore the user # Restore the user
async with SessionLocal() as session: async with SessionLocal() as session:
restored = await user_crud.restore(session, id=str(user_id)) restored = await user_repo.restore(session, id=str(user_id))
assert restored is not None assert restored is not None
assert restored.deleted_at is None assert restored.deleted_at is None
@@ -781,7 +781,7 @@ class TestCRUDBaseRestore:
_test_engine, SessionLocal = async_test_db _test_engine, SessionLocal = async_test_db
async with SessionLocal() as session: async with SessionLocal() as session:
result = await user_crud.restore(session, id="invalid-uuid") result = await user_repo.restore(session, id="invalid-uuid")
assert result is None assert result is None
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -790,7 +790,7 @@ class TestCRUDBaseRestore:
_test_engine, SessionLocal = async_test_db _test_engine, SessionLocal = async_test_db
async with SessionLocal() as session: async with SessionLocal() as session:
result = await user_crud.restore(session, id=str(uuid4())) result = await user_repo.restore(session, id=str(uuid4()))
assert result is None assert result is None
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -800,7 +800,7 @@ class TestCRUDBaseRestore:
async with SessionLocal() as session: async with SessionLocal() as session:
# Try to restore a user that's not deleted # Try to restore a user that's not deleted
result = await user_crud.restore(session, id=str(async_test_user.id)) result = await user_repo.restore(session, id=str(async_test_user.id))
assert result is None assert result is None
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -816,21 +816,21 @@ class TestCRUDBaseRestore:
first_name="Restore", first_name="Restore",
last_name="Test2", last_name="Test2",
) )
user = await user_crud.create(session, obj_in=user_data) user = await user_repo.create(session, obj_in=user_data)
user_id = user.id user_id = user.id
await session.commit() await session.commit()
async with SessionLocal() as session: async with SessionLocal() as session:
await user_crud.soft_delete(session, id=str(user_id)) await user_repo.soft_delete(session, id=str(user_id))
# Restore with UUID object # Restore with UUID object
async with SessionLocal() as session: async with SessionLocal() as session:
restored = await user_crud.restore(session, id=user_id) # UUID object restored = await user_repo.restore(session, id=user_id) # UUID object
assert restored is not None assert restored is not None
assert restored.deleted_at is None assert restored.deleted_at is None
class TestCRUDBasePaginationValidation: class TestRepositoryBasePaginationValidation:
"""Tests for pagination parameter validation (covers lines 254-260).""" """Tests for pagination parameter validation (covers lines 254-260)."""
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -840,7 +840,7 @@ class TestCRUDBasePaginationValidation:
async with SessionLocal() as session: async with SessionLocal() as session:
with pytest.raises(InvalidInputError, match="skip must be non-negative"): with pytest.raises(InvalidInputError, match="skip must be non-negative"):
await user_crud.get_multi_with_total(session, skip=-1, limit=10) await user_repo.get_multi_with_total(session, skip=-1, limit=10)
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_get_multi_with_total_negative_limit(self, async_test_db): async def test_get_multi_with_total_negative_limit(self, async_test_db):
@@ -849,7 +849,7 @@ class TestCRUDBasePaginationValidation:
async with SessionLocal() as session: async with SessionLocal() as session:
with pytest.raises(InvalidInputError, match="limit must be non-negative"): with pytest.raises(InvalidInputError, match="limit must be non-negative"):
await user_crud.get_multi_with_total(session, skip=0, limit=-1) await user_repo.get_multi_with_total(session, skip=0, limit=-1)
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_get_multi_with_total_limit_too_large(self, async_test_db): async def test_get_multi_with_total_limit_too_large(self, async_test_db):
@@ -858,7 +858,7 @@ class TestCRUDBasePaginationValidation:
async with SessionLocal() as session: async with SessionLocal() as session:
with pytest.raises(InvalidInputError, match="Maximum limit is 1000"): with pytest.raises(InvalidInputError, match="Maximum limit is 1000"):
await user_crud.get_multi_with_total(session, skip=0, limit=1001) await user_repo.get_multi_with_total(session, skip=0, limit=1001)
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_get_multi_with_total_with_filters( async def test_get_multi_with_total_with_filters(
@@ -868,7 +868,7 @@ class TestCRUDBasePaginationValidation:
_test_engine, SessionLocal = async_test_db _test_engine, SessionLocal = async_test_db
async with SessionLocal() as session: async with SessionLocal() as session:
users, total = await user_crud.get_multi_with_total( users, total = await user_repo.get_multi_with_total(
session, skip=0, limit=10, filters={"is_active": True} session, skip=0, limit=10, filters={"is_active": True}
) )
assert isinstance(users, list) assert isinstance(users, list)
@@ -880,7 +880,7 @@ class TestCRUDBasePaginationValidation:
_test_engine, SessionLocal = async_test_db _test_engine, SessionLocal = async_test_db
async with SessionLocal() as session: async with SessionLocal() as session:
users, _total = await user_crud.get_multi_with_total( users, _total = await user_repo.get_multi_with_total(
session, skip=0, limit=10, sort_by="created_at", sort_order="desc" session, skip=0, limit=10, sort_by="created_at", sort_order="desc"
) )
assert isinstance(users, list) assert isinstance(users, list)
@@ -891,13 +891,13 @@ class TestCRUDBasePaginationValidation:
_test_engine, SessionLocal = async_test_db _test_engine, SessionLocal = async_test_db
async with SessionLocal() as session: async with SessionLocal() as session:
users, _total = await user_crud.get_multi_with_total( users, _total = await user_repo.get_multi_with_total(
session, skip=0, limit=10, sort_by="created_at", sort_order="asc" session, skip=0, limit=10, sort_by="created_at", sort_order="asc"
) )
assert isinstance(users, list) assert isinstance(users, list)
class TestCRUDBaseModelsWithoutSoftDelete: class TestRepositoryBaseModelsWithoutSoftDelete:
""" """
Test soft_delete and restore on models without deleted_at column. Test soft_delete and restore on models without deleted_at column.
Covers lines 342-343, 383-384 - error handling for unsupported models. Covers lines 342-343, 383-384 - error handling for unsupported models.
@@ -912,7 +912,7 @@ class TestCRUDBaseModelsWithoutSoftDelete:
# Create an organization (which doesn't have deleted_at) # Create an organization (which doesn't have deleted_at)
from app.models.organization import Organization from app.models.organization import Organization
from app.repositories.organization import organization_repo as org_crud from app.repositories.organization import organization_repo as org_repo
async with SessionLocal() as session: async with SessionLocal() as session:
org = Organization(name="Test Org", slug="test-org") org = Organization(name="Test Org", slug="test-org")
@@ -925,7 +925,7 @@ class TestCRUDBaseModelsWithoutSoftDelete:
with pytest.raises( with pytest.raises(
InvalidInputError, match="does not have a deleted_at column" InvalidInputError, match="does not have a deleted_at column"
): ):
await org_crud.soft_delete(session, id=str(org_id)) await org_repo.soft_delete(session, id=str(org_id))
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_restore_model_without_deleted_at(self, async_test_db): async def test_restore_model_without_deleted_at(self, async_test_db):
@@ -934,7 +934,7 @@ class TestCRUDBaseModelsWithoutSoftDelete:
# Create an organization (which doesn't have deleted_at) # Create an organization (which doesn't have deleted_at)
from app.models.organization import Organization from app.models.organization import Organization
from app.repositories.organization import organization_repo as org_crud from app.repositories.organization import organization_repo as org_repo
async with SessionLocal() as session: async with SessionLocal() as session:
org = Organization(name="Restore Test", slug="restore-test") org = Organization(name="Restore Test", slug="restore-test")
@@ -947,10 +947,10 @@ class TestCRUDBaseModelsWithoutSoftDelete:
with pytest.raises( with pytest.raises(
InvalidInputError, match="does not have a deleted_at column" InvalidInputError, match="does not have a deleted_at column"
): ):
await org_crud.restore(session, id=str(org_id)) await org_repo.restore(session, id=str(org_id))
class TestCRUDBaseEagerLoadingWithRealOptions: class TestRepositoryBaseEagerLoadingWithRealOptions:
""" """
Test eager loading with actual SQLAlchemy load options. Test eager loading with actual SQLAlchemy load options.
Covers lines 77-78, 119-120 - options loop execution. Covers lines 77-78, 119-120 - options loop execution.
@@ -967,7 +967,7 @@ class TestCRUDBaseEagerLoadingWithRealOptions:
# Create a session for the user # Create a session for the user
from app.models.user_session import UserSession from app.models.user_session import UserSession
from app.repositories.session import session_repo as session_crud from app.repositories.session import session_repo as session_repo
async with SessionLocal() as session: async with SessionLocal() as session:
user_session = UserSession( user_session = UserSession(
@@ -985,7 +985,7 @@ class TestCRUDBaseEagerLoadingWithRealOptions:
# Get session with eager loading of user relationship # Get session with eager loading of user relationship
async with SessionLocal() as session: async with SessionLocal() as session:
result = await session_crud.get( result = await session_repo.get(
session, session,
id=str(session_id), id=str(session_id),
options=[joinedload(UserSession.user)], # Real option, not empty list options=[joinedload(UserSession.user)], # Real option, not empty list
@@ -1006,7 +1006,7 @@ class TestCRUDBaseEagerLoadingWithRealOptions:
# Create multiple sessions for the user # Create multiple sessions for the user
from app.models.user_session import UserSession from app.models.user_session import UserSession
from app.repositories.session import session_repo as session_crud from app.repositories.session import session_repo as session_repo
async with SessionLocal() as session: async with SessionLocal() as session:
for i in range(3): for i in range(3):
@@ -1024,7 +1024,7 @@ class TestCRUDBaseEagerLoadingWithRealOptions:
# Get sessions with eager loading # Get sessions with eager loading
async with SessionLocal() as session: async with SessionLocal() as session:
results = await session_crud.get_multi( results = await session_repo.get_multi(
session, session,
skip=0, skip=0,
limit=10, limit=10,

View File

@@ -1,6 +1,6 @@
# tests/crud/test_base_db_failures.py # tests/repositories/test_base_db_failures.py
""" """
Comprehensive tests for base CRUD database failure scenarios. Comprehensive tests for base repository database failure scenarios.
Tests exception handling, rollbacks, and error messages. Tests exception handling, rollbacks, and error messages.
""" """
@@ -11,16 +11,16 @@ import pytest
from sqlalchemy.exc import DataError, OperationalError from sqlalchemy.exc import DataError, OperationalError
from app.core.repository_exceptions import IntegrityConstraintError from app.core.repository_exceptions import IntegrityConstraintError
from app.repositories.user import user_repo as user_crud from app.repositories.user import user_repo as user_repo
from app.schemas.users import UserCreate from app.schemas.users import UserCreate
class TestBaseCRUDCreateFailures: class TestBaseRepositoryCreateFailures:
"""Test base CRUD create method exception handling.""" """Test base repository create method exception handling."""
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_create_operational_error_triggers_rollback(self, async_test_db): async def test_create_operational_error_triggers_rollback(self, async_test_db):
"""Test that OperationalError triggers rollback (User CRUD catches as Exception).""" """Test that OperationalError triggers rollback (User repository catches as Exception)."""
_test_engine, SessionLocal = async_test_db _test_engine, SessionLocal = async_test_db
async with SessionLocal() as session: async with SessionLocal() as session:
@@ -41,16 +41,16 @@ class TestBaseCRUDCreateFailures:
last_name="User", last_name="User",
) )
# User CRUD catches this as generic Exception and re-raises # User repository catches this as generic Exception and re-raises
with pytest.raises(OperationalError): with pytest.raises(OperationalError):
await user_crud.create(session, obj_in=user_data) await user_repo.create(session, obj_in=user_data)
# Verify rollback was called # Verify rollback was called
mock_rollback.assert_called_once() mock_rollback.assert_called_once()
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_create_data_error_triggers_rollback(self, async_test_db): async def test_create_data_error_triggers_rollback(self, async_test_db):
"""Test that DataError triggers rollback (User CRUD catches as Exception).""" """Test that DataError triggers rollback (User repository catches as Exception)."""
_test_engine, SessionLocal = async_test_db _test_engine, SessionLocal = async_test_db
async with SessionLocal() as session: async with SessionLocal() as session:
@@ -69,9 +69,9 @@ class TestBaseCRUDCreateFailures:
last_name="User", last_name="User",
) )
# User CRUD catches this as generic Exception and re-raises # User repository catches this as generic Exception and re-raises
with pytest.raises(DataError): with pytest.raises(DataError):
await user_crud.create(session, obj_in=user_data) await user_repo.create(session, obj_in=user_data)
mock_rollback.assert_called_once() mock_rollback.assert_called_once()
@@ -97,13 +97,13 @@ class TestBaseCRUDCreateFailures:
) )
with pytest.raises(RuntimeError, match="Unexpected database error"): with pytest.raises(RuntimeError, match="Unexpected database error"):
await user_crud.create(session, obj_in=user_data) await user_repo.create(session, obj_in=user_data)
mock_rollback.assert_called_once() mock_rollback.assert_called_once()
class TestBaseCRUDUpdateFailures: class TestBaseRepositoryUpdateFailures:
"""Test base CRUD update method exception handling.""" """Test base repository update method exception handling."""
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_update_operational_error(self, async_test_db, async_test_user): async def test_update_operational_error(self, async_test_db, async_test_user):
@@ -111,7 +111,7 @@ class TestBaseCRUDUpdateFailures:
_test_engine, SessionLocal = async_test_db _test_engine, SessionLocal = async_test_db
async with SessionLocal() as session: async with SessionLocal() as session:
user = await user_crud.get(session, id=str(async_test_user.id)) user = await user_repo.get(session, id=str(async_test_user.id))
async def mock_commit(): async def mock_commit():
raise OperationalError("Connection timeout", {}, Exception("Timeout")) raise OperationalError("Connection timeout", {}, Exception("Timeout"))
@@ -123,7 +123,7 @@ class TestBaseCRUDUpdateFailures:
with pytest.raises( with pytest.raises(
IntegrityConstraintError, match="Database operation failed" IntegrityConstraintError, match="Database operation failed"
): ):
await user_crud.update( await user_repo.update(
session, db_obj=user, obj_in={"first_name": "Updated"} session, db_obj=user, obj_in={"first_name": "Updated"}
) )
@@ -135,7 +135,7 @@ class TestBaseCRUDUpdateFailures:
_test_engine, SessionLocal = async_test_db _test_engine, SessionLocal = async_test_db
async with SessionLocal() as session: async with SessionLocal() as session:
user = await user_crud.get(session, id=str(async_test_user.id)) user = await user_repo.get(session, id=str(async_test_user.id))
async def mock_commit(): async def mock_commit():
raise DataError("Invalid data", {}, Exception("Data type mismatch")) raise DataError("Invalid data", {}, Exception("Data type mismatch"))
@@ -147,7 +147,7 @@ class TestBaseCRUDUpdateFailures:
with pytest.raises( with pytest.raises(
IntegrityConstraintError, match="Database operation failed" IntegrityConstraintError, match="Database operation failed"
): ):
await user_crud.update( await user_repo.update(
session, db_obj=user, obj_in={"first_name": "Updated"} session, db_obj=user, obj_in={"first_name": "Updated"}
) )
@@ -159,7 +159,7 @@ class TestBaseCRUDUpdateFailures:
_test_engine, SessionLocal = async_test_db _test_engine, SessionLocal = async_test_db
async with SessionLocal() as session: async with SessionLocal() as session:
user = await user_crud.get(session, id=str(async_test_user.id)) user = await user_repo.get(session, id=str(async_test_user.id))
async def mock_commit(): async def mock_commit():
raise KeyError("Unexpected error") raise KeyError("Unexpected error")
@@ -169,15 +169,15 @@ class TestBaseCRUDUpdateFailures:
session, "rollback", new_callable=AsyncMock session, "rollback", new_callable=AsyncMock
) as mock_rollback: ) as mock_rollback:
with pytest.raises(KeyError): with pytest.raises(KeyError):
await user_crud.update( await user_repo.update(
session, db_obj=user, obj_in={"first_name": "Updated"} session, db_obj=user, obj_in={"first_name": "Updated"}
) )
mock_rollback.assert_called_once() mock_rollback.assert_called_once()
class TestBaseCRUDRemoveFailures: class TestBaseRepositoryRemoveFailures:
"""Test base CRUD remove method exception handling.""" """Test base repository remove method exception handling."""
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_remove_unexpected_error_triggers_rollback( async def test_remove_unexpected_error_triggers_rollback(
@@ -196,12 +196,12 @@ class TestBaseCRUDRemoveFailures:
session, "rollback", new_callable=AsyncMock session, "rollback", new_callable=AsyncMock
) as mock_rollback: ) as mock_rollback:
with pytest.raises(RuntimeError, match="Database write failed"): with pytest.raises(RuntimeError, match="Database write failed"):
await user_crud.remove(session, id=str(async_test_user.id)) await user_repo.remove(session, id=str(async_test_user.id))
mock_rollback.assert_called_once() mock_rollback.assert_called_once()
class TestBaseCRUDGetMultiWithTotalFailures: class TestBaseRepositoryGetMultiWithTotalFailures:
"""Test get_multi_with_total exception handling.""" """Test get_multi_with_total exception handling."""
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -217,10 +217,10 @@ class TestBaseCRUDGetMultiWithTotalFailures:
with patch.object(session, "execute", side_effect=mock_execute): with patch.object(session, "execute", side_effect=mock_execute):
with pytest.raises(OperationalError): with pytest.raises(OperationalError):
await user_crud.get_multi_with_total(session, skip=0, limit=10) await user_repo.get_multi_with_total(session, skip=0, limit=10)
class TestBaseCRUDCountFailures: class TestBaseRepositoryCountFailures:
"""Test count method exception handling.""" """Test count method exception handling."""
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -235,10 +235,10 @@ class TestBaseCRUDCountFailures:
with patch.object(session, "execute", side_effect=mock_execute): with patch.object(session, "execute", side_effect=mock_execute):
with pytest.raises(OperationalError): with pytest.raises(OperationalError):
await user_crud.count(session) await user_repo.count(session)
class TestBaseCRUDSoftDeleteFailures: class TestBaseRepositorySoftDeleteFailures:
"""Test soft_delete method exception handling.""" """Test soft_delete method exception handling."""
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -258,12 +258,12 @@ class TestBaseCRUDSoftDeleteFailures:
session, "rollback", new_callable=AsyncMock session, "rollback", new_callable=AsyncMock
) as mock_rollback: ) as mock_rollback:
with pytest.raises(RuntimeError, match="Soft delete failed"): with pytest.raises(RuntimeError, match="Soft delete failed"):
await user_crud.soft_delete(session, id=str(async_test_user.id)) await user_repo.soft_delete(session, id=str(async_test_user.id))
mock_rollback.assert_called_once() mock_rollback.assert_called_once()
class TestBaseCRUDRestoreFailures: class TestBaseRepositoryRestoreFailures:
"""Test restore method exception handling.""" """Test restore method exception handling."""
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -279,12 +279,12 @@ class TestBaseCRUDRestoreFailures:
first_name="Restore", first_name="Restore",
last_name="Test", last_name="Test",
) )
user = await user_crud.create(session, obj_in=user_data) user = await user_repo.create(session, obj_in=user_data)
user_id = user.id user_id = user.id
await session.commit() await session.commit()
async with SessionLocal() as session: async with SessionLocal() as session:
await user_crud.soft_delete(session, id=str(user_id)) await user_repo.soft_delete(session, id=str(user_id))
# Now test restore failure # Now test restore failure
async with SessionLocal() as session: async with SessionLocal() as session:
@@ -297,12 +297,12 @@ class TestBaseCRUDRestoreFailures:
session, "rollback", new_callable=AsyncMock session, "rollback", new_callable=AsyncMock
) as mock_rollback: ) as mock_rollback:
with pytest.raises(RuntimeError, match="Restore failed"): with pytest.raises(RuntimeError, match="Restore failed"):
await user_crud.restore(session, id=str(user_id)) await user_repo.restore(session, id=str(user_id))
mock_rollback.assert_called_once() mock_rollback.assert_called_once()
class TestBaseCRUDGetFailures: class TestBaseRepositoryGetFailures:
"""Test get method exception handling.""" """Test get method exception handling."""
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -317,10 +317,10 @@ class TestBaseCRUDGetFailures:
with patch.object(session, "execute", side_effect=mock_execute): with patch.object(session, "execute", side_effect=mock_execute):
with pytest.raises(OperationalError): with pytest.raises(OperationalError):
await user_crud.get(session, id=str(uuid4())) await user_repo.get(session, id=str(uuid4()))
class TestBaseCRUDGetMultiFailures: class TestBaseRepositoryGetMultiFailures:
"""Test get_multi method exception handling.""" """Test get_multi method exception handling."""
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -335,4 +335,4 @@ class TestBaseCRUDGetMultiFailures:
with patch.object(session, "execute", side_effect=mock_execute): with patch.object(session, "execute", side_effect=mock_execute):
with pytest.raises(OperationalError): with pytest.raises(OperationalError):
await user_crud.get_multi(session, skip=0, limit=10) await user_repo.get_multi(session, skip=0, limit=10)

View File

@@ -1,6 +1,6 @@
# tests/crud/test_oauth.py # tests/repositories/test_oauth.py
""" """
Comprehensive tests for OAuth CRUD operations. Comprehensive tests for OAuth repository operations.
""" """
from datetime import UTC, datetime, timedelta from datetime import UTC, datetime, timedelta
@@ -14,8 +14,8 @@ from app.repositories.oauth_state import oauth_state_repo as oauth_state
from app.schemas.oauth import OAuthAccountCreate, OAuthClientCreate, OAuthStateCreate from app.schemas.oauth import OAuthAccountCreate, OAuthClientCreate, OAuthStateCreate
class TestOAuthAccountCRUD: class TestOAuthAccountRepository:
"""Tests for OAuth account CRUD operations.""" """Tests for OAuth account repository operations."""
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_create_account(self, async_test_db, async_test_user): async def test_create_account(self, async_test_db, async_test_user):
@@ -269,8 +269,8 @@ class TestOAuthAccountCRUD:
assert updated.refresh_token == "new_refresh_token" assert updated.refresh_token == "new_refresh_token"
class TestOAuthStateCRUD: class TestOAuthStateRepository:
"""Tests for OAuth state CRUD operations.""" """Tests for OAuth state repository operations."""
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_create_state(self, async_test_db): async def test_create_state(self, async_test_db):
@@ -376,8 +376,8 @@ class TestOAuthStateCRUD:
assert result is not None assert result is not None
class TestOAuthClientCRUD: class TestOAuthClientRepository:
"""Tests for OAuth client CRUD operations (provider mode).""" """Tests for OAuth client repository operations (provider mode)."""
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_create_public_client(self, async_test_db): async def test_create_public_client(self, async_test_db):

View File

@@ -1,6 +1,6 @@
# tests/crud/test_organization_async.py # tests/repositories/test_organization_async.py
""" """
Comprehensive tests for async organization CRUD operations. Comprehensive tests for async organization repository operations.
""" """
from unittest.mock import AsyncMock, MagicMock, patch from unittest.mock import AsyncMock, MagicMock, patch
@@ -12,7 +12,7 @@ from sqlalchemy import select
from app.core.repository_exceptions import DuplicateEntryError, IntegrityConstraintError from app.core.repository_exceptions import DuplicateEntryError, IntegrityConstraintError
from app.models.organization import Organization from app.models.organization import Organization
from app.models.user_organization import OrganizationRole, UserOrganization from app.models.user_organization import OrganizationRole, UserOrganization
from app.repositories.organization import organization_repo as organization_crud from app.repositories.organization import organization_repo as organization_repo
from app.schemas.organizations import OrganizationCreate from app.schemas.organizations import OrganizationCreate
@@ -35,7 +35,7 @@ class TestGetBySlug:
# Get by slug # Get by slug
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
result = await organization_crud.get_by_slug(session, slug="test-org") result = await organization_repo.get_by_slug(session, slug="test-org")
assert result is not None assert result is not None
assert result.id == org_id assert result.id == org_id
assert result.slug == "test-org" assert result.slug == "test-org"
@@ -46,7 +46,7 @@ class TestGetBySlug:
_test_engine, AsyncTestingSessionLocal = async_test_db _test_engine, AsyncTestingSessionLocal = async_test_db
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
result = await organization_crud.get_by_slug(session, slug="nonexistent") result = await organization_repo.get_by_slug(session, slug="nonexistent")
assert result is None assert result is None
@@ -55,7 +55,7 @@ class TestCreate:
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_create_success(self, async_test_db): async def test_create_success(self, async_test_db):
"""Test successfully creating an organization_crud.""" """Test successfully creating an organization_repo."""
_test_engine, AsyncTestingSessionLocal = async_test_db _test_engine, AsyncTestingSessionLocal = async_test_db
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
@@ -66,7 +66,7 @@ class TestCreate:
is_active=True, is_active=True,
settings={"key": "value"}, settings={"key": "value"},
) )
result = await organization_crud.create(session, obj_in=org_in) result = await organization_repo.create(session, obj_in=org_in)
assert result.name == "New Org" assert result.name == "New Org"
assert result.slug == "new-org" assert result.slug == "new-org"
@@ -89,7 +89,7 @@ class TestCreate:
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
org_in = OrganizationCreate(name="Org 2", slug="duplicate-slug") org_in = OrganizationCreate(name="Org 2", slug="duplicate-slug")
with pytest.raises(DuplicateEntryError, match="already exists"): with pytest.raises(DuplicateEntryError, match="already exists"):
await organization_crud.create(session, obj_in=org_in) await organization_repo.create(session, obj_in=org_in)
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_create_without_settings(self, async_test_db): async def test_create_without_settings(self, async_test_db):
@@ -98,7 +98,7 @@ class TestCreate:
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
org_in = OrganizationCreate(name="No Settings Org", slug="no-settings") org_in = OrganizationCreate(name="No Settings Org", slug="no-settings")
result = await organization_crud.create(session, obj_in=org_in) result = await organization_repo.create(session, obj_in=org_in)
assert result.settings == {} assert result.settings == {}
@@ -119,7 +119,7 @@ class TestGetMultiWithFilters:
await session.commit() await session.commit()
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
orgs, total = await organization_crud.get_multi_with_filters(session) orgs, total = await organization_repo.get_multi_with_filters(session)
assert total == 5 assert total == 5
assert len(orgs) == 5 assert len(orgs) == 5
@@ -135,7 +135,7 @@ class TestGetMultiWithFilters:
await session.commit() await session.commit()
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
orgs, total = await organization_crud.get_multi_with_filters( orgs, total = await organization_repo.get_multi_with_filters(
session, is_active=True session, is_active=True
) )
assert total == 1 assert total == 1
@@ -157,7 +157,7 @@ class TestGetMultiWithFilters:
await session.commit() await session.commit()
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
orgs, total = await organization_crud.get_multi_with_filters( orgs, total = await organization_repo.get_multi_with_filters(
session, search="tech" session, search="tech"
) )
assert total == 1 assert total == 1
@@ -175,7 +175,7 @@ class TestGetMultiWithFilters:
await session.commit() await session.commit()
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
orgs, total = await organization_crud.get_multi_with_filters( orgs, total = await organization_repo.get_multi_with_filters(
session, skip=2, limit=3 session, skip=2, limit=3
) )
assert total == 10 assert total == 10
@@ -193,7 +193,7 @@ class TestGetMultiWithFilters:
await session.commit() await session.commit()
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
orgs, _total = await organization_crud.get_multi_with_filters( orgs, _total = await organization_repo.get_multi_with_filters(
session, sort_by="name", sort_order="asc" session, sort_by="name", sort_order="asc"
) )
assert orgs[0].name == "A Org" assert orgs[0].name == "A Org"
@@ -205,7 +205,7 @@ class TestGetMemberCount:
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_get_member_count_success(self, async_test_db, async_test_user): async def test_get_member_count_success(self, async_test_db, async_test_user):
"""Test getting member count for organization_crud.""" """Test getting member count for organization_repo."""
_test_engine, AsyncTestingSessionLocal = async_test_db _test_engine, AsyncTestingSessionLocal = async_test_db
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
@@ -225,7 +225,7 @@ class TestGetMemberCount:
org_id = org.id org_id = org.id
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
count = await organization_crud.get_member_count( count = await organization_repo.get_member_count(
session, organization_id=org_id session, organization_id=org_id
) )
assert count == 1 assert count == 1
@@ -242,7 +242,7 @@ class TestGetMemberCount:
org_id = org.id org_id = org.id
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
count = await organization_crud.get_member_count( count = await organization_repo.get_member_count(
session, organization_id=org_id session, organization_id=org_id
) )
assert count == 0 assert count == 0
@@ -253,7 +253,7 @@ class TestAddUser:
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_add_user_success(self, async_test_db, async_test_user): async def test_add_user_success(self, async_test_db, async_test_user):
"""Test successfully adding a user to organization_crud.""" """Test successfully adding a user to organization_repo."""
_test_engine, AsyncTestingSessionLocal = async_test_db _test_engine, AsyncTestingSessionLocal = async_test_db
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
@@ -263,7 +263,7 @@ class TestAddUser:
org_id = org.id org_id = org.id
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
result = await organization_crud.add_user( result = await organization_repo.add_user(
session, session,
organization_id=org_id, organization_id=org_id,
user_id=async_test_user.id, user_id=async_test_user.id,
@@ -297,7 +297,7 @@ class TestAddUser:
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
with pytest.raises(DuplicateEntryError, match="already a member"): with pytest.raises(DuplicateEntryError, match="already a member"):
await organization_crud.add_user( await organization_repo.add_user(
session, organization_id=org_id, user_id=async_test_user.id session, organization_id=org_id, user_id=async_test_user.id
) )
@@ -322,7 +322,7 @@ class TestAddUser:
org_id = org.id org_id = org.id
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
result = await organization_crud.add_user( result = await organization_repo.add_user(
session, session,
organization_id=org_id, organization_id=org_id,
user_id=async_test_user.id, user_id=async_test_user.id,
@@ -338,7 +338,7 @@ class TestRemoveUser:
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_remove_user_success(self, async_test_db, async_test_user): async def test_remove_user_success(self, async_test_db, async_test_user):
"""Test successfully removing a user from organization_crud.""" """Test successfully removing a user from organization_repo."""
_test_engine, AsyncTestingSessionLocal = async_test_db _test_engine, AsyncTestingSessionLocal = async_test_db
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
@@ -357,7 +357,7 @@ class TestRemoveUser:
org_id = org.id org_id = org.id
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
result = await organization_crud.remove_user( result = await organization_repo.remove_user(
session, organization_id=org_id, user_id=async_test_user.id session, organization_id=org_id, user_id=async_test_user.id
) )
@@ -385,7 +385,7 @@ class TestRemoveUser:
org_id = org.id org_id = org.id
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
result = await organization_crud.remove_user( result = await organization_repo.remove_user(
session, organization_id=org_id, user_id=uuid4() session, organization_id=org_id, user_id=uuid4()
) )
@@ -416,7 +416,7 @@ class TestUpdateUserRole:
org_id = org.id org_id = org.id
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
result = await organization_crud.update_user_role( result = await organization_repo.update_user_role(
session, session,
organization_id=org_id, organization_id=org_id,
user_id=async_test_user.id, user_id=async_test_user.id,
@@ -439,7 +439,7 @@ class TestUpdateUserRole:
org_id = org.id org_id = org.id
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
result = await organization_crud.update_user_role( result = await organization_repo.update_user_role(
session, session,
organization_id=org_id, organization_id=org_id,
user_id=uuid4(), user_id=uuid4(),
@@ -475,7 +475,7 @@ class TestGetOrganizationMembers:
org_id = org.id org_id = org.id
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
members, total = await organization_crud.get_organization_members( members, total = await organization_repo.get_organization_members(
session, organization_id=org_id session, organization_id=org_id
) )
@@ -508,7 +508,7 @@ class TestGetOrganizationMembers:
org_id = org.id org_id = org.id
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
members, total = await organization_crud.get_organization_members( members, total = await organization_repo.get_organization_members(
session, organization_id=org_id, skip=0, limit=10 session, organization_id=org_id, skip=0, limit=10
) )
@@ -539,7 +539,7 @@ class TestGetUserOrganizations:
await session.commit() await session.commit()
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
orgs = await organization_crud.get_user_organizations( orgs = await organization_repo.get_user_organizations(
session, user_id=async_test_user.id session, user_id=async_test_user.id
) )
@@ -575,7 +575,7 @@ class TestGetUserOrganizations:
await session.commit() await session.commit()
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
orgs = await organization_crud.get_user_organizations( orgs = await organization_repo.get_user_organizations(
session, user_id=async_test_user.id, is_active=True session, user_id=async_test_user.id, is_active=True
) )
@@ -588,7 +588,7 @@ class TestGetUserRole:
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_get_user_role_in_org_success(self, async_test_db, async_test_user): async def test_get_user_role_in_org_success(self, async_test_db, async_test_user):
"""Test getting user role in organization_crud.""" """Test getting user role in organization_repo."""
_test_engine, AsyncTestingSessionLocal = async_test_db _test_engine, AsyncTestingSessionLocal = async_test_db
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
@@ -607,7 +607,7 @@ class TestGetUserRole:
org_id = org.id org_id = org.id
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
role = await organization_crud.get_user_role_in_org( role = await organization_repo.get_user_role_in_org(
session, user_id=async_test_user.id, organization_id=org_id session, user_id=async_test_user.id, organization_id=org_id
) )
@@ -625,7 +625,7 @@ class TestGetUserRole:
org_id = org.id org_id = org.id
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
role = await organization_crud.get_user_role_in_org( role = await organization_repo.get_user_role_in_org(
session, user_id=uuid4(), organization_id=org_id session, user_id=uuid4(), organization_id=org_id
) )
@@ -656,7 +656,7 @@ class TestIsUserOrgOwner:
org_id = org.id org_id = org.id
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
is_owner = await organization_crud.is_user_org_owner( is_owner = await organization_repo.is_user_org_owner(
session, user_id=async_test_user.id, organization_id=org_id session, user_id=async_test_user.id, organization_id=org_id
) )
@@ -683,7 +683,7 @@ class TestIsUserOrgOwner:
org_id = org.id org_id = org.id
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
is_owner = await organization_crud.is_user_org_owner( is_owner = await organization_repo.is_user_org_owner(
session, user_id=async_test_user.id, organization_id=org_id session, user_id=async_test_user.id, organization_id=org_id
) )
@@ -720,7 +720,7 @@ class TestGetMultiWithMemberCounts:
( (
orgs_with_counts, orgs_with_counts,
total, total,
) = await organization_crud.get_multi_with_member_counts(session) ) = await organization_repo.get_multi_with_member_counts(session)
assert total == 2 assert total == 2
assert len(orgs_with_counts) == 2 assert len(orgs_with_counts) == 2
@@ -745,7 +745,7 @@ class TestGetMultiWithMemberCounts:
( (
orgs_with_counts, orgs_with_counts,
total, total,
) = await organization_crud.get_multi_with_member_counts( ) = await organization_repo.get_multi_with_member_counts(
session, is_active=True session, is_active=True
) )
@@ -767,7 +767,7 @@ class TestGetMultiWithMemberCounts:
( (
orgs_with_counts, orgs_with_counts,
total, total,
) = await organization_crud.get_multi_with_member_counts( ) = await organization_repo.get_multi_with_member_counts(
session, search="tech" session, search="tech"
) )
@@ -801,7 +801,7 @@ class TestGetUserOrganizationsWithDetails:
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
orgs_with_details = ( orgs_with_details = (
await organization_crud.get_user_organizations_with_details( await organization_repo.get_user_organizations_with_details(
session, user_id=async_test_user.id session, user_id=async_test_user.id
) )
) )
@@ -841,7 +841,7 @@ class TestGetUserOrganizationsWithDetails:
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
orgs_with_details = ( orgs_with_details = (
await organization_crud.get_user_organizations_with_details( await organization_repo.get_user_organizations_with_details(
session, user_id=async_test_user.id, is_active=True session, user_id=async_test_user.id, is_active=True
) )
) )
@@ -874,7 +874,7 @@ class TestIsUserOrgAdmin:
org_id = org.id org_id = org.id
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
is_admin = await organization_crud.is_user_org_admin( is_admin = await organization_repo.is_user_org_admin(
session, user_id=async_test_user.id, organization_id=org_id session, user_id=async_test_user.id, organization_id=org_id
) )
@@ -901,7 +901,7 @@ class TestIsUserOrgAdmin:
org_id = org.id org_id = org.id
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
is_admin = await organization_crud.is_user_org_admin( is_admin = await organization_repo.is_user_org_admin(
session, user_id=async_test_user.id, organization_id=org_id session, user_id=async_test_user.id, organization_id=org_id
) )
@@ -928,7 +928,7 @@ class TestIsUserOrgAdmin:
org_id = org.id org_id = org.id
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
is_admin = await organization_crud.is_user_org_admin( is_admin = await organization_repo.is_user_org_admin(
session, user_id=async_test_user.id, organization_id=org_id session, user_id=async_test_user.id, organization_id=org_id
) )
@@ -937,7 +937,7 @@ class TestIsUserOrgAdmin:
class TestOrganizationExceptionHandlers: class TestOrganizationExceptionHandlers:
""" """
Test exception handlers in organization CRUD methods. Test exception handlers in organization repository methods.
Uses mocks to trigger database errors and verify proper error handling. Uses mocks to trigger database errors and verify proper error handling.
Covers lines: 33-35, 57-62, 114-116, 130-132, 207-209, 258-260, 291-294, 326-329, 385-387, 409-411, 466-468, 491-493 Covers lines: 33-35, 57-62, 114-116, 130-132, 207-209, 258-260, 291-294, 326-329, 385-387, 409-411, 466-468, 491-493
""" """
@@ -952,7 +952,7 @@ class TestOrganizationExceptionHandlers:
session, "execute", side_effect=Exception("Database connection lost") session, "execute", side_effect=Exception("Database connection lost")
): ):
with pytest.raises(Exception, match="Database connection lost"): with pytest.raises(Exception, match="Database connection lost"):
await organization_crud.get_by_slug(session, slug="test-slug") await organization_repo.get_by_slug(session, slug="test-slug")
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_create_integrity_error_non_slug(self, async_test_db): async def test_create_integrity_error_non_slug(self, async_test_db):
@@ -976,7 +976,7 @@ class TestOrganizationExceptionHandlers:
with pytest.raises( with pytest.raises(
IntegrityConstraintError, match="Database integrity error" IntegrityConstraintError, match="Database integrity error"
): ):
await organization_crud.create(session, obj_in=org_in) await organization_repo.create(session, obj_in=org_in)
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_create_unexpected_error(self, async_test_db): async def test_create_unexpected_error(self, async_test_db):
@@ -990,7 +990,7 @@ class TestOrganizationExceptionHandlers:
with patch.object(session, "rollback", new_callable=AsyncMock): with patch.object(session, "rollback", new_callable=AsyncMock):
org_in = OrganizationCreate(name="Test", slug="test") org_in = OrganizationCreate(name="Test", slug="test")
with pytest.raises(RuntimeError, match="Unexpected error"): with pytest.raises(RuntimeError, match="Unexpected error"):
await organization_crud.create(session, obj_in=org_in) await organization_repo.create(session, obj_in=org_in)
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_get_multi_with_filters_database_error(self, async_test_db): async def test_get_multi_with_filters_database_error(self, async_test_db):
@@ -1002,7 +1002,7 @@ class TestOrganizationExceptionHandlers:
session, "execute", side_effect=Exception("Query timeout") session, "execute", side_effect=Exception("Query timeout")
): ):
with pytest.raises(Exception, match="Query timeout"): with pytest.raises(Exception, match="Query timeout"):
await organization_crud.get_multi_with_filters(session) await organization_repo.get_multi_with_filters(session)
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_get_member_count_database_error(self, async_test_db): async def test_get_member_count_database_error(self, async_test_db):
@@ -1016,7 +1016,7 @@ class TestOrganizationExceptionHandlers:
session, "execute", side_effect=Exception("Count query failed") session, "execute", side_effect=Exception("Count query failed")
): ):
with pytest.raises(Exception, match="Count query failed"): with pytest.raises(Exception, match="Count query failed"):
await organization_crud.get_member_count( await organization_repo.get_member_count(
session, organization_id=uuid4() session, organization_id=uuid4()
) )
@@ -1030,7 +1030,7 @@ class TestOrganizationExceptionHandlers:
session, "execute", side_effect=Exception("Complex query failed") session, "execute", side_effect=Exception("Complex query failed")
): ):
with pytest.raises(Exception, match="Complex query failed"): with pytest.raises(Exception, match="Complex query failed"):
await organization_crud.get_multi_with_member_counts(session) await organization_repo.get_multi_with_member_counts(session)
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_add_user_integrity_error(self, async_test_db, async_test_user): async def test_add_user_integrity_error(self, async_test_db, async_test_user):
@@ -1064,7 +1064,7 @@ class TestOrganizationExceptionHandlers:
IntegrityConstraintError, IntegrityConstraintError,
match="Failed to add user to organization", match="Failed to add user to organization",
): ):
await organization_crud.add_user( await organization_repo.add_user(
session, session,
organization_id=org_id, organization_id=org_id,
user_id=async_test_user.id, user_id=async_test_user.id,
@@ -1082,7 +1082,7 @@ class TestOrganizationExceptionHandlers:
session, "execute", side_effect=Exception("Delete failed") session, "execute", side_effect=Exception("Delete failed")
): ):
with pytest.raises(Exception, match="Delete failed"): with pytest.raises(Exception, match="Delete failed"):
await organization_crud.remove_user( await organization_repo.remove_user(
session, organization_id=uuid4(), user_id=async_test_user.id session, organization_id=uuid4(), user_id=async_test_user.id
) )
@@ -1100,7 +1100,7 @@ class TestOrganizationExceptionHandlers:
session, "execute", side_effect=Exception("Update failed") session, "execute", side_effect=Exception("Update failed")
): ):
with pytest.raises(Exception, match="Update failed"): with pytest.raises(Exception, match="Update failed"):
await organization_crud.update_user_role( await organization_repo.update_user_role(
session, session,
organization_id=uuid4(), organization_id=uuid4(),
user_id=async_test_user.id, user_id=async_test_user.id,
@@ -1119,7 +1119,7 @@ class TestOrganizationExceptionHandlers:
session, "execute", side_effect=Exception("Members query failed") session, "execute", side_effect=Exception("Members query failed")
): ):
with pytest.raises(Exception, match="Members query failed"): with pytest.raises(Exception, match="Members query failed"):
await organization_crud.get_organization_members( await organization_repo.get_organization_members(
session, organization_id=uuid4() session, organization_id=uuid4()
) )
@@ -1135,7 +1135,7 @@ class TestOrganizationExceptionHandlers:
session, "execute", side_effect=Exception("User orgs query failed") session, "execute", side_effect=Exception("User orgs query failed")
): ):
with pytest.raises(Exception, match="User orgs query failed"): with pytest.raises(Exception, match="User orgs query failed"):
await organization_crud.get_user_organizations( await organization_repo.get_user_organizations(
session, user_id=async_test_user.id session, user_id=async_test_user.id
) )
@@ -1151,7 +1151,7 @@ class TestOrganizationExceptionHandlers:
session, "execute", side_effect=Exception("Details query failed") session, "execute", side_effect=Exception("Details query failed")
): ):
with pytest.raises(Exception, match="Details query failed"): with pytest.raises(Exception, match="Details query failed"):
await organization_crud.get_user_organizations_with_details( await organization_repo.get_user_organizations_with_details(
session, user_id=async_test_user.id session, user_id=async_test_user.id
) )
@@ -1169,6 +1169,6 @@ class TestOrganizationExceptionHandlers:
session, "execute", side_effect=Exception("Role query failed") session, "execute", side_effect=Exception("Role query failed")
): ):
with pytest.raises(Exception, match="Role query failed"): with pytest.raises(Exception, match="Role query failed"):
await organization_crud.get_user_role_in_org( await organization_repo.get_user_role_in_org(
session, user_id=async_test_user.id, organization_id=uuid4() session, user_id=async_test_user.id, organization_id=uuid4()
) )

View File

@@ -1,6 +1,6 @@
# tests/crud/test_session_async.py # tests/repositories/test_session_async.py
""" """
Comprehensive tests for async session CRUD operations. Comprehensive tests for async session repository operations.
""" """
from datetime import UTC, datetime, timedelta from datetime import UTC, datetime, timedelta
@@ -10,7 +10,7 @@ import pytest
from app.core.repository_exceptions import InvalidInputError from app.core.repository_exceptions import InvalidInputError
from app.models.user_session import UserSession from app.models.user_session import UserSession
from app.repositories.session import session_repo as session_crud from app.repositories.session import session_repo as session_repo
from app.schemas.sessions import SessionCreate from app.schemas.sessions import SessionCreate
@@ -37,7 +37,7 @@ class TestGetByJti:
await session.commit() await session.commit()
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
result = await session_crud.get_by_jti(session, jti="test_jti_123") result = await session_repo.get_by_jti(session, jti="test_jti_123")
assert result is not None assert result is not None
assert result.refresh_token_jti == "test_jti_123" assert result.refresh_token_jti == "test_jti_123"
@@ -47,7 +47,7 @@ class TestGetByJti:
_test_engine, AsyncTestingSessionLocal = async_test_db _test_engine, AsyncTestingSessionLocal = async_test_db
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
result = await session_crud.get_by_jti(session, jti="nonexistent") result = await session_repo.get_by_jti(session, jti="nonexistent")
assert result is None assert result is None
@@ -74,7 +74,7 @@ class TestGetActiveByJti:
await session.commit() await session.commit()
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
result = await session_crud.get_active_by_jti(session, jti="active_jti") result = await session_repo.get_active_by_jti(session, jti="active_jti")
assert result is not None assert result is not None
assert result.is_active is True assert result.is_active is True
@@ -98,7 +98,7 @@ class TestGetActiveByJti:
await session.commit() await session.commit()
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
result = await session_crud.get_active_by_jti(session, jti="inactive_jti") result = await session_repo.get_active_by_jti(session, jti="inactive_jti")
assert result is None assert result is None
@@ -135,7 +135,7 @@ class TestGetUserSessions:
await session.commit() await session.commit()
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
results = await session_crud.get_user_sessions( results = await session_repo.get_user_sessions(
session, user_id=str(async_test_user.id), active_only=True session, user_id=str(async_test_user.id), active_only=True
) )
assert len(results) == 1 assert len(results) == 1
@@ -162,7 +162,7 @@ class TestGetUserSessions:
await session.commit() await session.commit()
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
results = await session_crud.get_user_sessions( results = await session_repo.get_user_sessions(
session, user_id=str(async_test_user.id), active_only=False session, user_id=str(async_test_user.id), active_only=False
) )
assert len(results) == 3 assert len(results) == 3
@@ -173,7 +173,7 @@ class TestCreateSession:
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_create_session_success(self, async_test_db, async_test_user): async def test_create_session_success(self, async_test_db, async_test_user):
"""Test successfully creating a session_crud.""" """Test successfully creating a session_repo."""
_test_engine, AsyncTestingSessionLocal = async_test_db _test_engine, AsyncTestingSessionLocal = async_test_db
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
@@ -189,7 +189,7 @@ class TestCreateSession:
location_city="San Francisco", location_city="San Francisco",
location_country="USA", location_country="USA",
) )
result = await session_crud.create_session(session, obj_in=session_data) result = await session_repo.create_session(session, obj_in=session_data)
assert result.user_id == async_test_user.id assert result.user_id == async_test_user.id
assert result.refresh_token_jti == "new_jti" assert result.refresh_token_jti == "new_jti"
@@ -202,7 +202,7 @@ class TestDeactivate:
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_deactivate_success(self, async_test_db, async_test_user): async def test_deactivate_success(self, async_test_db, async_test_user):
"""Test successfully deactivating a session_crud.""" """Test successfully deactivating a session_repo."""
_test_engine, AsyncTestingSessionLocal = async_test_db _test_engine, AsyncTestingSessionLocal = async_test_db
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
@@ -221,7 +221,7 @@ class TestDeactivate:
session_id = user_session.id session_id = user_session.id
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
result = await session_crud.deactivate(session, session_id=str(session_id)) result = await session_repo.deactivate(session, session_id=str(session_id))
assert result is not None assert result is not None
assert result.is_active is False assert result.is_active is False
@@ -231,7 +231,7 @@ class TestDeactivate:
_test_engine, AsyncTestingSessionLocal = async_test_db _test_engine, AsyncTestingSessionLocal = async_test_db
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
result = await session_crud.deactivate(session, session_id=str(uuid4())) result = await session_repo.deactivate(session, session_id=str(uuid4()))
assert result is None assert result is None
@@ -262,7 +262,7 @@ class TestDeactivateAllUserSessions:
await session.commit() await session.commit()
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
count = await session_crud.deactivate_all_user_sessions( count = await session_repo.deactivate_all_user_sessions(
session, user_id=str(async_test_user.id) session, user_id=str(async_test_user.id)
) )
assert count == 2 assert count == 2
@@ -292,7 +292,7 @@ class TestUpdateLastUsed:
await session.refresh(user_session) await session.refresh(user_session)
old_time = user_session.last_used_at old_time = user_session.last_used_at
result = await session_crud.update_last_used(session, session=user_session) result = await session_repo.update_last_used(session, session=user_session)
assert result.last_used_at > old_time assert result.last_used_at > old_time
@@ -321,7 +321,7 @@ class TestGetUserSessionCount:
await session.commit() await session.commit()
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
count = await session_crud.get_user_session_count( count = await session_repo.get_user_session_count(
session, user_id=str(async_test_user.id) session, user_id=str(async_test_user.id)
) )
assert count == 3 assert count == 3
@@ -332,7 +332,7 @@ class TestGetUserSessionCount:
_test_engine, AsyncTestingSessionLocal = async_test_db _test_engine, AsyncTestingSessionLocal = async_test_db
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
count = await session_crud.get_user_session_count( count = await session_repo.get_user_session_count(
session, user_id=str(uuid4()) session, user_id=str(uuid4())
) )
assert count == 0 assert count == 0
@@ -364,7 +364,7 @@ class TestUpdateRefreshToken:
new_jti = "new_jti_123" new_jti = "new_jti_123"
new_expires = datetime.now(UTC) + timedelta(days=14) new_expires = datetime.now(UTC) + timedelta(days=14)
result = await session_crud.update_refresh_token( result = await session_repo.update_refresh_token(
session, session,
session=user_session, session=user_session,
new_jti=new_jti, new_jti=new_jti,
@@ -410,7 +410,7 @@ class TestCleanupExpired:
# Cleanup # Cleanup
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
count = await session_crud.cleanup_expired(session, keep_days=30) count = await session_repo.cleanup_expired(session, keep_days=30)
assert count == 1 assert count == 1
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -436,7 +436,7 @@ class TestCleanupExpired:
# Cleanup # Cleanup
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
count = await session_crud.cleanup_expired(session, keep_days=30) count = await session_repo.cleanup_expired(session, keep_days=30)
assert count == 0 # Should not delete recent sessions assert count == 0 # Should not delete recent sessions
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -462,7 +462,7 @@ class TestCleanupExpired:
# Cleanup # Cleanup
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
count = await session_crud.cleanup_expired(session, keep_days=30) count = await session_repo.cleanup_expired(session, keep_days=30)
assert count == 0 # Should not delete active sessions assert count == 0 # Should not delete active sessions
@@ -493,7 +493,7 @@ class TestCleanupExpiredForUser:
# Cleanup for user # Cleanup for user
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
count = await session_crud.cleanup_expired_for_user( count = await session_repo.cleanup_expired_for_user(
session, user_id=str(async_test_user.id) session, user_id=str(async_test_user.id)
) )
assert count == 1 assert count == 1
@@ -505,7 +505,7 @@ class TestCleanupExpiredForUser:
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
with pytest.raises(InvalidInputError, match="Invalid user ID format"): with pytest.raises(InvalidInputError, match="Invalid user ID format"):
await session_crud.cleanup_expired_for_user( await session_repo.cleanup_expired_for_user(
session, user_id="not-a-valid-uuid" session, user_id="not-a-valid-uuid"
) )
@@ -533,7 +533,7 @@ class TestCleanupExpiredForUser:
# Cleanup # Cleanup
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
count = await session_crud.cleanup_expired_for_user( count = await session_repo.cleanup_expired_for_user(
session, user_id=str(async_test_user.id) session, user_id=str(async_test_user.id)
) )
assert count == 0 # Should not delete active sessions assert count == 0 # Should not delete active sessions
@@ -565,7 +565,7 @@ class TestGetUserSessionsWithUser:
# Get with user relationship # Get with user relationship
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
results = await session_crud.get_user_sessions( results = await session_repo.get_user_sessions(
session, user_id=str(async_test_user.id), with_user=True session, user_id=str(async_test_user.id), with_user=True
) )
assert len(results) >= 1 assert len(results) >= 1

View File

@@ -1,6 +1,6 @@
# tests/crud/test_session_db_failures.py # tests/repositories/test_session_db_failures.py
""" """
Comprehensive tests for session CRUD database failure scenarios. Comprehensive tests for session repository database failure scenarios.
""" """
from datetime import UTC, datetime, timedelta from datetime import UTC, datetime, timedelta
@@ -12,11 +12,11 @@ from sqlalchemy.exc import OperationalError
from app.core.repository_exceptions import IntegrityConstraintError from app.core.repository_exceptions import IntegrityConstraintError
from app.models.user_session import UserSession from app.models.user_session import UserSession
from app.repositories.session import session_repo as session_crud from app.repositories.session import session_repo as session_repo
from app.schemas.sessions import SessionCreate from app.schemas.sessions import SessionCreate
class TestSessionCRUDGetByJtiFailures: class TestSessionRepositoryGetByJtiFailures:
"""Test get_by_jti exception handling.""" """Test get_by_jti exception handling."""
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -31,10 +31,10 @@ class TestSessionCRUDGetByJtiFailures:
with patch.object(session, "execute", side_effect=mock_execute): with patch.object(session, "execute", side_effect=mock_execute):
with pytest.raises(OperationalError): with pytest.raises(OperationalError):
await session_crud.get_by_jti(session, jti="test_jti") await session_repo.get_by_jti(session, jti="test_jti")
class TestSessionCRUDGetActiveByJtiFailures: class TestSessionRepositoryGetActiveByJtiFailures:
"""Test get_active_by_jti exception handling.""" """Test get_active_by_jti exception handling."""
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -49,10 +49,10 @@ class TestSessionCRUDGetActiveByJtiFailures:
with patch.object(session, "execute", side_effect=mock_execute): with patch.object(session, "execute", side_effect=mock_execute):
with pytest.raises(OperationalError): with pytest.raises(OperationalError):
await session_crud.get_active_by_jti(session, jti="test_jti") await session_repo.get_active_by_jti(session, jti="test_jti")
class TestSessionCRUDGetUserSessionsFailures: class TestSessionRepositoryGetUserSessionsFailures:
"""Test get_user_sessions exception handling.""" """Test get_user_sessions exception handling."""
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -69,12 +69,12 @@ class TestSessionCRUDGetUserSessionsFailures:
with patch.object(session, "execute", side_effect=mock_execute): with patch.object(session, "execute", side_effect=mock_execute):
with pytest.raises(OperationalError): with pytest.raises(OperationalError):
await session_crud.get_user_sessions( await session_repo.get_user_sessions(
session, user_id=str(async_test_user.id) session, user_id=str(async_test_user.id)
) )
class TestSessionCRUDCreateSessionFailures: class TestSessionRepositoryCreateSessionFailures:
"""Test create_session exception handling.""" """Test create_session exception handling."""
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -106,7 +106,7 @@ class TestSessionCRUDCreateSessionFailures:
with pytest.raises( with pytest.raises(
IntegrityConstraintError, match="Failed to create session" IntegrityConstraintError, match="Failed to create session"
): ):
await session_crud.create_session(session, obj_in=session_data) await session_repo.create_session(session, obj_in=session_data)
mock_rollback.assert_called_once() mock_rollback.assert_called_once()
@@ -139,12 +139,12 @@ class TestSessionCRUDCreateSessionFailures:
with pytest.raises( with pytest.raises(
IntegrityConstraintError, match="Failed to create session" IntegrityConstraintError, match="Failed to create session"
): ):
await session_crud.create_session(session, obj_in=session_data) await session_repo.create_session(session, obj_in=session_data)
mock_rollback.assert_called_once() mock_rollback.assert_called_once()
class TestSessionCRUDDeactivateFailures: class TestSessionRepositoryDeactivateFailures:
"""Test deactivate exception handling.""" """Test deactivate exception handling."""
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -182,14 +182,14 @@ class TestSessionCRUDDeactivateFailures:
session, "rollback", new_callable=AsyncMock session, "rollback", new_callable=AsyncMock
) as mock_rollback: ) as mock_rollback:
with pytest.raises(OperationalError): with pytest.raises(OperationalError):
await session_crud.deactivate( await session_repo.deactivate(
session, session_id=str(session_id) session, session_id=str(session_id)
) )
mock_rollback.assert_called_once() mock_rollback.assert_called_once()
class TestSessionCRUDDeactivateAllFailures: class TestSessionRepositoryDeactivateAllFailures:
"""Test deactivate_all_user_sessions exception handling.""" """Test deactivate_all_user_sessions exception handling."""
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -209,14 +209,14 @@ class TestSessionCRUDDeactivateAllFailures:
session, "rollback", new_callable=AsyncMock session, "rollback", new_callable=AsyncMock
) as mock_rollback: ) as mock_rollback:
with pytest.raises(OperationalError): with pytest.raises(OperationalError):
await session_crud.deactivate_all_user_sessions( await session_repo.deactivate_all_user_sessions(
session, user_id=str(async_test_user.id) session, user_id=str(async_test_user.id)
) )
mock_rollback.assert_called_once() mock_rollback.assert_called_once()
class TestSessionCRUDUpdateLastUsedFailures: class TestSessionRepositoryUpdateLastUsedFailures:
"""Test update_last_used exception handling.""" """Test update_last_used exception handling."""
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -259,12 +259,12 @@ class TestSessionCRUDUpdateLastUsedFailures:
session, "rollback", new_callable=AsyncMock session, "rollback", new_callable=AsyncMock
) as mock_rollback: ) as mock_rollback:
with pytest.raises(OperationalError): with pytest.raises(OperationalError):
await session_crud.update_last_used(session, session=sess) await session_repo.update_last_used(session, session=sess)
mock_rollback.assert_called_once() mock_rollback.assert_called_once()
class TestSessionCRUDUpdateRefreshTokenFailures: class TestSessionRepositoryUpdateRefreshTokenFailures:
"""Test update_refresh_token exception handling.""" """Test update_refresh_token exception handling."""
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -307,7 +307,7 @@ class TestSessionCRUDUpdateRefreshTokenFailures:
session, "rollback", new_callable=AsyncMock session, "rollback", new_callable=AsyncMock
) as mock_rollback: ) as mock_rollback:
with pytest.raises(OperationalError): with pytest.raises(OperationalError):
await session_crud.update_refresh_token( await session_repo.update_refresh_token(
session, session,
session=sess, session=sess,
new_jti=str(uuid4()), new_jti=str(uuid4()),
@@ -317,7 +317,7 @@ class TestSessionCRUDUpdateRefreshTokenFailures:
mock_rollback.assert_called_once() mock_rollback.assert_called_once()
class TestSessionCRUDCleanupExpiredFailures: class TestSessionRepositoryCleanupExpiredFailures:
"""Test cleanup_expired exception handling.""" """Test cleanup_expired exception handling."""
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -337,12 +337,12 @@ class TestSessionCRUDCleanupExpiredFailures:
session, "rollback", new_callable=AsyncMock session, "rollback", new_callable=AsyncMock
) as mock_rollback: ) as mock_rollback:
with pytest.raises(OperationalError): with pytest.raises(OperationalError):
await session_crud.cleanup_expired(session, keep_days=30) await session_repo.cleanup_expired(session, keep_days=30)
mock_rollback.assert_called_once() mock_rollback.assert_called_once()
class TestSessionCRUDCleanupExpiredForUserFailures: class TestSessionRepositoryCleanupExpiredForUserFailures:
"""Test cleanup_expired_for_user exception handling.""" """Test cleanup_expired_for_user exception handling."""
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -362,14 +362,14 @@ class TestSessionCRUDCleanupExpiredForUserFailures:
session, "rollback", new_callable=AsyncMock session, "rollback", new_callable=AsyncMock
) as mock_rollback: ) as mock_rollback:
with pytest.raises(OperationalError): with pytest.raises(OperationalError):
await session_crud.cleanup_expired_for_user( await session_repo.cleanup_expired_for_user(
session, user_id=str(async_test_user.id) session, user_id=str(async_test_user.id)
) )
mock_rollback.assert_called_once() mock_rollback.assert_called_once()
class TestSessionCRUDGetUserSessionCountFailures: class TestSessionRepositoryGetUserSessionCountFailures:
"""Test get_user_session_count exception handling.""" """Test get_user_session_count exception handling."""
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -386,6 +386,6 @@ class TestSessionCRUDGetUserSessionCountFailures:
with patch.object(session, "execute", side_effect=mock_execute): with patch.object(session, "execute", side_effect=mock_execute):
with pytest.raises(OperationalError): with pytest.raises(OperationalError):
await session_crud.get_user_session_count( await session_repo.get_user_session_count(
session, user_id=str(async_test_user.id) session, user_id=str(async_test_user.id)
) )

View File

@@ -1,12 +1,12 @@
# tests/crud/test_user_async.py # tests/repositories/test_user_async.py
""" """
Comprehensive tests for async user CRUD operations. Comprehensive tests for async user repository operations.
""" """
import pytest import pytest
from app.core.repository_exceptions import DuplicateEntryError, InvalidInputError from app.core.repository_exceptions import DuplicateEntryError, InvalidInputError
from app.repositories.user import user_repo as user_crud from app.repositories.user import user_repo as user_repo
from app.schemas.users import UserCreate, UserUpdate from app.schemas.users import UserCreate, UserUpdate
@@ -19,7 +19,7 @@ class TestGetByEmail:
_test_engine, AsyncTestingSessionLocal = async_test_db _test_engine, AsyncTestingSessionLocal = async_test_db
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
result = await user_crud.get_by_email(session, email=async_test_user.email) result = await user_repo.get_by_email(session, email=async_test_user.email)
assert result is not None assert result is not None
assert result.email == async_test_user.email assert result.email == async_test_user.email
assert result.id == async_test_user.id assert result.id == async_test_user.id
@@ -30,7 +30,7 @@ class TestGetByEmail:
_test_engine, AsyncTestingSessionLocal = async_test_db _test_engine, AsyncTestingSessionLocal = async_test_db
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
result = await user_crud.get_by_email( result = await user_repo.get_by_email(
session, email="nonexistent@example.com" session, email="nonexistent@example.com"
) )
assert result is None assert result is None
@@ -41,7 +41,7 @@ class TestCreate:
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_create_user_success(self, async_test_db): async def test_create_user_success(self, async_test_db):
"""Test successfully creating a user_crud.""" """Test successfully creating a user_repo."""
_test_engine, AsyncTestingSessionLocal = async_test_db _test_engine, AsyncTestingSessionLocal = async_test_db
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
@@ -52,7 +52,7 @@ class TestCreate:
last_name="User", last_name="User",
phone_number="+1234567890", phone_number="+1234567890",
) )
result = await user_crud.create(session, obj_in=user_data) result = await user_repo.create(session, obj_in=user_data)
assert result.email == "newuser@example.com" assert result.email == "newuser@example.com"
assert result.first_name == "New" assert result.first_name == "New"
@@ -76,7 +76,7 @@ class TestCreate:
last_name="User", last_name="User",
is_superuser=True, is_superuser=True,
) )
result = await user_crud.create(session, obj_in=user_data) result = await user_repo.create(session, obj_in=user_data)
assert result.is_superuser is True assert result.is_superuser is True
assert result.email == "superuser@example.com" assert result.email == "superuser@example.com"
@@ -95,7 +95,7 @@ class TestCreate:
) )
with pytest.raises(DuplicateEntryError) as exc_info: with pytest.raises(DuplicateEntryError) as exc_info:
await user_crud.create(session, obj_in=user_data) await user_repo.create(session, obj_in=user_data)
assert "already exists" in str(exc_info.value).lower() assert "already exists" in str(exc_info.value).lower()
@@ -110,12 +110,12 @@ class TestUpdate:
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
# Get fresh copy of user # Get fresh copy of user
user = await user_crud.get(session, id=str(async_test_user.id)) user = await user_repo.get(session, id=str(async_test_user.id))
update_data = UserUpdate( update_data = UserUpdate(
first_name="Updated", last_name="Name", phone_number="+9876543210" first_name="Updated", last_name="Name", phone_number="+9876543210"
) )
result = await user_crud.update(session, db_obj=user, obj_in=update_data) result = await user_repo.update(session, db_obj=user, obj_in=update_data)
assert result.first_name == "Updated" assert result.first_name == "Updated"
assert result.last_name == "Name" assert result.last_name == "Name"
@@ -134,16 +134,16 @@ class TestUpdate:
first_name="Pass", first_name="Pass",
last_name="Test", last_name="Test",
) )
user = await user_crud.create(session, obj_in=user_data) user = await user_repo.create(session, obj_in=user_data)
user_id = user.id user_id = user.id
old_password_hash = user.password_hash old_password_hash = user.password_hash
# Update the password # Update the password
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
user = await user_crud.get(session, id=str(user_id)) user = await user_repo.get(session, id=str(user_id))
update_data = UserUpdate(password="NewDifferentPassword123!") update_data = UserUpdate(password="NewDifferentPassword123!")
result = await user_crud.update(session, db_obj=user, obj_in=update_data) result = await user_repo.update(session, db_obj=user, obj_in=update_data)
await session.refresh(result) await session.refresh(result)
assert result.password_hash != old_password_hash assert result.password_hash != old_password_hash
@@ -158,10 +158,10 @@ class TestUpdate:
_test_engine, AsyncTestingSessionLocal = async_test_db _test_engine, AsyncTestingSessionLocal = async_test_db
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
user = await user_crud.get(session, id=str(async_test_user.id)) user = await user_repo.get(session, id=str(async_test_user.id))
update_dict = {"first_name": "DictUpdate"} update_dict = {"first_name": "DictUpdate"}
result = await user_crud.update(session, db_obj=user, obj_in=update_dict) result = await user_repo.update(session, db_obj=user, obj_in=update_dict)
assert result.first_name == "DictUpdate" assert result.first_name == "DictUpdate"
@@ -175,7 +175,7 @@ class TestGetMultiWithTotal:
_test_engine, AsyncTestingSessionLocal = async_test_db _test_engine, AsyncTestingSessionLocal = async_test_db
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
users, total = await user_crud.get_multi_with_total( users, total = await user_repo.get_multi_with_total(
session, skip=0, limit=10 session, skip=0, limit=10
) )
assert total >= 1 assert total >= 1
@@ -196,10 +196,10 @@ class TestGetMultiWithTotal:
first_name=f"User{i}", first_name=f"User{i}",
last_name="Test", last_name="Test",
) )
await user_crud.create(session, obj_in=user_data) await user_repo.create(session, obj_in=user_data)
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
users, _total = await user_crud.get_multi_with_total( users, _total = await user_repo.get_multi_with_total(
session, skip=0, limit=10, sort_by="email", sort_order="asc" session, skip=0, limit=10, sort_by="email", sort_order="asc"
) )
@@ -222,10 +222,10 @@ class TestGetMultiWithTotal:
first_name=f"User{i}", first_name=f"User{i}",
last_name="Test", last_name="Test",
) )
await user_crud.create(session, obj_in=user_data) await user_repo.create(session, obj_in=user_data)
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
users, _total = await user_crud.get_multi_with_total( users, _total = await user_repo.get_multi_with_total(
session, skip=0, limit=10, sort_by="email", sort_order="desc" session, skip=0, limit=10, sort_by="email", sort_order="desc"
) )
@@ -247,7 +247,7 @@ class TestGetMultiWithTotal:
first_name="Active", first_name="Active",
last_name="User", last_name="User",
) )
await user_crud.create(session, obj_in=active_user) await user_repo.create(session, obj_in=active_user)
inactive_user = UserCreate( inactive_user = UserCreate(
email="inactive@example.com", email="inactive@example.com",
@@ -255,15 +255,15 @@ class TestGetMultiWithTotal:
first_name="Inactive", first_name="Inactive",
last_name="User", last_name="User",
) )
created_inactive = await user_crud.create(session, obj_in=inactive_user) created_inactive = await user_repo.create(session, obj_in=inactive_user)
# Deactivate the user # Deactivate the user
await user_crud.update( await user_repo.update(
session, db_obj=created_inactive, obj_in={"is_active": False} session, db_obj=created_inactive, obj_in={"is_active": False}
) )
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
users, _total = await user_crud.get_multi_with_total( users, _total = await user_repo.get_multi_with_total(
session, skip=0, limit=100, filters={"is_active": True} session, skip=0, limit=100, filters={"is_active": True}
) )
@@ -283,10 +283,10 @@ class TestGetMultiWithTotal:
first_name="Searchable", first_name="Searchable",
last_name="UserName", last_name="UserName",
) )
await user_crud.create(session, obj_in=user_data) await user_repo.create(session, obj_in=user_data)
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
users, total = await user_crud.get_multi_with_total( users, total = await user_repo.get_multi_with_total(
session, skip=0, limit=100, search="Searchable" session, skip=0, limit=100, search="Searchable"
) )
@@ -307,16 +307,16 @@ class TestGetMultiWithTotal:
first_name=f"Page{i}", first_name=f"Page{i}",
last_name="User", last_name="User",
) )
await user_crud.create(session, obj_in=user_data) await user_repo.create(session, obj_in=user_data)
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
# Get first page # Get first page
users_page1, total = await user_crud.get_multi_with_total( users_page1, total = await user_repo.get_multi_with_total(
session, skip=0, limit=2 session, skip=0, limit=2
) )
# Get second page # Get second page
users_page2, total2 = await user_crud.get_multi_with_total( users_page2, total2 = await user_repo.get_multi_with_total(
session, skip=2, limit=2 session, skip=2, limit=2
) )
@@ -332,7 +332,7 @@ class TestGetMultiWithTotal:
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
with pytest.raises(InvalidInputError) as exc_info: with pytest.raises(InvalidInputError) as exc_info:
await user_crud.get_multi_with_total(session, skip=-1, limit=10) await user_repo.get_multi_with_total(session, skip=-1, limit=10)
assert "skip must be non-negative" in str(exc_info.value) assert "skip must be non-negative" in str(exc_info.value)
@@ -343,7 +343,7 @@ class TestGetMultiWithTotal:
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
with pytest.raises(InvalidInputError) as exc_info: with pytest.raises(InvalidInputError) as exc_info:
await user_crud.get_multi_with_total(session, skip=0, limit=-1) await user_repo.get_multi_with_total(session, skip=0, limit=-1)
assert "limit must be non-negative" in str(exc_info.value) assert "limit must be non-negative" in str(exc_info.value)
@@ -354,7 +354,7 @@ class TestGetMultiWithTotal:
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
with pytest.raises(InvalidInputError) as exc_info: with pytest.raises(InvalidInputError) as exc_info:
await user_crud.get_multi_with_total(session, skip=0, limit=1001) await user_repo.get_multi_with_total(session, skip=0, limit=1001)
assert "Maximum limit is 1000" in str(exc_info.value) assert "Maximum limit is 1000" in str(exc_info.value)
@@ -377,12 +377,12 @@ class TestBulkUpdateStatus:
first_name=f"Bulk{i}", first_name=f"Bulk{i}",
last_name="User", last_name="User",
) )
user = await user_crud.create(session, obj_in=user_data) user = await user_repo.create(session, obj_in=user_data)
user_ids.append(user.id) user_ids.append(user.id)
# Bulk deactivate # Bulk deactivate
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
count = await user_crud.bulk_update_status( count = await user_repo.bulk_update_status(
session, user_ids=user_ids, is_active=False session, user_ids=user_ids, is_active=False
) )
assert count == 3 assert count == 3
@@ -390,7 +390,7 @@ class TestBulkUpdateStatus:
# Verify all are inactive # Verify all are inactive
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
for user_id in user_ids: for user_id in user_ids:
user = await user_crud.get(session, id=str(user_id)) user = await user_repo.get(session, id=str(user_id))
assert user.is_active is False assert user.is_active is False
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -399,7 +399,7 @@ class TestBulkUpdateStatus:
_test_engine, AsyncTestingSessionLocal = async_test_db _test_engine, AsyncTestingSessionLocal = async_test_db
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
count = await user_crud.bulk_update_status( count = await user_repo.bulk_update_status(
session, user_ids=[], is_active=False session, user_ids=[], is_active=False
) )
assert count == 0 assert count == 0
@@ -417,21 +417,21 @@ class TestBulkUpdateStatus:
first_name="Reactivate", first_name="Reactivate",
last_name="User", last_name="User",
) )
user = await user_crud.create(session, obj_in=user_data) user = await user_repo.create(session, obj_in=user_data)
# Deactivate # Deactivate
await user_crud.update(session, db_obj=user, obj_in={"is_active": False}) await user_repo.update(session, db_obj=user, obj_in={"is_active": False})
user_id = user.id user_id = user.id
# Reactivate # Reactivate
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
count = await user_crud.bulk_update_status( count = await user_repo.bulk_update_status(
session, user_ids=[user_id], is_active=True session, user_ids=[user_id], is_active=True
) )
assert count == 1 assert count == 1
# Verify active # Verify active
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
user = await user_crud.get(session, id=str(user_id)) user = await user_repo.get(session, id=str(user_id))
assert user.is_active is True assert user.is_active is True
@@ -453,24 +453,24 @@ class TestBulkSoftDelete:
first_name=f"Delete{i}", first_name=f"Delete{i}",
last_name="User", last_name="User",
) )
user = await user_crud.create(session, obj_in=user_data) user = await user_repo.create(session, obj_in=user_data)
user_ids.append(user.id) user_ids.append(user.id)
# Bulk delete # Bulk delete
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
count = await user_crud.bulk_soft_delete(session, user_ids=user_ids) count = await user_repo.bulk_soft_delete(session, user_ids=user_ids)
assert count == 3 assert count == 3
# Verify all are soft deleted # Verify all are soft deleted
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
for user_id in user_ids: for user_id in user_ids:
user = await user_crud.get(session, id=str(user_id)) user = await user_repo.get(session, id=str(user_id))
assert user.deleted_at is not None assert user.deleted_at is not None
assert user.is_active is False assert user.is_active is False
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_bulk_soft_delete_with_exclusion(self, async_test_db): async def test_bulk_soft_delete_with_exclusion(self, async_test_db):
"""Test bulk soft delete with excluded user_crud.""" """Test bulk soft delete with excluded user_repo."""
_test_engine, AsyncTestingSessionLocal = async_test_db _test_engine, AsyncTestingSessionLocal = async_test_db
# Create multiple users # Create multiple users
@@ -483,20 +483,20 @@ class TestBulkSoftDelete:
first_name=f"Exclude{i}", first_name=f"Exclude{i}",
last_name="User", last_name="User",
) )
user = await user_crud.create(session, obj_in=user_data) user = await user_repo.create(session, obj_in=user_data)
user_ids.append(user.id) user_ids.append(user.id)
# Bulk delete, excluding first user # Bulk delete, excluding first user
exclude_id = user_ids[0] exclude_id = user_ids[0]
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
count = await user_crud.bulk_soft_delete( count = await user_repo.bulk_soft_delete(
session, user_ids=user_ids, exclude_user_id=exclude_id session, user_ids=user_ids, exclude_user_id=exclude_id
) )
assert count == 2 # Only 2 deleted assert count == 2 # Only 2 deleted
# Verify excluded user is NOT deleted # Verify excluded user is NOT deleted
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
excluded_user = await user_crud.get(session, id=str(exclude_id)) excluded_user = await user_repo.get(session, id=str(exclude_id))
assert excluded_user.deleted_at is None assert excluded_user.deleted_at is None
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -505,7 +505,7 @@ class TestBulkSoftDelete:
_test_engine, AsyncTestingSessionLocal = async_test_db _test_engine, AsyncTestingSessionLocal = async_test_db
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
count = await user_crud.bulk_soft_delete(session, user_ids=[]) count = await user_repo.bulk_soft_delete(session, user_ids=[])
assert count == 0 assert count == 0
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -521,12 +521,12 @@ class TestBulkSoftDelete:
first_name="Only", first_name="Only",
last_name="User", last_name="User",
) )
user = await user_crud.create(session, obj_in=user_data) user = await user_repo.create(session, obj_in=user_data)
user_id = user.id user_id = user.id
# Try to delete but exclude # Try to delete but exclude
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
count = await user_crud.bulk_soft_delete( count = await user_repo.bulk_soft_delete(
session, user_ids=[user_id], exclude_user_id=user_id session, user_ids=[user_id], exclude_user_id=user_id
) )
assert count == 0 assert count == 0
@@ -544,15 +544,15 @@ class TestBulkSoftDelete:
first_name="PreDeleted", first_name="PreDeleted",
last_name="User", last_name="User",
) )
user = await user_crud.create(session, obj_in=user_data) user = await user_repo.create(session, obj_in=user_data)
user_id = user.id user_id = user.id
# First deletion # First deletion
await user_crud.bulk_soft_delete(session, user_ids=[user_id]) await user_repo.bulk_soft_delete(session, user_ids=[user_id])
# Try to delete again # Try to delete again
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
count = await user_crud.bulk_soft_delete(session, user_ids=[user_id]) count = await user_repo.bulk_soft_delete(session, user_ids=[user_id])
assert count == 0 # Already deleted assert count == 0 # Already deleted
@@ -561,16 +561,16 @@ class TestUtilityMethods:
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_is_active_true(self, async_test_db, async_test_user): async def test_is_active_true(self, async_test_db, async_test_user):
"""Test is_active returns True for active user_crud.""" """Test is_active returns True for active user_repo."""
_test_engine, AsyncTestingSessionLocal = async_test_db _test_engine, AsyncTestingSessionLocal = async_test_db
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
user = await user_crud.get(session, id=str(async_test_user.id)) user = await user_repo.get(session, id=str(async_test_user.id))
assert user_crud.is_active(user) is True assert user_repo.is_active(user) is True
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_is_active_false(self, async_test_db): async def test_is_active_false(self, async_test_db):
"""Test is_active returns False for inactive user_crud.""" """Test is_active returns False for inactive user_repo."""
_test_engine, AsyncTestingSessionLocal = async_test_db _test_engine, AsyncTestingSessionLocal = async_test_db
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
@@ -580,10 +580,10 @@ class TestUtilityMethods:
first_name="Inactive", first_name="Inactive",
last_name="User", last_name="User",
) )
user = await user_crud.create(session, obj_in=user_data) user = await user_repo.create(session, obj_in=user_data)
await user_crud.update(session, db_obj=user, obj_in={"is_active": False}) await user_repo.update(session, db_obj=user, obj_in={"is_active": False})
assert user_crud.is_active(user) is False assert user_repo.is_active(user) is False
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_is_superuser_true(self, async_test_db, async_test_superuser): async def test_is_superuser_true(self, async_test_db, async_test_superuser):
@@ -591,22 +591,22 @@ class TestUtilityMethods:
_test_engine, AsyncTestingSessionLocal = async_test_db _test_engine, AsyncTestingSessionLocal = async_test_db
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
user = await user_crud.get(session, id=str(async_test_superuser.id)) user = await user_repo.get(session, id=str(async_test_superuser.id))
assert user_crud.is_superuser(user) is True assert user_repo.is_superuser(user) is True
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_is_superuser_false(self, async_test_db, async_test_user): async def test_is_superuser_false(self, async_test_db, async_test_user):
"""Test is_superuser returns False for regular user_crud.""" """Test is_superuser returns False for regular user_repo."""
_test_engine, AsyncTestingSessionLocal = async_test_db _test_engine, AsyncTestingSessionLocal = async_test_db
async with AsyncTestingSessionLocal() as session: async with AsyncTestingSessionLocal() as session:
user = await user_crud.get(session, id=str(async_test_user.id)) user = await user_repo.get(session, id=str(async_test_user.id))
assert user_crud.is_superuser(user) is False assert user_repo.is_superuser(user) is False
class TestUserExceptionHandlers: class TestUserExceptionHandlers:
""" """
Test exception handlers in user CRUD methods. Test exception handlers in user repository methods.
Covers lines: 30-32, 205-208, 257-260 Covers lines: 30-32, 205-208, 257-260
""" """
@@ -622,7 +622,7 @@ class TestUserExceptionHandlers:
session, "execute", side_effect=Exception("Database query failed") session, "execute", side_effect=Exception("Database query failed")
): ):
with pytest.raises(Exception, match="Database query failed"): with pytest.raises(Exception, match="Database query failed"):
await user_crud.get_by_email(session, email="test@example.com") await user_repo.get_by_email(session, email="test@example.com")
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_bulk_update_status_database_error( async def test_bulk_update_status_database_error(
@@ -640,7 +640,7 @@ class TestUserExceptionHandlers:
): ):
with patch.object(session, "rollback", new_callable=AsyncMock): with patch.object(session, "rollback", new_callable=AsyncMock):
with pytest.raises(Exception, match="Bulk update failed"): with pytest.raises(Exception, match="Bulk update failed"):
await user_crud.bulk_update_status( await user_repo.bulk_update_status(
session, user_ids=[async_test_user.id], is_active=False session, user_ids=[async_test_user.id], is_active=False
) )
@@ -660,6 +660,6 @@ class TestUserExceptionHandlers:
): ):
with patch.object(session, "rollback", new_callable=AsyncMock): with patch.object(session, "rollback", new_callable=AsyncMock):
with pytest.raises(Exception, match="Bulk delete failed"): with pytest.raises(Exception, match="Bulk delete failed"):
await user_crud.bulk_soft_delete( await user_repo.bulk_soft_delete(
session, user_ids=[async_test_user.id] session, user_ids=[async_test_user.id]
) )

View File

@@ -206,13 +206,13 @@ class TestCleanupExpiredSessions:
"""Test cleanup returns 0 on database errors (doesn't crash).""" """Test cleanup returns 0 on database errors (doesn't crash)."""
_test_engine, AsyncTestingSessionLocal = async_test_db _test_engine, AsyncTestingSessionLocal = async_test_db
# Mock session_crud.cleanup_expired to raise error # Mock session_repo.cleanup_expired to raise error
with patch( with patch(
"app.services.session_cleanup.SessionLocal", "app.services.session_cleanup.SessionLocal",
return_value=AsyncTestingSessionLocal(), return_value=AsyncTestingSessionLocal(),
): ):
with patch( with patch(
"app.services.session_cleanup.session_crud.cleanup_expired" "app.services.session_cleanup.session_repo.cleanup_expired"
) as mock_cleanup: ) as mock_cleanup:
mock_cleanup.side_effect = Exception("Database connection lost") mock_cleanup.side_effect = Exception("Database connection lost")

View File

@@ -91,9 +91,9 @@ class TestInitDb:
"""Test that init_db handles database errors gracefully.""" """Test that init_db handles database errors gracefully."""
_test_engine, SessionLocal = async_test_db _test_engine, SessionLocal = async_test_db
# Mock user_crud.get_by_email to raise an exception # Mock user_repo.get_by_email to raise an exception
with patch( with patch(
"app.init_db.user_crud.get_by_email", "app.init_db.user_repo.get_by_email",
side_effect=Exception("Database error"), side_effect=Exception("Database error"),
): ):
with patch("app.init_db.SessionLocal", SessionLocal): with patch("app.init_db.SessionLocal", SessionLocal):

691
backend/uv.lock generated
View File

@@ -120,14 +120,14 @@ wheels = [
[[package]] [[package]]
name = "authlib" name = "authlib"
version = "1.6.5" version = "1.6.8"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "cryptography" }, { name = "cryptography" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/cd/3f/1d3bbd0bf23bdd99276d4def22f29c27a914067b4cf66f753ff9b8bbd0f3/authlib-1.6.5.tar.gz", hash = "sha256:6aaf9c79b7cc96c900f0b284061691c5d4e61221640a948fe690b556a6d6d10b", size = 164553, upload-time = "2025-10-02T13:36:09.489Z" } sdist = { url = "https://files.pythonhosted.org/packages/6b/6c/c88eac87468c607f88bc24df1f3b31445ee6fc9ba123b09e666adf687cd9/authlib-1.6.8.tar.gz", hash = "sha256:41ae180a17cf672bc784e4a518e5c82687f1fe1e98b0cafaeda80c8e4ab2d1cb", size = 165074, upload-time = "2026-02-14T04:02:17.941Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/f8/aa/5082412d1ee302e9e7d80b6949bc4d2a8fa1149aaab610c5fc24709605d6/authlib-1.6.5-py2.py3-none-any.whl", hash = "sha256:3e0e0507807f842b02175507bdee8957a1d5707fd4afb17c32fb43fee90b6e3a", size = 243608, upload-time = "2025-10-02T13:36:07.637Z" }, { url = "https://files.pythonhosted.org/packages/9b/73/f7084bf12755113cd535ae586782ff3a6e710bfbe6a0d13d1c2f81ffbbfa/authlib-1.6.8-py2.py3-none-any.whl", hash = "sha256:97286fd7a15e6cfefc32771c8ef9c54f0ed58028f1322de6a2a7c969c3817888", size = 244116, upload-time = "2026-02-14T04:02:15.579Z" },
] ]
[[package]] [[package]]
@@ -160,6 +160,33 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/76/b9/d51d34e6cd6d887adddb28a8680a1d34235cc45b9d6e238ce39b98199ca0/bcrypt-4.2.1-cp39-abi3-win_amd64.whl", hash = "sha256:e84e0e6f8e40a242b11bce56c313edc2be121cec3e0ec2d76fce01f6af33c07c", size = 153078, upload-time = "2024-11-19T20:08:01.436Z" }, { url = "https://files.pythonhosted.org/packages/76/b9/d51d34e6cd6d887adddb28a8680a1d34235cc45b9d6e238ce39b98199ca0/bcrypt-4.2.1-cp39-abi3-win_amd64.whl", hash = "sha256:e84e0e6f8e40a242b11bce56c313edc2be121cec3e0ec2d76fce01f6af33c07c", size = 153078, upload-time = "2024-11-19T20:08:01.436Z" },
] ]
[[package]]
name = "boolean-py"
version = "5.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/c4/cf/85379f13b76f3a69bca86b60237978af17d6aa0bc5998978c3b8cf05abb2/boolean_py-5.0.tar.gz", hash = "sha256:60cbc4bad079753721d32649545505362c754e121570ada4658b852a3a318d95", size = 37047, upload-time = "2025-04-03T10:39:49.734Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e5/ca/78d423b324b8d77900030fa59c4aa9054261ef0925631cd2501dd015b7b7/boolean_py-5.0-py3-none-any.whl", hash = "sha256:ef28a70bd43115208441b53a045d1549e2f0ec6e3d08a9d142cbc41c1938e8d9", size = 26577, upload-time = "2025-04-03T10:39:48.449Z" },
]
[[package]]
name = "cachecontrol"
version = "0.14.4"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "msgpack" },
{ name = "requests" },
]
sdist = { url = "https://files.pythonhosted.org/packages/2d/f6/c972b32d80760fb79d6b9eeb0b3010a46b89c0b23cf6329417ff7886cd22/cachecontrol-0.14.4.tar.gz", hash = "sha256:e6220afafa4c22a47dd0badb319f84475d79108100d04e26e8542ef7d3ab05a1", size = 16150, upload-time = "2025-11-14T04:32:13.138Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ef/79/c45f2d53efe6ada1110cf6f9fca095e4ff47a0454444aefdde6ac4789179/cachecontrol-0.14.4-py3-none-any.whl", hash = "sha256:b7ac014ff72ee199b5f8af1de29d60239954f223e948196fa3d84adaffc71d2b", size = 22247, upload-time = "2025-11-14T04:32:11.733Z" },
]
[package.optional-dependencies]
filecache = [
{ name = "filelock" },
]
[[package]] [[package]]
name = "certifi" name = "certifi"
version = "2025.10.5" version = "2025.10.5"
@@ -226,6 +253,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" }, { url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" },
] ]
[[package]]
name = "cfgv"
version = "3.5.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/4e/b5/721b8799b04bf9afe054a3899c6cf4e880fcf8563cc71c15610242490a0c/cfgv-3.5.0.tar.gz", hash = "sha256:d5b1034354820651caa73ede66a6294d6e95c1b00acc5e9b098e917404669132", size = 7334, upload-time = "2025-11-19T20:55:51.612Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/db/3c/33bac158f8ab7f89b2e59426d5fe2e4f63f7ed25df84c036890172b412b5/cfgv-3.5.0-py2.py3-none-any.whl", hash = "sha256:a8dc6b26ad22ff227d2634a65cb388215ce6cc96bbcc5cfde7641ae87e8dacc0", size = 7445, upload-time = "2025-11-19T20:55:50.744Z" },
]
[[package]] [[package]]
name = "charset-normalizer" name = "charset-normalizer"
version = "3.4.4" version = "3.4.4"
@@ -380,37 +416,80 @@ wheels = [
[[package]] [[package]]
name = "cryptography" name = "cryptography"
version = "44.0.1" version = "46.0.5"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, { name = "cffi", marker = "platform_python_implementation != 'PyPy'" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/c7/67/545c79fe50f7af51dbad56d16b23fe33f63ee6a5d956b3cb68ea110cbe64/cryptography-44.0.1.tar.gz", hash = "sha256:f51f5705ab27898afda1aaa430f34ad90dc117421057782022edf0600bec5f14", size = 710819, upload-time = "2025-02-11T15:50:58.39Z" } sdist = { url = "https://files.pythonhosted.org/packages/60/04/ee2a9e8542e4fa2773b81771ff8349ff19cdd56b7258a0cc442639052edb/cryptography-46.0.5.tar.gz", hash = "sha256:abace499247268e3757271b2f1e244b36b06f8515cf27c4d49468fc9eb16e93d", size = 750064, upload-time = "2026-02-10T19:18:38.255Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/72/27/5e3524053b4c8889da65cf7814a9d0d8514a05194a25e1e34f46852ee6eb/cryptography-44.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf688f615c29bfe9dfc44312ca470989279f0e94bb9f631f85e3459af8efc009", size = 6642022, upload-time = "2025-02-11T15:49:32.752Z" }, { url = "https://files.pythonhosted.org/packages/f7/81/b0bb27f2ba931a65409c6b8a8b358a7f03c0e46eceacddff55f7c84b1f3b/cryptography-46.0.5-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:351695ada9ea9618b3500b490ad54c739860883df6c1f555e088eaf25b1bbaad", size = 7176289, upload-time = "2026-02-10T19:17:08.274Z" },
{ url = "https://files.pythonhosted.org/packages/34/b9/4d1fa8d73ae6ec350012f89c3abfbff19fc95fe5420cf972e12a8d182986/cryptography-44.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd7c7e2d71d908dc0f8d2027e1604102140d84b155e658c20e8ad1304317691f", size = 3943865, upload-time = "2025-02-11T15:49:36.659Z" }, { url = "https://files.pythonhosted.org/packages/ff/9e/6b4397a3e3d15123de3b1806ef342522393d50736c13b20ec4c9ea6693a6/cryptography-46.0.5-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c18ff11e86df2e28854939acde2d003f7984f721eba450b56a200ad90eeb0e6b", size = 4275637, upload-time = "2026-02-10T19:17:10.53Z" },
{ url = "https://files.pythonhosted.org/packages/6e/57/371a9f3f3a4500807b5fcd29fec77f418ba27ffc629d88597d0d1049696e/cryptography-44.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:887143b9ff6bad2b7570da75a7fe8bbf5f65276365ac259a5d2d5147a73775f2", size = 4162562, upload-time = "2025-02-11T15:49:39.541Z" }, { url = "https://files.pythonhosted.org/packages/63/e7/471ab61099a3920b0c77852ea3f0ea611c9702f651600397ac567848b897/cryptography-46.0.5-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d7e3d356b8cd4ea5aff04f129d5f66ebdc7b6f8eae802b93739ed520c47c79b", size = 4424742, upload-time = "2026-02-10T19:17:12.388Z" },
{ url = "https://files.pythonhosted.org/packages/c5/1d/5b77815e7d9cf1e3166988647f336f87d5634a5ccecec2ffbe08ef8dd481/cryptography-44.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:322eb03ecc62784536bc173f1483e76747aafeb69c8728df48537eb431cd1911", size = 3951923, upload-time = "2025-02-11T15:49:42.461Z" }, { url = "https://files.pythonhosted.org/packages/37/53/a18500f270342d66bf7e4d9f091114e31e5ee9e7375a5aba2e85a91e0044/cryptography-46.0.5-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:50bfb6925eff619c9c023b967d5b77a54e04256c4281b0e21336a130cd7fc263", size = 4277528, upload-time = "2026-02-10T19:17:13.853Z" },
{ url = "https://files.pythonhosted.org/packages/28/01/604508cd34a4024467cd4105887cf27da128cba3edd435b54e2395064bfb/cryptography-44.0.1-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:21377472ca4ada2906bc313168c9dc7b1d7ca417b63c1c3011d0c74b7de9ae69", size = 3685194, upload-time = "2025-02-11T15:49:45.226Z" }, { url = "https://files.pythonhosted.org/packages/22/29/c2e812ebc38c57b40e7c583895e73c8c5adb4d1e4a0cc4c5a4fdab2b1acc/cryptography-46.0.5-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:803812e111e75d1aa73690d2facc295eaefd4439be1023fefc4995eaea2af90d", size = 4947993, upload-time = "2026-02-10T19:17:15.618Z" },
{ url = "https://files.pythonhosted.org/packages/c6/3d/d3c55d4f1d24580a236a6753902ef6d8aafd04da942a1ee9efb9dc8fd0cb/cryptography-44.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:df978682c1504fc93b3209de21aeabf2375cb1571d4e61907b3e7a2540e83026", size = 4187790, upload-time = "2025-02-11T15:49:48.215Z" }, { url = "https://files.pythonhosted.org/packages/6b/e7/237155ae19a9023de7e30ec64e5d99a9431a567407ac21170a046d22a5a3/cryptography-46.0.5-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ee190460e2fbe447175cda91b88b84ae8322a104fc27766ad09428754a618ed", size = 4456855, upload-time = "2026-02-10T19:17:17.221Z" },
{ url = "https://files.pythonhosted.org/packages/ea/a6/44d63950c8588bfa8594fd234d3d46e93c3841b8e84a066649c566afb972/cryptography-44.0.1-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:eb3889330f2a4a148abead555399ec9a32b13b7c8ba969b72d8e500eb7ef84cd", size = 3951343, upload-time = "2025-02-11T15:49:50.313Z" }, { url = "https://files.pythonhosted.org/packages/2d/87/fc628a7ad85b81206738abbd213b07702bcbdada1dd43f72236ef3cffbb5/cryptography-46.0.5-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:f145bba11b878005c496e93e257c1e88f154d278d2638e6450d17e0f31e558d2", size = 3984635, upload-time = "2026-02-10T19:17:18.792Z" },
{ url = "https://files.pythonhosted.org/packages/c1/17/f5282661b57301204cbf188254c1a0267dbd8b18f76337f0a7ce1038888c/cryptography-44.0.1-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:8e6a85a93d0642bd774460a86513c5d9d80b5c002ca9693e63f6e540f1815ed0", size = 4187127, upload-time = "2025-02-11T15:49:52.051Z" }, { url = "https://files.pythonhosted.org/packages/84/29/65b55622bde135aedf4565dc509d99b560ee4095e56989e815f8fd2aa910/cryptography-46.0.5-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:e9251e3be159d1020c4030bd2e5f84d6a43fe54b6c19c12f51cde9542a2817b2", size = 4277038, upload-time = "2026-02-10T19:17:20.256Z" },
{ url = "https://files.pythonhosted.org/packages/f3/68/abbae29ed4f9d96596687f3ceea8e233f65c9645fbbec68adb7c756bb85a/cryptography-44.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6f76fdd6fd048576a04c5210d53aa04ca34d2ed63336d4abd306d0cbe298fddf", size = 4070666, upload-time = "2025-02-11T15:49:56.56Z" }, { url = "https://files.pythonhosted.org/packages/bc/36/45e76c68d7311432741faf1fbf7fac8a196a0a735ca21f504c75d37e2558/cryptography-46.0.5-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:47fb8a66058b80e509c47118ef8a75d14c455e81ac369050f20ba0d23e77fee0", size = 4912181, upload-time = "2026-02-10T19:17:21.825Z" },
{ url = "https://files.pythonhosted.org/packages/0f/10/cf91691064a9e0a88ae27e31779200b1505d3aee877dbe1e4e0d73b4f155/cryptography-44.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6c8acf6f3d1f47acb2248ec3ea261171a671f3d9428e34ad0357148d492c7864", size = 4288811, upload-time = "2025-02-11T15:49:59.248Z" }, { url = "https://files.pythonhosted.org/packages/6d/1a/c1ba8fead184d6e3d5afcf03d569acac5ad063f3ac9fb7258af158f7e378/cryptography-46.0.5-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:4c3341037c136030cb46e4b1e17b7418ea4cbd9dd207e4a6f3b2b24e0d4ac731", size = 4456482, upload-time = "2026-02-10T19:17:25.133Z" },
{ url = "https://files.pythonhosted.org/packages/38/78/74ea9eb547d13c34e984e07ec8a473eb55b19c1451fe7fc8077c6a4b0548/cryptography-44.0.1-cp37-abi3-win32.whl", hash = "sha256:24979e9f2040c953a94bf3c6782e67795a4c260734e5264dceea65c8f4bae64a", size = 2771882, upload-time = "2025-02-11T15:50:01.478Z" }, { url = "https://files.pythonhosted.org/packages/f9/e5/3fb22e37f66827ced3b902cf895e6a6bc1d095b5b26be26bd13c441fdf19/cryptography-46.0.5-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:890bcb4abd5a2d3f852196437129eb3667d62630333aacc13dfd470fad3aaa82", size = 4405497, upload-time = "2026-02-10T19:17:26.66Z" },
{ url = "https://files.pythonhosted.org/packages/cf/6c/3907271ee485679e15c9f5e93eac6aa318f859b0aed8d369afd636fafa87/cryptography-44.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:fd0ee90072861e276b0ff08bd627abec29e32a53b2be44e41dbcdf87cbee2b00", size = 3206989, upload-time = "2025-02-11T15:50:03.312Z" }, { url = "https://files.pythonhosted.org/packages/1a/df/9d58bb32b1121a8a2f27383fabae4d63080c7ca60b9b5c88be742be04ee7/cryptography-46.0.5-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:80a8d7bfdf38f87ca30a5391c0c9ce4ed2926918e017c29ddf643d0ed2778ea1", size = 4667819, upload-time = "2026-02-10T19:17:28.569Z" },
{ url = "https://files.pythonhosted.org/packages/9f/f1/676e69c56a9be9fd1bffa9bc3492366901f6e1f8f4079428b05f1414e65c/cryptography-44.0.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:a2d8a7045e1ab9b9f803f0d9531ead85f90c5f2859e653b61497228b18452008", size = 6643714, upload-time = "2025-02-11T15:50:05.555Z" }, { url = "https://files.pythonhosted.org/packages/ea/ed/325d2a490c5e94038cdb0117da9397ece1f11201f425c4e9c57fe5b9f08b/cryptography-46.0.5-cp311-abi3-win32.whl", hash = "sha256:60ee7e19e95104d4c03871d7d7dfb3d22ef8a9b9c6778c94e1c8fcc8365afd48", size = 3028230, upload-time = "2026-02-10T19:17:30.518Z" },
{ url = "https://files.pythonhosted.org/packages/ba/9f/1775600eb69e72d8f9931a104120f2667107a0ee478f6ad4fe4001559345/cryptography-44.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8272f257cf1cbd3f2e120f14c68bff2b6bdfcc157fafdee84a1b795efd72862", size = 3943269, upload-time = "2025-02-11T15:50:08.54Z" }, { url = "https://files.pythonhosted.org/packages/e9/5a/ac0f49e48063ab4255d9e3b79f5def51697fce1a95ea1370f03dc9db76f6/cryptography-46.0.5-cp311-abi3-win_amd64.whl", hash = "sha256:38946c54b16c885c72c4f59846be9743d699eee2b69b6988e0a00a01f46a61a4", size = 3480909, upload-time = "2026-02-10T19:17:32.083Z" },
{ url = "https://files.pythonhosted.org/packages/25/ba/e00d5ad6b58183829615be7f11f55a7b6baa5a06910faabdc9961527ba44/cryptography-44.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e8d181e90a777b63f3f0caa836844a1182f1f265687fac2115fcf245f5fbec3", size = 4166461, upload-time = "2025-02-11T15:50:11.419Z" }, { url = "https://files.pythonhosted.org/packages/00/13/3d278bfa7a15a96b9dc22db5a12ad1e48a9eb3d40e1827ef66a5df75d0d0/cryptography-46.0.5-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:94a76daa32eb78d61339aff7952ea819b1734b46f73646a07decb40e5b3448e2", size = 7119287, upload-time = "2026-02-10T19:17:33.801Z" },
{ url = "https://files.pythonhosted.org/packages/b3/45/690a02c748d719a95ab08b6e4decb9d81e0ec1bac510358f61624c86e8a3/cryptography-44.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:436df4f203482f41aad60ed1813811ac4ab102765ecae7a2bbb1dbb66dcff5a7", size = 3950314, upload-time = "2025-02-11T15:50:14.181Z" }, { url = "https://files.pythonhosted.org/packages/67/c8/581a6702e14f0898a0848105cbefd20c058099e2c2d22ef4e476dfec75d7/cryptography-46.0.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5be7bf2fb40769e05739dd0046e7b26f9d4670badc7b032d6ce4db64dddc0678", size = 4265728, upload-time = "2026-02-10T19:17:35.569Z" },
{ url = "https://files.pythonhosted.org/packages/e6/50/bf8d090911347f9b75adc20f6f6569ed6ca9b9bff552e6e390f53c2a1233/cryptography-44.0.1-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4f422e8c6a28cf8b7f883eb790695d6d45b0c385a2583073f3cec434cc705e1a", size = 3686675, upload-time = "2025-02-11T15:50:16.3Z" }, { url = "https://files.pythonhosted.org/packages/dd/4a/ba1a65ce8fc65435e5a849558379896c957870dd64fecea97b1ad5f46a37/cryptography-46.0.5-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fe346b143ff9685e40192a4960938545c699054ba11d4f9029f94751e3f71d87", size = 4408287, upload-time = "2026-02-10T19:17:36.938Z" },
{ url = "https://files.pythonhosted.org/packages/e1/e7/cfb18011821cc5f9b21efb3f94f3241e3a658d267a3bf3a0f45543858ed8/cryptography-44.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:72198e2b5925155497a5a3e8c216c7fb3e64c16ccee11f0e7da272fa93b35c4c", size = 4190429, upload-time = "2025-02-11T15:50:19.302Z" }, { url = "https://files.pythonhosted.org/packages/f8/67/8ffdbf7b65ed1ac224d1c2df3943553766914a8ca718747ee3871da6107e/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:c69fd885df7d089548a42d5ec05be26050ebcd2283d89b3d30676eb32ff87dee", size = 4270291, upload-time = "2026-02-10T19:17:38.748Z" },
{ url = "https://files.pythonhosted.org/packages/07/ef/77c74d94a8bfc1a8a47b3cafe54af3db537f081742ee7a8a9bd982b62774/cryptography-44.0.1-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:2a46a89ad3e6176223b632056f321bc7de36b9f9b93b2cc1cccf935a3849dc62", size = 3950039, upload-time = "2025-02-11T15:50:22.257Z" }, { url = "https://files.pythonhosted.org/packages/f8/e5/f52377ee93bc2f2bba55a41a886fd208c15276ffbd2569f2ddc89d50e2c5/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:8293f3dea7fc929ef7240796ba231413afa7b68ce38fd21da2995549f5961981", size = 4927539, upload-time = "2026-02-10T19:17:40.241Z" },
{ url = "https://files.pythonhosted.org/packages/6d/b9/8be0ff57c4592382b77406269b1e15650c9f1a167f9e34941b8515b97159/cryptography-44.0.1-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:53f23339864b617a3dfc2b0ac8d5c432625c80014c25caac9082314e9de56f41", size = 4189713, upload-time = "2025-02-11T15:50:24.261Z" }, { url = "https://files.pythonhosted.org/packages/3b/02/cfe39181b02419bbbbcf3abdd16c1c5c8541f03ca8bda240debc467d5a12/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:1abfdb89b41c3be0365328a410baa9df3ff8a9110fb75e7b52e66803ddabc9a9", size = 4442199, upload-time = "2026-02-10T19:17:41.789Z" },
{ url = "https://files.pythonhosted.org/packages/78/e1/4b6ac5f4100545513b0847a4d276fe3c7ce0eacfa73e3b5ebd31776816ee/cryptography-44.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:888fcc3fce0c888785a4876ca55f9f43787f4c5c1cc1e2e0da71ad481ff82c5b", size = 4071193, upload-time = "2025-02-11T15:50:26.18Z" }, { url = "https://files.pythonhosted.org/packages/c0/96/2fcaeb4873e536cf71421a388a6c11b5bc846e986b2b069c79363dc1648e/cryptography-46.0.5-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:d66e421495fdb797610a08f43b05269e0a5ea7f5e652a89bfd5a7d3c1dee3648", size = 3960131, upload-time = "2026-02-10T19:17:43.379Z" },
{ url = "https://files.pythonhosted.org/packages/3d/cb/afff48ceaed15531eab70445abe500f07f8f96af2bb35d98af6bfa89ebd4/cryptography-44.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:00918d859aa4e57db8299607086f793fa7813ae2ff5a4637e318a25ef82730f7", size = 4289566, upload-time = "2025-02-11T15:50:28.221Z" }, { url = "https://files.pythonhosted.org/packages/d8/d2/b27631f401ddd644e94c5cf33c9a4069f72011821cf3dc7309546b0642a0/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:4e817a8920bfbcff8940ecfd60f23d01836408242b30f1a708d93198393a80b4", size = 4270072, upload-time = "2026-02-10T19:17:45.481Z" },
{ url = "https://files.pythonhosted.org/packages/30/6f/4eca9e2e0f13ae459acd1ca7d9f0257ab86e68f44304847610afcb813dc9/cryptography-44.0.1-cp39-abi3-win32.whl", hash = "sha256:9b336599e2cb77b1008cb2ac264b290803ec5e8e89d618a5e978ff5eb6f715d9", size = 2772371, upload-time = "2025-02-11T15:50:29.997Z" }, { url = "https://files.pythonhosted.org/packages/f4/a7/60d32b0370dae0b4ebe55ffa10e8599a2a59935b5ece1b9f06edb73abdeb/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:68f68d13f2e1cb95163fa3b4db4bf9a159a418f5f6e7242564fc75fcae667fd0", size = 4892170, upload-time = "2026-02-10T19:17:46.997Z" },
{ url = "https://files.pythonhosted.org/packages/d2/05/5533d30f53f10239616a357f080892026db2d550a40c393d0a8a7af834a9/cryptography-44.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:e403f7f766ded778ecdb790da786b418a9f2394f36e8cc8b796cc056ab05f44f", size = 3207303, upload-time = "2025-02-11T15:50:32.258Z" }, { url = "https://files.pythonhosted.org/packages/d2/b9/cf73ddf8ef1164330eb0b199a589103c363afa0cf794218c24d524a58eab/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:a3d1fae9863299076f05cb8a778c467578262fae09f9dc0ee9b12eb4268ce663", size = 4441741, upload-time = "2026-02-10T19:17:48.661Z" },
{ url = "https://files.pythonhosted.org/packages/5f/eb/eee00b28c84c726fe8fa0158c65afe312d9c3b78d9d01daf700f1f6e37ff/cryptography-46.0.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c4143987a42a2397f2fc3b4d7e3a7d313fbe684f67ff443999e803dd75a76826", size = 4396728, upload-time = "2026-02-10T19:17:50.058Z" },
{ url = "https://files.pythonhosted.org/packages/65/f4/6bc1a9ed5aef7145045114b75b77c2a8261b4d38717bd8dea111a63c3442/cryptography-46.0.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:7d731d4b107030987fd61a7f8ab512b25b53cef8f233a97379ede116f30eb67d", size = 4652001, upload-time = "2026-02-10T19:17:51.54Z" },
{ url = "https://files.pythonhosted.org/packages/86/ef/5d00ef966ddd71ac2e6951d278884a84a40ffbd88948ef0e294b214ae9e4/cryptography-46.0.5-cp314-cp314t-win32.whl", hash = "sha256:c3bcce8521d785d510b2aad26ae2c966092b7daa8f45dd8f44734a104dc0bc1a", size = 3003637, upload-time = "2026-02-10T19:17:52.997Z" },
{ url = "https://files.pythonhosted.org/packages/b7/57/f3f4160123da6d098db78350fdfd9705057aad21de7388eacb2401dceab9/cryptography-46.0.5-cp314-cp314t-win_amd64.whl", hash = "sha256:4d8ae8659ab18c65ced284993c2265910f6c9e650189d4e3f68445ef82a810e4", size = 3469487, upload-time = "2026-02-10T19:17:54.549Z" },
{ url = "https://files.pythonhosted.org/packages/e2/fa/a66aa722105ad6a458bebd64086ca2b72cdd361fed31763d20390f6f1389/cryptography-46.0.5-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:4108d4c09fbbf2789d0c926eb4152ae1760d5a2d97612b92d508d96c861e4d31", size = 7170514, upload-time = "2026-02-10T19:17:56.267Z" },
{ url = "https://files.pythonhosted.org/packages/0f/04/c85bdeab78c8bc77b701bf0d9bdcf514c044e18a46dcff330df5448631b0/cryptography-46.0.5-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1f30a86d2757199cb2d56e48cce14deddf1f9c95f1ef1b64ee91ea43fe2e18", size = 4275349, upload-time = "2026-02-10T19:17:58.419Z" },
{ url = "https://files.pythonhosted.org/packages/5c/32/9b87132a2f91ee7f5223b091dc963055503e9b442c98fc0b8a5ca765fab0/cryptography-46.0.5-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:039917b0dc418bb9f6edce8a906572d69e74bd330b0b3fea4f79dab7f8ddd235", size = 4420667, upload-time = "2026-02-10T19:18:00.619Z" },
{ url = "https://files.pythonhosted.org/packages/a1/a6/a7cb7010bec4b7c5692ca6f024150371b295ee1c108bdc1c400e4c44562b/cryptography-46.0.5-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ba2a27ff02f48193fc4daeadf8ad2590516fa3d0adeeb34336b96f7fa64c1e3a", size = 4276980, upload-time = "2026-02-10T19:18:02.379Z" },
{ url = "https://files.pythonhosted.org/packages/8e/7c/c4f45e0eeff9b91e3f12dbd0e165fcf2a38847288fcfd889deea99fb7b6d/cryptography-46.0.5-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:61aa400dce22cb001a98014f647dc21cda08f7915ceb95df0c9eaf84b4b6af76", size = 4939143, upload-time = "2026-02-10T19:18:03.964Z" },
{ url = "https://files.pythonhosted.org/packages/37/19/e1b8f964a834eddb44fa1b9a9976f4e414cbb7aa62809b6760c8803d22d1/cryptography-46.0.5-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ce58ba46e1bc2aac4f7d9290223cead56743fa6ab94a5d53292ffaac6a91614", size = 4453674, upload-time = "2026-02-10T19:18:05.588Z" },
{ url = "https://files.pythonhosted.org/packages/db/ed/db15d3956f65264ca204625597c410d420e26530c4e2943e05a0d2f24d51/cryptography-46.0.5-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:420d0e909050490d04359e7fdb5ed7e667ca5c3c402b809ae2563d7e66a92229", size = 3978801, upload-time = "2026-02-10T19:18:07.167Z" },
{ url = "https://files.pythonhosted.org/packages/41/e2/df40a31d82df0a70a0daf69791f91dbb70e47644c58581d654879b382d11/cryptography-46.0.5-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:582f5fcd2afa31622f317f80426a027f30dc792e9c80ffee87b993200ea115f1", size = 4276755, upload-time = "2026-02-10T19:18:09.813Z" },
{ url = "https://files.pythonhosted.org/packages/33/45/726809d1176959f4a896b86907b98ff4391a8aa29c0aaaf9450a8a10630e/cryptography-46.0.5-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:bfd56bb4b37ed4f330b82402f6f435845a5f5648edf1ad497da51a8452d5d62d", size = 4901539, upload-time = "2026-02-10T19:18:11.263Z" },
{ url = "https://files.pythonhosted.org/packages/99/0f/a3076874e9c88ecb2ecc31382f6e7c21b428ede6f55aafa1aa272613e3cd/cryptography-46.0.5-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:a3d507bb6a513ca96ba84443226af944b0f7f47dcc9a399d110cd6146481d24c", size = 4452794, upload-time = "2026-02-10T19:18:12.914Z" },
{ url = "https://files.pythonhosted.org/packages/02/ef/ffeb542d3683d24194a38f66ca17c0a4b8bf10631feef44a7ef64e631b1a/cryptography-46.0.5-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9f16fbdf4da055efb21c22d81b89f155f02ba420558db21288b3d0035bafd5f4", size = 4404160, upload-time = "2026-02-10T19:18:14.375Z" },
{ url = "https://files.pythonhosted.org/packages/96/93/682d2b43c1d5f1406ed048f377c0fc9fc8f7b0447a478d5c65ab3d3a66eb/cryptography-46.0.5-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ced80795227d70549a411a4ab66e8ce307899fad2220ce5ab2f296e687eacde9", size = 4667123, upload-time = "2026-02-10T19:18:15.886Z" },
{ url = "https://files.pythonhosted.org/packages/45/2d/9c5f2926cb5300a8eefc3f4f0b3f3df39db7f7ce40c8365444c49363cbda/cryptography-46.0.5-cp38-abi3-win32.whl", hash = "sha256:02f547fce831f5096c9a567fd41bc12ca8f11df260959ecc7c3202555cc47a72", size = 3010220, upload-time = "2026-02-10T19:18:17.361Z" },
{ url = "https://files.pythonhosted.org/packages/48/ef/0c2f4a8e31018a986949d34a01115dd057bf536905dca38897bacd21fac3/cryptography-46.0.5-cp38-abi3-win_amd64.whl", hash = "sha256:556e106ee01aa13484ce9b0239bca667be5004efb0aabbed28d353df86445595", size = 3467050, upload-time = "2026-02-10T19:18:18.899Z" },
]
[[package]]
name = "cyclonedx-python-lib"
version = "11.6.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "license-expression" },
{ name = "packageurl-python" },
{ name = "py-serializable" },
{ name = "sortedcontainers" },
{ name = "typing-extensions", marker = "python_full_version < '3.13'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/89/ed/54ecfa25fc145c58bf4f98090f7b6ffe5188d0759248c57dde44427ea239/cyclonedx_python_lib-11.6.0.tar.gz", hash = "sha256:7fb85a4371fa3a203e5be577ac22b7e9a7157f8b0058b7448731474d6dea7bf0", size = 1408147, upload-time = "2025-12-02T12:28:46.446Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c7/1b/534ad8a5e0f9470522811a8e5a9bc5d328fb7738ba29faf357467a4ef6d0/cyclonedx_python_lib-11.6.0-py3-none-any.whl", hash = "sha256:94f4aae97db42a452134dafdddcfab9745324198201c4777ed131e64c8380759", size = 511157, upload-time = "2025-12-02T12:28:44.158Z" },
]
[[package]]
name = "defusedxml"
version = "0.7.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/0f/d5/c66da9b79e5bdb124974bfe172b4daf3c984ebd9c2a06e2b8a4dc7331c72/defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69", size = 75520, upload-time = "2021-03-08T10:59:26.269Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/07/6c/aa3f2f849e01cb6a001cd8554a88d4c77c5c1a31c95bdf1cf9301e6d9ef4/defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61", size = 25604, upload-time = "2021-03-08T10:59:24.45Z" },
] ]
[[package]] [[package]]
@@ -425,6 +504,28 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/84/d0/205d54408c08b13550c733c4b85429e7ead111c7f0014309637425520a9a/deprecated-1.3.1-py2.py3-none-any.whl", hash = "sha256:597bfef186b6f60181535a29fbe44865ce137a5079f295b479886c82729d5f3f", size = 11298, upload-time = "2025-10-30T08:19:00.758Z" }, { url = "https://files.pythonhosted.org/packages/84/d0/205d54408c08b13550c733c4b85429e7ead111c7f0014309637425520a9a/deprecated-1.3.1-py2.py3-none-any.whl", hash = "sha256:597bfef186b6f60181535a29fbe44865ce137a5079f295b479886c82729d5f3f", size = 11298, upload-time = "2025-10-30T08:19:00.758Z" },
] ]
[[package]]
name = "detect-secrets"
version = "1.5.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "pyyaml" },
{ name = "requests" },
]
sdist = { url = "https://files.pythonhosted.org/packages/69/67/382a863fff94eae5a0cf05542179169a1c49a4c8784a9480621e2066ca7d/detect_secrets-1.5.0.tar.gz", hash = "sha256:6bb46dcc553c10df51475641bb30fd69d25645cc12339e46c824c1e0c388898a", size = 97351, upload-time = "2024-05-06T17:46:19.721Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/4e/5e/4f5fe4b89fde1dc3ed0eb51bd4ce4c0bca406246673d370ea2ad0c58d747/detect_secrets-1.5.0-py3-none-any.whl", hash = "sha256:e24e7b9b5a35048c313e983f76c4bd09dad89f045ff059e354f9943bf45aa060", size = 120341, upload-time = "2024-05-06T17:46:16.628Z" },
]
[[package]]
name = "distlib"
version = "0.4.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/96/8e/709914eb2b5749865801041647dc7f4e6d00b549cfe88b65ca192995f07c/distlib-0.4.0.tar.gz", hash = "sha256:feec40075be03a04501a973d81f633735b4b69f98b05450592310c0f401a4e0d", size = 614605, upload-time = "2025-07-17T16:52:00.465Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/33/6b/e0547afaf41bf2c42e52430072fa5658766e3d65bd4b03a563d1b6336f57/distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16", size = 469047, upload-time = "2025-07-17T16:51:58.613Z" },
]
[[package]] [[package]]
name = "dnspython" name = "dnspython"
version = "2.8.0" version = "2.8.0"
@@ -448,18 +549,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0", size = 147774, upload-time = "2024-05-23T11:13:55.01Z" }, { url = "https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0", size = 147774, upload-time = "2024-05-23T11:13:55.01Z" },
] ]
[[package]]
name = "ecdsa"
version = "0.19.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "six" },
]
sdist = { url = "https://files.pythonhosted.org/packages/c0/1f/924e3caae75f471eae4b26bd13b698f6af2c44279f67af317439c2f4c46a/ecdsa-0.19.1.tar.gz", hash = "sha256:478cba7b62555866fcb3bb3fe985e06decbdb68ef55713c4e5ab98c57d508e61", size = 201793, upload-time = "2025-03-13T11:52:43.25Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/cb/a3/460c57f094a4a165c84a1341c373b0a4f5ec6ac244b998d5021aade89b77/ecdsa-0.19.1-py2.py3-none-any.whl", hash = "sha256:30638e27cf77b7e15c4c4cc1973720149e1033827cfd00661ca5c8cc0cdb24c3", size = 150607, upload-time = "2025-03-13T11:52:41.757Z" },
]
[[package]] [[package]]
name = "email-validator" name = "email-validator"
version = "2.3.0" version = "2.3.0"
@@ -498,13 +587,12 @@ dependencies = [
{ name = "fastapi" }, { name = "fastapi" },
{ name = "fastapi-utils" }, { name = "fastapi-utils" },
{ name = "httpx" }, { name = "httpx" },
{ name = "passlib" },
{ name = "pillow" }, { name = "pillow" },
{ name = "psycopg2-binary" }, { name = "psycopg2-binary" },
{ name = "pydantic" }, { name = "pydantic" },
{ name = "pydantic-settings" }, { name = "pydantic-settings" },
{ name = "pyjwt" },
{ name = "python-dotenv" }, { name = "python-dotenv" },
{ name = "python-jose" },
{ name = "python-multipart" }, { name = "python-multipart" },
{ name = "pytz" }, { name = "pytz" },
{ name = "slowapi" }, { name = "slowapi" },
@@ -513,15 +601,21 @@ dependencies = [
{ name = "starlette-csrf" }, { name = "starlette-csrf" },
{ name = "tenacity" }, { name = "tenacity" },
{ name = "ujson" }, { name = "ujson" },
{ name = "urllib3" },
{ name = "uvicorn" }, { name = "uvicorn" },
] ]
[package.optional-dependencies] [package.optional-dependencies]
dev = [ dev = [
{ name = "detect-secrets" },
{ name = "freezegun" }, { name = "freezegun" },
{ name = "pip-audit" },
{ name = "pip-licenses" },
{ name = "pre-commit" },
{ name = "pyright" }, { name = "pyright" },
{ name = "pytest" }, { name = "pytest" },
{ name = "pytest-asyncio" }, { name = "pytest-asyncio" },
{ name = "pytest-benchmark" },
{ name = "pytest-cov" }, { name = "pytest-cov" },
{ name = "pytest-xdist" }, { name = "pytest-xdist" },
{ name = "requests" }, { name = "requests" },
@@ -538,27 +632,31 @@ requires-dist = [
{ name = "alembic", specifier = ">=1.14.1" }, { name = "alembic", specifier = ">=1.14.1" },
{ name = "apscheduler", specifier = "==3.11.0" }, { name = "apscheduler", specifier = "==3.11.0" },
{ name = "asyncpg", specifier = ">=0.29.0" }, { name = "asyncpg", specifier = ">=0.29.0" },
{ name = "authlib", specifier = ">=1.3.0" }, { name = "authlib", specifier = ">=1.6.6" },
{ name = "bcrypt", specifier = "==4.2.1" }, { name = "bcrypt", specifier = "==4.2.1" },
{ name = "cryptography", specifier = "==44.0.1" }, { name = "cryptography", specifier = ">=46.0.5" },
{ name = "detect-secrets", marker = "extra == 'dev'", specifier = ">=1.5.0" },
{ name = "email-validator", specifier = ">=2.1.0.post1" }, { name = "email-validator", specifier = ">=2.1.0.post1" },
{ name = "fastapi", specifier = ">=0.115.8" }, { name = "fastapi", specifier = ">=0.115.8" },
{ name = "fastapi-utils", specifier = "==0.8.0" }, { name = "fastapi-utils", specifier = "==0.8.0" },
{ name = "freezegun", marker = "extra == 'dev'", specifier = "~=1.5.1" }, { name = "freezegun", marker = "extra == 'dev'", specifier = "~=1.5.1" },
{ name = "httpx", specifier = ">=0.27.0" }, { name = "httpx", specifier = ">=0.27.0" },
{ name = "passlib", specifier = "==1.7.4" }, { name = "pillow", specifier = ">=12.1.1" },
{ name = "pillow", specifier = ">=10.3.0" }, { name = "pip-audit", marker = "extra == 'dev'", specifier = ">=2.7.0" },
{ name = "pip-licenses", marker = "extra == 'dev'", specifier = ">=4.0.0" },
{ name = "pre-commit", marker = "extra == 'dev'", specifier = ">=4.0.0" },
{ name = "psycopg2-binary", specifier = ">=2.9.9" }, { name = "psycopg2-binary", specifier = ">=2.9.9" },
{ name = "pydantic", specifier = ">=2.10.6" }, { name = "pydantic", specifier = ">=2.10.6" },
{ name = "pydantic-settings", specifier = ">=2.2.1" }, { name = "pydantic-settings", specifier = ">=2.2.1" },
{ name = "pyjwt", specifier = ">=2.9.0" },
{ name = "pyright", marker = "extra == 'dev'", specifier = ">=1.1.390" }, { name = "pyright", marker = "extra == 'dev'", specifier = ">=1.1.390" },
{ name = "pytest", marker = "extra == 'dev'", specifier = ">=8.0.0" }, { name = "pytest", marker = "extra == 'dev'", specifier = ">=8.0.0" },
{ name = "pytest-asyncio", marker = "extra == 'dev'", specifier = ">=0.23.5" }, { name = "pytest-asyncio", marker = "extra == 'dev'", specifier = ">=0.23.5" },
{ name = "pytest-benchmark", marker = "extra == 'dev'", specifier = ">=4.0.0" },
{ name = "pytest-cov", marker = "extra == 'dev'", specifier = ">=4.1.0" }, { name = "pytest-cov", marker = "extra == 'dev'", specifier = ">=4.1.0" },
{ name = "pytest-xdist", marker = "extra == 'dev'", specifier = ">=3.8.0" }, { name = "pytest-xdist", marker = "extra == 'dev'", specifier = ">=3.8.0" },
{ name = "python-dotenv", specifier = ">=1.0.1" }, { name = "python-dotenv", specifier = ">=1.0.1" },
{ name = "python-jose", specifier = "==3.4.0" }, { name = "python-multipart", specifier = ">=0.0.22" },
{ name = "python-multipart", specifier = ">=0.0.19" },
{ name = "pytz", specifier = ">=2024.1" }, { name = "pytz", specifier = ">=2024.1" },
{ name = "requests", marker = "extra == 'dev'", specifier = ">=2.32.0" }, { name = "requests", marker = "extra == 'dev'", specifier = ">=2.32.0" },
{ name = "ruff", marker = "extra == 'dev'", specifier = ">=0.8.0" }, { name = "ruff", marker = "extra == 'dev'", specifier = ">=0.8.0" },
@@ -570,6 +668,7 @@ requires-dist = [
{ name = "tenacity", specifier = ">=8.2.3" }, { name = "tenacity", specifier = ">=8.2.3" },
{ name = "testcontainers", extras = ["postgres"], marker = "extra == 'e2e'", specifier = ">=4.0.0" }, { name = "testcontainers", extras = ["postgres"], marker = "extra == 'e2e'", specifier = ">=4.0.0" },
{ name = "ujson", specifier = ">=5.9.0" }, { name = "ujson", specifier = ">=5.9.0" },
{ name = "urllib3", specifier = ">=2.6.3" },
{ name = "uvicorn", specifier = ">=0.34.0" }, { name = "uvicorn", specifier = ">=0.34.0" },
] ]
provides-extras = ["dev", "e2e"] provides-extras = ["dev", "e2e"]
@@ -603,6 +702,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/43/8b/cef8cfed7ed77d52fc772b1c7b966ba019a3f50b65a2b3625a0f3b7f6f53/fastapi_utils-0.8.0-py3-none-any.whl", hash = "sha256:6c4d507a76bab9a016cee0c4fa3a4638c636b2b2689e39c62254b1b2e4e81825", size = 18495, upload-time = "2024-11-11T08:30:01.914Z" }, { url = "https://files.pythonhosted.org/packages/43/8b/cef8cfed7ed77d52fc772b1c7b966ba019a3f50b65a2b3625a0f3b7f6f53/fastapi_utils-0.8.0-py3-none-any.whl", hash = "sha256:6c4d507a76bab9a016cee0c4fa3a4638c636b2b2689e39c62254b1b2e4e81825", size = 18495, upload-time = "2024-11-11T08:30:01.914Z" },
] ]
[[package]]
name = "filelock"
version = "3.24.3"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/73/92/a8e2479937ff39185d20dd6a851c1a63e55849e447a55e798cc2e1f49c65/filelock-3.24.3.tar.gz", hash = "sha256:011a5644dc937c22699943ebbfc46e969cdde3e171470a6e40b9533e5a72affa", size = 37935, upload-time = "2026-02-19T00:48:20.543Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/9c/0f/5d0c71a1aefeb08efff26272149e07ab922b64f46c63363756224bd6872e/filelock-3.24.3-py3-none-any.whl", hash = "sha256:426e9a4660391f7f8a810d71b0555bce9008b0a1cc342ab1f6947d37639e002d", size = 24331, upload-time = "2026-02-19T00:48:18.465Z" },
]
[[package]] [[package]]
name = "fqdn" name = "fqdn"
version = "1.5.1" version = "1.5.1"
@@ -756,6 +864,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/17/44/635a8d2add845c9a2d99a93a379df77f7e70829f0a1d7d5a6998b61f9d01/hypothesis_jsonschema-0.23.1-py3-none-any.whl", hash = "sha256:a4d74d9516dd2784fbbae82e009f62486c9104ac6f4e3397091d98a1d5ee94a2", size = 29200, upload-time = "2024-02-28T20:33:48.744Z" }, { url = "https://files.pythonhosted.org/packages/17/44/635a8d2add845c9a2d99a93a379df77f7e70829f0a1d7d5a6998b61f9d01/hypothesis_jsonschema-0.23.1-py3-none-any.whl", hash = "sha256:a4d74d9516dd2784fbbae82e009f62486c9104ac6f4e3397091d98a1d5ee94a2", size = 29200, upload-time = "2024-02-28T20:33:48.744Z" },
] ]
[[package]]
name = "identify"
version = "2.6.16"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/5b/8d/e8b97e6bd3fb6fb271346f7981362f1e04d6a7463abd0de79e1fda17c067/identify-2.6.16.tar.gz", hash = "sha256:846857203b5511bbe94d5a352a48ef2359532bc8f6727b5544077a0dcfb24980", size = 99360, upload-time = "2026-01-12T18:58:58.201Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/b8/58/40fbbcefeda82364720eba5cf2270f98496bdfa19ea75b4cccae79c698e6/identify-2.6.16-py2.py3-none-any.whl", hash = "sha256:391ee4d77741d994189522896270b787aed8670389bfd60f326d677d64a6dfb0", size = 99202, upload-time = "2026-01-12T18:58:56.627Z" },
]
[[package]] [[package]]
name = "idna" name = "idna"
version = "3.11" version = "3.11"
@@ -855,6 +972,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/2a/93/2d896b5fd3d79b4cadd8882c06650e66d003f465c9d12c488d92853dff78/junit_xml-1.9-py2.py3-none-any.whl", hash = "sha256:ec5ca1a55aefdd76d28fcc0b135251d156c7106fa979686a4b48d62b761b4732", size = 7130, upload-time = "2020-02-22T20:41:37.661Z" }, { url = "https://files.pythonhosted.org/packages/2a/93/2d896b5fd3d79b4cadd8882c06650e66d003f465c9d12c488d92853dff78/junit_xml-1.9-py2.py3-none-any.whl", hash = "sha256:ec5ca1a55aefdd76d28fcc0b135251d156c7106fa979686a4b48d62b761b4732", size = 7130, upload-time = "2020-02-22T20:41:37.661Z" },
] ]
[[package]]
name = "license-expression"
version = "30.4.4"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "boolean-py" },
]
sdist = { url = "https://files.pythonhosted.org/packages/40/71/d89bb0e71b1415453980fd32315f2a037aad9f7f70f695c7cec7035feb13/license_expression-30.4.4.tar.gz", hash = "sha256:73448f0aacd8d0808895bdc4b2c8e01a8d67646e4188f887375398c761f340fd", size = 186402, upload-time = "2025-07-22T11:13:32.17Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/af/40/791891d4c0c4dab4c5e187c17261cedc26285fd41541577f900470a45a4d/license_expression-30.4.4-py3-none-any.whl", hash = "sha256:421788fdcadb41f049d2dc934ce666626265aeccefddd25e162a26f23bcbf8a4", size = 120615, upload-time = "2025-07-22T11:13:31.217Z" },
]
[[package]] [[package]]
name = "limits" name = "limits"
version = "5.6.0" version = "5.6.0"
@@ -965,6 +1094,50 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" },
] ]
[[package]]
name = "msgpack"
version = "1.1.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/4d/f2/bfb55a6236ed8725a96b0aa3acbd0ec17588e6a2c3b62a93eb513ed8783f/msgpack-1.1.2.tar.gz", hash = "sha256:3b60763c1373dd60f398488069bcdc703cd08a711477b5d480eecc9f9626f47e", size = 173581, upload-time = "2025-10-08T09:15:56.596Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ad/bd/8b0d01c756203fbab65d265859749860682ccd2a59594609aeec3a144efa/msgpack-1.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:70a0dff9d1f8da25179ffcf880e10cf1aad55fdb63cd59c9a49a1b82290062aa", size = 81939, upload-time = "2025-10-08T09:15:01.472Z" },
{ url = "https://files.pythonhosted.org/packages/34/68/ba4f155f793a74c1483d4bdef136e1023f7bcba557f0db4ef3db3c665cf1/msgpack-1.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:446abdd8b94b55c800ac34b102dffd2f6aa0ce643c55dfc017ad89347db3dbdb", size = 85064, upload-time = "2025-10-08T09:15:03.764Z" },
{ url = "https://files.pythonhosted.org/packages/f2/60/a064b0345fc36c4c3d2c743c82d9100c40388d77f0b48b2f04d6041dbec1/msgpack-1.1.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c63eea553c69ab05b6747901b97d620bb2a690633c77f23feb0c6a947a8a7b8f", size = 417131, upload-time = "2025-10-08T09:15:05.136Z" },
{ url = "https://files.pythonhosted.org/packages/65/92/a5100f7185a800a5d29f8d14041f61475b9de465ffcc0f3b9fba606e4505/msgpack-1.1.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:372839311ccf6bdaf39b00b61288e0557916c3729529b301c52c2d88842add42", size = 427556, upload-time = "2025-10-08T09:15:06.837Z" },
{ url = "https://files.pythonhosted.org/packages/f5/87/ffe21d1bf7d9991354ad93949286f643b2bb6ddbeab66373922b44c3b8cc/msgpack-1.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2929af52106ca73fcb28576218476ffbb531a036c2adbcf54a3664de124303e9", size = 404920, upload-time = "2025-10-08T09:15:08.179Z" },
{ url = "https://files.pythonhosted.org/packages/ff/41/8543ed2b8604f7c0d89ce066f42007faac1eaa7d79a81555f206a5cdb889/msgpack-1.1.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:be52a8fc79e45b0364210eef5234a7cf8d330836d0a64dfbb878efa903d84620", size = 415013, upload-time = "2025-10-08T09:15:09.83Z" },
{ url = "https://files.pythonhosted.org/packages/41/0d/2ddfaa8b7e1cee6c490d46cb0a39742b19e2481600a7a0e96537e9c22f43/msgpack-1.1.2-cp312-cp312-win32.whl", hash = "sha256:1fff3d825d7859ac888b0fbda39a42d59193543920eda9d9bea44d958a878029", size = 65096, upload-time = "2025-10-08T09:15:11.11Z" },
{ url = "https://files.pythonhosted.org/packages/8c/ec/d431eb7941fb55a31dd6ca3404d41fbb52d99172df2e7707754488390910/msgpack-1.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:1de460f0403172cff81169a30b9a92b260cb809c4cb7e2fc79ae8d0510c78b6b", size = 72708, upload-time = "2025-10-08T09:15:12.554Z" },
{ url = "https://files.pythonhosted.org/packages/c5/31/5b1a1f70eb0e87d1678e9624908f86317787b536060641d6798e3cf70ace/msgpack-1.1.2-cp312-cp312-win_arm64.whl", hash = "sha256:be5980f3ee0e6bd44f3a9e9dea01054f175b50c3e6cdb692bc9424c0bbb8bf69", size = 64119, upload-time = "2025-10-08T09:15:13.589Z" },
{ url = "https://files.pythonhosted.org/packages/6b/31/b46518ecc604d7edf3a4f94cb3bf021fc62aa301f0cb849936968164ef23/msgpack-1.1.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4efd7b5979ccb539c221a4c4e16aac1a533efc97f3b759bb5a5ac9f6d10383bf", size = 81212, upload-time = "2025-10-08T09:15:14.552Z" },
{ url = "https://files.pythonhosted.org/packages/92/dc/c385f38f2c2433333345a82926c6bfa5ecfff3ef787201614317b58dd8be/msgpack-1.1.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:42eefe2c3e2af97ed470eec850facbe1b5ad1d6eacdbadc42ec98e7dcf68b4b7", size = 84315, upload-time = "2025-10-08T09:15:15.543Z" },
{ url = "https://files.pythonhosted.org/packages/d3/68/93180dce57f684a61a88a45ed13047558ded2be46f03acb8dec6d7c513af/msgpack-1.1.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1fdf7d83102bf09e7ce3357de96c59b627395352a4024f6e2458501f158bf999", size = 412721, upload-time = "2025-10-08T09:15:16.567Z" },
{ url = "https://files.pythonhosted.org/packages/5d/ba/459f18c16f2b3fc1a1ca871f72f07d70c07bf768ad0a507a698b8052ac58/msgpack-1.1.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fac4be746328f90caa3cd4bc67e6fe36ca2bf61d5c6eb6d895b6527e3f05071e", size = 424657, upload-time = "2025-10-08T09:15:17.825Z" },
{ url = "https://files.pythonhosted.org/packages/38/f8/4398c46863b093252fe67368b44edc6c13b17f4e6b0e4929dbf0bdb13f23/msgpack-1.1.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:fffee09044073e69f2bad787071aeec727183e7580443dfeb8556cbf1978d162", size = 402668, upload-time = "2025-10-08T09:15:19.003Z" },
{ url = "https://files.pythonhosted.org/packages/28/ce/698c1eff75626e4124b4d78e21cca0b4cc90043afb80a507626ea354ab52/msgpack-1.1.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5928604de9b032bc17f5099496417f113c45bc6bc21b5c6920caf34b3c428794", size = 419040, upload-time = "2025-10-08T09:15:20.183Z" },
{ url = "https://files.pythonhosted.org/packages/67/32/f3cd1667028424fa7001d82e10ee35386eea1408b93d399b09fb0aa7875f/msgpack-1.1.2-cp313-cp313-win32.whl", hash = "sha256:a7787d353595c7c7e145e2331abf8b7ff1e6673a6b974ded96e6d4ec09f00c8c", size = 65037, upload-time = "2025-10-08T09:15:21.416Z" },
{ url = "https://files.pythonhosted.org/packages/74/07/1ed8277f8653c40ebc65985180b007879f6a836c525b3885dcc6448ae6cb/msgpack-1.1.2-cp313-cp313-win_amd64.whl", hash = "sha256:a465f0dceb8e13a487e54c07d04ae3ba131c7c5b95e2612596eafde1dccf64a9", size = 72631, upload-time = "2025-10-08T09:15:22.431Z" },
{ url = "https://files.pythonhosted.org/packages/e5/db/0314e4e2db56ebcf450f277904ffd84a7988b9e5da8d0d61ab2d057df2b6/msgpack-1.1.2-cp313-cp313-win_arm64.whl", hash = "sha256:e69b39f8c0aa5ec24b57737ebee40be647035158f14ed4b40e6f150077e21a84", size = 64118, upload-time = "2025-10-08T09:15:23.402Z" },
{ url = "https://files.pythonhosted.org/packages/22/71/201105712d0a2ff07b7873ed3c220292fb2ea5120603c00c4b634bcdafb3/msgpack-1.1.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e23ce8d5f7aa6ea6d2a2b326b4ba46c985dbb204523759984430db7114f8aa00", size = 81127, upload-time = "2025-10-08T09:15:24.408Z" },
{ url = "https://files.pythonhosted.org/packages/1b/9f/38ff9e57a2eade7bf9dfee5eae17f39fc0e998658050279cbb14d97d36d9/msgpack-1.1.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:6c15b7d74c939ebe620dd8e559384be806204d73b4f9356320632d783d1f7939", size = 84981, upload-time = "2025-10-08T09:15:25.812Z" },
{ url = "https://files.pythonhosted.org/packages/8e/a9/3536e385167b88c2cc8f4424c49e28d49a6fc35206d4a8060f136e71f94c/msgpack-1.1.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:99e2cb7b9031568a2a5c73aa077180f93dd2e95b4f8d3b8e14a73ae94a9e667e", size = 411885, upload-time = "2025-10-08T09:15:27.22Z" },
{ url = "https://files.pythonhosted.org/packages/2f/40/dc34d1a8d5f1e51fc64640b62b191684da52ca469da9cd74e84936ffa4a6/msgpack-1.1.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:180759d89a057eab503cf62eeec0aa61c4ea1200dee709f3a8e9397dbb3b6931", size = 419658, upload-time = "2025-10-08T09:15:28.4Z" },
{ url = "https://files.pythonhosted.org/packages/3b/ef/2b92e286366500a09a67e03496ee8b8ba00562797a52f3c117aa2b29514b/msgpack-1.1.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:04fb995247a6e83830b62f0b07bf36540c213f6eac8e851166d8d86d83cbd014", size = 403290, upload-time = "2025-10-08T09:15:29.764Z" },
{ url = "https://files.pythonhosted.org/packages/78/90/e0ea7990abea5764e4655b8177aa7c63cdfa89945b6e7641055800f6c16b/msgpack-1.1.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8e22ab046fa7ede9e36eeb4cfad44d46450f37bb05d5ec482b02868f451c95e2", size = 415234, upload-time = "2025-10-08T09:15:31.022Z" },
{ url = "https://files.pythonhosted.org/packages/72/4e/9390aed5db983a2310818cd7d3ec0aecad45e1f7007e0cda79c79507bb0d/msgpack-1.1.2-cp314-cp314-win32.whl", hash = "sha256:80a0ff7d4abf5fecb995fcf235d4064b9a9a8a40a3ab80999e6ac1e30b702717", size = 66391, upload-time = "2025-10-08T09:15:32.265Z" },
{ url = "https://files.pythonhosted.org/packages/6e/f1/abd09c2ae91228c5f3998dbd7f41353def9eac64253de3c8105efa2082f7/msgpack-1.1.2-cp314-cp314-win_amd64.whl", hash = "sha256:9ade919fac6a3e7260b7f64cea89df6bec59104987cbea34d34a2fa15d74310b", size = 73787, upload-time = "2025-10-08T09:15:33.219Z" },
{ url = "https://files.pythonhosted.org/packages/6a/b0/9d9f667ab48b16ad4115c1935d94023b82b3198064cb84a123e97f7466c1/msgpack-1.1.2-cp314-cp314-win_arm64.whl", hash = "sha256:59415c6076b1e30e563eb732e23b994a61c159cec44deaf584e5cc1dd662f2af", size = 66453, upload-time = "2025-10-08T09:15:34.225Z" },
{ url = "https://files.pythonhosted.org/packages/16/67/93f80545eb1792b61a217fa7f06d5e5cb9e0055bed867f43e2b8e012e137/msgpack-1.1.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:897c478140877e5307760b0ea66e0932738879e7aa68144d9b78ea4c8302a84a", size = 85264, upload-time = "2025-10-08T09:15:35.61Z" },
{ url = "https://files.pythonhosted.org/packages/87/1c/33c8a24959cf193966ef11a6f6a2995a65eb066bd681fd085afd519a57ce/msgpack-1.1.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a668204fa43e6d02f89dbe79a30b0d67238d9ec4c5bd8a940fc3a004a47b721b", size = 89076, upload-time = "2025-10-08T09:15:36.619Z" },
{ url = "https://files.pythonhosted.org/packages/fc/6b/62e85ff7193663fbea5c0254ef32f0c77134b4059f8da89b958beb7696f3/msgpack-1.1.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5559d03930d3aa0f3aacb4c42c776af1a2ace2611871c84a75afe436695e6245", size = 435242, upload-time = "2025-10-08T09:15:37.647Z" },
{ url = "https://files.pythonhosted.org/packages/c1/47/5c74ecb4cc277cf09f64e913947871682ffa82b3b93c8dad68083112f412/msgpack-1.1.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:70c5a7a9fea7f036b716191c29047374c10721c389c21e9ffafad04df8c52c90", size = 432509, upload-time = "2025-10-08T09:15:38.794Z" },
{ url = "https://files.pythonhosted.org/packages/24/a4/e98ccdb56dc4e98c929a3f150de1799831c0a800583cde9fa022fa90602d/msgpack-1.1.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:f2cb069d8b981abc72b41aea1c580ce92d57c673ec61af4c500153a626cb9e20", size = 415957, upload-time = "2025-10-08T09:15:40.238Z" },
{ url = "https://files.pythonhosted.org/packages/da/28/6951f7fb67bc0a4e184a6b38ab71a92d9ba58080b27a77d3e2fb0be5998f/msgpack-1.1.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d62ce1f483f355f61adb5433ebfd8868c5f078d1a52d042b0a998682b4fa8c27", size = 422910, upload-time = "2025-10-08T09:15:41.505Z" },
{ url = "https://files.pythonhosted.org/packages/f0/03/42106dcded51f0a0b5284d3ce30a671e7bd3f7318d122b2ead66ad289fed/msgpack-1.1.2-cp314-cp314t-win32.whl", hash = "sha256:1d1418482b1ee984625d88aa9585db570180c286d942da463533b238b98b812b", size = 75197, upload-time = "2025-10-08T09:15:42.954Z" },
{ url = "https://files.pythonhosted.org/packages/15/86/d0071e94987f8db59d4eeb386ddc64d0bb9b10820a8d82bcd3e53eeb2da6/msgpack-1.1.2-cp314-cp314t-win_amd64.whl", hash = "sha256:5a46bf7e831d09470ad92dff02b8b1ac92175ca36b087f904a0519857c6be3ff", size = 85772, upload-time = "2025-10-08T09:15:43.954Z" },
{ url = "https://files.pythonhosted.org/packages/81/f2/08ace4142eb281c12701fc3b93a10795e4d4dc7f753911d836675050f886/msgpack-1.1.2-cp314-cp314t-win_arm64.whl", hash = "sha256:d99ef64f349d5ec3293688e91486c5fdb925ed03807f64d98d205d2713c60b46", size = 70868, upload-time = "2025-10-08T09:15:44.959Z" },
]
[[package]] [[package]]
name = "nodeenv" name = "nodeenv"
version = "1.10.0" version = "1.10.0"
@@ -974,6 +1147,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/88/b2/d0896bdcdc8d28a7fc5717c305f1a861c26e18c05047949fb371034d98bd/nodeenv-1.10.0-py2.py3-none-any.whl", hash = "sha256:5bb13e3eed2923615535339b3c620e76779af4cb4c6a90deccc9e36b274d3827", size = 23438, upload-time = "2025-12-20T14:08:52.782Z" }, { url = "https://files.pythonhosted.org/packages/88/b2/d0896bdcdc8d28a7fc5717c305f1a861c26e18c05047949fb371034d98bd/nodeenv-1.10.0-py2.py3-none-any.whl", hash = "sha256:5bb13e3eed2923615535339b3c620e76779af4cb4c6a90deccc9e36b274d3827", size = 23438, upload-time = "2025-12-20T14:08:52.782Z" },
] ]
[[package]]
name = "packageurl-python"
version = "0.17.6"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f5/d6/3b5a4e3cfaef7a53869a26ceb034d1ff5e5c27c814ce77260a96d50ab7bb/packageurl_python-0.17.6.tar.gz", hash = "sha256:1252ce3a102372ca6f86eb968e16f9014c4ba511c5c37d95a7f023e2ca6e5c25", size = 50618, upload-time = "2025-11-24T15:20:17.998Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/b1/2f/c7277b7615a93f51b5fbc1eacfc1b75e8103370e786fd8ce2abf6e5c04ab/packageurl_python-0.17.6-py3-none-any.whl", hash = "sha256:31a85c2717bc41dd818f3c62908685ff9eebcb68588213745b14a6ee9e7df7c9", size = 36776, upload-time = "2025-11-24T15:20:16.962Z" },
]
[[package]] [[package]]
name = "packaging" name = "packaging"
version = "25.0" version = "25.0"
@@ -984,81 +1166,148 @@ wheels = [
] ]
[[package]] [[package]]
name = "passlib" name = "pillow"
version = "1.7.4" version = "12.1.1"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/b6/06/9da9ee59a67fae7761aab3ccc84fa4f3f33f125b370f1ccdb915bf967c11/passlib-1.7.4.tar.gz", hash = "sha256:defd50f72b65c5402ab2c573830a6978e5f202ad0d984793c8dde2c4152ebe04", size = 689844, upload-time = "2020-10-08T19:00:52.121Z" } sdist = { url = "https://files.pythonhosted.org/packages/1f/42/5c74462b4fd957fcd7b13b04fb3205ff8349236ea74c7c375766d6c82288/pillow-12.1.1.tar.gz", hash = "sha256:9ad8fa5937ab05218e2b6a4cff30295ad35afd2f83ac592e68c0d871bb0fdbc4", size = 46980264, upload-time = "2026-02-11T04:23:07.146Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/3b/a4/ab6b7589382ca3df236e03faa71deac88cae040af60c071a78d254a62172/passlib-1.7.4-py2.py3-none-any.whl", hash = "sha256:aa6bca462b8d8bda89c70b382f0c298a20b5560af6cbfa2dce410c0a2fb669f1", size = 525554, upload-time = "2020-10-08T19:00:49.856Z" }, { url = "https://files.pythonhosted.org/packages/07/d3/8df65da0d4df36b094351dce696f2989bec731d4f10e743b1c5f4da4d3bf/pillow-12.1.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ab323b787d6e18b3d91a72fc99b1a2c28651e4358749842b8f8dfacd28ef2052", size = 5262803, upload-time = "2026-02-11T04:20:47.653Z" },
{ url = "https://files.pythonhosted.org/packages/d6/71/5026395b290ff404b836e636f51d7297e6c83beceaa87c592718747e670f/pillow-12.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:adebb5bee0f0af4909c30db0d890c773d1a92ffe83da908e2e9e720f8edf3984", size = 4657601, upload-time = "2026-02-11T04:20:49.328Z" },
{ url = "https://files.pythonhosted.org/packages/b1/2e/1001613d941c67442f745aff0f7cc66dd8df9a9c084eb497e6a543ee6f7e/pillow-12.1.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bb66b7cc26f50977108790e2456b7921e773f23db5630261102233eb355a3b79", size = 6234995, upload-time = "2026-02-11T04:20:51.032Z" },
{ url = "https://files.pythonhosted.org/packages/07/26/246ab11455b2549b9233dbd44d358d033a2f780fa9007b61a913c5b2d24e/pillow-12.1.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:aee2810642b2898bb187ced9b349e95d2a7272930796e022efaf12e99dccd293", size = 8045012, upload-time = "2026-02-11T04:20:52.882Z" },
{ url = "https://files.pythonhosted.org/packages/b2/8b/07587069c27be7535ac1fe33874e32de118fbd34e2a73b7f83436a88368c/pillow-12.1.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a0b1cd6232e2b618adcc54d9882e4e662a089d5768cd188f7c245b4c8c44a397", size = 6349638, upload-time = "2026-02-11T04:20:54.444Z" },
{ url = "https://files.pythonhosted.org/packages/ff/79/6df7b2ee763d619cda2fb4fea498e5f79d984dae304d45a8999b80d6cf5c/pillow-12.1.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7aac39bcf8d4770d089588a2e1dd111cbaa42df5a94be3114222057d68336bd0", size = 7041540, upload-time = "2026-02-11T04:20:55.97Z" },
{ url = "https://files.pythonhosted.org/packages/2c/5e/2ba19e7e7236d7529f4d873bdaf317a318896bac289abebd4bb00ef247f0/pillow-12.1.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ab174cd7d29a62dd139c44bf74b698039328f45cb03b4596c43473a46656b2f3", size = 6462613, upload-time = "2026-02-11T04:20:57.542Z" },
{ url = "https://files.pythonhosted.org/packages/03/03/31216ec124bb5c3dacd74ce8efff4cc7f52643653bad4825f8f08c697743/pillow-12.1.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:339ffdcb7cbeaa08221cd401d517d4b1fe7a9ed5d400e4a8039719238620ca35", size = 7166745, upload-time = "2026-02-11T04:20:59.196Z" },
{ url = "https://files.pythonhosted.org/packages/1f/e7/7c4552d80052337eb28653b617eafdef39adfb137c49dd7e831b8dc13bc5/pillow-12.1.1-cp312-cp312-win32.whl", hash = "sha256:5d1f9575a12bed9e9eedd9a4972834b08c97a352bd17955ccdebfeca5913fa0a", size = 6328823, upload-time = "2026-02-11T04:21:01.385Z" },
{ url = "https://files.pythonhosted.org/packages/3d/17/688626d192d7261bbbf98846fc98995726bddc2c945344b65bec3a29d731/pillow-12.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:21329ec8c96c6e979cd0dfd29406c40c1d52521a90544463057d2aaa937d66a6", size = 7033367, upload-time = "2026-02-11T04:21:03.536Z" },
{ url = "https://files.pythonhosted.org/packages/ed/fe/a0ef1f73f939b0eca03ee2c108d0043a87468664770612602c63266a43c4/pillow-12.1.1-cp312-cp312-win_arm64.whl", hash = "sha256:af9a332e572978f0218686636610555ae3defd1633597be015ed50289a03c523", size = 2453811, upload-time = "2026-02-11T04:21:05.116Z" },
{ url = "https://files.pythonhosted.org/packages/d5/11/6db24d4bd7685583caeae54b7009584e38da3c3d4488ed4cd25b439de486/pillow-12.1.1-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:d242e8ac078781f1de88bf823d70c1a9b3c7950a44cdf4b7c012e22ccbcd8e4e", size = 4062689, upload-time = "2026-02-11T04:21:06.804Z" },
{ url = "https://files.pythonhosted.org/packages/33/c0/ce6d3b1fe190f0021203e0d9b5b99e57843e345f15f9ef22fcd43842fd21/pillow-12.1.1-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:02f84dfad02693676692746df05b89cf25597560db2857363a208e393429f5e9", size = 4138535, upload-time = "2026-02-11T04:21:08.452Z" },
{ url = "https://files.pythonhosted.org/packages/a0/c6/d5eb6a4fb32a3f9c21a8c7613ec706534ea1cf9f4b3663e99f0d83f6fca8/pillow-12.1.1-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:e65498daf4b583091ccbb2556c7000abf0f3349fcd57ef7adc9a84a394ed29f6", size = 3601364, upload-time = "2026-02-11T04:21:10.194Z" },
{ url = "https://files.pythonhosted.org/packages/14/a1/16c4b823838ba4c9c52c0e6bbda903a3fe5a1bdbf1b8eb4fff7156f3e318/pillow-12.1.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6c6db3b84c87d48d0088943bf33440e0c42370b99b1c2a7989216f7b42eede60", size = 5262561, upload-time = "2026-02-11T04:21:11.742Z" },
{ url = "https://files.pythonhosted.org/packages/bb/ad/ad9dc98ff24f485008aa5cdedaf1a219876f6f6c42a4626c08bc4e80b120/pillow-12.1.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8b7e5304e34942bf62e15184219a7b5ad4ff7f3bb5cca4d984f37df1a0e1aee2", size = 4657460, upload-time = "2026-02-11T04:21:13.786Z" },
{ url = "https://files.pythonhosted.org/packages/9e/1b/f1a4ea9a895b5732152789326202a82464d5254759fbacae4deea3069334/pillow-12.1.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:18e5bddd742a44b7e6b1e773ab5db102bd7a94c32555ba656e76d319d19c3850", size = 6232698, upload-time = "2026-02-11T04:21:15.949Z" },
{ url = "https://files.pythonhosted.org/packages/95/f4/86f51b8745070daf21fd2e5b1fe0eb35d4db9ca26e6d58366562fb56a743/pillow-12.1.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc44ef1f3de4f45b50ccf9136999d71abb99dca7706bc75d222ed350b9fd2289", size = 8041706, upload-time = "2026-02-11T04:21:17.723Z" },
{ url = "https://files.pythonhosted.org/packages/29/9b/d6ecd956bb1266dd1045e995cce9b8d77759e740953a1c9aad9502a0461e/pillow-12.1.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5a8eb7ed8d4198bccbd07058416eeec51686b498e784eda166395a23eb99138e", size = 6346621, upload-time = "2026-02-11T04:21:19.547Z" },
{ url = "https://files.pythonhosted.org/packages/71/24/538bff45bde96535d7d998c6fed1a751c75ac7c53c37c90dc2601b243893/pillow-12.1.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:47b94983da0c642de92ced1702c5b6c292a84bd3a8e1d1702ff923f183594717", size = 7038069, upload-time = "2026-02-11T04:21:21.378Z" },
{ url = "https://files.pythonhosted.org/packages/94/0e/58cb1a6bc48f746bc4cb3adb8cabff73e2742c92b3bf7a220b7cf69b9177/pillow-12.1.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:518a48c2aab7ce596d3bf79d0e275661b846e86e4d0e7dec34712c30fe07f02a", size = 6460040, upload-time = "2026-02-11T04:21:23.148Z" },
{ url = "https://files.pythonhosted.org/packages/6c/57/9045cb3ff11eeb6c1adce3b2d60d7d299d7b273a2e6c8381a524abfdc474/pillow-12.1.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a550ae29b95c6dc13cf69e2c9dc5747f814c54eeb2e32d683e5e93af56caa029", size = 7164523, upload-time = "2026-02-11T04:21:25.01Z" },
{ url = "https://files.pythonhosted.org/packages/73/f2/9be9cb99f2175f0d4dbadd6616ce1bf068ee54a28277ea1bf1fbf729c250/pillow-12.1.1-cp313-cp313-win32.whl", hash = "sha256:a003d7422449f6d1e3a34e3dd4110c22148336918ddbfc6a32581cd54b2e0b2b", size = 6332552, upload-time = "2026-02-11T04:21:27.238Z" },
{ url = "https://files.pythonhosted.org/packages/3f/eb/b0834ad8b583d7d9d42b80becff092082a1c3c156bb582590fcc973f1c7c/pillow-12.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:344cf1e3dab3be4b1fa08e449323d98a2a3f819ad20f4b22e77a0ede31f0faa1", size = 7040108, upload-time = "2026-02-11T04:21:29.462Z" },
{ url = "https://files.pythonhosted.org/packages/d5/7d/fc09634e2aabdd0feabaff4a32f4a7d97789223e7c2042fd805ea4b4d2c2/pillow-12.1.1-cp313-cp313-win_arm64.whl", hash = "sha256:5c0dd1636633e7e6a0afe7bf6a51a14992b7f8e60de5789018ebbdfae55b040a", size = 2453712, upload-time = "2026-02-11T04:21:31.072Z" },
{ url = "https://files.pythonhosted.org/packages/19/2a/b9d62794fc8a0dd14c1943df68347badbd5511103e0d04c035ffe5cf2255/pillow-12.1.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0330d233c1a0ead844fc097a7d16c0abff4c12e856c0b325f231820fee1f39da", size = 5264880, upload-time = "2026-02-11T04:21:32.865Z" },
{ url = "https://files.pythonhosted.org/packages/26/9d/e03d857d1347fa5ed9247e123fcd2a97b6220e15e9cb73ca0a8d91702c6e/pillow-12.1.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5dae5f21afb91322f2ff791895ddd8889e5e947ff59f71b46041c8ce6db790bc", size = 4660616, upload-time = "2026-02-11T04:21:34.97Z" },
{ url = "https://files.pythonhosted.org/packages/f7/ec/8a6d22afd02570d30954e043f09c32772bfe143ba9285e2fdb11284952cd/pillow-12.1.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2e0c664be47252947d870ac0d327fea7e63985a08794758aa8af5b6cb6ec0c9c", size = 6269008, upload-time = "2026-02-11T04:21:36.623Z" },
{ url = "https://files.pythonhosted.org/packages/3d/1d/6d875422c9f28a4a361f495a5f68d9de4a66941dc2c619103ca335fa6446/pillow-12.1.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:691ab2ac363b8217f7d31b3497108fb1f50faab2f75dfb03284ec2f217e87bf8", size = 8073226, upload-time = "2026-02-11T04:21:38.585Z" },
{ url = "https://files.pythonhosted.org/packages/a1/cd/134b0b6ee5eda6dc09e25e24b40fdafe11a520bc725c1d0bbaa5e00bf95b/pillow-12.1.1-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e9e8064fb1cc019296958595f6db671fba95209e3ceb0c4734c9baf97de04b20", size = 6380136, upload-time = "2026-02-11T04:21:40.562Z" },
{ url = "https://files.pythonhosted.org/packages/7a/a9/7628f013f18f001c1b98d8fffe3452f306a70dc6aba7d931019e0492f45e/pillow-12.1.1-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:472a8d7ded663e6162dafdf20015c486a7009483ca671cece7a9279b512fcb13", size = 7067129, upload-time = "2026-02-11T04:21:42.521Z" },
{ url = "https://files.pythonhosted.org/packages/1e/f8/66ab30a2193b277785601e82ee2d49f68ea575d9637e5e234faaa98efa4c/pillow-12.1.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:89b54027a766529136a06cfebeecb3a04900397a3590fd252160b888479517bf", size = 6491807, upload-time = "2026-02-11T04:21:44.22Z" },
{ url = "https://files.pythonhosted.org/packages/da/0b/a877a6627dc8318fdb84e357c5e1a758c0941ab1ddffdafd231983788579/pillow-12.1.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:86172b0831b82ce4f7877f280055892b31179e1576aa00d0df3bb1bbf8c3e524", size = 7190954, upload-time = "2026-02-11T04:21:46.114Z" },
{ url = "https://files.pythonhosted.org/packages/83/43/6f732ff85743cf746b1361b91665d9f5155e1483817f693f8d57ea93147f/pillow-12.1.1-cp313-cp313t-win32.whl", hash = "sha256:44ce27545b6efcf0fdbdceb31c9a5bdea9333e664cda58a7e674bb74608b3986", size = 6336441, upload-time = "2026-02-11T04:21:48.22Z" },
{ url = "https://files.pythonhosted.org/packages/3b/44/e865ef3986611bb75bfabdf94a590016ea327833f434558801122979cd0e/pillow-12.1.1-cp313-cp313t-win_amd64.whl", hash = "sha256:a285e3eb7a5a45a2ff504e31f4a8d1b12ef62e84e5411c6804a42197c1cf586c", size = 7045383, upload-time = "2026-02-11T04:21:50.015Z" },
{ url = "https://files.pythonhosted.org/packages/a8/c6/f4fb24268d0c6908b9f04143697ea18b0379490cb74ba9e8d41b898bd005/pillow-12.1.1-cp313-cp313t-win_arm64.whl", hash = "sha256:cc7d296b5ea4d29e6570dabeaed58d31c3fea35a633a69679fb03d7664f43fb3", size = 2456104, upload-time = "2026-02-11T04:21:51.633Z" },
{ url = "https://files.pythonhosted.org/packages/03/d0/bebb3ffbf31c5a8e97241476c4cf8b9828954693ce6744b4a2326af3e16b/pillow-12.1.1-cp314-cp314-ios_13_0_arm64_iphoneos.whl", hash = "sha256:417423db963cb4be8bac3fc1204fe61610f6abeed1580a7a2cbb2fbda20f12af", size = 4062652, upload-time = "2026-02-11T04:21:53.19Z" },
{ url = "https://files.pythonhosted.org/packages/2d/c0/0e16fb0addda4851445c28f8350d8c512f09de27bbb0d6d0bbf8b6709605/pillow-12.1.1-cp314-cp314-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:b957b71c6b2387610f556a7eb0828afbe40b4a98036fc0d2acfa5a44a0c2036f", size = 4138823, upload-time = "2026-02-11T04:22:03.088Z" },
{ url = "https://files.pythonhosted.org/packages/6b/fb/6170ec655d6f6bb6630a013dd7cf7bc218423d7b5fa9071bf63dc32175ae/pillow-12.1.1-cp314-cp314-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:097690ba1f2efdeb165a20469d59d8bb03c55fb6621eb2041a060ae8ea3e9642", size = 3601143, upload-time = "2026-02-11T04:22:04.909Z" },
{ url = "https://files.pythonhosted.org/packages/59/04/dc5c3f297510ba9a6837cbb318b87dd2b8f73eb41a43cc63767f65cb599c/pillow-12.1.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:2815a87ab27848db0321fb78c7f0b2c8649dee134b7f2b80c6a45c6831d75ccd", size = 5266254, upload-time = "2026-02-11T04:22:07.656Z" },
{ url = "https://files.pythonhosted.org/packages/05/30/5db1236b0d6313f03ebf97f5e17cda9ca060f524b2fcc875149a8360b21c/pillow-12.1.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:f7ed2c6543bad5a7d5530eb9e78c53132f93dfa44a28492db88b41cdab885202", size = 4657499, upload-time = "2026-02-11T04:22:09.613Z" },
{ url = "https://files.pythonhosted.org/packages/6f/18/008d2ca0eb612e81968e8be0bbae5051efba24d52debf930126d7eaacbba/pillow-12.1.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:652a2c9ccfb556235b2b501a3a7cf3742148cd22e04b5625c5fe057ea3e3191f", size = 6232137, upload-time = "2026-02-11T04:22:11.434Z" },
{ url = "https://files.pythonhosted.org/packages/70/f1/f14d5b8eeb4b2cd62b9f9f847eb6605f103df89ef619ac68f92f748614ea/pillow-12.1.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d6e4571eedf43af33d0fc233a382a76e849badbccdf1ac438841308652a08e1f", size = 8042721, upload-time = "2026-02-11T04:22:13.321Z" },
{ url = "https://files.pythonhosted.org/packages/5a/d6/17824509146e4babbdabf04d8171491fa9d776f7061ff6e727522df9bd03/pillow-12.1.1-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b574c51cf7d5d62e9be37ba446224b59a2da26dc4c1bb2ecbe936a4fb1a7cb7f", size = 6347798, upload-time = "2026-02-11T04:22:15.449Z" },
{ url = "https://files.pythonhosted.org/packages/d1/ee/c85a38a9ab92037a75615aba572c85ea51e605265036e00c5b67dfafbfe2/pillow-12.1.1-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a37691702ed687799de29a518d63d4682d9016932db66d4e90c345831b02fb4e", size = 7039315, upload-time = "2026-02-11T04:22:17.24Z" },
{ url = "https://files.pythonhosted.org/packages/ec/f3/bc8ccc6e08a148290d7523bde4d9a0d6c981db34631390dc6e6ec34cacf6/pillow-12.1.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f95c00d5d6700b2b890479664a06e754974848afaae5e21beb4d83c106923fd0", size = 6462360, upload-time = "2026-02-11T04:22:19.111Z" },
{ url = "https://files.pythonhosted.org/packages/f6/ab/69a42656adb1d0665ab051eec58a41f169ad295cf81ad45406963105408f/pillow-12.1.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:559b38da23606e68681337ad74622c4dbba02254fc9cb4488a305dd5975c7eeb", size = 7165438, upload-time = "2026-02-11T04:22:21.041Z" },
{ url = "https://files.pythonhosted.org/packages/02/46/81f7aa8941873f0f01d4b55cc543b0a3d03ec2ee30d617a0448bf6bd6dec/pillow-12.1.1-cp314-cp314-win32.whl", hash = "sha256:03edcc34d688572014ff223c125a3f77fb08091e4607e7745002fc214070b35f", size = 6431503, upload-time = "2026-02-11T04:22:22.833Z" },
{ url = "https://files.pythonhosted.org/packages/40/72/4c245f7d1044b67affc7f134a09ea619d4895333d35322b775b928180044/pillow-12.1.1-cp314-cp314-win_amd64.whl", hash = "sha256:50480dcd74fa63b8e78235957d302d98d98d82ccbfac4c7e12108ba9ecbdba15", size = 7176748, upload-time = "2026-02-11T04:22:24.64Z" },
{ url = "https://files.pythonhosted.org/packages/e4/ad/8a87bdbe038c5c698736e3348af5c2194ffb872ea52f11894c95f9305435/pillow-12.1.1-cp314-cp314-win_arm64.whl", hash = "sha256:5cb1785d97b0c3d1d1a16bc1d710c4a0049daefc4935f3a8f31f827f4d3d2e7f", size = 2544314, upload-time = "2026-02-11T04:22:26.685Z" },
{ url = "https://files.pythonhosted.org/packages/6c/9d/efd18493f9de13b87ede7c47e69184b9e859e4427225ea962e32e56a49bc/pillow-12.1.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:1f90cff8aa76835cba5769f0b3121a22bd4eb9e6884cfe338216e557a9a548b8", size = 5268612, upload-time = "2026-02-11T04:22:29.884Z" },
{ url = "https://files.pythonhosted.org/packages/f8/f1/4f42eb2b388eb2ffc660dcb7f7b556c1015c53ebd5f7f754965ef997585b/pillow-12.1.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1f1be78ce9466a7ee64bfda57bdba0f7cc499d9794d518b854816c41bf0aa4e9", size = 4660567, upload-time = "2026-02-11T04:22:31.799Z" },
{ url = "https://files.pythonhosted.org/packages/01/54/df6ef130fa43e4b82e32624a7b821a2be1c5653a5fdad8469687a7db4e00/pillow-12.1.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:42fc1f4677106188ad9a55562bbade416f8b55456f522430fadab3cef7cd4e60", size = 6269951, upload-time = "2026-02-11T04:22:33.921Z" },
{ url = "https://files.pythonhosted.org/packages/a9/48/618752d06cc44bb4aae8ce0cd4e6426871929ed7b46215638088270d9b34/pillow-12.1.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:98edb152429ab62a1818039744d8fbb3ccab98a7c29fc3d5fcef158f3f1f68b7", size = 8074769, upload-time = "2026-02-11T04:22:35.877Z" },
{ url = "https://files.pythonhosted.org/packages/c3/bd/f1d71eb39a72fa088d938655afba3e00b38018d052752f435838961127d8/pillow-12.1.1-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d470ab1178551dd17fdba0fef463359c41aaa613cdcd7ff8373f54be629f9f8f", size = 6381358, upload-time = "2026-02-11T04:22:37.698Z" },
{ url = "https://files.pythonhosted.org/packages/64/ef/c784e20b96674ed36a5af839305f55616f8b4f8aa8eeccf8531a6e312243/pillow-12.1.1-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6408a7b064595afcab0a49393a413732a35788f2a5092fdc6266952ed67de586", size = 7068558, upload-time = "2026-02-11T04:22:39.597Z" },
{ url = "https://files.pythonhosted.org/packages/73/cb/8059688b74422ae61278202c4e1ad992e8a2e7375227be0a21c6b87ca8d5/pillow-12.1.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5d8c41325b382c07799a3682c1c258469ea2ff97103c53717b7893862d0c98ce", size = 6493028, upload-time = "2026-02-11T04:22:42.73Z" },
{ url = "https://files.pythonhosted.org/packages/c6/da/e3c008ed7d2dd1f905b15949325934510b9d1931e5df999bb15972756818/pillow-12.1.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c7697918b5be27424e9ce568193efd13d925c4481dd364e43f5dff72d33e10f8", size = 7191940, upload-time = "2026-02-11T04:22:44.543Z" },
{ url = "https://files.pythonhosted.org/packages/01/4a/9202e8d11714c1fc5951f2e1ef362f2d7fbc595e1f6717971d5dd750e969/pillow-12.1.1-cp314-cp314t-win32.whl", hash = "sha256:d2912fd8114fc5545aa3a4b5576512f64c55a03f3ebcca4c10194d593d43ea36", size = 6438736, upload-time = "2026-02-11T04:22:46.347Z" },
{ url = "https://files.pythonhosted.org/packages/f3/ca/cbce2327eb9885476b3957b2e82eb12c866a8b16ad77392864ad601022ce/pillow-12.1.1-cp314-cp314t-win_amd64.whl", hash = "sha256:4ceb838d4bd9dab43e06c363cab2eebf63846d6a4aeaea283bbdfd8f1a8ed58b", size = 7182894, upload-time = "2026-02-11T04:22:48.114Z" },
{ url = "https://files.pythonhosted.org/packages/ec/d2/de599c95ba0a973b94410477f8bf0b6f0b5e67360eb89bcb1ad365258beb/pillow-12.1.1-cp314-cp314t-win_arm64.whl", hash = "sha256:7b03048319bfc6170e93bd60728a1af51d3dd7704935feb228c4d4faab35d334", size = 2546446, upload-time = "2026-02-11T04:22:50.342Z" },
] ]
[[package]] [[package]]
name = "pillow" name = "pip"
version = "12.0.0" version = "26.0.1"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/cace85a1b0c9775a9f8f5d5423c8261c858760e2466c79b2dd184638b056/pillow-12.0.0.tar.gz", hash = "sha256:87d4f8125c9988bfbed67af47dd7a953e2fc7b0cc1e7800ec6d2080d490bb353", size = 47008828, upload-time = "2025-10-15T18:24:14.008Z" } sdist = { url = "https://files.pythonhosted.org/packages/48/83/0d7d4e9efe3344b8e2fe25d93be44f64b65364d3c8d7bc6dc90198d5422e/pip-26.0.1.tar.gz", hash = "sha256:c4037d8a277c89b320abe636d59f91e6d0922d08a05b60e85e53b296613346d8", size = 1812747, upload-time = "2026-02-05T02:20:18.702Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/2c/90/4fcce2c22caf044e660a198d740e7fbc14395619e3cb1abad12192c0826c/pillow-12.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:53561a4ddc36facb432fae7a9d8afbfaf94795414f5cdc5fc52f28c1dca90371", size = 5249377, upload-time = "2025-10-15T18:22:05.993Z" }, { url = "https://files.pythonhosted.org/packages/de/f0/c81e05b613866b76d2d1066490adf1a3dbc4ee9d9c839961c3fc8a6997af/pip-26.0.1-py3-none-any.whl", hash = "sha256:bdb1b08f4274833d62c1aa29e20907365a2ceb950410df15fc9521bad440122b", size = 1787723, upload-time = "2026-02-05T02:20:16.416Z" },
{ url = "https://files.pythonhosted.org/packages/fd/e0/ed960067543d080691d47d6938ebccbf3976a931c9567ab2fbfab983a5dd/pillow-12.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:71db6b4c1653045dacc1585c1b0d184004f0d7e694c7b34ac165ca70c0838082", size = 4650343, upload-time = "2025-10-15T18:22:07.718Z" }, ]
{ url = "https://files.pythonhosted.org/packages/e7/a1/f81fdeddcb99c044bf7d6faa47e12850f13cee0849537a7d27eeab5534d4/pillow-12.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2fa5f0b6716fc88f11380b88b31fe591a06c6315e955c096c35715788b339e3f", size = 6232981, upload-time = "2025-10-15T18:22:09.287Z" },
{ url = "https://files.pythonhosted.org/packages/88/e1/9098d3ce341a8750b55b0e00c03f1630d6178f38ac191c81c97a3b047b44/pillow-12.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:82240051c6ca513c616f7f9da06e871f61bfd7805f566275841af15015b8f98d", size = 8041399, upload-time = "2025-10-15T18:22:10.872Z" }, [[package]]
{ url = "https://files.pythonhosted.org/packages/a7/62/a22e8d3b602ae8cc01446d0c57a54e982737f44b6f2e1e019a925143771d/pillow-12.0.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:55f818bd74fe2f11d4d7cbc65880a843c4075e0ac7226bc1a23261dbea531953", size = 6347740, upload-time = "2025-10-15T18:22:12.769Z" }, name = "pip-api"
{ url = "https://files.pythonhosted.org/packages/4f/87/424511bdcd02c8d7acf9f65caa09f291a519b16bd83c3fb3374b3d4ae951/pillow-12.0.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b87843e225e74576437fd5b6a4c2205d422754f84a06942cfaf1dc32243e45a8", size = 7040201, upload-time = "2025-10-15T18:22:14.813Z" }, version = "0.0.34"
{ url = "https://files.pythonhosted.org/packages/dc/4d/435c8ac688c54d11755aedfdd9f29c9eeddf68d150fe42d1d3dbd2365149/pillow-12.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c607c90ba67533e1b2355b821fef6764d1dd2cbe26b8c1005ae84f7aea25ff79", size = 6462334, upload-time = "2025-10-15T18:22:16.375Z" }, source = { registry = "https://pypi.org/simple" }
{ url = "https://files.pythonhosted.org/packages/2b/f2/ad34167a8059a59b8ad10bc5c72d4d9b35acc6b7c0877af8ac885b5f2044/pillow-12.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:21f241bdd5080a15bc86d3466a9f6074a9c2c2b314100dd896ac81ee6db2f1ba", size = 7134162, upload-time = "2025-10-15T18:22:17.996Z" }, dependencies = [
{ url = "https://files.pythonhosted.org/packages/0c/b1/a7391df6adacf0a5c2cf6ac1cf1fcc1369e7d439d28f637a847f8803beb3/pillow-12.0.0-cp312-cp312-win32.whl", hash = "sha256:dd333073e0cacdc3089525c7df7d39b211bcdf31fc2824e49d01c6b6187b07d0", size = 6298769, upload-time = "2025-10-15T18:22:19.923Z" }, { name = "pip" },
{ url = "https://files.pythonhosted.org/packages/a2/0b/d87733741526541c909bbf159e338dcace4f982daac6e5a8d6be225ca32d/pillow-12.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:9fe611163f6303d1619bbcb653540a4d60f9e55e622d60a3108be0d5b441017a", size = 7001107, upload-time = "2025-10-15T18:22:21.644Z" }, ]
{ url = "https://files.pythonhosted.org/packages/bc/96/aaa61ce33cc98421fb6088af2a03be4157b1e7e0e87087c888e2370a7f45/pillow-12.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:7dfb439562f234f7d57b1ac6bc8fe7f838a4bd49c79230e0f6a1da93e82f1fad", size = 2436012, upload-time = "2025-10-15T18:22:23.621Z" }, sdist = { url = "https://files.pythonhosted.org/packages/b9/f1/ee85f8c7e82bccf90a3c7aad22863cc6e20057860a1361083cd2adacb92e/pip_api-0.0.34.tar.gz", hash = "sha256:9b75e958f14c5a2614bae415f2adf7eeb54d50a2cfbe7e24fd4826471bac3625", size = 123017, upload-time = "2024-07-09T20:32:30.641Z" }
{ url = "https://files.pythonhosted.org/packages/62/f2/de993bb2d21b33a98d031ecf6a978e4b61da207bef02f7b43093774c480d/pillow-12.0.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:0869154a2d0546545cde61d1789a6524319fc1897d9ee31218eae7a60ccc5643", size = 4045493, upload-time = "2025-10-15T18:22:25.758Z" }, wheels = [
{ url = "https://files.pythonhosted.org/packages/0e/b6/bc8d0c4c9f6f111a783d045310945deb769b806d7574764234ffd50bc5ea/pillow-12.0.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:a7921c5a6d31b3d756ec980f2f47c0cfdbce0fc48c22a39347a895f41f4a6ea4", size = 4120461, upload-time = "2025-10-15T18:22:27.286Z" }, { url = "https://files.pythonhosted.org/packages/91/f7/ebf5003e1065fd00b4cbef53bf0a65c3d3e1b599b676d5383ccb7a8b88ba/pip_api-0.0.34-py3-none-any.whl", hash = "sha256:8b2d7d7c37f2447373aa2cf8b1f60a2f2b27a84e1e9e0294a3f6ef10eb3ba6bb", size = 120369, upload-time = "2024-07-09T20:32:29.099Z" },
{ url = "https://files.pythonhosted.org/packages/5d/57/d60d343709366a353dc56adb4ee1e7d8a2cc34e3fbc22905f4167cfec119/pillow-12.0.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:1ee80a59f6ce048ae13cda1abf7fbd2a34ab9ee7d401c46be3ca685d1999a399", size = 3576912, upload-time = "2025-10-15T18:22:28.751Z" }, ]
{ url = "https://files.pythonhosted.org/packages/a4/a4/a0a31467e3f83b94d37568294b01d22b43ae3c5d85f2811769b9c66389dd/pillow-12.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c50f36a62a22d350c96e49ad02d0da41dbd17ddc2e29750dbdba4323f85eb4a5", size = 5249132, upload-time = "2025-10-15T18:22:30.641Z" },
{ url = "https://files.pythonhosted.org/packages/83/06/48eab21dd561de2914242711434c0c0eb992ed08ff3f6107a5f44527f5e9/pillow-12.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5193fde9a5f23c331ea26d0cf171fbf67e3f247585f50c08b3e205c7aeb4589b", size = 4650099, upload-time = "2025-10-15T18:22:32.73Z" }, [[package]]
{ url = "https://files.pythonhosted.org/packages/fc/bd/69ed99fd46a8dba7c1887156d3572fe4484e3f031405fcc5a92e31c04035/pillow-12.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bde737cff1a975b70652b62d626f7785e0480918dece11e8fef3c0cf057351c3", size = 6230808, upload-time = "2025-10-15T18:22:34.337Z" }, name = "pip-audit"
{ url = "https://files.pythonhosted.org/packages/ea/94/8fad659bcdbf86ed70099cb60ae40be6acca434bbc8c4c0d4ef356d7e0de/pillow-12.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a6597ff2b61d121172f5844b53f21467f7082f5fb385a9a29c01414463f93b07", size = 8037804, upload-time = "2025-10-15T18:22:36.402Z" }, version = "2.10.0"
{ url = "https://files.pythonhosted.org/packages/20/39/c685d05c06deecfd4e2d1950e9a908aa2ca8bc4e6c3b12d93b9cafbd7837/pillow-12.0.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0b817e7035ea7f6b942c13aa03bb554fc44fea70838ea21f8eb31c638326584e", size = 6345553, upload-time = "2025-10-15T18:22:38.066Z" }, source = { registry = "https://pypi.org/simple" }
{ url = "https://files.pythonhosted.org/packages/38/57/755dbd06530a27a5ed74f8cb0a7a44a21722ebf318edbe67ddbd7fb28f88/pillow-12.0.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f4f1231b7dec408e8670264ce63e9c71409d9583dd21d32c163e25213ee2a344", size = 7037729, upload-time = "2025-10-15T18:22:39.769Z" }, dependencies = [
{ url = "https://files.pythonhosted.org/packages/ca/b6/7e94f4c41d238615674d06ed677c14883103dce1c52e4af16f000338cfd7/pillow-12.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6e51b71417049ad6ab14c49608b4a24d8fb3fe605e5dfabfe523b58064dc3d27", size = 6459789, upload-time = "2025-10-15T18:22:41.437Z" }, { name = "cachecontrol", extra = ["filecache"] },
{ url = "https://files.pythonhosted.org/packages/9c/14/4448bb0b5e0f22dd865290536d20ec8a23b64e2d04280b89139f09a36bb6/pillow-12.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d120c38a42c234dc9a8c5de7ceaaf899cf33561956acb4941653f8bdc657aa79", size = 7130917, upload-time = "2025-10-15T18:22:43.152Z" }, { name = "cyclonedx-python-lib" },
{ url = "https://files.pythonhosted.org/packages/dd/ca/16c6926cc1c015845745d5c16c9358e24282f1e588237a4c36d2b30f182f/pillow-12.0.0-cp313-cp313-win32.whl", hash = "sha256:4cc6b3b2efff105c6a1656cfe59da4fdde2cda9af1c5e0b58529b24525d0a098", size = 6302391, upload-time = "2025-10-15T18:22:44.753Z" }, { name = "packaging" },
{ url = "https://files.pythonhosted.org/packages/6d/2a/dd43dcfd6dae9b6a49ee28a8eedb98c7d5ff2de94a5d834565164667b97b/pillow-12.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:4cf7fed4b4580601c4345ceb5d4cbf5a980d030fd5ad07c4d2ec589f95f09905", size = 7007477, upload-time = "2025-10-15T18:22:46.838Z" }, { name = "pip-api" },
{ url = "https://files.pythonhosted.org/packages/77/f0/72ea067f4b5ae5ead653053212af05ce3705807906ba3f3e8f58ddf617e6/pillow-12.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:9f0b04c6b8584c2c193babcccc908b38ed29524b29dd464bc8801bf10d746a3a", size = 2435918, upload-time = "2025-10-15T18:22:48.399Z" }, { name = "pip-requirements-parser" },
{ url = "https://files.pythonhosted.org/packages/f5/5e/9046b423735c21f0487ea6cb5b10f89ea8f8dfbe32576fe052b5ba9d4e5b/pillow-12.0.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:7fa22993bac7b77b78cae22bad1e2a987ddf0d9015c63358032f84a53f23cdc3", size = 5251406, upload-time = "2025-10-15T18:22:49.905Z" }, { name = "platformdirs" },
{ url = "https://files.pythonhosted.org/packages/12/66/982ceebcdb13c97270ef7a56c3969635b4ee7cd45227fa707c94719229c5/pillow-12.0.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f135c702ac42262573fe9714dfe99c944b4ba307af5eb507abef1667e2cbbced", size = 4653218, upload-time = "2025-10-15T18:22:51.587Z" }, { name = "requests" },
{ url = "https://files.pythonhosted.org/packages/16/b3/81e625524688c31859450119bf12674619429cab3119eec0e30a7a1029cb/pillow-12.0.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c85de1136429c524e55cfa4e033b4a7940ac5c8ee4d9401cc2d1bf48154bbc7b", size = 6266564, upload-time = "2025-10-15T18:22:53.215Z" }, { name = "rich" },
{ url = "https://files.pythonhosted.org/packages/98/59/dfb38f2a41240d2408096e1a76c671d0a105a4a8471b1871c6902719450c/pillow-12.0.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:38df9b4bfd3db902c9c2bd369bcacaf9d935b2fff73709429d95cc41554f7b3d", size = 8069260, upload-time = "2025-10-15T18:22:54.933Z" }, { name = "tomli" },
{ url = "https://files.pythonhosted.org/packages/dc/3d/378dbea5cd1874b94c312425ca77b0f47776c78e0df2df751b820c8c1d6c/pillow-12.0.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7d87ef5795da03d742bf49439f9ca4d027cde49c82c5371ba52464aee266699a", size = 6379248, upload-time = "2025-10-15T18:22:56.605Z" }, { name = "tomli-w" },
{ url = "https://files.pythonhosted.org/packages/84/b0/d525ef47d71590f1621510327acec75ae58c721dc071b17d8d652ca494d8/pillow-12.0.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aff9e4d82d082ff9513bdd6acd4f5bd359f5b2c870907d2b0a9c5e10d40c88fe", size = 7066043, upload-time = "2025-10-15T18:22:58.53Z" }, ]
{ url = "https://files.pythonhosted.org/packages/61/2c/aced60e9cf9d0cde341d54bf7932c9ffc33ddb4a1595798b3a5150c7ec4e/pillow-12.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:8d8ca2b210ada074d57fcee40c30446c9562e542fc46aedc19baf758a93532ee", size = 6490915, upload-time = "2025-10-15T18:23:00.582Z" }, sdist = { url = "https://files.pythonhosted.org/packages/bd/89/0e999b413facab81c33d118f3ac3739fd02c0622ccf7c4e82e37cebd8447/pip_audit-2.10.0.tar.gz", hash = "sha256:427ea5bf61d1d06b98b1ae29b7feacc00288a2eced52c9c58ceed5253ef6c2a4", size = 53776, upload-time = "2025-12-01T23:42:40.612Z" }
{ url = "https://files.pythonhosted.org/packages/ef/26/69dcb9b91f4e59f8f34b2332a4a0a951b44f547c4ed39d3e4dcfcff48f89/pillow-12.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:99a7f72fb6249302aa62245680754862a44179b545ded638cf1fef59befb57ef", size = 7157998, upload-time = "2025-10-15T18:23:02.627Z" }, wheels = [
{ url = "https://files.pythonhosted.org/packages/61/2b/726235842220ca95fa441ddf55dd2382b52ab5b8d9c0596fe6b3f23dafe8/pillow-12.0.0-cp313-cp313t-win32.whl", hash = "sha256:4078242472387600b2ce8d93ade8899c12bf33fa89e55ec89fe126e9d6d5d9e9", size = 6306201, upload-time = "2025-10-15T18:23:04.709Z" }, { url = "https://files.pythonhosted.org/packages/be/f3/4888f895c02afa085630a3a3329d1b18b998874642ad4c530e9a4d7851fe/pip_audit-2.10.0-py3-none-any.whl", hash = "sha256:16e02093872fac97580303f0848fa3ad64f7ecf600736ea7835a2b24de49613f", size = 61518, upload-time = "2025-12-01T23:42:39.193Z" },
{ url = "https://files.pythonhosted.org/packages/c0/3d/2afaf4e840b2df71344ababf2f8edd75a705ce500e5dc1e7227808312ae1/pillow-12.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2c54c1a783d6d60595d3514f0efe9b37c8808746a66920315bfd34a938d7994b", size = 7013165, upload-time = "2025-10-15T18:23:06.46Z" }, ]
{ url = "https://files.pythonhosted.org/packages/6f/75/3fa09aa5cf6ed04bee3fa575798ddf1ce0bace8edb47249c798077a81f7f/pillow-12.0.0-cp313-cp313t-win_arm64.whl", hash = "sha256:26d9f7d2b604cd23aba3e9faf795787456ac25634d82cd060556998e39c6fa47", size = 2437834, upload-time = "2025-10-15T18:23:08.194Z" },
{ url = "https://files.pythonhosted.org/packages/54/2a/9a8c6ba2c2c07b71bec92cf63e03370ca5e5f5c5b119b742bcc0cde3f9c5/pillow-12.0.0-cp314-cp314-ios_13_0_arm64_iphoneos.whl", hash = "sha256:beeae3f27f62308f1ddbcfb0690bf44b10732f2ef43758f169d5e9303165d3f9", size = 4045531, upload-time = "2025-10-15T18:23:10.121Z" }, [[package]]
{ url = "https://files.pythonhosted.org/packages/84/54/836fdbf1bfb3d66a59f0189ff0b9f5f666cee09c6188309300df04ad71fa/pillow-12.0.0-cp314-cp314-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:d4827615da15cd59784ce39d3388275ec093ae3ee8d7f0c089b76fa87af756c2", size = 4120554, upload-time = "2025-10-15T18:23:12.14Z" }, name = "pip-licenses"
{ url = "https://files.pythonhosted.org/packages/0d/cd/16aec9f0da4793e98e6b54778a5fbce4f375c6646fe662e80600b8797379/pillow-12.0.0-cp314-cp314-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:3e42edad50b6909089750e65c91aa09aaf1e0a71310d383f11321b27c224ed8a", size = 3576812, upload-time = "2025-10-15T18:23:13.962Z" }, version = "5.5.1"
{ url = "https://files.pythonhosted.org/packages/f6/b7/13957fda356dc46339298b351cae0d327704986337c3c69bb54628c88155/pillow-12.0.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:e5d8efac84c9afcb40914ab49ba063d94f5dbdf5066db4482c66a992f47a3a3b", size = 5252689, upload-time = "2025-10-15T18:23:15.562Z" }, source = { registry = "https://pypi.org/simple" }
{ url = "https://files.pythonhosted.org/packages/fc/f5/eae31a306341d8f331f43edb2e9122c7661b975433de5e447939ae61c5da/pillow-12.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:266cd5f2b63ff316d5a1bba46268e603c9caf5606d44f38c2873c380950576ad", size = 4650186, upload-time = "2025-10-15T18:23:17.379Z" }, dependencies = [
{ url = "https://files.pythonhosted.org/packages/86/62/2a88339aa40c4c77e79108facbd307d6091e2c0eb5b8d3cf4977cfca2fe6/pillow-12.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:58eea5ebe51504057dd95c5b77d21700b77615ab0243d8152793dc00eb4faf01", size = 6230308, upload-time = "2025-10-15T18:23:18.971Z" }, { name = "prettytable" },
{ url = "https://files.pythonhosted.org/packages/c7/33/5425a8992bcb32d1cb9fa3dd39a89e613d09a22f2c8083b7bf43c455f760/pillow-12.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f13711b1a5ba512d647a0e4ba79280d3a9a045aaf7e0cc6fbe96b91d4cdf6b0c", size = 8039222, upload-time = "2025-10-15T18:23:20.909Z" }, ]
{ url = "https://files.pythonhosted.org/packages/d8/61/3f5d3b35c5728f37953d3eec5b5f3e77111949523bd2dd7f31a851e50690/pillow-12.0.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6846bd2d116ff42cba6b646edf5bf61d37e5cbd256425fa089fee4ff5c07a99e", size = 6346657, upload-time = "2025-10-15T18:23:23.077Z" }, sdist = { url = "https://files.pythonhosted.org/packages/44/4c/b4be9024dae3b5b3c0a6c58cc1d4a35fffe51c3adb835350cb7dcd43b5cd/pip_licenses-5.5.1.tar.gz", hash = "sha256:7df370e6e5024a3f7449abf8e4321ef868ba9a795698ad24ab6851f3e7fc65a7", size = 49108, upload-time = "2026-01-27T21:46:41.432Z" }
{ url = "https://files.pythonhosted.org/packages/3a/be/ee90a3d79271227e0f0a33c453531efd6ed14b2e708596ba5dd9be948da3/pillow-12.0.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c98fa880d695de164b4135a52fd2e9cd7b7c90a9d8ac5e9e443a24a95ef9248e", size = 7038482, upload-time = "2025-10-15T18:23:25.005Z" }, wheels = [
{ url = "https://files.pythonhosted.org/packages/44/34/a16b6a4d1ad727de390e9bd9f19f5f669e079e5826ec0f329010ddea492f/pillow-12.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:fa3ed2a29a9e9d2d488b4da81dcb54720ac3104a20bf0bd273f1e4648aff5af9", size = 6461416, upload-time = "2025-10-15T18:23:27.009Z" }, { url = "https://files.pythonhosted.org/packages/a0/a3/0b369cdffef3746157712804f1ded9856c75aa060217ee206f742c74e753/pip_licenses-5.5.1-py3-none-any.whl", hash = "sha256:ed5e229a93760e529cfa7edaec6630b5a2cd3874c1bddb8019e5f18a723fdead", size = 22108, upload-time = "2026-01-27T21:46:39.766Z" },
{ url = "https://files.pythonhosted.org/packages/b6/39/1aa5850d2ade7d7ba9f54e4e4c17077244ff7a2d9e25998c38a29749eb3f/pillow-12.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d034140032870024e6b9892c692fe2968493790dd57208b2c37e3fb35f6df3ab", size = 7131584, upload-time = "2025-10-15T18:23:29.752Z" }, ]
{ url = "https://files.pythonhosted.org/packages/bf/db/4fae862f8fad0167073a7733973bfa955f47e2cac3dc3e3e6257d10fab4a/pillow-12.0.0-cp314-cp314-win32.whl", hash = "sha256:1b1b133e6e16105f524a8dec491e0586d072948ce15c9b914e41cdadd209052b", size = 6400621, upload-time = "2025-10-15T18:23:32.06Z" },
{ url = "https://files.pythonhosted.org/packages/2b/24/b350c31543fb0107ab2599464d7e28e6f856027aadda995022e695313d94/pillow-12.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:8dc232e39d409036af549c86f24aed8273a40ffa459981146829a324e0848b4b", size = 7142916, upload-time = "2025-10-15T18:23:34.71Z" }, [[package]]
{ url = "https://files.pythonhosted.org/packages/0f/9b/0ba5a6fd9351793996ef7487c4fdbde8d3f5f75dbedc093bb598648fddf0/pillow-12.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:d52610d51e265a51518692045e372a4c363056130d922a7351429ac9f27e70b0", size = 2523836, upload-time = "2025-10-15T18:23:36.967Z" }, name = "pip-requirements-parser"
{ url = "https://files.pythonhosted.org/packages/f5/7a/ceee0840aebc579af529b523d530840338ecf63992395842e54edc805987/pillow-12.0.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:1979f4566bb96c1e50a62d9831e2ea2d1211761e5662afc545fa766f996632f6", size = 5255092, upload-time = "2025-10-15T18:23:38.573Z" }, version = "32.0.1"
{ url = "https://files.pythonhosted.org/packages/44/76/20776057b4bfd1aef4eeca992ebde0f53a4dce874f3ae693d0ec90a4f79b/pillow-12.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b2e4b27a6e15b04832fe9bf292b94b5ca156016bbc1ea9c2c20098a0320d6cf6", size = 4653158, upload-time = "2025-10-15T18:23:40.238Z" }, source = { registry = "https://pypi.org/simple" }
{ url = "https://files.pythonhosted.org/packages/82/3f/d9ff92ace07be8836b4e7e87e6a4c7a8318d47c2f1463ffcf121fc57d9cb/pillow-12.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fb3096c30df99fd01c7bf8e544f392103d0795b9f98ba71a8054bcbf56b255f1", size = 6267882, upload-time = "2025-10-15T18:23:42.434Z" }, dependencies = [
{ url = "https://files.pythonhosted.org/packages/9f/7a/4f7ff87f00d3ad33ba21af78bfcd2f032107710baf8280e3722ceec28cda/pillow-12.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7438839e9e053ef79f7112c881cef684013855016f928b168b81ed5835f3e75e", size = 8071001, upload-time = "2025-10-15T18:23:44.29Z" }, { name = "packaging" },
{ url = "https://files.pythonhosted.org/packages/75/87/fcea108944a52dad8cca0715ae6247e271eb80459364a98518f1e4f480c1/pillow-12.0.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d5c411a8eaa2299322b647cd932586b1427367fd3184ffbb8f7a219ea2041ca", size = 6380146, upload-time = "2025-10-15T18:23:46.065Z" }, { name = "pyparsing" },
{ url = "https://files.pythonhosted.org/packages/91/52/0d31b5e571ef5fd111d2978b84603fce26aba1b6092f28e941cb46570745/pillow-12.0.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d7e091d464ac59d2c7ad8e7e08105eaf9dafbc3883fd7265ffccc2baad6ac925", size = 7067344, upload-time = "2025-10-15T18:23:47.898Z" }, ]
{ url = "https://files.pythonhosted.org/packages/7b/f4/2dd3d721f875f928d48e83bb30a434dee75a2531bca839bb996bb0aa5a91/pillow-12.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:792a2c0be4dcc18af9d4a2dfd8a11a17d5e25274a1062b0ec1c2d79c76f3e7f8", size = 6491864, upload-time = "2025-10-15T18:23:49.607Z" }, sdist = { url = "https://files.pythonhosted.org/packages/5e/2a/63b574101850e7f7b306ddbdb02cb294380d37948140eecd468fae392b54/pip-requirements-parser-32.0.1.tar.gz", hash = "sha256:b4fa3a7a0be38243123cf9d1f3518da10c51bdb165a2b2985566247f9155a7d3", size = 209359, upload-time = "2022-12-21T15:25:22.732Z" }
{ url = "https://files.pythonhosted.org/packages/30/4b/667dfcf3d61fc309ba5a15b141845cece5915e39b99c1ceab0f34bf1d124/pillow-12.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:afbefa430092f71a9593a99ab6a4e7538bc9eabbf7bf94f91510d3503943edc4", size = 7158911, upload-time = "2025-10-15T18:23:51.351Z" }, wheels = [
{ url = "https://files.pythonhosted.org/packages/a2/2f/16cabcc6426c32218ace36bf0d55955e813f2958afddbf1d391849fee9d1/pillow-12.0.0-cp314-cp314t-win32.whl", hash = "sha256:3830c769decf88f1289680a59d4f4c46c72573446352e2befec9a8512104fa52", size = 6408045, upload-time = "2025-10-15T18:23:53.177Z" }, { url = "https://files.pythonhosted.org/packages/54/d0/d04f1d1e064ac901439699ee097f58688caadea42498ec9c4b4ad2ef84ab/pip_requirements_parser-32.0.1-py3-none-any.whl", hash = "sha256:4659bc2a667783e7a15d190f6fccf8b2486685b6dba4c19c3876314769c57526", size = 35648, upload-time = "2022-12-21T15:25:21.046Z" },
{ url = "https://files.pythonhosted.org/packages/35/73/e29aa0c9c666cf787628d3f0dcf379f4791fba79f4936d02f8b37165bdf8/pillow-12.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:905b0365b210c73afb0ebe9101a32572152dfd1c144c7e28968a331b9217b94a", size = 7148282, upload-time = "2025-10-15T18:23:55.316Z" }, ]
{ url = "https://files.pythonhosted.org/packages/c1/70/6b41bdcddf541b437bbb9f47f94d2db5d9ddef6c37ccab8c9107743748a4/pillow-12.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:99353a06902c2e43b43e8ff74ee65a7d90307d82370604746738a1e0661ccca7", size = 2525630, upload-time = "2025-10-15T18:23:57.149Z" },
[[package]]
name = "platformdirs"
version = "4.9.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/1b/04/fea538adf7dbbd6d186f551d595961e564a3b6715bdf276b477460858672/platformdirs-4.9.2.tar.gz", hash = "sha256:9a33809944b9db043ad67ca0db94b14bf452cc6aeaac46a88ea55b26e2e9d291", size = 28394, upload-time = "2026-02-16T03:56:10.574Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/48/31/05e764397056194206169869b50cf2fee4dbbbc71b344705b9c0d878d4d8/platformdirs-4.9.2-py3-none-any.whl", hash = "sha256:9170634f126f8efdae22fb58ae8a0eaa86f38365bc57897a6c4f781d1f5875bd", size = 21168, upload-time = "2026-02-16T03:56:08.891Z" },
] ]
[[package]] [[package]]
@@ -1070,6 +1319,34 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" },
] ]
[[package]]
name = "pre-commit"
version = "4.5.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "cfgv" },
{ name = "identify" },
{ name = "nodeenv" },
{ name = "pyyaml" },
{ name = "virtualenv" },
]
sdist = { url = "https://files.pythonhosted.org/packages/40/f1/6d86a29246dfd2e9b6237f0b5823717f60cad94d47ddc26afa916d21f525/pre_commit-4.5.1.tar.gz", hash = "sha256:eb545fcff725875197837263e977ea257a402056661f09dae08e4b149b030a61", size = 198232, upload-time = "2025-12-16T21:14:33.552Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/5d/19/fd3ef348460c80af7bb4669ea7926651d1f95c23ff2df18b9d24bab4f3fa/pre_commit-4.5.1-py2.py3-none-any.whl", hash = "sha256:3b3afd891e97337708c1674210f8eba659b52a38ea5f822ff142d10786221f77", size = 226437, upload-time = "2025-12-16T21:14:32.409Z" },
]
[[package]]
name = "prettytable"
version = "3.17.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "wcwidth" },
]
sdist = { url = "https://files.pythonhosted.org/packages/79/45/b0847d88d6cfeb4413566738c8bbf1e1995fad3d42515327ff32cc1eb578/prettytable-3.17.0.tar.gz", hash = "sha256:59f2590776527f3c9e8cf9fe7b66dd215837cca96a9c39567414cbc632e8ddb0", size = 67892, upload-time = "2025-11-14T17:33:20.212Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ee/8c/83087ebc47ab0396ce092363001fa37c17153119ee282700c0713a195853/prettytable-3.17.0-py3-none-any.whl", hash = "sha256:aad69b294ddbe3e1f95ef8886a060ed1666a0b83018bbf56295f6f226c43d287", size = 34433, upload-time = "2025-11-14T17:33:19.093Z" },
]
[[package]] [[package]]
name = "psutil" name = "psutil"
version = "5.9.8" version = "5.9.8"
@@ -1126,12 +1403,24 @@ wheels = [
] ]
[[package]] [[package]]
name = "pyasn1" name = "py-cpuinfo"
version = "0.4.8" version = "9.0.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/a4/db/fffec68299e6d7bad3d504147f9094830b704527a7fc098b721d38cc7fa7/pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba", size = 146820, upload-time = "2019-11-16T17:27:38.772Z" } sdist = { url = "https://files.pythonhosted.org/packages/37/a8/d832f7293ebb21690860d2e01d8115e5ff6f2ae8bbdc953f0eb0fa4bd2c7/py-cpuinfo-9.0.0.tar.gz", hash = "sha256:3cdbbf3fac90dc6f118bfd64384f309edeadd902d7c8fb17f02ffa1fc3f49690", size = 104716, upload-time = "2022-10-25T20:38:06.303Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/62/1e/a94a8d635fa3ce4cfc7f506003548d0a2447ae76fd5ca53932970fe3053f/pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d", size = 77145, upload-time = "2019-11-16T17:27:11.07Z" }, { url = "https://files.pythonhosted.org/packages/e0/a9/023730ba63db1e494a271cb018dcd361bd2c917ba7004c3e49d5daf795a2/py_cpuinfo-9.0.0-py3-none-any.whl", hash = "sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5", size = 22335, upload-time = "2022-10-25T20:38:27.636Z" },
]
[[package]]
name = "py-serializable"
version = "2.1.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "defusedxml" },
]
sdist = { url = "https://files.pythonhosted.org/packages/73/21/d250cfca8ff30c2e5a7447bc13861541126ce9bd4426cd5d0c9f08b5547d/py_serializable-2.1.0.tar.gz", hash = "sha256:9d5db56154a867a9b897c0163b33a793c804c80cee984116d02d49e4578fc103", size = 52368, upload-time = "2025-07-21T09:56:48.07Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/9b/bf/7595e817906a29453ba4d99394e781b6fabe55d21f3c15d240f85dd06bb1/py_serializable-2.1.0-py3-none-any.whl", hash = "sha256:b56d5d686b5a03ba4f4db5e769dc32336e142fc3bd4d68a8c25579ebb0a67304", size = 23045, upload-time = "2025-07-21T09:56:46.848Z" },
] ]
[[package]] [[package]]
@@ -1252,6 +1541,24 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" },
] ]
[[package]]
name = "pyjwt"
version = "2.11.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/5c/5a/b46fa56bf322901eee5b0454a34343cdbdae202cd421775a8ee4e42fd519/pyjwt-2.11.0.tar.gz", hash = "sha256:35f95c1f0fbe5d5ba6e43f00271c275f7a1a4db1dab27bf708073b75318ea623", size = 98019, upload-time = "2026-01-30T19:59:55.694Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/6f/01/c26ce75ba460d5cd503da9e13b21a33804d38c2165dec7b716d06b13010c/pyjwt-2.11.0-py3-none-any.whl", hash = "sha256:94a6bde30eb5c8e04fee991062b534071fd1439ef58d2adc9ccb823e7bcd0469", size = 28224, upload-time = "2026-01-30T19:59:54.539Z" },
]
[[package]]
name = "pyparsing"
version = "3.3.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f3/91/9c6ee907786a473bf81c5f53cf703ba0957b23ab84c264080fb5a450416f/pyparsing-3.3.2.tar.gz", hash = "sha256:c777f4d763f140633dcb6d8a3eda953bf7a214dc4eff598413c070bcdc117cbc", size = 6851574, upload-time = "2026-01-21T03:57:59.36Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/10/bd/c038d7cc38edc1aa5bf91ab8068b63d4308c66c4c8bb3cbba7dfbc049f9c/pyparsing-3.3.2-py3-none-any.whl", hash = "sha256:850ba148bd908d7e2411587e247a1e4f0327839c40e2e5e6d05a007ecc69911d", size = 122781, upload-time = "2026-01-21T03:57:55.912Z" },
]
[[package]] [[package]]
name = "pyrate-limiter" name = "pyrate-limiter"
version = "3.9.0" version = "3.9.0"
@@ -1303,6 +1610,19 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/04/93/2fa34714b7a4ae72f2f8dad66ba17dd9a2c793220719e736dda28b7aec27/pytest_asyncio-1.2.0-py3-none-any.whl", hash = "sha256:8e17ae5e46d8e7efe51ab6494dd2010f4ca8dae51652aa3c8d55acf50bfb2e99", size = 15095, upload-time = "2025-09-12T07:33:52.639Z" }, { url = "https://files.pythonhosted.org/packages/04/93/2fa34714b7a4ae72f2f8dad66ba17dd9a2c793220719e736dda28b7aec27/pytest_asyncio-1.2.0-py3-none-any.whl", hash = "sha256:8e17ae5e46d8e7efe51ab6494dd2010f4ca8dae51652aa3c8d55acf50bfb2e99", size = 15095, upload-time = "2025-09-12T07:33:52.639Z" },
] ]
[[package]]
name = "pytest-benchmark"
version = "5.2.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "py-cpuinfo" },
{ name = "pytest" },
]
sdist = { url = "https://files.pythonhosted.org/packages/24/34/9f732b76456d64faffbef6232f1f9dbec7a7c4999ff46282fa418bd1af66/pytest_benchmark-5.2.3.tar.gz", hash = "sha256:deb7317998a23c650fd4ff76e1230066a76cb45dcece0aca5607143c619e7779", size = 341340, upload-time = "2025-11-09T18:48:43.215Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/33/29/e756e715a48959f1c0045342088d7ca9762a2f509b945f362a316e9412b7/pytest_benchmark-5.2.3-py3-none-any.whl", hash = "sha256:bc839726ad20e99aaa0d11a127445457b4219bdb9e80a1afc4b51da7f96b0803", size = 45255, upload-time = "2025-11-09T18:48:39.765Z" },
]
[[package]] [[package]]
name = "pytest-cov" name = "pytest-cov"
version = "7.0.0" version = "7.0.0"
@@ -1355,6 +1675,19 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" },
] ]
[[package]]
name = "python-discovery"
version = "1.1.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "filelock" },
{ name = "platformdirs" },
]
sdist = { url = "https://files.pythonhosted.org/packages/82/bb/93a3e83bdf9322c7e21cafd092e56a4a17c4d8ef4277b6eb01af1a540a6f/python_discovery-1.1.0.tar.gz", hash = "sha256:447941ba1aed8cc2ab7ee3cb91be5fc137c5bdbb05b7e6ea62fbdcb66e50b268", size = 55674, upload-time = "2026-02-26T09:42:49.668Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/06/54/82a6e2ef37f0f23dccac604b9585bdcbd0698604feb64807dcb72853693e/python_discovery-1.1.0-py3-none-any.whl", hash = "sha256:a162893b8809727f54594a99ad2179d2ede4bf953e12d4c7abc3cc9cdbd1437b", size = 30687, upload-time = "2026-02-26T09:42:48.548Z" },
]
[[package]] [[package]]
name = "python-dotenv" name = "python-dotenv"
version = "1.2.1" version = "1.2.1"
@@ -1364,27 +1697,13 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/14/1b/a298b06749107c305e1fe0f814c6c74aea7b2f1e10989cb30f544a1b3253/python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61", size = 21230, upload-time = "2025-10-26T15:12:09.109Z" }, { url = "https://files.pythonhosted.org/packages/14/1b/a298b06749107c305e1fe0f814c6c74aea7b2f1e10989cb30f544a1b3253/python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61", size = 21230, upload-time = "2025-10-26T15:12:09.109Z" },
] ]
[[package]]
name = "python-jose"
version = "3.4.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "ecdsa" },
{ name = "pyasn1" },
{ name = "rsa" },
]
sdist = { url = "https://files.pythonhosted.org/packages/8e/a0/c49687cf40cb6128ea4e0559855aff92cd5ebd1a60a31c08526818c0e51e/python-jose-3.4.0.tar.gz", hash = "sha256:9a9a40f418ced8ecaf7e3b28d69887ceaa76adad3bcaa6dae0d9e596fec1d680", size = 92145, upload-time = "2025-02-18T17:26:41.985Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/63/b0/2586ea6b6fd57a994ece0b56418cbe93fff0efb85e2c9eb6b0caf24a4e37/python_jose-3.4.0-py2.py3-none-any.whl", hash = "sha256:9c9f616819652d109bd889ecd1e15e9a162b9b94d682534c9c2146092945b78f", size = 34616, upload-time = "2025-02-18T17:26:40.826Z" },
]
[[package]] [[package]]
name = "python-multipart" name = "python-multipart"
version = "0.0.20" version = "0.0.22"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f3/87/f44d7c9f274c7ee665a29b885ec97089ec5dc034c7f3fafa03da9e39a09e/python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13", size = 37158, upload-time = "2024-12-16T19:45:46.972Z" } sdist = { url = "https://files.pythonhosted.org/packages/94/01/979e98d542a70714b0cb2b6728ed0b7c46792b695e3eaec3e20711271ca3/python_multipart-0.0.22.tar.gz", hash = "sha256:7340bef99a7e0032613f56dc36027b959fd3b30a787ed62d310e951f7c3a3a58", size = 37612, upload-time = "2026-01-25T10:15:56.219Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546, upload-time = "2024-12-16T19:45:44.423Z" }, { url = "https://files.pythonhosted.org/packages/1b/d0/397f9626e711ff749a95d96b7af99b9c566a9bb5129b8e4c10fc4d100304/python_multipart-0.0.22-py3-none-any.whl", hash = "sha256:2b2cd894c83d21bf49d702499531c7bafd057d730c201782048f7945d82de155", size = 24579, upload-time = "2026-01-25T10:15:54.811Z" },
] ]
[[package]] [[package]]
@@ -1602,18 +1921,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/c9/7f/1a65ae870bc9d0576aebb0c501ea5dccf1ae2178fe2821042150ebd2e707/rpds_py-0.29.0-cp314-cp314t-win_amd64.whl", hash = "sha256:2023473f444752f0f82a58dfcbee040d0a1b3d1b3c2ec40e884bd25db6d117d2", size = 225919, upload-time = "2025-11-16T14:50:14.734Z" }, { url = "https://files.pythonhosted.org/packages/c9/7f/1a65ae870bc9d0576aebb0c501ea5dccf1ae2178fe2821042150ebd2e707/rpds_py-0.29.0-cp314-cp314t-win_amd64.whl", hash = "sha256:2023473f444752f0f82a58dfcbee040d0a1b3d1b3c2ec40e884bd25db6d117d2", size = 225919, upload-time = "2025-11-16T14:50:14.734Z" },
] ]
[[package]]
name = "rsa"
version = "4.9.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "pyasn1" },
]
sdist = { url = "https://files.pythonhosted.org/packages/da/8a/22b7beea3ee0d44b1916c0c1cb0ee3af23b700b6da9f04991899d0c555d4/rsa-4.9.1.tar.gz", hash = "sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75", size = 29034, upload-time = "2025-04-16T09:51:18.218Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/64/8d/0133e4eb4beed9e425d9a98ed6e081a55d195481b7632472be1af08d2f6b/rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762", size = 34696, upload-time = "2025-04-16T09:51:17.142Z" },
]
[[package]] [[package]]
name = "ruff" name = "ruff"
version = "0.14.4" version = "0.14.4"
@@ -1802,6 +2109,60 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/73/27/c2f24b19dafa197c514abe70eda69bc031c5152c6b1f1e5b20099e2ceedd/testcontainers-4.13.3-py3-none-any.whl", hash = "sha256:063278c4805ffa6dd85e56648a9da3036939e6c0ac1001e851c9276b19b05970", size = 124784, upload-time = "2025-11-14T05:08:46.053Z" }, { url = "https://files.pythonhosted.org/packages/73/27/c2f24b19dafa197c514abe70eda69bc031c5152c6b1f1e5b20099e2ceedd/testcontainers-4.13.3-py3-none-any.whl", hash = "sha256:063278c4805ffa6dd85e56648a9da3036939e6c0ac1001e851c9276b19b05970", size = 124784, upload-time = "2025-11-14T05:08:46.053Z" },
] ]
[[package]]
name = "tomli"
version = "2.4.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/82/30/31573e9457673ab10aa432461bee537ce6cef177667deca369efb79df071/tomli-2.4.0.tar.gz", hash = "sha256:aa89c3f6c277dd275d8e243ad24f3b5e701491a860d5121f2cdd399fbb31fc9c", size = 17477, upload-time = "2026-01-11T11:22:38.165Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/3c/43/7389a1869f2f26dba52404e1ef13b4784b6b37dac93bac53457e3ff24ca3/tomli-2.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:920b1de295e72887bafa3ad9f7a792f811847d57ea6b1215154030cf131f16b1", size = 154894, upload-time = "2026-01-11T11:21:56.07Z" },
{ url = "https://files.pythonhosted.org/packages/e9/05/2f9bf110b5294132b2edf13fe6ca6ae456204f3d749f623307cbb7a946f2/tomli-2.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d6d9a4aee98fac3eab4952ad1d73aee87359452d1c086b5ceb43ed02ddb16b8", size = 149053, upload-time = "2026-01-11T11:21:57.467Z" },
{ url = "https://files.pythonhosted.org/packages/e8/41/1eda3ca1abc6f6154a8db4d714a4d35c4ad90adc0bcf700657291593fbf3/tomli-2.4.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36b9d05b51e65b254ea6c2585b59d2c4cb91c8a3d91d0ed0f17591a29aaea54a", size = 243481, upload-time = "2026-01-11T11:21:58.661Z" },
{ url = "https://files.pythonhosted.org/packages/d2/6d/02ff5ab6c8868b41e7d4b987ce2b5f6a51d3335a70aa144edd999e055a01/tomli-2.4.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1c8a885b370751837c029ef9bc014f27d80840e48bac415f3412e6593bbc18c1", size = 251720, upload-time = "2026-01-11T11:22:00.178Z" },
{ url = "https://files.pythonhosted.org/packages/7b/57/0405c59a909c45d5b6f146107c6d997825aa87568b042042f7a9c0afed34/tomli-2.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8768715ffc41f0008abe25d808c20c3d990f42b6e2e58305d5da280ae7d1fa3b", size = 247014, upload-time = "2026-01-11T11:22:01.238Z" },
{ url = "https://files.pythonhosted.org/packages/2c/0e/2e37568edd944b4165735687cbaf2fe3648129e440c26d02223672ee0630/tomli-2.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b438885858efd5be02a9a133caf5812b8776ee0c969fea02c45e8e3f296ba51", size = 251820, upload-time = "2026-01-11T11:22:02.727Z" },
{ url = "https://files.pythonhosted.org/packages/5a/1c/ee3b707fdac82aeeb92d1a113f803cf6d0f37bdca0849cb489553e1f417a/tomli-2.4.0-cp312-cp312-win32.whl", hash = "sha256:0408e3de5ec77cc7f81960c362543cbbd91ef883e3138e81b729fc3eea5b9729", size = 97712, upload-time = "2026-01-11T11:22:03.777Z" },
{ url = "https://files.pythonhosted.org/packages/69/13/c07a9177d0b3bab7913299b9278845fc6eaaca14a02667c6be0b0a2270c8/tomli-2.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:685306e2cc7da35be4ee914fd34ab801a6acacb061b6a7abca922aaf9ad368da", size = 108296, upload-time = "2026-01-11T11:22:04.86Z" },
{ url = "https://files.pythonhosted.org/packages/18/27/e267a60bbeeee343bcc279bb9e8fbed0cbe224bc7b2a3dc2975f22809a09/tomli-2.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:5aa48d7c2356055feef06a43611fc401a07337d5b006be13a30f6c58f869e3c3", size = 94553, upload-time = "2026-01-11T11:22:05.854Z" },
{ url = "https://files.pythonhosted.org/packages/34/91/7f65f9809f2936e1f4ce6268ae1903074563603b2a2bd969ebbda802744f/tomli-2.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:84d081fbc252d1b6a982e1870660e7330fb8f90f676f6e78b052ad4e64714bf0", size = 154915, upload-time = "2026-01-11T11:22:06.703Z" },
{ url = "https://files.pythonhosted.org/packages/20/aa/64dd73a5a849c2e8f216b755599c511badde80e91e9bc2271baa7b2cdbb1/tomli-2.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9a08144fa4cba33db5255f9b74f0b89888622109bd2776148f2597447f92a94e", size = 149038, upload-time = "2026-01-11T11:22:07.56Z" },
{ url = "https://files.pythonhosted.org/packages/9e/8a/6d38870bd3d52c8d1505ce054469a73f73a0fe62c0eaf5dddf61447e32fa/tomli-2.4.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c73add4bb52a206fd0c0723432db123c0c75c280cbd67174dd9d2db228ebb1b4", size = 242245, upload-time = "2026-01-11T11:22:08.344Z" },
{ url = "https://files.pythonhosted.org/packages/59/bb/8002fadefb64ab2669e5b977df3f5e444febea60e717e755b38bb7c41029/tomli-2.4.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1fb2945cbe303b1419e2706e711b7113da57b7db31ee378d08712d678a34e51e", size = 250335, upload-time = "2026-01-11T11:22:09.951Z" },
{ url = "https://files.pythonhosted.org/packages/a5/3d/4cdb6f791682b2ea916af2de96121b3cb1284d7c203d97d92d6003e91c8d/tomli-2.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bbb1b10aa643d973366dc2cb1ad94f99c1726a02343d43cbc011edbfac579e7c", size = 245962, upload-time = "2026-01-11T11:22:11.27Z" },
{ url = "https://files.pythonhosted.org/packages/f2/4a/5f25789f9a460bd858ba9756ff52d0830d825b458e13f754952dd15fb7bb/tomli-2.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4cbcb367d44a1f0c2be408758b43e1ffb5308abe0ea222897d6bfc8e8281ef2f", size = 250396, upload-time = "2026-01-11T11:22:12.325Z" },
{ url = "https://files.pythonhosted.org/packages/aa/2f/b73a36fea58dfa08e8b3a268750e6853a6aac2a349241a905ebd86f3047a/tomli-2.4.0-cp313-cp313-win32.whl", hash = "sha256:7d49c66a7d5e56ac959cb6fc583aff0651094ec071ba9ad43df785abc2320d86", size = 97530, upload-time = "2026-01-11T11:22:13.865Z" },
{ url = "https://files.pythonhosted.org/packages/3b/af/ca18c134b5d75de7e8dc551c5234eaba2e8e951f6b30139599b53de9c187/tomli-2.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:3cf226acb51d8f1c394c1b310e0e0e61fecdd7adcb78d01e294ac297dd2e7f87", size = 108227, upload-time = "2026-01-11T11:22:15.224Z" },
{ url = "https://files.pythonhosted.org/packages/22/c3/b386b832f209fee8073c8138ec50f27b4460db2fdae9ffe022df89a57f9b/tomli-2.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:d20b797a5c1ad80c516e41bc1fb0443ddb5006e9aaa7bda2d71978346aeb9132", size = 94748, upload-time = "2026-01-11T11:22:16.009Z" },
{ url = "https://files.pythonhosted.org/packages/f3/c4/84047a97eb1004418bc10bdbcfebda209fca6338002eba2dc27cc6d13563/tomli-2.4.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:26ab906a1eb794cd4e103691daa23d95c6919cc2fa9160000ac02370cc9dd3f6", size = 154725, upload-time = "2026-01-11T11:22:17.269Z" },
{ url = "https://files.pythonhosted.org/packages/a8/5d/d39038e646060b9d76274078cddf146ced86dc2b9e8bbf737ad5983609a0/tomli-2.4.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:20cedb4ee43278bc4f2fee6cb50daec836959aadaf948db5172e776dd3d993fc", size = 148901, upload-time = "2026-01-11T11:22:18.287Z" },
{ url = "https://files.pythonhosted.org/packages/73/e5/383be1724cb30f4ce44983d249645684a48c435e1cd4f8b5cded8a816d3c/tomli-2.4.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:39b0b5d1b6dd03684b3fb276407ebed7090bbec989fa55838c98560c01113b66", size = 243375, upload-time = "2026-01-11T11:22:19.154Z" },
{ url = "https://files.pythonhosted.org/packages/31/f0/bea80c17971c8d16d3cc109dc3585b0f2ce1036b5f4a8a183789023574f2/tomli-2.4.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a26d7ff68dfdb9f87a016ecfd1e1c2bacbe3108f4e0f8bcd2228ef9a766c787d", size = 250639, upload-time = "2026-01-11T11:22:20.168Z" },
{ url = "https://files.pythonhosted.org/packages/2c/8f/2853c36abbb7608e3f945d8a74e32ed3a74ee3a1f468f1ffc7d1cb3abba6/tomli-2.4.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:20ffd184fb1df76a66e34bd1b36b4a4641bd2b82954befa32fe8163e79f1a702", size = 246897, upload-time = "2026-01-11T11:22:21.544Z" },
{ url = "https://files.pythonhosted.org/packages/49/f0/6c05e3196ed5337b9fe7ea003e95fd3819a840b7a0f2bf5a408ef1dad8ed/tomli-2.4.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:75c2f8bbddf170e8effc98f5e9084a8751f8174ea6ccf4fca5398436e0320bc8", size = 254697, upload-time = "2026-01-11T11:22:23.058Z" },
{ url = "https://files.pythonhosted.org/packages/f3/f5/2922ef29c9f2951883525def7429967fc4d8208494e5ab524234f06b688b/tomli-2.4.0-cp314-cp314-win32.whl", hash = "sha256:31d556d079d72db7c584c0627ff3a24c5d3fb4f730221d3444f3efb1b2514776", size = 98567, upload-time = "2026-01-11T11:22:24.033Z" },
{ url = "https://files.pythonhosted.org/packages/7b/31/22b52e2e06dd2a5fdbc3ee73226d763b184ff21fc24e20316a44ccc4d96b/tomli-2.4.0-cp314-cp314-win_amd64.whl", hash = "sha256:43e685b9b2341681907759cf3a04e14d7104b3580f808cfde1dfdb60ada85475", size = 108556, upload-time = "2026-01-11T11:22:25.378Z" },
{ url = "https://files.pythonhosted.org/packages/48/3d/5058dff3255a3d01b705413f64f4306a141a8fd7a251e5a495e3f192a998/tomli-2.4.0-cp314-cp314-win_arm64.whl", hash = "sha256:3d895d56bd3f82ddd6faaff993c275efc2ff38e52322ea264122d72729dca2b2", size = 96014, upload-time = "2026-01-11T11:22:26.138Z" },
{ url = "https://files.pythonhosted.org/packages/b8/4e/75dab8586e268424202d3a1997ef6014919c941b50642a1682df43204c22/tomli-2.4.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:5b5807f3999fb66776dbce568cc9a828544244a8eb84b84b9bafc080c99597b9", size = 163339, upload-time = "2026-01-11T11:22:27.143Z" },
{ url = "https://files.pythonhosted.org/packages/06/e3/b904d9ab1016829a776d97f163f183a48be6a4deb87304d1e0116a349519/tomli-2.4.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c084ad935abe686bd9c898e62a02a19abfc9760b5a79bc29644463eaf2840cb0", size = 159490, upload-time = "2026-01-11T11:22:28.399Z" },
{ url = "https://files.pythonhosted.org/packages/e3/5a/fc3622c8b1ad823e8ea98a35e3c632ee316d48f66f80f9708ceb4f2a0322/tomli-2.4.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f2e3955efea4d1cfbcb87bc321e00dc08d2bcb737fd1d5e398af111d86db5df", size = 269398, upload-time = "2026-01-11T11:22:29.345Z" },
{ url = "https://files.pythonhosted.org/packages/fd/33/62bd6152c8bdd4c305ad9faca48f51d3acb2df1f8791b1477d46ff86e7f8/tomli-2.4.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e0fe8a0b8312acf3a88077a0802565cb09ee34107813bba1c7cd591fa6cfc8d", size = 276515, upload-time = "2026-01-11T11:22:30.327Z" },
{ url = "https://files.pythonhosted.org/packages/4b/ff/ae53619499f5235ee4211e62a8d7982ba9e439a0fb4f2f351a93d67c1dd2/tomli-2.4.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:413540dce94673591859c4c6f794dfeaa845e98bf35d72ed59636f869ef9f86f", size = 273806, upload-time = "2026-01-11T11:22:32.56Z" },
{ url = "https://files.pythonhosted.org/packages/47/71/cbca7787fa68d4d0a9f7072821980b39fbb1b6faeb5f5cf02f4a5559fa28/tomli-2.4.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:0dc56fef0e2c1c470aeac5b6ca8cc7b640bb93e92d9803ddaf9ea03e198f5b0b", size = 281340, upload-time = "2026-01-11T11:22:33.505Z" },
{ url = "https://files.pythonhosted.org/packages/f5/00/d595c120963ad42474cf6ee7771ad0d0e8a49d0f01e29576ee9195d9ecdf/tomli-2.4.0-cp314-cp314t-win32.whl", hash = "sha256:d878f2a6707cc9d53a1be1414bbb419e629c3d6e67f69230217bb663e76b5087", size = 108106, upload-time = "2026-01-11T11:22:34.451Z" },
{ url = "https://files.pythonhosted.org/packages/de/69/9aa0c6a505c2f80e519b43764f8b4ba93b5a0bbd2d9a9de6e2b24271b9a5/tomli-2.4.0-cp314-cp314t-win_amd64.whl", hash = "sha256:2add28aacc7425117ff6364fe9e06a183bb0251b03f986df0e78e974047571fd", size = 120504, upload-time = "2026-01-11T11:22:35.764Z" },
{ url = "https://files.pythonhosted.org/packages/b3/9f/f1668c281c58cfae01482f7114a4b88d345e4c140386241a1a24dcc9e7bc/tomli-2.4.0-cp314-cp314t-win_arm64.whl", hash = "sha256:2b1e3b80e1d5e52e40e9b924ec43d81570f0e7d09d11081b797bc4692765a3d4", size = 99561, upload-time = "2026-01-11T11:22:36.624Z" },
{ url = "https://files.pythonhosted.org/packages/23/d1/136eb2cb77520a31e1f64cbae9d33ec6df0d78bdf4160398e86eec8a8754/tomli-2.4.0-py3-none-any.whl", hash = "sha256:1f776e7d669ebceb01dee46484485f43a4048746235e683bcdffacdf1fb4785a", size = 14477, upload-time = "2026-01-11T11:22:37.446Z" },
]
[[package]]
name = "tomli-w"
version = "1.2.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/19/75/241269d1da26b624c0d5e110e8149093c759b7a286138f4efd61a60e75fe/tomli_w-1.2.0.tar.gz", hash = "sha256:2dd14fac5a47c27be9cd4c976af5a12d87fb1f0b4512f81d69cce3b35ae25021", size = 7184, upload-time = "2025-01-15T12:07:24.262Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c7/18/c86eb8e0202e32dd3df50d43d7ff9854f8e0603945ff398974c1d91ac1ef/tomli_w-1.2.0-py3-none-any.whl", hash = "sha256:188306098d013b691fcadc011abd66727d3c414c571bb01b1a174ba8c983cf90", size = 6675, upload-time = "2025-01-15T12:07:22.074Z" },
]
[[package]] [[package]]
name = "typing-extensions" name = "typing-extensions"
version = "4.15.0" version = "4.15.0"
@@ -1907,11 +2268,11 @@ wheels = [
[[package]] [[package]]
name = "urllib3" name = "urllib3"
version = "2.5.0" version = "2.6.3"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" } sdist = { url = "https://files.pythonhosted.org/packages/c7/24/5f1b3bdffd70275f6661c76461e25f024d5a38a46f04aaca912426a2b1d3/urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed", size = 435556, upload-time = "2026-01-07T16:24:43.925Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" }, { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" },
] ]
[[package]] [[package]]
@@ -1927,6 +2288,30 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/ee/d9/d88e73ca598f4f6ff671fb5fde8a32925c2e08a637303a1d12883c7305fa/uvicorn-0.38.0-py3-none-any.whl", hash = "sha256:48c0afd214ceb59340075b4a052ea1ee91c16fbc2a9b1469cca0e54566977b02", size = 68109, upload-time = "2025-10-18T13:46:42.958Z" }, { url = "https://files.pythonhosted.org/packages/ee/d9/d88e73ca598f4f6ff671fb5fde8a32925c2e08a637303a1d12883c7305fa/uvicorn-0.38.0-py3-none-any.whl", hash = "sha256:48c0afd214ceb59340075b4a052ea1ee91c16fbc2a9b1469cca0e54566977b02", size = 68109, upload-time = "2025-10-18T13:46:42.958Z" },
] ]
[[package]]
name = "virtualenv"
version = "21.1.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "distlib" },
{ name = "filelock" },
{ name = "platformdirs" },
{ name = "python-discovery" },
]
sdist = { url = "https://files.pythonhosted.org/packages/2f/c9/18d4b36606d6091844daa3bd93cf7dc78e6f5da21d9f21d06c221104b684/virtualenv-21.1.0.tar.gz", hash = "sha256:1990a0188c8f16b6b9cf65c9183049007375b26aad415514d377ccacf1e4fb44", size = 5840471, upload-time = "2026-02-27T08:49:29.702Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/78/55/896b06bf93a49bec0f4ae2a6f1ed12bd05c8860744ac3a70eda041064e4d/virtualenv-21.1.0-py3-none-any.whl", hash = "sha256:164f5e14c5587d170cf98e60378eb91ea35bf037be313811905d3a24ea33cc07", size = 5825072, upload-time = "2026-02-27T08:49:27.516Z" },
]
[[package]]
name = "wcwidth"
version = "0.6.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/35/a2/8e3becb46433538a38726c948d3399905a4c7cabd0df578ede5dc51f0ec2/wcwidth-0.6.0.tar.gz", hash = "sha256:cdc4e4262d6ef9a1a57e018384cbeb1208d8abbc64176027e2c2455c81313159", size = 159684, upload-time = "2026-02-06T19:19:40.919Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/68/5a/199c59e0a824a3db2b89c5d2dade7ab5f9624dbf6448dc291b46d5ec94d3/wcwidth-0.6.0-py3-none-any.whl", hash = "sha256:1a3a1e510b553315f8e146c54764f4fb6264ffad731b3d78088cdb1478ffbdad", size = 94189, upload-time = "2026-02-06T19:19:39.646Z" },
]
[[package]] [[package]]
name = "webcolors" name = "webcolors"
version = "25.10.0" version = "25.10.0"

View File

@@ -62,7 +62,7 @@ services:
- NEXT_PUBLIC_API_URL=${NEXT_PUBLIC_API_URL} - NEXT_PUBLIC_API_URL=${NEXT_PUBLIC_API_URL}
depends_on: depends_on:
- backend - backend
command: npm run dev command: bun run dev
networks: networks:
- app-network - app-network

2
frontend/.gitignore vendored
View File

@@ -42,5 +42,5 @@ yarn-error.log*
*.tsbuildinfo *.tsbuildinfo
next-env.d.ts next-env.d.ts
# Auto-generated files (regenerate with npm run generate:api) # Auto-generated files (regenerate with bun run generate:api)
/src/mocks/handlers/generated.ts /src/mocks/handlers/generated.ts

View File

@@ -1,16 +1,16 @@
# Stage 1: Dependencies # Stage 1: Dependencies
FROM node:20-alpine AS deps FROM oven/bun:1-alpine AS deps
WORKDIR /app WORKDIR /app
COPY package.json package-lock.json ./ COPY package.json bun.lock ./
RUN npm ci RUN bun install --frozen-lockfile
# Stage 2: Builder # Stage 2: Builder
FROM node:20-alpine AS builder FROM oven/bun:1-alpine AS builder
WORKDIR /app WORKDIR /app
COPY --from=deps /app/node_modules ./node_modules COPY --from=deps /app/node_modules ./node_modules
COPY . . COPY . .
ENV NEXT_TELEMETRY_DISABLED 1 ENV NEXT_TELEMETRY_DISABLED 1
RUN npm run build RUN bun run build
# Stage 3: Runner # Stage 3: Runner
FROM node:20-alpine AS runner FROM node:20-alpine AS runner

View File

@@ -29,7 +29,7 @@ Production-ready Next.js 16 frontend with TypeScript, authentication, admin pane
### Admin Panel ### Admin Panel
- 👥 **User Administration** - CRUD operations, search, filters - 👥 **User Administration** - Full lifecycle operations, search, filters
- 🏢 **Organization Management** - Multi-tenant support with roles - 🏢 **Organization Management** - Multi-tenant support with roles
- 📊 **Dashboard** - Statistics and quick actions - 📊 **Dashboard** - Statistics and quick actions
- 🔍 **Advanced Filtering** - Status, search, pagination - 🔍 **Advanced Filtering** - Status, search, pagination
@@ -47,16 +47,16 @@ Production-ready Next.js 16 frontend with TypeScript, authentication, admin pane
### Prerequisites ### Prerequisites
- Node.js 18+ - Node.js 18+
- npm, yarn, or pnpm - [Bun](https://bun.sh/) (recommended runtime & package manager)
### Installation ### Installation
```bash ```bash
# Install dependencies # Install dependencies
npm install bun install
# Run development server # Run development server
npm run dev bun run dev
``` ```
Open [http://localhost:3000](http://localhost:3000) to view the app. Open [http://localhost:3000](http://localhost:3000) to view the app.
@@ -74,26 +74,26 @@ NEXT_PUBLIC_SITE_URL=http://localhost:3000
```bash ```bash
# Development # Development
npm run dev # Start dev server bun run dev # Start dev server
npm run build # Production build bun run build # Production build
npm run start # Start production server bun run start # Start production server
# Code Quality # Code Quality
npm run lint # Run ESLint bun run lint # Run ESLint
npm run format # Format with Prettier bun run format # Format with Prettier
npm run format:check # Check formatting bun run format:check # Check formatting
npm run type-check # TypeScript type checking bun run type-check # TypeScript type checking
npm run validate # Run all checks (lint + format + type-check) bun run validate # Run all checks (lint + format + type-check)
# Testing # Testing
npm test # Run unit tests bun run test # Run unit tests
npm run test:watch # Watch mode bun run test:watch # Watch mode
npm run test:coverage # Coverage report bun run test:coverage # Coverage report
npm run test:e2e # Run E2E tests bun run test:e2e # Run E2E tests
npm run test:e2e:ui # Playwright UI mode bun run test:e2e:ui # Playwright UI mode
# API Client # API Client
npm run generate:api # Generate TypeScript client from OpenAPI spec bun run generate:api # Generate TypeScript client from OpenAPI spec
``` ```
## Project Structure ## Project Structure
@@ -184,13 +184,13 @@ See [docs/I18N.md](./docs/I18N.md) for complete guide.
```bash ```bash
# Run all tests # Run all tests
npm test bun run test
# Watch mode # Watch mode
npm run test:watch bun run test:watch
# Coverage # Coverage
npm run test:coverage bun run test:coverage
``` ```
**Coverage**: 1,142+ tests covering components, hooks, utilities, and pages. **Coverage**: 1,142+ tests covering components, hooks, utilities, and pages.
@@ -199,13 +199,13 @@ npm run test:coverage
```bash ```bash
# Run E2E tests # Run E2E tests
npm run test:e2e bun run test:e2e
# UI mode (recommended for debugging) # UI mode (recommended for debugging)
npm run test:e2e:ui bun run test:e2e:ui
# Debug mode # Debug mode
npm run test:e2e:debug bun run test:e2e:debug
``` ```
**Coverage**: 178+ tests covering authentication, navigation, admin panel, and user flows. **Coverage**: 178+ tests covering authentication, navigation, admin panel, and user flows.
@@ -247,7 +247,7 @@ npm run test:e2e:debug
1. Follow existing code patterns 1. Follow existing code patterns
2. Write tests for new features 2. Write tests for new features
3. Run `npm run validate` before committing 3. Run `bun run validate` before committing
4. Keep translations in sync (en.json & it.json) 4. Keep translations in sync (en.json & it.json)
## License ## License

2678
frontend/bun.lock Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -35,7 +35,7 @@
```bash ```bash
cd frontend cd frontend
npm run generate:api bun run generate:api
``` ```
This fetches the OpenAPI spec from the backend and generates TypeScript types and API client functions. This fetches the OpenAPI spec from the backend and generates TypeScript types and API client functions.
@@ -894,7 +894,7 @@ apiClient.interceptors.request.use((config) => {
**Solution**: Regenerate API client to sync with backend **Solution**: Regenerate API client to sync with backend
```bash ```bash
npm run generate:api bun run generate:api
``` ```
### 9.4 Stale Data ### 9.4 Stale Data

View File

@@ -1300,7 +1300,7 @@ import Image from 'next/image';
**Bundle Size Monitoring:** **Bundle Size Monitoring:**
```bash ```bash
npm run build && npm run analyze bun run build && bun run analyze
# Use webpack-bundle-analyzer to identify large dependencies # Use webpack-bundle-analyzer to identify large dependencies
``` ```
@@ -1362,8 +1362,8 @@ npm run build && npm run analyze
**Regular Audits:** **Regular Audits:**
```bash ```bash
npm audit bun audit
npm audit fix bun audit fix
``` ```
**Automated Scanning:** **Automated Scanning:**
@@ -1496,11 +1496,11 @@ npm audit fix
FROM node:20-alpine FROM node:20-alpine
WORKDIR /app WORKDIR /app
COPY package*.json ./ COPY package*.json ./
RUN npm ci --only=production RUN bun install --frozen-lockfile --only=production
COPY . . COPY . .
RUN npm run build RUN bun run build
EXPOSE 3000 EXPOSE 3000
CMD ["npm", "start"] CMD ["bun", "start"]
``` ```
### 14.2 Environment Configuration ### 14.2 Environment Configuration
@@ -1536,15 +1536,15 @@ jobs:
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Install dependencies - name: Install dependencies
run: npm ci run: bun install --frozen-lockfile
- name: Run tests - name: Run tests
run: npm test run: bun run test
- name: Run linter - name: Run linter
run: npm run lint run: bun run lint
- name: Type check - name: Type check
run: npm run type-check run: bun run type-check
- name: Build - name: Build
run: npm run build run: bun run build
``` ```
--- ---

View File

@@ -908,16 +908,16 @@ Before committing code, always run:
```bash ```bash
# Type checking # Type checking
npm run type-check bun run type-check
# Linting # Linting
npm run lint bun run lint
# Tests # Tests
npm test bun run test
# Build check # Build check
npm run build bun run build
``` ```
**In browser:** **In browser:**

View File

@@ -59,7 +59,7 @@ cd frontend
echo "NEXT_PUBLIC_DEMO_MODE=true" > .env.local echo "NEXT_PUBLIC_DEMO_MODE=true" > .env.local
# Start frontend only (no backend needed) # Start frontend only (no backend needed)
npm run dev bun run dev
# Open http://localhost:3000 # Open http://localhost:3000
``` ```
@@ -233,7 +233,7 @@ MSW never initializes during Jest tests:
- 97%+ coverage maintained - 97%+ coverage maintained
```bash ```bash
npm test # MSW will NOT interfere bun run test # MSW will NOT interfere
``` ```
### E2E Tests (Playwright) ### E2E Tests (Playwright)
@@ -247,14 +247,14 @@ MSW never initializes during Playwright tests:
- All E2E tests pass unchanged - All E2E tests pass unchanged
```bash ```bash
npm run test:e2e # MSW will NOT interfere bun run test:e2e # MSW will NOT interfere
``` ```
### Manual Testing in Demo Mode ### Manual Testing in Demo Mode
```bash ```bash
# Enable demo mode # Enable demo mode
NEXT_PUBLIC_DEMO_MODE=true npm run dev NEXT_PUBLIC_DEMO_MODE=true bun run dev
# Test flows: # Test flows:
# 1. Open http://localhost:3000 # 1. Open http://localhost:3000
@@ -304,7 +304,7 @@ NEXT_PUBLIC_APP_NAME=My Demo App
```bash ```bash
# netlify.toml # netlify.toml
[build] [build]
command = "npm run build" command = "bun run build"
publish = ".next" publish = ".next"
[build.environment] [build.environment]
@@ -321,10 +321,10 @@ module.exports = {
} }
# Build # Build
NEXT_PUBLIC_DEMO_MODE=true npm run build NEXT_PUBLIC_DEMO_MODE=true bun run build
# Deploy to GitHub Pages # Deploy to GitHub Pages
npm run deploy bun run deploy
``` ```
## Troubleshooting ## Troubleshooting

View File

@@ -1040,7 +1040,7 @@ export default function AdminDashboardPage() {
These examples demonstrate: These examples demonstrate:
1. **Complete CRUD operations** (User Management) 1. **Complete management operations** (User Management)
2. **Real-time data with polling** (Session Management) 2. **Real-time data with polling** (Session Management)
3. **Data visualization** (Admin Dashboard Charts) 3. **Data visualization** (Admin Dashboard Charts)

View File

@@ -9,7 +9,7 @@ MSW (Mock Service Worker) handlers are **automatically generated** from your Ope
``` ```
Backend API Changes Backend API Changes
npm run generate:api bun run generate:api
┌─────────────────────────────────────┐ ┌─────────────────────────────────────┐
│ 1. Fetches OpenAPI spec │ │ 1. Fetches OpenAPI spec │
@@ -30,7 +30,7 @@ src/mocks/handlers/
When you run: When you run:
```bash ```bash
npm run generate:api bun run generate:api
``` ```
The system: The system:
@@ -125,7 +125,7 @@ Overrides are applied FIRST, so they take precedence over generated handlers.
```bash ```bash
# Backend adds new endpoint # Backend adds new endpoint
# 1. Run npm run generate:api # 1. Run bun run generate:api
# 2. Manually add MSW handler # 2. Manually add MSW handler
# 3. Test demo mode # 3. Test demo mode
# 4. Fix bugs # 4. Fix bugs
@@ -136,7 +136,7 @@ Overrides are applied FIRST, so they take precedence over generated handlers.
```bash ```bash
# Backend adds new endpoint # Backend adds new endpoint
npm run generate:api # Done! MSW auto-synced bun run generate:api # Done! MSW auto-synced
``` ```
### ✅ Always In Sync ### ✅ Always In Sync
@@ -202,11 +202,11 @@ frontend/
2. **Regenerate clients:** 2. **Regenerate clients:**
```bash ```bash
cd frontend cd frontend
npm run generate:api bun run generate:api
``` ```
3. **Test demo mode:** 3. **Test demo mode:**
```bash ```bash
NEXT_PUBLIC_DEMO_MODE=true npm run dev NEXT_PUBLIC_DEMO_MODE=true bun run dev
``` ```
4. **Done!** New endpoint automatically works in demo mode 4. **Done!** New endpoint automatically works in demo mode
@@ -286,7 +286,7 @@ The generator (`scripts/generate-msw-handlers.ts`) does:
**Check:** **Check:**
1. Is backend running? (`npm run generate:api` requires backend) 1. Is backend running? (`bun run generate:api` requires backend)
2. Check console for `[MSW]` warnings 2. Check console for `[MSW]` warnings
3. Verify `generated.ts` exists and has your endpoint 3. Verify `generated.ts` exists and has your endpoint
4. Check path parameters match exactly 4. Check path parameters match exactly
@@ -324,7 +324,7 @@ npx tsx scripts/generate-msw-handlers.ts /tmp/openapi.json
### ✅ Do ### ✅ Do
- Run `npm run generate:api` after backend changes - Run `bun run generate:api` after backend changes
- Use `overrides.ts` for complex logic - Use `overrides.ts` for complex logic
- Keep mock data in `data/` files - Keep mock data in `data/` files
- Test demo mode regularly - Test demo mode regularly
@@ -380,7 +380,7 @@ http.get(`${API_BASE_URL}/api/v1/users/me`, async ({ request }) => {
### After (Automated) ### After (Automated)
```bash ```bash
npm run generate:api # Done! All 31+ endpoints handled automatically bun run generate:api # Done! All 31+ endpoints handled automatically
``` ```
**Manual Code: 1500+ lines** **Manual Code: 1500+ lines**
@@ -399,6 +399,6 @@ npm run generate:api # Done! All 31+ endpoints handled automatically
**This template is batteries-included.** **This template is batteries-included.**
Your API client and MSW handlers stay perfectly synchronized with zero manual work. Your API client and MSW handlers stay perfectly synchronized with zero manual work.
Just run `npm run generate:api` and everything updates automatically. Just run `bun run generate:api` and everything updates automatically.
That's the power of OpenAPI + automation! 🚀 That's the power of OpenAPI + automation! 🚀

View File

@@ -526,7 +526,7 @@ interface UserSession {
- Development: `http://localhost:8000/api/v1/openapi.json` - Development: `http://localhost:8000/api/v1/openapi.json`
- Docker: `http://backend:8000/api/v1/openapi.json` - Docker: `http://backend:8000/api/v1/openapi.json`
- Generates TypeScript client in `src/lib/api/generated/` - Generates TypeScript client in `src/lib/api/generated/`
- Runs as npm script: `npm run generate:api` - Runs as script: `bun run generate:api`
- Can be run independently for frontend-only development - Can be run independently for frontend-only development
**Root Script** (`root/scripts/generate-frontend-api.sh`): **Root Script** (`root/scripts/generate-frontend-api.sh`):
@@ -1724,7 +1724,7 @@ Provide 2-3 complete feature implementation walkthroughs, including:
**Dependency Security:** **Dependency Security:**
- Regular dependency updates - Regular dependency updates
- Security audit via `npm audit` - Security audit via `bun audit`
- Automated security scanning (Dependabot, Snyk) - Automated security scanning (Dependabot, Snyk)
### 12.5 SEO ### 12.5 SEO
@@ -1780,7 +1780,7 @@ The frontend template will be considered complete when:
1. **Functionality:** 1. **Functionality:**
- All specified pages are implemented and functional - All specified pages are implemented and functional
- Authentication flow works end-to-end - Authentication flow works end-to-end
- User and organization CRUD operations work - User and organization management operations work
- API integration is complete and reliable - API integration is complete and reliable
2. **Code Quality:** 2. **Code Quality:**

19020
frontend/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -12,7 +12,7 @@
"type-check": "tsc --noEmit", "type-check": "tsc --noEmit",
"format": "prettier --write .", "format": "prettier --write .",
"format:check": "prettier --check .", "format:check": "prettier --check .",
"validate": "npm run lint && npm run format:check && npm run type-check", "validate": "bun run lint && bun run format:check && bun run type-check",
"generate:api": "./scripts/generate-api-client.sh", "generate:api": "./scripts/generate-api-client.sh",
"test": "jest", "test": "jest",
"test:watch": "jest --watch", "test:watch": "jest --watch",
@@ -24,7 +24,7 @@
"dependencies": { "dependencies": {
"@hookform/resolvers": "^5.2.2", "@hookform/resolvers": "^5.2.2",
"@radix-ui/react-alert-dialog": "^1.1.15", "@radix-ui/react-alert-dialog": "^1.1.15",
"@radix-ui/react-avatar": "^1.1.10", "@radix-ui/react-avatar": "^1.1.11",
"@radix-ui/react-checkbox": "^1.3.3", "@radix-ui/react-checkbox": "^1.3.3",
"@radix-ui/react-dialog": "^1.1.15", "@radix-ui/react-dialog": "^1.1.15",
"@radix-ui/react-dropdown-menu": "^2.1.16", "@radix-ui/react-dropdown-menu": "^2.1.16",
@@ -32,65 +32,65 @@
"@radix-ui/react-label": "^2.1.8", "@radix-ui/react-label": "^2.1.8",
"@radix-ui/react-popover": "^1.1.15", "@radix-ui/react-popover": "^1.1.15",
"@radix-ui/react-select": "^2.2.6", "@radix-ui/react-select": "^2.2.6",
"@radix-ui/react-separator": "^1.1.7", "@radix-ui/react-separator": "^1.1.8",
"@radix-ui/react-slot": "^1.2.4", "@radix-ui/react-slot": "^1.2.4",
"@radix-ui/react-tabs": "^1.1.13", "@radix-ui/react-tabs": "^1.1.13",
"@tanstack/react-query": "^5.90.5", "@tanstack/react-query": "^5.90.21",
"@types/react-syntax-highlighter": "^15.5.13", "@types/react-syntax-highlighter": "^15.5.13",
"axios": "^1.13.1", "axios": "^1.13.6",
"class-variance-authority": "^0.7.1", "class-variance-authority": "^0.7.1",
"clsx": "^2.1.1", "clsx": "^2.1.1",
"date-fns": "^4.1.0", "date-fns": "^4.1.0",
"framer-motion": "^12.23.24", "framer-motion": "^12.34.3",
"gray-matter": "^4.0.3", "gray-matter": "^4.0.3",
"lucide-react": "^0.552.0", "lucide-react": "^0.552.0",
"next": "^16", "next": "^16.1.6",
"next-intl": "^4.5.3", "next-intl": "^4.8.3",
"next-themes": "^0.4.6", "next-themes": "^0.4.6",
"react": "^19.0.0", "react": "^19.2.4",
"react-dom": "^19.0.0", "react-dom": "^19.2.4",
"react-hook-form": "^7.66.0", "react-hook-form": "^7.71.2",
"react-markdown": "^10.1.0", "react-markdown": "^10.1.0",
"react-syntax-highlighter": "^16.1.0", "react-syntax-highlighter": "^16.1.1",
"recharts": "^2.15.4", "recharts": "^2.15.4",
"rehype-autolink-headings": "^7.1.0", "rehype-autolink-headings": "^7.1.0",
"rehype-highlight": "^7.0.2", "rehype-highlight": "^7.0.2",
"rehype-slug": "^6.0.0", "rehype-slug": "^6.0.0",
"remark-gfm": "^4.0.1", "remark-gfm": "^4.0.1",
"sonner": "^2.0.7", "sonner": "^2.0.7",
"tailwind-merge": "^3.3.1", "tailwind-merge": "^3.5.0",
"zod": "^3.25.76", "zod": "^3.25.76",
"zustand": "^4.5.7" "zustand": "^4.5.7"
}, },
"devDependencies": { "devDependencies": {
"@hey-api/openapi-ts": "^0.86.11", "@hey-api/openapi-ts": "^0.86.12",
"@next/bundle-analyzer": "^16.0.1", "@next/bundle-analyzer": "^16.1.6",
"@peculiar/webcrypto": "^1.5.0", "@peculiar/webcrypto": "^1.5.0",
"@playwright/test": "^1.56.1", "@playwright/test": "^1.58.2",
"@tailwindcss/postcss": "^4", "@tailwindcss/postcss": "^4.2.1",
"@tanstack/react-query-devtools": "^5.90.2", "@tanstack/react-query-devtools": "^5.91.3",
"@testing-library/jest-dom": "^6.9.1", "@testing-library/jest-dom": "^6.9.1",
"@testing-library/react": "^16.3.0", "@testing-library/react": "^16.3.2",
"@testing-library/user-event": "^14.6.1", "@testing-library/user-event": "^14.6.1",
"@types/jest": "^30.0.0", "@types/jest": "^30.0.0",
"@types/node": "^20", "@types/node": "^20.19.35",
"@types/react": "^19", "@types/react": "^19.2.14",
"@types/react-dom": "^19", "@types/react-dom": "^19.2.3",
"eslint": "^9", "eslint": "^9.39.3",
"eslint-config-next": "^16", "eslint-config-next": "^16.1.6",
"eslint-config-prettier": "^10.1.8", "eslint-config-prettier": "^10.1.8",
"eslint-plugin-jsx-a11y": "^6.10.2", "eslint-plugin-jsx-a11y": "^6.10.2",
"eslint-plugin-react": "^7.37.2", "eslint-plugin-react": "^7.37.5",
"eslint-plugin-react-hooks": "^5.0.0", "eslint-plugin-react-hooks": "^5.2.0",
"jest": "^30.2.0", "jest": "^30.2.0",
"jest-environment-jsdom": "^30.2.0", "jest-environment-jsdom": "^30.2.0",
"lighthouse": "^12.8.2", "lighthouse": "^12.8.2",
"msw": "^2.12.3", "msw": "^2.12.10",
"prettier": "^3.6.2", "prettier": "^3.8.1",
"tailwindcss": "^4", "tailwindcss": "^4.2.1",
"tsx": "^4.20.6", "tsx": "^4.21.0",
"typescript": "^5", "typescript": "^5.9.3",
"typescript-eslint": "^8.15.0", "typescript-eslint": "^8.56.1",
"whatwg-fetch": "^3.6.20" "whatwg-fetch": "^3.6.20"
}, },
"msw": { "msw": {
@@ -98,8 +98,5 @@
"public" "public"
] ]
}, },
"overrides": { "overrides": {}
"glob": "^10.4.1",
"inflight": "npm:lru-cache@^10.0.0"
}
} }

View File

@@ -7,7 +7,7 @@
* - Please do NOT modify this file. * - Please do NOT modify this file.
*/ */
const PACKAGE_VERSION = '2.12.7'; const PACKAGE_VERSION = '2.12.10';
const INTEGRITY_CHECKSUM = '4db4a41e972cec1b64cc569c66952d82'; const INTEGRITY_CHECKSUM = '4db4a41e972cec1b64cc569c66952d82';
const IS_MOCKED_RESPONSE = Symbol('isMockedResponse'); const IS_MOCKED_RESPONSE = Symbol('isMockedResponse');
const activeClientIds = new Set(); const activeClientIds = new Set();

View File

@@ -11,7 +11,7 @@ MSW handlers can drift out of sync with the backend API as it evolves.
Install the package that auto-generates MSW handlers from OpenAPI: Install the package that auto-generates MSW handlers from OpenAPI:
```bash ```bash
npm install --save-dev openapi-msw bun install --save-dev openapi-msw
``` ```
Then create a generation script: Then create a generation script:
@@ -39,9 +39,9 @@ generate();
When you add/change backend endpoints: When you add/change backend endpoints:
1. **Update Backend** → Make API changes 1. **Update Backend** → Make API changes
2. **Generate Frontend Client**`npm run generate:api` 2. **Generate Frontend Client**`bun run generate:api`
3. **Update MSW Handlers** → Edit `src/mocks/handlers/*.ts` 3. **Update MSW Handlers** → Edit `src/mocks/handlers/*.ts`
4. **Test Demo Mode**`NEXT_PUBLIC_DEMO_MODE=true npm run dev` 4. **Test Demo Mode**`NEXT_PUBLIC_DEMO_MODE=true bun run dev`
### Option 3: Automated with Script Hook ### Option 3: Automated with Script Hook
@@ -50,7 +50,7 @@ Add to `package.json`:
```json ```json
{ {
"scripts": { "scripts": {
"generate:api": "./scripts/generate-api-client.sh && npm run sync:msw", "generate:api": "./scripts/generate-api-client.sh && bun run sync:msw",
"sync:msw": "echo '⚠️ Don't forget to update MSW handlers in src/mocks/handlers/'" "sync:msw": "echo '⚠️ Don't forget to update MSW handlers in src/mocks/handlers/'"
} }
} }
@@ -100,7 +100,7 @@ Our MSW handlers currently cover:
To check if MSW is missing handlers: To check if MSW is missing handlers:
1. Start demo mode: `NEXT_PUBLIC_DEMO_MODE=true npm run dev` 1. Start demo mode: `NEXT_PUBLIC_DEMO_MODE=true bun run dev`
2. Open browser console 2. Open browser console
3. Look for `[MSW] Warning: intercepted a request without a matching request handler` 3. Look for `[MSW] Warning: intercepted a request without a matching request handler`
4. Add missing handlers to appropriate file in `src/mocks/handlers/` 4. Add missing handlers to appropriate file in `src/mocks/handlers/`

View File

@@ -152,7 +152,7 @@ type BuildUrlFn = <
url: string; url: string;
}, },
>( >(
options: Pick<TData, 'url'> & Options<TData>, options: TData & Options<TData>,
) => string; ) => string;
export type Client = CoreClient< export type Client = CoreClient<
@@ -195,7 +195,7 @@ export type Options<
RequestOptions<TResponse, ThrowOnError>, RequestOptions<TResponse, ThrowOnError>,
'body' | 'path' | 'query' | 'url' 'body' | 'path' | 'query' | 'url'
> & > &
Omit<TData, 'url'>; ([TData] extends [never] ? unknown : Omit<TData, 'url'>);
export type OptionsLegacyParser< export type OptionsLegacyParser<
TData = unknown, TData = unknown,

View File

@@ -23,6 +23,17 @@ export type Field =
*/ */
key?: string; key?: string;
map?: string; map?: string;
}
| {
/**
* Field name. This is the name we want the user to see and use.
*/
key: string;
/**
* Field mapped name. This is the name we want to use in the request.
* If `in` is omitted, `map` aliases `key` to the transport layer.
*/
map: Slot;
}; };
export interface Fields { export interface Fields {
@@ -42,10 +53,14 @@ const extraPrefixes = Object.entries(extraPrefixesMap);
type KeyMap = Map< type KeyMap = Map<
string, string,
{ | {
in: Slot; in: Slot;
map?: string; map?: string;
} }
| {
in?: never;
map: Slot;
}
>; >;
const buildKeyMap = (fields: FieldsConfig, map?: KeyMap): KeyMap => { const buildKeyMap = (fields: FieldsConfig, map?: KeyMap): KeyMap => {
@@ -61,6 +76,10 @@ const buildKeyMap = (fields: FieldsConfig, map?: KeyMap): KeyMap => {
map: config.map, map: config.map,
}); });
} }
} else if ('key' in config) {
map.set(config.key, {
map: config.map,
});
} else if (config.args) { } else if (config.args) {
buildKeyMap(config.args, map); buildKeyMap(config.args, map);
} }
@@ -112,7 +131,9 @@ export const buildClientParams = (
if (config.key) { if (config.key) {
const field = map.get(config.key)!; const field = map.get(config.key)!;
const name = field.map || config.key; const name = field.map || config.key;
(params[field.in] as Record<string, unknown>)[name] = arg; if (field.in) {
(params[field.in] as Record<string, unknown>)[name] = arg;
}
} else { } else {
params.body = arg; params.body = arg;
} }
@@ -121,8 +142,12 @@ export const buildClientParams = (
const field = map.get(key); const field = map.get(key);
if (field) { if (field) {
const name = field.map || key; if (field.in) {
(params[field.in] as Record<string, unknown>)[name] = value; const name = field.map || key;
(params[field.in] as Record<string, unknown>)[name] = value;
} else {
params[field.map] = value;
}
} else { } else {
const extra = extraPrefixes.find(([prefix]) => const extra = extraPrefixes.find(([prefix]) =>
key.startsWith(prefix), key.startsWith(prefix),
@@ -133,10 +158,8 @@ export const buildClientParams = (
(params[slot] as Record<string, unknown>)[ (params[slot] as Record<string, unknown>)[
key.slice(prefix.length) key.slice(prefix.length)
] = value; ] = value;
} else { } else if ('allowExtra' in config && config.allowExtra) {
for (const [slot, allowed] of Object.entries( for (const [slot, allowed] of Object.entries(config.allowExtra)) {
config.allowExtra ?? {},
)) {
if (allowed) { if (allowed) {
(params[slot as Slot] as Record<string, unknown>)[key] = value; (params[slot as Slot] as Record<string, unknown>)[key] = value;
break; break;

View File

@@ -8,7 +8,7 @@
* *
* For custom handler behavior, use src/mocks/handlers/overrides.ts * For custom handler behavior, use src/mocks/handlers/overrides.ts
* *
* Generated: 2025-11-26T12:21:51.098Z * Generated: 2026-03-01T17:00:19.178Z
*/ */
import { http, HttpResponse, delay } from 'msw'; import { http, HttpResponse, delay } from 'msw';