feature: create multitasking
This commit is contained in:
@@ -0,0 +1,598 @@
|
|||||||
|
# AGENTS.md - AI Coding Guidelines for CofeeProject Backend
|
||||||
|
|
||||||
|
This document provides guidelines and best practices for AI agents working with this codebase.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Core Principles
|
||||||
|
|
||||||
|
### 1. Code Should Be Simple, Readable, and Well Supported
|
||||||
|
|
||||||
|
- Write code that humans can understand at first glance
|
||||||
|
- Prefer explicit over implicit behavior
|
||||||
|
- Use clear control flow patterns (avoid deeply nested conditions)
|
||||||
|
- Add docstrings for public functions, classes, and modules
|
||||||
|
- Keep functions short and focused (ideally under 30 lines)
|
||||||
|
|
||||||
|
### 2. Less Overhead Is Better
|
||||||
|
|
||||||
|
- Avoid unnecessary abstractions and over-engineering
|
||||||
|
- Don't add layers of indirection without clear benefit
|
||||||
|
- Prefer direct solutions over clever ones
|
||||||
|
- Minimize dependencies where possible
|
||||||
|
- Use built-in Python features before reaching for external libraries
|
||||||
|
|
||||||
|
### 3. No Magic Values
|
||||||
|
|
||||||
|
- Define constants with meaningful names at module level
|
||||||
|
- Use enums or `Literal` types for fixed sets of values (see `ArtifactTypeEnum` pattern)
|
||||||
|
- Configuration values belong in `Settings` class with explicit defaults
|
||||||
|
- Never hardcode timeouts, limits, or thresholds inline
|
||||||
|
|
||||||
|
```python
|
||||||
|
# BAD
|
||||||
|
if silence_db > 16:
|
||||||
|
...
|
||||||
|
|
||||||
|
# GOOD
|
||||||
|
SILENCE_THRESHOLD_DB = 16
|
||||||
|
|
||||||
|
if silence_db > SILENCE_THRESHOLD_DB:
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4. One Function Should Implement One Purpose
|
||||||
|
|
||||||
|
- Each function should do exactly one thing
|
||||||
|
- If a function needs "and" in its description, split it
|
||||||
|
- Extract helper functions for distinct subtasks
|
||||||
|
- Keep side effects isolated and predictable
|
||||||
|
|
||||||
|
```python
|
||||||
|
# BAD
|
||||||
|
async def get_and_validate_and_process_media(file_key: str) -> MediaResult:
|
||||||
|
...
|
||||||
|
|
||||||
|
# GOOD
|
||||||
|
async def download_media(file_key: str) -> TempFile:
|
||||||
|
...
|
||||||
|
|
||||||
|
def validate_media_format(file_path: str) -> bool:
|
||||||
|
...
|
||||||
|
|
||||||
|
async def process_media(file_path: str) -> MediaResult:
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
### 5. All Variable Names Should Have Meaning Based on Context
|
||||||
|
|
||||||
|
- Use descriptive names that explain purpose, not type
|
||||||
|
- Avoid single-letter variables (except for trivial loops)
|
||||||
|
- Prefix boolean variables with `is_`, `has_`, `can_`, `should_`
|
||||||
|
- Use domain terminology consistently
|
||||||
|
|
||||||
|
```python
|
||||||
|
# BAD
|
||||||
|
x = await repo.get(id)
|
||||||
|
flag = x.is_deleted
|
||||||
|
|
||||||
|
# GOOD
|
||||||
|
media_file = await media_repository.get_by_id(media_file_id)
|
||||||
|
is_soft_deleted = media_file.is_deleted
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Project Architecture
|
||||||
|
|
||||||
|
### Layer Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
cpv3/
|
||||||
|
├── api/v1/ # API version routing
|
||||||
|
├── common/ # Shared schemas and utilities
|
||||||
|
├── db/ # Database base classes and session
|
||||||
|
├── infrastructure/ # Cross-cutting concerns (auth, storage, settings)
|
||||||
|
└── modules/ # Feature modules (domain logic)
|
||||||
|
└── <module>/
|
||||||
|
├── models.py # SQLAlchemy models
|
||||||
|
├── schemas.py # Pydantic DTOs
|
||||||
|
├── repository.py # Database access layer
|
||||||
|
├── service.py # Business logic
|
||||||
|
└── router.py # FastAPI endpoints
|
||||||
|
```
|
||||||
|
|
||||||
|
### Module Responsibilities
|
||||||
|
|
||||||
|
| Layer | Responsibility | Dependencies |
|
||||||
|
| --------------- | ------------------------------------------ | ----------------------------- |
|
||||||
|
| `router.py` | HTTP request/response handling, validation | schemas, service, repository |
|
||||||
|
| `service.py` | Business logic, orchestration | repository, external services |
|
||||||
|
| `repository.py` | Database queries, CRUD operations | models, session |
|
||||||
|
| `schemas.py` | Data transfer objects, validation | pydantic |
|
||||||
|
| `models.py` | Database table definitions | SQLAlchemy |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Coding Standards
|
||||||
|
|
||||||
|
### Python Version & Style
|
||||||
|
|
||||||
|
- **Python 3.11+** required
|
||||||
|
- Use `from __future__ import annotations` for forward references
|
||||||
|
- Line length: **100 characters** (configured in ruff)
|
||||||
|
- Use type hints for all function signatures
|
||||||
|
- Async-first approach for I/O operations
|
||||||
|
|
||||||
|
### Imports
|
||||||
|
|
||||||
|
```python
|
||||||
|
# Standard library
|
||||||
|
from __future__ import annotations
|
||||||
|
import uuid
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
from typing import Literal
|
||||||
|
|
||||||
|
# Third-party
|
||||||
|
from fastapi import APIRouter, Depends, HTTPException, status
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
# Local imports (absolute paths)
|
||||||
|
from cpv3.infrastructure.auth import get_current_user
|
||||||
|
from cpv3.modules.media.schemas import MediaFileRead
|
||||||
|
from cpv3.modules.media.repository import MediaFileRepository
|
||||||
|
```
|
||||||
|
|
||||||
|
### Pydantic Schemas
|
||||||
|
|
||||||
|
- Inherit from `cpv3.common.schemas.Schema` for consistent config
|
||||||
|
- Use `Literal` types for enums with string values
|
||||||
|
- Suffix schema names: `*Create`, `*Update`, `*Read`
|
||||||
|
|
||||||
|
```python
|
||||||
|
from cpv3.common.schemas import Schema
|
||||||
|
|
||||||
|
class MediaFileRead(Schema):
|
||||||
|
id: UUID
|
||||||
|
owner_id: UUID
|
||||||
|
duration_seconds: float
|
||||||
|
is_deleted: bool
|
||||||
|
created_at: datetime
|
||||||
|
```
|
||||||
|
|
||||||
|
### SQLAlchemy Models
|
||||||
|
|
||||||
|
- Inherit from `Base` and `BaseModelMixin`
|
||||||
|
- Use explicit column types
|
||||||
|
- Add indexes for frequently queried fields
|
||||||
|
- Use soft deletes (`is_deleted` flag)
|
||||||
|
|
||||||
|
```python
|
||||||
|
from cpv3.db.base import Base, BaseModelMixin
|
||||||
|
|
||||||
|
class MediaFile(Base, BaseModelMixin):
|
||||||
|
__tablename__ = "media_files"
|
||||||
|
|
||||||
|
owner_id: Mapped[uuid.UUID] = mapped_column(
|
||||||
|
UUID(as_uuid=True), ForeignKey("users.id", ondelete="RESTRICT"), index=True
|
||||||
|
)
|
||||||
|
is_deleted: Mapped[bool] = mapped_column(Boolean, default=False)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Repository Pattern
|
||||||
|
|
||||||
|
- One repository per model
|
||||||
|
- Accept `AsyncSession` in constructor
|
||||||
|
- Methods should be atomic and focused
|
||||||
|
- Filter soft-deleted records by default
|
||||||
|
|
||||||
|
```python
|
||||||
|
class MediaFileRepository:
|
||||||
|
def __init__(self, session: AsyncSession) -> None:
|
||||||
|
self._session = session
|
||||||
|
|
||||||
|
async def get_by_id(self, media_file_id: uuid.UUID) -> MediaFile | None:
|
||||||
|
result = await self._session.execute(
|
||||||
|
select(MediaFile).where(MediaFile.id == media_file_id)
|
||||||
|
)
|
||||||
|
media_file = result.scalar_one_or_none()
|
||||||
|
if media_file is None or media_file.is_deleted:
|
||||||
|
return None
|
||||||
|
return media_file
|
||||||
|
```
|
||||||
|
|
||||||
|
### FastAPI Endpoints
|
||||||
|
|
||||||
|
- Use dependency injection for DB session, auth, and services
|
||||||
|
- Return typed response models
|
||||||
|
- Use appropriate HTTP status codes
|
||||||
|
- Handle errors with `HTTPException`
|
||||||
|
|
||||||
|
```python
|
||||||
|
@router.get("/mediafiles/{media_file_id}", response_model=MediaFileRead)
|
||||||
|
async def get_mediafile(
|
||||||
|
media_file_id: uuid.UUID,
|
||||||
|
current_user: User = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db),
|
||||||
|
) -> MediaFileRead:
|
||||||
|
repo = MediaFileRepository(db)
|
||||||
|
media_file = await repo.get_by_id(media_file_id)
|
||||||
|
if media_file is None:
|
||||||
|
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Not found")
|
||||||
|
return MediaFileRead.model_validate(media_file)
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Configuration & Settings
|
||||||
|
|
||||||
|
### Environment Variables
|
||||||
|
|
||||||
|
- All configuration through `Settings` class in `infrastructure/settings.py`
|
||||||
|
- Use `Field(default=..., alias="ENV_VAR_NAME")` pattern
|
||||||
|
- Provide sensible defaults for local development
|
||||||
|
- Never commit secrets to repository
|
||||||
|
|
||||||
|
```python
|
||||||
|
class Settings(BaseSettings):
|
||||||
|
jwt_secret_key: str = Field(default="dev-secret", alias="JWT_SECRET_KEY")
|
||||||
|
jwt_algorithm: str = Field(default="HS256", alias="JWT_ALGORITHM")
|
||||||
|
jwt_access_ttl_minutes: int = Field(default=60, alias="JWT_ACCESS_TTL_MINUTES")
|
||||||
|
```
|
||||||
|
|
||||||
|
### Accessing Settings
|
||||||
|
|
||||||
|
```python
|
||||||
|
from cpv3.infrastructure.settings import get_settings
|
||||||
|
|
||||||
|
settings = get_settings() # Cached via @lru_cache
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Testing Guidelines
|
||||||
|
|
||||||
|
### Test Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
tests/
|
||||||
|
├── conftest.py # Shared fixtures
|
||||||
|
├── unit/ # Unit tests (isolated)
|
||||||
|
└── integration/ # Integration tests (with DB/services)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Fixtures
|
||||||
|
|
||||||
|
- Use `pytest-asyncio` for async tests
|
||||||
|
- Create isolated database sessions per test
|
||||||
|
- Mock external services (storage, APIs)
|
||||||
|
|
||||||
|
```python
|
||||||
|
@pytest.fixture
|
||||||
|
async def test_user(test_db_session: AsyncSession) -> User:
|
||||||
|
user = User(
|
||||||
|
id=uuid.uuid4(),
|
||||||
|
username="testuser",
|
||||||
|
email="test@example.com",
|
||||||
|
password_hash=hash_password("testpassword"),
|
||||||
|
is_active=True,
|
||||||
|
)
|
||||||
|
test_db_session.add(user)
|
||||||
|
await test_db_session.commit()
|
||||||
|
return user
|
||||||
|
```
|
||||||
|
|
||||||
|
### Test Naming
|
||||||
|
|
||||||
|
```python
|
||||||
|
# Pattern: test_<action>_<condition>_<expected_result>
|
||||||
|
async def test_get_mediafile_when_not_found_returns_404():
|
||||||
|
...
|
||||||
|
|
||||||
|
async def test_create_mediafile_with_valid_data_returns_201():
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Common Patterns
|
||||||
|
|
||||||
|
### Error Handling
|
||||||
|
|
||||||
|
```python
|
||||||
|
# Use specific HTTP exceptions
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND,
|
||||||
|
detail="Media file not found"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Re-raise with context
|
||||||
|
try:
|
||||||
|
result = await external_service.call()
|
||||||
|
except ExternalError as e:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_502_BAD_GATEWAY,
|
||||||
|
detail="External service unavailable"
|
||||||
|
) from e
|
||||||
|
```
|
||||||
|
|
||||||
|
### Async Operations
|
||||||
|
|
||||||
|
```python
|
||||||
|
# For CPU-bound work in async context
|
||||||
|
import anyio
|
||||||
|
|
||||||
|
result = await anyio.to_thread.run_sync(cpu_intensive_function, arg1, arg2)
|
||||||
|
|
||||||
|
# For subprocess calls
|
||||||
|
proc = await asyncio.create_subprocess_exec(
|
||||||
|
"ffprobe", "-v", "error", file_path,
|
||||||
|
stdout=asyncio.subprocess.PIPE,
|
||||||
|
stderr=asyncio.subprocess.PIPE,
|
||||||
|
)
|
||||||
|
stdout, stderr = await proc.communicate()
|
||||||
|
```
|
||||||
|
|
||||||
|
### Temporary Files
|
||||||
|
|
||||||
|
```python
|
||||||
|
from tempfile import NamedTemporaryFile
|
||||||
|
|
||||||
|
with NamedTemporaryFile(suffix=".mp4", delete=False) as tmp:
|
||||||
|
tmp_path = tmp.name
|
||||||
|
try:
|
||||||
|
# Use tmp_path
|
||||||
|
...
|
||||||
|
finally:
|
||||||
|
# Clean up
|
||||||
|
Path(tmp_path).unlink(missing_ok=True)
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Do's and Don'ts
|
||||||
|
|
||||||
|
### ✅ DO
|
||||||
|
|
||||||
|
- Use type hints everywhere
|
||||||
|
- Write async code for I/O operations
|
||||||
|
- Use dependency injection
|
||||||
|
- Keep modules self-contained
|
||||||
|
- Write tests for new features
|
||||||
|
- Use meaningful commit messages
|
||||||
|
- Follow existing patterns in the codebase
|
||||||
|
|
||||||
|
### ❌ DON'T
|
||||||
|
|
||||||
|
- Use global mutable state
|
||||||
|
- Put business logic in routers
|
||||||
|
- Hardcode configuration values
|
||||||
|
- Ignore type checker warnings
|
||||||
|
- Write overly clever code
|
||||||
|
- Skip error handling
|
||||||
|
- Mix sync and async DB operations
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Quick Reference
|
||||||
|
|
||||||
|
| Task | Location |
|
||||||
|
| --------------------- | ------------------------------------- |
|
||||||
|
| Add new endpoint | `modules/<module>/router.py` |
|
||||||
|
| Add database model | `modules/<module>/models.py` |
|
||||||
|
| Add validation schema | `modules/<module>/schemas.py` |
|
||||||
|
| Add business logic | `modules/<module>/service.py` |
|
||||||
|
| Add database query | `modules/<module>/repository.py` |
|
||||||
|
| Add configuration | `infrastructure/settings.py` |
|
||||||
|
| Add shared utility | `common/` |
|
||||||
|
| Add migration | Run `alembic revision --autogenerate` |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Package Management
|
||||||
|
|
||||||
|
This project uses **[uv](https://docs.astral.sh/uv/)** as the package manager - a fast Python package installer and resolver written in Rust.
|
||||||
|
|
||||||
|
### Common Commands
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Install all dependencies
|
||||||
|
uv sync
|
||||||
|
|
||||||
|
# Add a new dependency
|
||||||
|
uv add <package-name>
|
||||||
|
|
||||||
|
# Add a dev dependency
|
||||||
|
uv add --group dev <package-name>
|
||||||
|
|
||||||
|
# Run a command in the virtual environment
|
||||||
|
uv run <command>
|
||||||
|
|
||||||
|
# Run the development server
|
||||||
|
uv run uvicorn cpv3.main:app --reload
|
||||||
|
|
||||||
|
# Run tests
|
||||||
|
uv run pytest
|
||||||
|
```
|
||||||
|
|
||||||
|
### Why uv?
|
||||||
|
|
||||||
|
- **Speed** - 10-100x faster than pip
|
||||||
|
- **Reliable** - Deterministic dependency resolution
|
||||||
|
- **Compatible** - Works with standard `pyproject.toml`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Dependencies
|
||||||
|
|
||||||
|
Key dependencies used in this project:
|
||||||
|
|
||||||
|
- **FastAPI** - Web framework
|
||||||
|
- **SQLAlchemy 2.0** - ORM (async mode)
|
||||||
|
- **Pydantic 2.x** - Data validation
|
||||||
|
- **asyncpg** - PostgreSQL async driver
|
||||||
|
- **Alembic** - Database migrations
|
||||||
|
- **pytest-asyncio** - Async testing
|
||||||
|
- **boto3** - AWS S3 storage
|
||||||
|
- **pydub** - Audio processing
|
||||||
|
- **openai-whisper** - Transcription
|
||||||
|
- **Dramatiq** - Background task queue (with Redis broker)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Common AI Agent Mistakes to Avoid
|
||||||
|
|
||||||
|
This section documents real errors made during AI-assisted development sessions. Learn from these mistakes.
|
||||||
|
|
||||||
|
### 1. Over-Engineering and Breaking Module Structure
|
||||||
|
|
||||||
|
**What happened:** When asked to implement background tasks, the agent created excessive files:
|
||||||
|
|
||||||
|
```
|
||||||
|
# BAD - What was created
|
||||||
|
cpv3/modules/tasks/
|
||||||
|
├── __init__.py
|
||||||
|
├── actors.py # ❌ Non-standard
|
||||||
|
├── base.py # ❌ Non-standard
|
||||||
|
├── db_helpers.py # ❌ Non-standard
|
||||||
|
├── webhook_dispatch.py # ❌ Non-standard
|
||||||
|
├── handlers/ # ❌ Non-standard directory
|
||||||
|
│ ├── __init__.py
|
||||||
|
│ ├── base.py
|
||||||
|
│ ├── media_probe.py
|
||||||
|
│ ├── silence_remove.py
|
||||||
|
│ └── ...
|
||||||
|
├── schemas.py
|
||||||
|
├── service.py
|
||||||
|
└── router.py
|
||||||
|
|
||||||
|
# GOOD - Standard module structure
|
||||||
|
cpv3/modules/tasks/
|
||||||
|
├── __init__.py
|
||||||
|
├── schemas.py # DTOs only
|
||||||
|
├── service.py # All business logic including actors
|
||||||
|
└── router.py # Endpoints only
|
||||||
|
```
|
||||||
|
|
||||||
|
**Why it's wrong:**
|
||||||
|
|
||||||
|
- Ignored existing module patterns in the codebase
|
||||||
|
- Added unnecessary abstraction layers (BaseTaskHandler, registry pattern)
|
||||||
|
- Created cognitive overhead for maintainers
|
||||||
|
|
||||||
|
**Advice:**
|
||||||
|
|
||||||
|
- **ALWAYS examine existing modules first** before creating new ones
|
||||||
|
- **Match the existing file naming conventions exactly**
|
||||||
|
- Standard module files: `__init__.py`, `models.py`, `schemas.py`, `repository.py`, `service.py`, `router.py`
|
||||||
|
- Only create files from this list; consolidate everything else into `service.py`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 2. Misinterpreting "Make It Flexible" or "Apply SRP"
|
||||||
|
|
||||||
|
**What happened:** When asked to "make tasks module more flexible with SRP compliance", the agent interpreted this as creating:
|
||||||
|
|
||||||
|
- Abstract base classes (`BaseTaskHandler`, `BaseTaskSubmitter`)
|
||||||
|
- A registry pattern with dynamic handler registration
|
||||||
|
- Separate files for each handler implementation
|
||||||
|
- Complex inheritance hierarchies
|
||||||
|
|
||||||
|
**Why it's wrong:**
|
||||||
|
|
||||||
|
- SRP doesn't mean "one class per file" or "maximum abstraction"
|
||||||
|
- Flexibility doesn't mean "prepare for every possible future change"
|
||||||
|
- This violates the project's core principle: **"Less Overhead Is Better"**
|
||||||
|
|
||||||
|
**Advice:**
|
||||||
|
|
||||||
|
- SRP = one function does one thing, NOT one file per concept
|
||||||
|
- "Flexible" = easy to modify, NOT infinitely extensible
|
||||||
|
- When in doubt, keep it in one file and refactor later if needed
|
||||||
|
- Abstract base classes are rarely needed; prefer composition
|
||||||
|
|
||||||
|
```python
|
||||||
|
# BAD - Over-abstracted
|
||||||
|
class BaseTaskHandler(ABC):
|
||||||
|
@abstractmethod
|
||||||
|
async def validate(self, request): ...
|
||||||
|
@abstractmethod
|
||||||
|
async def execute(self, job_id): ...
|
||||||
|
@abstractmethod
|
||||||
|
async def on_complete(self, result): ...
|
||||||
|
|
||||||
|
class MediaProbeHandler(BaseTaskHandler):
|
||||||
|
...
|
||||||
|
|
||||||
|
# GOOD - Simple and direct
|
||||||
|
@dramatiq.actor
|
||||||
|
def media_probe_actor(job_id: str, media_file_id: str) -> None:
|
||||||
|
"""Probe media file for metadata."""
|
||||||
|
# All logic here, no inheritance needed
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 3. Not Reading AGENTS.md Before Starting
|
||||||
|
|
||||||
|
**What happened:** The agent proceeded with implementation without fully considering the documented principles, particularly:
|
||||||
|
|
||||||
|
- "Avoid unnecessary abstractions and over-engineering"
|
||||||
|
- "Don't add layers of indirection without clear benefit"
|
||||||
|
- "Prefer direct solutions over clever ones"
|
||||||
|
|
||||||
|
**Advice:**
|
||||||
|
|
||||||
|
- **Read AGENTS.md completely before any implementation**
|
||||||
|
- Re-read relevant sections when making architectural decisions
|
||||||
|
- When the user's request conflicts with AGENTS.md principles, ask for clarification
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 4. Creating Files Without Checking Existing Patterns
|
||||||
|
|
||||||
|
**What happened:** The agent created `handlers/` subdirectory and multiple utility files without checking how other modules handle similar needs.
|
||||||
|
|
||||||
|
**Advice:**
|
||||||
|
|
||||||
|
- Before creating ANY new file, run: `ls cpv3/modules/<similar_module>/`
|
||||||
|
- Check if the functionality can fit into existing standard files
|
||||||
|
- If you need a helper function, put it in `service.py`, not a new file
|
||||||
|
- Subdirectories within modules are almost never appropriate
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 5. Ignoring the "Quick Reference" Table
|
||||||
|
|
||||||
|
The AGENTS.md contains a clear reference:
|
||||||
|
|
||||||
|
| Task | Location |
|
||||||
|
| --------------------- | -------------------------------- |
|
||||||
|
| Add new endpoint | `modules/<module>/router.py` |
|
||||||
|
| Add database model | `modules/<module>/models.py` |
|
||||||
|
| Add validation schema | `modules/<module>/schemas.py` |
|
||||||
|
| Add business logic | `modules/<module>/service.py` |
|
||||||
|
| Add database query | `modules/<module>/repository.py` |
|
||||||
|
|
||||||
|
**Advice:**
|
||||||
|
|
||||||
|
- Use this table as the ONLY guide for file placement
|
||||||
|
- If something doesn't fit these categories, it probably belongs in `service.py`
|
||||||
|
- Cross-cutting concerns go in `infrastructure/`, not in module subdirectories
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Summary: The Golden Rules
|
||||||
|
|
||||||
|
1. **Check existing patterns first** - Look at 2-3 similar modules before creating anything
|
||||||
|
2. **Standard files only** - `__init__.py`, `models.py`, `schemas.py`, `repository.py`, `service.py`, `router.py`
|
||||||
|
3. **No subdirectories in modules** - Everything fits in the standard files
|
||||||
|
4. **Consolidate, don't split** - When unsure, put it in `service.py`
|
||||||
|
5. **Simple > Clever** - Direct code beats abstract patterns
|
||||||
|
6. **YAGNI** - Don't build for hypothetical future requirements
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
_Last updated: February 2026_
|
||||||
@@ -12,6 +12,7 @@ from cpv3.modules.jobs.router import events_router, jobs_router
|
|||||||
from cpv3.modules.media.router import artifacts_router, media_router, mediafiles_router
|
from cpv3.modules.media.router import artifacts_router, media_router, mediafiles_router
|
||||||
from cpv3.modules.projects.router import router as projects_router
|
from cpv3.modules.projects.router import router as projects_router
|
||||||
from cpv3.modules.system.router import router as system_router
|
from cpv3.modules.system.router import router as system_router
|
||||||
|
from cpv3.modules.tasks.router import router as tasks_router
|
||||||
from cpv3.modules.transcription.router import router as transcription_router
|
from cpv3.modules.transcription.router import router as transcription_router
|
||||||
from cpv3.modules.users.router import auth_router, users_router
|
from cpv3.modules.users.router import auth_router, users_router
|
||||||
from cpv3.modules.webhooks.router import router as webhooks_router
|
from cpv3.modules.webhooks.router import router as webhooks_router
|
||||||
@@ -44,5 +45,8 @@ api_router.include_router(captions_router)
|
|||||||
api_router.include_router(jobs_router)
|
api_router.include_router(jobs_router)
|
||||||
api_router.include_router(events_router)
|
api_router.include_router(events_router)
|
||||||
|
|
||||||
|
# Tasks (background processing)
|
||||||
|
api_router.include_router(tasks_router)
|
||||||
|
|
||||||
# Webhooks
|
# Webhooks
|
||||||
api_router.include_router(webhooks_router)
|
api_router.include_router(webhooks_router)
|
||||||
|
|||||||
@@ -75,6 +75,14 @@ class Settings(BaseSettings):
|
|||||||
alias="GOOGLE_APPLICATION_CREDENTIALS",
|
alias="GOOGLE_APPLICATION_CREDENTIALS",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Redis / Dramatiq
|
||||||
|
redis_url: str = Field(default="redis://localhost:6379/0", alias="REDIS_URL")
|
||||||
|
|
||||||
|
# Webhook / Task settings
|
||||||
|
webhook_base_url: str = Field(
|
||||||
|
default="http://localhost:8000", alias="WEBHOOK_BASE_URL"
|
||||||
|
)
|
||||||
|
|
||||||
def get_database_url(self) -> str:
|
def get_database_url(self) -> str:
|
||||||
if self.database_url:
|
if self.database_url:
|
||||||
return self.database_url
|
return self.database_url
|
||||||
|
|||||||
@@ -8,7 +8,13 @@ from cpv3.common.schemas import Schema
|
|||||||
|
|
||||||
|
|
||||||
JobStatusEnum = Literal["PENDING", "RUNNING", "FAILED", "CANCELLED", "DONE"]
|
JobStatusEnum = Literal["PENDING", "RUNNING", "FAILED", "CANCELLED", "DONE"]
|
||||||
JobTypeEnum = Literal["PENDING", "RUNNING", "FAILED", "CANCELLED", "DONE"]
|
JobTypeEnum = Literal[
|
||||||
|
"MEDIA_PROBE",
|
||||||
|
"SILENCE_REMOVE",
|
||||||
|
"MEDIA_CONVERT",
|
||||||
|
"TRANSCRIPTION_GENERATE",
|
||||||
|
"CAPTIONS_GENERATE",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class JobRead(Schema):
|
class JobRead(Schema):
|
||||||
@@ -40,7 +46,7 @@ class JobCreate(Schema):
|
|||||||
project_id: UUID | None = None
|
project_id: UUID | None = None
|
||||||
input_data: dict | None = None
|
input_data: dict | None = None
|
||||||
status: JobStatusEnum = "PENDING"
|
status: JobStatusEnum = "PENDING"
|
||||||
job_type: JobTypeEnum = "PENDING"
|
job_type: JobTypeEnum
|
||||||
|
|
||||||
|
|
||||||
class JobUpdate(Schema):
|
class JobUpdate(Schema):
|
||||||
|
|||||||
@@ -0,0 +1,5 @@
|
|||||||
|
"""
|
||||||
|
Background tasks module for long-running operations.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
@@ -0,0 +1,168 @@
|
|||||||
|
"""
|
||||||
|
API endpoints for background task submission.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import uuid
|
||||||
|
from typing import cast
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Depends, HTTPException, Request, status
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
|
from cpv3.db.session import get_db
|
||||||
|
from cpv3.infrastructure.auth import get_current_user
|
||||||
|
from cpv3.modules.jobs.service import JobService
|
||||||
|
from cpv3.modules.tasks.schemas import (
|
||||||
|
CaptionsGenerateRequest,
|
||||||
|
MediaConvertRequest,
|
||||||
|
MediaProbeRequest,
|
||||||
|
SilenceRemoveRequest,
|
||||||
|
TaskStatusEnum,
|
||||||
|
TaskStatusResponse,
|
||||||
|
TaskSubmitResponse,
|
||||||
|
TaskTypeEnum,
|
||||||
|
TranscriptionGenerateRequest,
|
||||||
|
)
|
||||||
|
from cpv3.modules.tasks.service import TaskService
|
||||||
|
from cpv3.modules.users.models import User
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/api/tasks", tags=["tasks"])
|
||||||
|
|
||||||
|
|
||||||
|
@router.post(
|
||||||
|
"/media-probe/",
|
||||||
|
response_model=TaskSubmitResponse,
|
||||||
|
status_code=status.HTTP_202_ACCEPTED,
|
||||||
|
)
|
||||||
|
async def submit_media_probe(
|
||||||
|
body: MediaProbeRequest,
|
||||||
|
current_user: User = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db),
|
||||||
|
) -> TaskSubmitResponse:
|
||||||
|
"""Submit a background task to probe media file metadata."""
|
||||||
|
service = TaskService(db)
|
||||||
|
return await service.submit_media_probe(requester=current_user, request=body)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post(
|
||||||
|
"/silence-remove/",
|
||||||
|
response_model=TaskSubmitResponse,
|
||||||
|
status_code=status.HTTP_202_ACCEPTED,
|
||||||
|
)
|
||||||
|
async def submit_silence_remove(
|
||||||
|
body: SilenceRemoveRequest,
|
||||||
|
current_user: User = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db),
|
||||||
|
) -> TaskSubmitResponse:
|
||||||
|
"""Submit a background task to remove silence from media file."""
|
||||||
|
service = TaskService(db)
|
||||||
|
return await service.submit_silence_remove(requester=current_user, request=body)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post(
|
||||||
|
"/media-convert/",
|
||||||
|
response_model=TaskSubmitResponse,
|
||||||
|
status_code=status.HTTP_202_ACCEPTED,
|
||||||
|
)
|
||||||
|
async def submit_media_convert(
|
||||||
|
body: MediaConvertRequest,
|
||||||
|
current_user: User = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db),
|
||||||
|
) -> TaskSubmitResponse:
|
||||||
|
"""Submit a background task to convert media file format."""
|
||||||
|
service = TaskService(db)
|
||||||
|
return await service.submit_media_convert(requester=current_user, request=body)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post(
|
||||||
|
"/transcription-generate/",
|
||||||
|
response_model=TaskSubmitResponse,
|
||||||
|
status_code=status.HTTP_202_ACCEPTED,
|
||||||
|
)
|
||||||
|
async def submit_transcription_generate(
|
||||||
|
body: TranscriptionGenerateRequest,
|
||||||
|
current_user: User = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db),
|
||||||
|
) -> TaskSubmitResponse:
|
||||||
|
"""Submit a background task to generate transcription from audio/video."""
|
||||||
|
service = TaskService(db)
|
||||||
|
return await service.submit_transcription_generate(
|
||||||
|
requester=current_user, request=body
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post(
|
||||||
|
"/captions-generate/",
|
||||||
|
response_model=TaskSubmitResponse,
|
||||||
|
status_code=status.HTTP_202_ACCEPTED,
|
||||||
|
)
|
||||||
|
async def submit_captions_generate(
|
||||||
|
body: CaptionsGenerateRequest,
|
||||||
|
current_user: User = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db),
|
||||||
|
) -> TaskSubmitResponse:
|
||||||
|
"""Submit a background task to generate captions on video."""
|
||||||
|
service = TaskService(db)
|
||||||
|
try:
|
||||||
|
return await service.submit_captions_generate(
|
||||||
|
requester=current_user, request=body
|
||||||
|
)
|
||||||
|
except ValueError as e:
|
||||||
|
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(e))
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/status/{job_id}/", response_model=TaskStatusResponse)
|
||||||
|
async def get_task_status(
|
||||||
|
job_id: uuid.UUID,
|
||||||
|
current_user: User = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db),
|
||||||
|
) -> TaskStatusResponse:
|
||||||
|
"""Get the status of a background task."""
|
||||||
|
job_service = JobService(db)
|
||||||
|
job = await job_service.get_job(job_id)
|
||||||
|
|
||||||
|
if job is None:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND, detail="Job not found"
|
||||||
|
)
|
||||||
|
|
||||||
|
if not current_user.is_staff and job.user_id != current_user.id:
|
||||||
|
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Forbidden")
|
||||||
|
|
||||||
|
return TaskStatusResponse(
|
||||||
|
job_id=job.id,
|
||||||
|
status=cast(TaskStatusEnum, job.status),
|
||||||
|
job_type=cast(TaskTypeEnum, job.job_type),
|
||||||
|
progress_pct=job.project_pct,
|
||||||
|
current_message=job.current_message,
|
||||||
|
error_message=job.error_message,
|
||||||
|
output_data=job.output_data,
|
||||||
|
started_at=job.started_at,
|
||||||
|
finished_at=job.finished_at,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/webhook/{job_id}/", include_in_schema=False)
|
||||||
|
async def task_webhook_callback(
|
||||||
|
job_id: uuid.UUID,
|
||||||
|
request: Request,
|
||||||
|
db: AsyncSession = Depends(get_db),
|
||||||
|
) -> dict[str, str]:
|
||||||
|
"""Internal webhook endpoint for task status updates."""
|
||||||
|
try:
|
||||||
|
await request.json()
|
||||||
|
except Exception:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid JSON payload"
|
||||||
|
)
|
||||||
|
|
||||||
|
job_service = JobService(db)
|
||||||
|
job = await job_service.get_job(job_id)
|
||||||
|
|
||||||
|
if job is None:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND, detail="Job not found"
|
||||||
|
)
|
||||||
|
|
||||||
|
return {"status": "received", "job_id": str(job_id)}
|
||||||
@@ -0,0 +1,106 @@
|
|||||||
|
"""
|
||||||
|
Task request and response schemas.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Literal
|
||||||
|
from uuid import UUID
|
||||||
|
|
||||||
|
from pydantic import Field
|
||||||
|
|
||||||
|
from cpv3.common.schemas import Schema
|
||||||
|
|
||||||
|
|
||||||
|
TaskTypeEnum = Literal[
|
||||||
|
"MEDIA_PROBE",
|
||||||
|
"SILENCE_REMOVE",
|
||||||
|
"MEDIA_CONVERT",
|
||||||
|
"TRANSCRIPTION_GENERATE",
|
||||||
|
"CAPTIONS_GENERATE",
|
||||||
|
]
|
||||||
|
|
||||||
|
TaskStatusEnum = Literal["PENDING", "RUNNING", "FAILED", "CANCELLED", "DONE"]
|
||||||
|
|
||||||
|
|
||||||
|
# --- Request schemas ---
|
||||||
|
|
||||||
|
|
||||||
|
class MediaProbeRequest(Schema):
|
||||||
|
"""Request to probe media file metadata."""
|
||||||
|
|
||||||
|
file_key: str = Field(..., description="Storage key of the file to probe")
|
||||||
|
project_id: UUID | None = Field(default=None, description="Associated project ID")
|
||||||
|
|
||||||
|
|
||||||
|
class SilenceRemoveRequest(Schema):
|
||||||
|
"""Request to remove silence from media file."""
|
||||||
|
|
||||||
|
file_key: str = Field(..., description="Storage key of the input file")
|
||||||
|
out_folder: str = Field(..., description="Output folder for processed file")
|
||||||
|
project_id: UUID | None = Field(default=None, description="Associated project ID")
|
||||||
|
min_silence_duration_ms: int = Field(
|
||||||
|
default=200, description="Minimum silence duration in milliseconds"
|
||||||
|
)
|
||||||
|
silence_threshold_db: int = Field(
|
||||||
|
default=16, description="Silence threshold in decibels"
|
||||||
|
)
|
||||||
|
padding_ms: int = Field(
|
||||||
|
default=100, description="Padding around non-silent segments in milliseconds"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class MediaConvertRequest(Schema):
|
||||||
|
"""Request to convert media file to different format."""
|
||||||
|
|
||||||
|
file_key: str = Field(..., description="Storage key of the input file")
|
||||||
|
out_folder: str = Field(..., description="Output folder for converted file")
|
||||||
|
output_format: str = Field(default="mp4", description="Target output format")
|
||||||
|
project_id: UUID | None = Field(default=None, description="Associated project ID")
|
||||||
|
|
||||||
|
|
||||||
|
class TranscriptionGenerateRequest(Schema):
|
||||||
|
"""Request to generate transcription from audio/video file."""
|
||||||
|
|
||||||
|
file_key: str = Field(..., description="Storage key of the input file")
|
||||||
|
project_id: UUID | None = Field(default=None, description="Associated project ID")
|
||||||
|
engine: Literal["whisper", "google"] = Field(
|
||||||
|
default="whisper", description="Transcription engine to use"
|
||||||
|
)
|
||||||
|
language: str | None = Field(default=None, description="Language code (e.g., 'en')")
|
||||||
|
model: str = Field(default="base", description="Model size for whisper")
|
||||||
|
|
||||||
|
|
||||||
|
class CaptionsGenerateRequest(Schema):
|
||||||
|
"""Request to generate captions/subtitles video."""
|
||||||
|
|
||||||
|
video_s3_path: str = Field(..., description="S3 path to the video file")
|
||||||
|
folder: str = Field(..., description="Output folder for rendered video")
|
||||||
|
transcription_id: UUID = Field(..., description="ID of the transcription to use")
|
||||||
|
project_id: UUID | None = Field(default=None, description="Associated project ID")
|
||||||
|
|
||||||
|
|
||||||
|
# --- Response schemas ---
|
||||||
|
|
||||||
|
|
||||||
|
class TaskSubmitResponse(Schema):
|
||||||
|
"""Response after submitting a background task."""
|
||||||
|
|
||||||
|
job_id: UUID = Field(..., description="Job ID for tracking")
|
||||||
|
webhook_url: str = Field(..., description="Webhook URL for status updates")
|
||||||
|
status: TaskStatusEnum = Field(default="PENDING", description="Initial task status")
|
||||||
|
|
||||||
|
|
||||||
|
class TaskStatusResponse(Schema):
|
||||||
|
"""Response for task status query."""
|
||||||
|
|
||||||
|
job_id: UUID
|
||||||
|
status: TaskStatusEnum
|
||||||
|
job_type: TaskTypeEnum
|
||||||
|
progress_pct: float | None = None
|
||||||
|
current_message: str | None = None
|
||||||
|
error_message: str | None = None
|
||||||
|
output_data: dict | None = None
|
||||||
|
started_at: datetime | None = None
|
||||||
|
finished_at: datetime | None = None
|
||||||
@@ -0,0 +1,510 @@
|
|||||||
|
"""
|
||||||
|
Task service for submitting and managing background tasks.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
import uuid
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
import dramatiq # type: ignore[import-untyped]
|
||||||
|
from dramatiq.brokers.redis import RedisBroker # type: ignore[import-untyped]
|
||||||
|
from pydantic import BaseModel
|
||||||
|
from sqlalchemy import create_engine, select
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
from sqlalchemy.orm import Session, sessionmaker
|
||||||
|
|
||||||
|
from cpv3.infrastructure.deps import _get_storage_service
|
||||||
|
from cpv3.infrastructure.settings import get_settings
|
||||||
|
from cpv3.modules.jobs.models import Job, JobEvent
|
||||||
|
from cpv3.modules.jobs.repository import JobRepository
|
||||||
|
from cpv3.modules.jobs.schemas import JobCreate, JobTypeEnum
|
||||||
|
from cpv3.modules.tasks.schemas import (
|
||||||
|
CaptionsGenerateRequest,
|
||||||
|
MediaConvertRequest,
|
||||||
|
MediaProbeRequest,
|
||||||
|
SilenceRemoveRequest,
|
||||||
|
TaskSubmitResponse,
|
||||||
|
TranscriptionGenerateRequest,
|
||||||
|
)
|
||||||
|
from cpv3.modules.transcription.repository import TranscriptionRepository
|
||||||
|
from cpv3.modules.users.models import User
|
||||||
|
from cpv3.modules.webhooks.repository import WebhookRepository
|
||||||
|
from cpv3.modules.webhooks.schemas import WebhookCreate
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Dramatiq broker setup
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
_settings = get_settings()
|
||||||
|
_redis_broker = RedisBroker(url=_settings.redis_url)
|
||||||
|
dramatiq.set_broker(_redis_broker)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Sync DB helpers for Dramatiq workers
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def _get_sync_session() -> Session:
|
||||||
|
"""Create sync DB session for worker tasks."""
|
||||||
|
settings = get_settings()
|
||||||
|
sync_url = settings.get_database_url().replace(
|
||||||
|
"postgresql+asyncpg://", "postgresql://"
|
||||||
|
)
|
||||||
|
engine = create_engine(sync_url, pool_pre_ping=True)
|
||||||
|
return sessionmaker(bind=engine, expire_on_commit=False)()
|
||||||
|
|
||||||
|
|
||||||
|
def _update_job(
|
||||||
|
job_id: uuid.UUID,
|
||||||
|
*,
|
||||||
|
status: str | None = None,
|
||||||
|
current_message: str | None = None,
|
||||||
|
progress_pct: float | None = None,
|
||||||
|
error_message: str | None = None,
|
||||||
|
output_data: dict | None = None,
|
||||||
|
started_at: datetime | None = None,
|
||||||
|
finished_at: datetime | None = None,
|
||||||
|
) -> Job | None:
|
||||||
|
"""Update job in database (sync, for workers)."""
|
||||||
|
with _get_sync_session() as session:
|
||||||
|
job = session.execute(select(Job).where(Job.id == job_id)).scalar_one_or_none()
|
||||||
|
if job is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if status is not None:
|
||||||
|
job.status = status
|
||||||
|
if current_message is not None:
|
||||||
|
job.current_message = current_message
|
||||||
|
if progress_pct is not None:
|
||||||
|
job.project_pct = progress_pct
|
||||||
|
if error_message is not None:
|
||||||
|
job.error_message = error_message
|
||||||
|
if output_data is not None:
|
||||||
|
job.output_data = output_data
|
||||||
|
if started_at is not None:
|
||||||
|
job.started_at = started_at
|
||||||
|
if finished_at is not None:
|
||||||
|
job.finished_at = finished_at
|
||||||
|
|
||||||
|
# Create event
|
||||||
|
event = JobEvent(
|
||||||
|
job_id=job_id,
|
||||||
|
event_type=f"status_{status}" if status else "progress",
|
||||||
|
payload={"status": status or job.status, "message": current_message},
|
||||||
|
)
|
||||||
|
session.add(event)
|
||||||
|
session.commit()
|
||||||
|
session.refresh(job)
|
||||||
|
return job
|
||||||
|
|
||||||
|
|
||||||
|
def _run_async(coro: Any) -> Any:
|
||||||
|
"""Run async function in new event loop (for sync Dramatiq actors)."""
|
||||||
|
loop = asyncio.new_event_loop()
|
||||||
|
asyncio.set_event_loop(loop)
|
||||||
|
try:
|
||||||
|
return loop.run_until_complete(coro)
|
||||||
|
finally:
|
||||||
|
loop.close()
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Dramatiq actors
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
@dramatiq.actor(max_retries=3, min_backoff=1000)
|
||||||
|
def media_probe_actor(job_id: str, file_key: str) -> None:
|
||||||
|
"""Probe media file to extract metadata."""
|
||||||
|
from cpv3.modules.media.service import probe_media
|
||||||
|
|
||||||
|
_job_id = uuid.UUID(job_id)
|
||||||
|
_update_job(
|
||||||
|
_job_id,
|
||||||
|
status="RUNNING",
|
||||||
|
current_message="Starting",
|
||||||
|
started_at=datetime.now(timezone.utc),
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
storage = _get_storage_service()
|
||||||
|
_update_job(_job_id, current_message="Probing media", progress_pct=50.0)
|
||||||
|
result = _run_async(probe_media(storage, file_key=file_key))
|
||||||
|
_update_job(
|
||||||
|
_job_id,
|
||||||
|
status="DONE",
|
||||||
|
current_message="Completed",
|
||||||
|
progress_pct=100.0,
|
||||||
|
output_data=result.model_dump(mode="json"),
|
||||||
|
finished_at=datetime.now(timezone.utc),
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception("media_probe_actor failed: %s", _job_id)
|
||||||
|
_update_job(
|
||||||
|
_job_id,
|
||||||
|
status="FAILED",
|
||||||
|
error_message=str(e),
|
||||||
|
finished_at=datetime.now(timezone.utc),
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
@dramatiq.actor(max_retries=3, min_backoff=1000)
|
||||||
|
def silence_remove_actor(
|
||||||
|
job_id: str,
|
||||||
|
file_key: str,
|
||||||
|
out_folder: str,
|
||||||
|
min_silence_duration_ms: int,
|
||||||
|
silence_threshold_db: int,
|
||||||
|
padding_ms: int,
|
||||||
|
) -> None:
|
||||||
|
"""Remove silence from media file."""
|
||||||
|
from cpv3.modules.media.service import remove_silence
|
||||||
|
|
||||||
|
_job_id = uuid.UUID(job_id)
|
||||||
|
_update_job(
|
||||||
|
_job_id,
|
||||||
|
status="RUNNING",
|
||||||
|
current_message="Starting",
|
||||||
|
started_at=datetime.now(timezone.utc),
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
storage = _get_storage_service()
|
||||||
|
_update_job(_job_id, current_message="Processing", progress_pct=30.0)
|
||||||
|
result = _run_async(
|
||||||
|
remove_silence(
|
||||||
|
storage,
|
||||||
|
file_key=file_key,
|
||||||
|
out_folder=out_folder,
|
||||||
|
min_silence_duration_ms=min_silence_duration_ms,
|
||||||
|
silence_threshold_db=silence_threshold_db,
|
||||||
|
padding_ms=padding_ms,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
_update_job(
|
||||||
|
_job_id,
|
||||||
|
status="DONE",
|
||||||
|
current_message="Completed",
|
||||||
|
progress_pct=100.0,
|
||||||
|
output_data={
|
||||||
|
"file_path": result.file_path,
|
||||||
|
"file_url": result.file_url,
|
||||||
|
"file_size": result.file_size,
|
||||||
|
},
|
||||||
|
finished_at=datetime.now(timezone.utc),
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception("silence_remove_actor failed: %s", _job_id)
|
||||||
|
_update_job(
|
||||||
|
_job_id,
|
||||||
|
status="FAILED",
|
||||||
|
error_message=str(e),
|
||||||
|
finished_at=datetime.now(timezone.utc),
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
@dramatiq.actor(max_retries=3, min_backoff=1000)
|
||||||
|
def media_convert_actor(
|
||||||
|
job_id: str, file_key: str, out_folder: str, output_format: str
|
||||||
|
) -> None:
|
||||||
|
"""Convert media file to specified format."""
|
||||||
|
from cpv3.modules.media.service import convert_to_mp4
|
||||||
|
|
||||||
|
_job_id = uuid.UUID(job_id)
|
||||||
|
_update_job(
|
||||||
|
_job_id,
|
||||||
|
status="RUNNING",
|
||||||
|
current_message="Starting",
|
||||||
|
started_at=datetime.now(timezone.utc),
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
if output_format.lower() != "mp4":
|
||||||
|
raise ValueError(f"Unsupported format: {output_format}")
|
||||||
|
|
||||||
|
storage = _get_storage_service()
|
||||||
|
_update_job(_job_id, current_message="Converting", progress_pct=30.0)
|
||||||
|
result = _run_async(
|
||||||
|
convert_to_mp4(storage, file_key=file_key, out_folder=out_folder)
|
||||||
|
)
|
||||||
|
_update_job(
|
||||||
|
_job_id,
|
||||||
|
status="DONE",
|
||||||
|
current_message="Completed",
|
||||||
|
progress_pct=100.0,
|
||||||
|
output_data={
|
||||||
|
"file_path": result.file_path,
|
||||||
|
"file_url": result.file_url,
|
||||||
|
"file_size": result.file_size,
|
||||||
|
},
|
||||||
|
finished_at=datetime.now(timezone.utc),
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception("media_convert_actor failed: %s", _job_id)
|
||||||
|
_update_job(
|
||||||
|
_job_id,
|
||||||
|
status="FAILED",
|
||||||
|
error_message=str(e),
|
||||||
|
finished_at=datetime.now(timezone.utc),
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
@dramatiq.actor(max_retries=2, min_backoff=2000)
|
||||||
|
def transcription_generate_actor(
|
||||||
|
job_id: str, file_key: str, engine: str, language: str | None, model: str
|
||||||
|
) -> None:
|
||||||
|
"""Generate transcription from audio/video file."""
|
||||||
|
from cpv3.modules.transcription.service import (
|
||||||
|
transcribe_with_google_speech,
|
||||||
|
transcribe_with_whisper,
|
||||||
|
)
|
||||||
|
|
||||||
|
_job_id = uuid.UUID(job_id)
|
||||||
|
_update_job(
|
||||||
|
_job_id,
|
||||||
|
status="RUNNING",
|
||||||
|
current_message="Starting",
|
||||||
|
started_at=datetime.now(timezone.utc),
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
storage = _get_storage_service()
|
||||||
|
_update_job(
|
||||||
|
_job_id, current_message=f"Transcribing ({engine})", progress_pct=20.0
|
||||||
|
)
|
||||||
|
|
||||||
|
if engine == "whisper":
|
||||||
|
document = _run_async(
|
||||||
|
transcribe_with_whisper(
|
||||||
|
storage, file_key=file_key, model_name=model, language=language
|
||||||
|
)
|
||||||
|
)
|
||||||
|
elif engine == "google":
|
||||||
|
language_codes = [language] if language else None
|
||||||
|
document = _run_async(
|
||||||
|
transcribe_with_google_speech(
|
||||||
|
storage, file_key=file_key, language_codes=language_codes
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Unknown engine: {engine}")
|
||||||
|
|
||||||
|
_update_job(
|
||||||
|
_job_id,
|
||||||
|
status="DONE",
|
||||||
|
current_message="Completed",
|
||||||
|
progress_pct=100.0,
|
||||||
|
output_data={"document": document.model_dump(mode="json")},
|
||||||
|
finished_at=datetime.now(timezone.utc),
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception("transcription_generate_actor failed: %s", _job_id)
|
||||||
|
_update_job(
|
||||||
|
_job_id,
|
||||||
|
status="FAILED",
|
||||||
|
error_message=str(e),
|
||||||
|
finished_at=datetime.now(timezone.utc),
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
@dramatiq.actor(max_retries=2, min_backoff=2000)
|
||||||
|
def captions_generate_actor(
|
||||||
|
job_id: str, video_s3_path: str, folder: str, transcription_json: dict
|
||||||
|
) -> None:
|
||||||
|
"""Generate captions on video."""
|
||||||
|
from cpv3.modules.captions.service import generate_captions
|
||||||
|
from cpv3.modules.transcription.schemas import Document
|
||||||
|
|
||||||
|
_job_id = uuid.UUID(job_id)
|
||||||
|
_update_job(
|
||||||
|
_job_id,
|
||||||
|
status="RUNNING",
|
||||||
|
current_message="Starting",
|
||||||
|
started_at=datetime.now(timezone.utc),
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
_update_job(_job_id, current_message="Rendering captions", progress_pct=30.0)
|
||||||
|
document = Document.model_validate(transcription_json)
|
||||||
|
output_path = _run_async(
|
||||||
|
generate_captions(
|
||||||
|
video_s3_path=video_s3_path, folder=folder, transcription=document
|
||||||
|
)
|
||||||
|
)
|
||||||
|
_update_job(
|
||||||
|
_job_id,
|
||||||
|
status="DONE",
|
||||||
|
current_message="Completed",
|
||||||
|
progress_pct=100.0,
|
||||||
|
output_data={"output_path": output_path},
|
||||||
|
finished_at=datetime.now(timezone.utc),
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception("captions_generate_actor failed: %s", _job_id)
|
||||||
|
_update_job(
|
||||||
|
_job_id,
|
||||||
|
status="FAILED",
|
||||||
|
error_message=str(e),
|
||||||
|
finished_at=datetime.now(timezone.utc),
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Task Service
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
class TaskService:
|
||||||
|
"""Service for submitting background tasks."""
|
||||||
|
|
||||||
|
def __init__(self, session: AsyncSession) -> None:
|
||||||
|
self._session = session
|
||||||
|
self._job_repo = JobRepository(session)
|
||||||
|
self._webhook_repo = WebhookRepository(session)
|
||||||
|
|
||||||
|
async def _create_job(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
requester: User,
|
||||||
|
job_type: JobTypeEnum,
|
||||||
|
project_id: uuid.UUID | None,
|
||||||
|
input_data: dict,
|
||||||
|
) -> tuple[Job, str]:
|
||||||
|
"""Create job and webhook, return job and webhook URL."""
|
||||||
|
settings = get_settings()
|
||||||
|
broker_id = uuid.uuid4().hex
|
||||||
|
|
||||||
|
job = await self._job_repo.create(
|
||||||
|
requester=requester,
|
||||||
|
data=JobCreate(
|
||||||
|
broker_id=broker_id,
|
||||||
|
project_id=project_id,
|
||||||
|
input_data=input_data,
|
||||||
|
job_type=job_type,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
webhook_url = f"{settings.webhook_base_url}/api/tasks/webhook/{job.id}/"
|
||||||
|
await self._webhook_repo.create(
|
||||||
|
requester=requester,
|
||||||
|
data=WebhookCreate(
|
||||||
|
project_id=project_id, event=f"task.{job_type.lower()}", url=webhook_url
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
return job, webhook_url
|
||||||
|
|
||||||
|
async def submit_media_probe(
|
||||||
|
self, *, requester: User, request: MediaProbeRequest
|
||||||
|
) -> TaskSubmitResponse:
|
||||||
|
"""Submit media probe task."""
|
||||||
|
job, webhook_url = await self._create_job(
|
||||||
|
requester=requester,
|
||||||
|
job_type="MEDIA_PROBE",
|
||||||
|
project_id=request.project_id,
|
||||||
|
input_data=request.model_dump(mode="json"),
|
||||||
|
)
|
||||||
|
media_probe_actor.send(job_id=str(job.id), file_key=request.file_key)
|
||||||
|
return TaskSubmitResponse(
|
||||||
|
job_id=job.id, webhook_url=webhook_url, status="PENDING"
|
||||||
|
)
|
||||||
|
|
||||||
|
async def submit_silence_remove(
|
||||||
|
self, *, requester: User, request: SilenceRemoveRequest
|
||||||
|
) -> TaskSubmitResponse:
|
||||||
|
"""Submit silence removal task."""
|
||||||
|
job, webhook_url = await self._create_job(
|
||||||
|
requester=requester,
|
||||||
|
job_type="SILENCE_REMOVE",
|
||||||
|
project_id=request.project_id,
|
||||||
|
input_data=request.model_dump(mode="json"),
|
||||||
|
)
|
||||||
|
silence_remove_actor.send(
|
||||||
|
job_id=str(job.id),
|
||||||
|
file_key=request.file_key,
|
||||||
|
out_folder=request.out_folder,
|
||||||
|
min_silence_duration_ms=request.min_silence_duration_ms,
|
||||||
|
silence_threshold_db=request.silence_threshold_db,
|
||||||
|
padding_ms=request.padding_ms,
|
||||||
|
)
|
||||||
|
return TaskSubmitResponse(
|
||||||
|
job_id=job.id, webhook_url=webhook_url, status="PENDING"
|
||||||
|
)
|
||||||
|
|
||||||
|
async def submit_media_convert(
|
||||||
|
self, *, requester: User, request: MediaConvertRequest
|
||||||
|
) -> TaskSubmitResponse:
|
||||||
|
"""Submit media conversion task."""
|
||||||
|
job, webhook_url = await self._create_job(
|
||||||
|
requester=requester,
|
||||||
|
job_type="MEDIA_CONVERT",
|
||||||
|
project_id=request.project_id,
|
||||||
|
input_data=request.model_dump(mode="json"),
|
||||||
|
)
|
||||||
|
media_convert_actor.send(
|
||||||
|
job_id=str(job.id),
|
||||||
|
file_key=request.file_key,
|
||||||
|
out_folder=request.out_folder,
|
||||||
|
output_format=request.output_format,
|
||||||
|
)
|
||||||
|
return TaskSubmitResponse(
|
||||||
|
job_id=job.id, webhook_url=webhook_url, status="PENDING"
|
||||||
|
)
|
||||||
|
|
||||||
|
async def submit_transcription_generate(
|
||||||
|
self, *, requester: User, request: TranscriptionGenerateRequest
|
||||||
|
) -> TaskSubmitResponse:
|
||||||
|
"""Submit transcription generation task."""
|
||||||
|
job, webhook_url = await self._create_job(
|
||||||
|
requester=requester,
|
||||||
|
job_type="TRANSCRIPTION_GENERATE",
|
||||||
|
project_id=request.project_id,
|
||||||
|
input_data=request.model_dump(mode="json"),
|
||||||
|
)
|
||||||
|
transcription_generate_actor.send(
|
||||||
|
job_id=str(job.id),
|
||||||
|
file_key=request.file_key,
|
||||||
|
engine=request.engine,
|
||||||
|
language=request.language,
|
||||||
|
model=request.model,
|
||||||
|
)
|
||||||
|
return TaskSubmitResponse(
|
||||||
|
job_id=job.id, webhook_url=webhook_url, status="PENDING"
|
||||||
|
)
|
||||||
|
|
||||||
|
async def submit_captions_generate(
|
||||||
|
self, *, requester: User, request: CaptionsGenerateRequest
|
||||||
|
) -> TaskSubmitResponse:
|
||||||
|
"""Submit captions generation task."""
|
||||||
|
transcription_repo = TranscriptionRepository(self._session)
|
||||||
|
transcription = await transcription_repo.get_by_id(request.transcription_id)
|
||||||
|
if transcription is None:
|
||||||
|
raise ValueError(f"Transcription {request.transcription_id} not found")
|
||||||
|
|
||||||
|
job, webhook_url = await self._create_job(
|
||||||
|
requester=requester,
|
||||||
|
job_type="CAPTIONS_GENERATE",
|
||||||
|
project_id=request.project_id,
|
||||||
|
input_data=request.model_dump(mode="json"),
|
||||||
|
)
|
||||||
|
captions_generate_actor.send(
|
||||||
|
job_id=str(job.id),
|
||||||
|
video_s3_path=request.video_s3_path,
|
||||||
|
folder=request.folder,
|
||||||
|
transcription_json=transcription.document,
|
||||||
|
)
|
||||||
|
return TaskSubmitResponse(
|
||||||
|
job_id=job.id, webhook_url=webhook_url, status="PENDING"
|
||||||
|
)
|
||||||
@@ -29,12 +29,27 @@ services:
|
|||||||
volumes:
|
volumes:
|
||||||
- cpv3_minio:/data
|
- cpv3_minio:/data
|
||||||
|
|
||||||
|
redis:
|
||||||
|
container_name: cpv3_redis
|
||||||
|
image: redis:7-alpine
|
||||||
|
ports:
|
||||||
|
- "6379:6379"
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "redis-cli", "ping"]
|
||||||
|
interval: 5s
|
||||||
|
timeout: 3s
|
||||||
|
retries: 10
|
||||||
|
volumes:
|
||||||
|
- cpv3_redis:/data
|
||||||
|
|
||||||
api:
|
api:
|
||||||
container_name: cpv3_api
|
container_name: cpv3_api
|
||||||
build: .
|
build: .
|
||||||
depends_on:
|
depends_on:
|
||||||
db:
|
db:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
|
redis:
|
||||||
|
condition: service_healthy
|
||||||
environment:
|
environment:
|
||||||
DEBUG: ${DEBUG:-1}
|
DEBUG: ${DEBUG:-1}
|
||||||
JWT_SECRET_KEY: ${JWT_SECRET_KEY:-dev-secret}
|
JWT_SECRET_KEY: ${JWT_SECRET_KEY:-dev-secret}
|
||||||
@@ -54,6 +69,9 @@ services:
|
|||||||
# Used only for generated browser links (presigned URLs)
|
# Used only for generated browser links (presigned URLs)
|
||||||
S3_ENDPOINT_URL_PUBLIC: http://localhost:9000
|
S3_ENDPOINT_URL_PUBLIC: http://localhost:9000
|
||||||
|
|
||||||
|
REDIS_URL: redis://redis:6379/0
|
||||||
|
WEBHOOK_BASE_URL: http://api:8000
|
||||||
|
|
||||||
REMOTION_SERVICE_URL: ${REMOTION_SERVICE_URL:-http://localhost:8001}
|
REMOTION_SERVICE_URL: ${REMOTION_SERVICE_URL:-http://localhost:8001}
|
||||||
ports:
|
ports:
|
||||||
- "8000:8000"
|
- "8000:8000"
|
||||||
@@ -65,6 +83,41 @@ services:
|
|||||||
- ./alembic:/app/alembic
|
- ./alembic:/app/alembic
|
||||||
- ./alembic.ini:/app/alembic.ini
|
- ./alembic.ini:/app/alembic.ini
|
||||||
|
|
||||||
|
worker:
|
||||||
|
container_name: cpv3_worker
|
||||||
|
build: .
|
||||||
|
depends_on:
|
||||||
|
db:
|
||||||
|
condition: service_healthy
|
||||||
|
redis:
|
||||||
|
condition: service_healthy
|
||||||
|
environment:
|
||||||
|
DEBUG: ${DEBUG:-1}
|
||||||
|
|
||||||
|
POSTGRES_USER: ${POSTGRES_USER:-postgres}
|
||||||
|
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-postgres}
|
||||||
|
POSTGRES_HOST: db
|
||||||
|
POSTGRES_PORT: 5432
|
||||||
|
POSTGRES_DATABASE: ${POSTGRES_DATABASE:-coffee_project_db}
|
||||||
|
|
||||||
|
STORAGE_BACKEND: ${STORAGE_BACKEND:-S3}
|
||||||
|
|
||||||
|
S3_ACCESS_KEY: ${MINIO_ROOT_USER:-minioadmin}
|
||||||
|
S3_SECRET_KEY: ${MINIO_ROOT_PASSWORD:-minioadmin}
|
||||||
|
S3_BUCKET_NAME: ${S3_BUCKET_NAME:-coffee-bucket}
|
||||||
|
S3_ENDPOINT_URL_INTERNAL: http://minio:9000
|
||||||
|
S3_ENDPOINT_URL_PUBLIC: http://localhost:9000
|
||||||
|
|
||||||
|
REDIS_URL: redis://redis:6379/0
|
||||||
|
WEBHOOK_BASE_URL: http://api:8000
|
||||||
|
|
||||||
|
REMOTION_SERVICE_URL: ${REMOTION_SERVICE_URL:-http://localhost:8001}
|
||||||
|
command: >
|
||||||
|
uv run dramatiq cpv3.modules.tasks.service --processes 1 --threads 2
|
||||||
|
volumes:
|
||||||
|
- ./src:/app/src
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
cpv3_db:
|
cpv3_db:
|
||||||
cpv3_minio:
|
cpv3_minio:
|
||||||
|
cpv3_redis:
|
||||||
|
|||||||
@@ -22,14 +22,28 @@ dependencies = [
|
|||||||
"pydub>=0.25.1",
|
"pydub>=0.25.1",
|
||||||
"google-cloud-speech>=2.34.0",
|
"google-cloud-speech>=2.34.0",
|
||||||
"openai-whisper>=20250625",
|
"openai-whisper>=20250625",
|
||||||
|
"dramatiq[redis]>=1.17.0",
|
||||||
|
"redis>=5.0.0",
|
||||||
|
"psycopg2-binary>=2.9.9",
|
||||||
]
|
]
|
||||||
|
|
||||||
[dependency-groups]
|
[dependency-groups]
|
||||||
dev = [
|
dev = [
|
||||||
"mypy>=1.19.1",
|
"mypy>=1.19.1",
|
||||||
"ruff>=0.6.0",
|
"ruff>=0.6.0",
|
||||||
|
"pytest>=8.0.0",
|
||||||
|
"pytest-asyncio>=0.23.0",
|
||||||
|
"aiosqlite>=0.20.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[tool.pytest.ini_options]
|
||||||
|
asyncio_mode = "auto"
|
||||||
|
asyncio_default_fixture_loop_scope = "function"
|
||||||
|
testpaths = ["tests"]
|
||||||
|
python_files = ["test_*.py"]
|
||||||
|
python_classes = ["Test*"]
|
||||||
|
python_functions = ["test_*"]
|
||||||
|
|
||||||
[tool.ruff]
|
[tool.ruff]
|
||||||
line-length = 100
|
line-length = 100
|
||||||
|
|
||||||
|
|||||||
+217
-17
@@ -4,12 +4,22 @@ Shared test fixtures and configuration.
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import pytest # type: ignore[import-not-found]
|
import uuid
|
||||||
from fastapi.testclient import TestClient
|
from datetime import timedelta
|
||||||
|
from typing import AsyncGenerator
|
||||||
|
from unittest.mock import AsyncMock, MagicMock
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from httpx import ASGITransport, AsyncClient
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
|
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
|
||||||
|
|
||||||
from cpv3.db.base import Base
|
from cpv3.db.base import Base
|
||||||
|
from cpv3.db.session import get_db
|
||||||
|
from cpv3.infrastructure.auth import get_current_user
|
||||||
|
from cpv3.infrastructure.deps import get_storage
|
||||||
|
from cpv3.infrastructure.security import create_token, hash_password
|
||||||
from cpv3.main import app
|
from cpv3.main import app
|
||||||
|
from cpv3.modules.users.models import User
|
||||||
|
|
||||||
|
|
||||||
# Use in-memory SQLite for tests (or configure a test database)
|
# Use in-memory SQLite for tests (or configure a test database)
|
||||||
@@ -17,28 +27,218 @@ TEST_DATABASE_URL = "sqlite+aiosqlite:///:memory:"
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def test_client():
|
async def test_engine():
|
||||||
"""Create a test client for the FastAPI app."""
|
"""Create a test database engine with tables."""
|
||||||
with TestClient(app) as client:
|
engine = create_async_engine(TEST_DATABASE_URL, echo=False)
|
||||||
yield client
|
async with engine.begin() as conn:
|
||||||
|
await conn.run_sync(Base.metadata.create_all)
|
||||||
|
yield engine
|
||||||
|
async with engine.begin() as conn:
|
||||||
|
await conn.run_sync(Base.metadata.drop_all)
|
||||||
|
await engine.dispose()
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
async def test_db_session():
|
async def test_db_session(test_engine) -> AsyncGenerator[AsyncSession, None]:
|
||||||
"""Create a test database session."""
|
"""Create a test database session with per-test transaction isolation."""
|
||||||
engine = create_async_engine(TEST_DATABASE_URL, echo=False)
|
|
||||||
|
|
||||||
async with engine.begin() as conn:
|
|
||||||
await conn.run_sync(Base.metadata.create_all)
|
|
||||||
|
|
||||||
async_session = async_sessionmaker(
|
async_session = async_sessionmaker(
|
||||||
bind=engine, class_=AsyncSession, expire_on_commit=False
|
bind=test_engine, class_=AsyncSession, expire_on_commit=False
|
||||||
)
|
)
|
||||||
|
|
||||||
async with async_session() as session:
|
async with async_session() as session:
|
||||||
yield session
|
yield session
|
||||||
|
|
||||||
async with engine.begin() as conn:
|
|
||||||
await conn.run_sync(Base.metadata.drop_all)
|
|
||||||
|
|
||||||
await engine.dispose()
|
@pytest.fixture
|
||||||
|
async def test_user(test_db_session: AsyncSession) -> User:
|
||||||
|
"""Create a regular test user."""
|
||||||
|
user = User(
|
||||||
|
id=uuid.uuid4(),
|
||||||
|
username="testuser",
|
||||||
|
email="test@example.com",
|
||||||
|
password_hash=hash_password("testpassword"),
|
||||||
|
first_name="Test",
|
||||||
|
last_name="User",
|
||||||
|
is_active=True,
|
||||||
|
is_staff=False,
|
||||||
|
is_superuser=False,
|
||||||
|
)
|
||||||
|
test_db_session.add(user)
|
||||||
|
await test_db_session.commit()
|
||||||
|
await test_db_session.refresh(user)
|
||||||
|
return user
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
async def staff_user(test_db_session: AsyncSession) -> User:
|
||||||
|
"""Create a staff test user."""
|
||||||
|
user = User(
|
||||||
|
id=uuid.uuid4(),
|
||||||
|
username="staffuser",
|
||||||
|
email="staff@example.com",
|
||||||
|
password_hash=hash_password("staffpassword"),
|
||||||
|
first_name="Staff",
|
||||||
|
last_name="User",
|
||||||
|
is_active=True,
|
||||||
|
is_staff=True,
|
||||||
|
is_superuser=False,
|
||||||
|
)
|
||||||
|
test_db_session.add(user)
|
||||||
|
await test_db_session.commit()
|
||||||
|
await test_db_session.refresh(user)
|
||||||
|
return user
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
async def other_user(test_db_session: AsyncSession) -> User:
|
||||||
|
"""Create another regular user for permission testing."""
|
||||||
|
user = User(
|
||||||
|
id=uuid.uuid4(),
|
||||||
|
username="otheruser",
|
||||||
|
email="other@example.com",
|
||||||
|
password_hash=hash_password("otherpassword"),
|
||||||
|
first_name="Other",
|
||||||
|
last_name="User",
|
||||||
|
is_active=True,
|
||||||
|
is_staff=False,
|
||||||
|
is_superuser=False,
|
||||||
|
)
|
||||||
|
test_db_session.add(user)
|
||||||
|
await test_db_session.commit()
|
||||||
|
await test_db_session.refresh(user)
|
||||||
|
return user
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def auth_headers(test_user: User) -> dict[str, str]:
|
||||||
|
"""Generate auth headers with valid JWT for the test user."""
|
||||||
|
token = create_token(
|
||||||
|
subject=str(test_user.id),
|
||||||
|
token_type="access",
|
||||||
|
expires_in=timedelta(hours=1),
|
||||||
|
)
|
||||||
|
return {"Authorization": f"Bearer {token}"}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def staff_auth_headers(staff_user: User) -> dict[str, str]:
|
||||||
|
"""Generate auth headers with valid JWT for the staff user."""
|
||||||
|
token = create_token(
|
||||||
|
subject=str(staff_user.id),
|
||||||
|
token_type="access",
|
||||||
|
expires_in=timedelta(hours=1),
|
||||||
|
)
|
||||||
|
return {"Authorization": f"Bearer {token}"}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def other_auth_headers(other_user: User) -> dict[str, str]:
|
||||||
|
"""Generate auth headers with valid JWT for the other user."""
|
||||||
|
token = create_token(
|
||||||
|
subject=str(other_user.id),
|
||||||
|
token_type="access",
|
||||||
|
expires_in=timedelta(hours=1),
|
||||||
|
)
|
||||||
|
return {"Authorization": f"Bearer {token}"}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_storage() -> MagicMock:
|
||||||
|
"""Create a mock storage service."""
|
||||||
|
storage = MagicMock()
|
||||||
|
storage.upload_fileobj = AsyncMock(return_value="uploads/test-file.txt")
|
||||||
|
storage.exists = AsyncMock(return_value=True)
|
||||||
|
|
||||||
|
file_info = MagicMock()
|
||||||
|
file_info.file_path = "uploads/test-file.txt"
|
||||||
|
file_info.file_url = "http://example.com/uploads/test-file.txt"
|
||||||
|
file_info.file_size = 1024
|
||||||
|
file_info.filename = "test-file.txt"
|
||||||
|
storage.get_file_info = AsyncMock(return_value=file_info)
|
||||||
|
|
||||||
|
return storage
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
async def async_client(
|
||||||
|
test_db_session: AsyncSession,
|
||||||
|
mock_storage: MagicMock,
|
||||||
|
) -> AsyncGenerator[AsyncClient, None]:
|
||||||
|
"""Create async test client with dependency overrides (no auth override)."""
|
||||||
|
|
||||||
|
async def override_get_db():
|
||||||
|
yield test_db_session
|
||||||
|
|
||||||
|
async def override_get_storage():
|
||||||
|
return mock_storage
|
||||||
|
|
||||||
|
app.dependency_overrides[get_db] = override_get_db
|
||||||
|
app.dependency_overrides[get_storage] = override_get_storage
|
||||||
|
|
||||||
|
async with AsyncClient(
|
||||||
|
transport=ASGITransport(app=app),
|
||||||
|
base_url="http://test",
|
||||||
|
) as client:
|
||||||
|
yield client
|
||||||
|
|
||||||
|
app.dependency_overrides.clear()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
async def auth_client(
|
||||||
|
test_db_session: AsyncSession,
|
||||||
|
test_user: User,
|
||||||
|
mock_storage: MagicMock,
|
||||||
|
) -> AsyncGenerator[AsyncClient, None]:
|
||||||
|
"""Create async test client with auth dependency overridden to return test_user."""
|
||||||
|
|
||||||
|
async def override_get_db():
|
||||||
|
yield test_db_session
|
||||||
|
|
||||||
|
def override_get_current_user():
|
||||||
|
return test_user
|
||||||
|
|
||||||
|
async def override_get_storage():
|
||||||
|
return mock_storage
|
||||||
|
|
||||||
|
app.dependency_overrides[get_db] = override_get_db
|
||||||
|
app.dependency_overrides[get_current_user] = override_get_current_user
|
||||||
|
app.dependency_overrides[get_storage] = override_get_storage
|
||||||
|
|
||||||
|
async with AsyncClient(
|
||||||
|
transport=ASGITransport(app=app),
|
||||||
|
base_url="http://test",
|
||||||
|
) as client:
|
||||||
|
yield client
|
||||||
|
|
||||||
|
app.dependency_overrides.clear()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
async def staff_client(
|
||||||
|
test_db_session: AsyncSession,
|
||||||
|
staff_user: User,
|
||||||
|
mock_storage: MagicMock,
|
||||||
|
) -> AsyncGenerator[AsyncClient, None]:
|
||||||
|
"""Create async test client with auth dependency overridden to return staff_user."""
|
||||||
|
|
||||||
|
async def override_get_db():
|
||||||
|
yield test_db_session
|
||||||
|
|
||||||
|
def override_get_current_user():
|
||||||
|
return staff_user
|
||||||
|
|
||||||
|
async def override_get_storage():
|
||||||
|
return mock_storage
|
||||||
|
|
||||||
|
app.dependency_overrides[get_db] = override_get_db
|
||||||
|
app.dependency_overrides[get_current_user] = override_get_current_user
|
||||||
|
app.dependency_overrides[get_storage] = override_get_storage
|
||||||
|
|
||||||
|
async with AsyncClient(
|
||||||
|
transport=ASGITransport(app=app),
|
||||||
|
base_url="http://test",
|
||||||
|
) as client:
|
||||||
|
yield client
|
||||||
|
|
||||||
|
app.dependency_overrides.clear()
|
||||||
|
|||||||
@@ -0,0 +1,159 @@
|
|||||||
|
"""
|
||||||
|
Tests for authentication endpoints.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from datetime import timedelta
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from httpx import AsyncClient
|
||||||
|
|
||||||
|
from cpv3.infrastructure.security import create_token
|
||||||
|
from cpv3.modules.users.models import User
|
||||||
|
|
||||||
|
|
||||||
|
class TestRegisterEndpoint:
|
||||||
|
"""Tests for POST /auth/register."""
|
||||||
|
|
||||||
|
async def test_register_success(self, async_client: AsyncClient):
|
||||||
|
"""Test successful user registration."""
|
||||||
|
response = await async_client.post(
|
||||||
|
"/auth/register",
|
||||||
|
json={
|
||||||
|
"username": "newuser",
|
||||||
|
"email": "newuser@example.com",
|
||||||
|
"password": "securepassword123",
|
||||||
|
"first_name": "New",
|
||||||
|
"last_name": "User",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 201
|
||||||
|
data = response.json()
|
||||||
|
assert "access" in data
|
||||||
|
assert "refresh" in data
|
||||||
|
assert data["user"]["username"] == "newuser"
|
||||||
|
assert data["user"]["email"] == "newuser@example.com"
|
||||||
|
|
||||||
|
async def test_register_duplicate_username(
|
||||||
|
self, async_client: AsyncClient, test_user: User
|
||||||
|
):
|
||||||
|
"""Test registration fails with duplicate username."""
|
||||||
|
response = await async_client.post(
|
||||||
|
"/auth/register",
|
||||||
|
json={
|
||||||
|
"username": test_user.username, # existing username
|
||||||
|
"email": "another@example.com",
|
||||||
|
"password": "password123",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 400
|
||||||
|
|
||||||
|
async def test_register_missing_required_fields(self, async_client: AsyncClient):
|
||||||
|
"""Test registration fails with missing required fields."""
|
||||||
|
response = await async_client.post(
|
||||||
|
"/auth/register",
|
||||||
|
json={"username": "someuser"}, # missing email and password
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 422
|
||||||
|
|
||||||
|
|
||||||
|
class TestLoginEndpoint:
|
||||||
|
"""Tests for POST /auth/login."""
|
||||||
|
|
||||||
|
async def test_login_success(self, async_client: AsyncClient, test_user: User):
|
||||||
|
"""Test successful login."""
|
||||||
|
response = await async_client.post(
|
||||||
|
"/auth/login",
|
||||||
|
json={
|
||||||
|
"username": "testuser",
|
||||||
|
"password": "testpassword",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert "access" in data
|
||||||
|
assert "refresh" in data
|
||||||
|
assert data["user"]["username"] == "testuser"
|
||||||
|
|
||||||
|
async def test_login_invalid_password(
|
||||||
|
self, async_client: AsyncClient, test_user: User
|
||||||
|
):
|
||||||
|
"""Test login fails with wrong password."""
|
||||||
|
response = await async_client.post(
|
||||||
|
"/auth/login",
|
||||||
|
json={
|
||||||
|
"username": "testuser",
|
||||||
|
"password": "wrongpassword",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 401
|
||||||
|
assert response.json()["detail"] == "Invalid credentials"
|
||||||
|
|
||||||
|
async def test_login_nonexistent_user(self, async_client: AsyncClient):
|
||||||
|
"""Test login fails for nonexistent user."""
|
||||||
|
response = await async_client.post(
|
||||||
|
"/auth/login",
|
||||||
|
json={
|
||||||
|
"username": "nonexistent",
|
||||||
|
"password": "password123",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 401
|
||||||
|
assert response.json()["detail"] == "Invalid credentials"
|
||||||
|
|
||||||
|
|
||||||
|
class TestRefreshEndpoint:
|
||||||
|
"""Tests for POST /auth/refresh."""
|
||||||
|
|
||||||
|
async def test_refresh_success(self, async_client: AsyncClient, test_user: User):
|
||||||
|
"""Test successful token refresh."""
|
||||||
|
refresh_token = create_token(
|
||||||
|
subject=str(test_user.id),
|
||||||
|
token_type="refresh",
|
||||||
|
expires_in=timedelta(days=7),
|
||||||
|
)
|
||||||
|
|
||||||
|
response = await async_client.post(
|
||||||
|
"/auth/refresh",
|
||||||
|
json={"refresh": refresh_token},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert "access" in data
|
||||||
|
assert "refresh" in data
|
||||||
|
|
||||||
|
async def test_refresh_with_access_token_fails(
|
||||||
|
self, async_client: AsyncClient, test_user: User
|
||||||
|
):
|
||||||
|
"""Test refresh fails when using access token instead of refresh token."""
|
||||||
|
access_token = create_token(
|
||||||
|
subject=str(test_user.id),
|
||||||
|
token_type="access",
|
||||||
|
expires_in=timedelta(minutes=15),
|
||||||
|
)
|
||||||
|
|
||||||
|
response = await async_client.post(
|
||||||
|
"/auth/refresh",
|
||||||
|
json={"refresh": access_token},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 401
|
||||||
|
assert response.json()["detail"] == "Invalid refresh token"
|
||||||
|
|
||||||
|
async def test_refresh_with_invalid_token(self, async_client: AsyncClient):
|
||||||
|
"""Test refresh fails with invalid token."""
|
||||||
|
response = await async_client.post(
|
||||||
|
"/auth/refresh",
|
||||||
|
json={"refresh": "invalid.token.here"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 401
|
||||||
|
assert response.json()["detail"] == "Invalid refresh token"
|
||||||
@@ -0,0 +1,80 @@
|
|||||||
|
"""
|
||||||
|
Tests for captions endpoints.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from unittest.mock import AsyncMock, patch
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from httpx import AsyncClient
|
||||||
|
|
||||||
|
|
||||||
|
class TestGetVideoEndpoint:
|
||||||
|
"""Tests for POST /api/captions/get_video/."""
|
||||||
|
|
||||||
|
async def test_get_video_success(self, auth_client: AsyncClient):
|
||||||
|
"""Test caption burn-in endpoint returns result."""
|
||||||
|
mock_transcription = {
|
||||||
|
"segments": [
|
||||||
|
{
|
||||||
|
"text": "Hello world",
|
||||||
|
"semantic_tags": [],
|
||||||
|
"structure_tags": [],
|
||||||
|
"time": {"start": 0.0, "end": 2.0},
|
||||||
|
"lines": [
|
||||||
|
{
|
||||||
|
"text": "Hello world",
|
||||||
|
"semantic_tags": [],
|
||||||
|
"structure_tags": [],
|
||||||
|
"time": {"start": 0.0, "end": 2.0},
|
||||||
|
"words": [],
|
||||||
|
}
|
||||||
|
],
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
with patch(
|
||||||
|
"cpv3.modules.captions.router.generate_captions",
|
||||||
|
new_callable=AsyncMock,
|
||||||
|
return_value="uploads/output/captioned_video.mp4",
|
||||||
|
):
|
||||||
|
response = await auth_client.post(
|
||||||
|
"/api/captions/get_video/",
|
||||||
|
json={
|
||||||
|
"folder": "output",
|
||||||
|
"video_s3_path": "uploads/source_video.mp4",
|
||||||
|
"transcription": mock_transcription,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert "result" in data
|
||||||
|
assert data["result"] == "uploads/output/captioned_video.mp4"
|
||||||
|
|
||||||
|
async def test_get_video_unauthenticated(self, async_client: AsyncClient):
|
||||||
|
"""Test caption burn-in without auth returns 401."""
|
||||||
|
response = await async_client.post(
|
||||||
|
"/api/captions/get_video/",
|
||||||
|
json={
|
||||||
|
"folder": "output",
|
||||||
|
"video_s3_path": "test.mp4",
|
||||||
|
"transcription": {"segments": []},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 401
|
||||||
|
|
||||||
|
async def test_get_video_missing_fields(self, auth_client: AsyncClient):
|
||||||
|
"""Test caption burn-in with missing required fields returns 422."""
|
||||||
|
response = await auth_client.post(
|
||||||
|
"/api/captions/get_video/",
|
||||||
|
json={
|
||||||
|
"folder": "output",
|
||||||
|
# missing video_s3_path and transcription
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 422
|
||||||
@@ -0,0 +1,293 @@
|
|||||||
|
"""
|
||||||
|
Tests for file management endpoints.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import io
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from httpx import AsyncClient
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
|
from cpv3.modules.files.models import File
|
||||||
|
from cpv3.modules.users.models import User
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
async def test_file(test_db_session: AsyncSession, test_user: User) -> File:
|
||||||
|
"""Create a test file entry owned by test_user."""
|
||||||
|
file = File(
|
||||||
|
id=uuid.uuid4(),
|
||||||
|
owner_id=test_user.id,
|
||||||
|
original_filename="test-document.pdf",
|
||||||
|
path="uploads/test-document.pdf",
|
||||||
|
storage_backend="LOCAL",
|
||||||
|
mime_type="application/pdf",
|
||||||
|
size_bytes=1024,
|
||||||
|
is_uploaded=True,
|
||||||
|
is_active=True,
|
||||||
|
)
|
||||||
|
test_db_session.add(file)
|
||||||
|
await test_db_session.commit()
|
||||||
|
await test_db_session.refresh(file)
|
||||||
|
return file
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
async def other_file(test_db_session: AsyncSession, other_user: User) -> File:
|
||||||
|
"""Create a file entry owned by another user."""
|
||||||
|
file = File(
|
||||||
|
id=uuid.uuid4(),
|
||||||
|
owner_id=other_user.id,
|
||||||
|
original_filename="other-document.pdf",
|
||||||
|
path="uploads/other-document.pdf",
|
||||||
|
storage_backend="LOCAL",
|
||||||
|
mime_type="application/pdf",
|
||||||
|
size_bytes=2048,
|
||||||
|
is_uploaded=True,
|
||||||
|
is_active=True,
|
||||||
|
)
|
||||||
|
test_db_session.add(file)
|
||||||
|
await test_db_session.commit()
|
||||||
|
await test_db_session.refresh(file)
|
||||||
|
return file
|
||||||
|
|
||||||
|
|
||||||
|
class TestUploadFileEndpoint:
|
||||||
|
"""Tests for POST /api/files/upload/."""
|
||||||
|
|
||||||
|
async def test_upload_file_success(self, auth_client: AsyncClient):
|
||||||
|
"""Test successful file upload."""
|
||||||
|
file_content = b"test file content"
|
||||||
|
files = {"file": ("testfile.txt", io.BytesIO(file_content), "text/plain")}
|
||||||
|
data = {"folder": "uploads"}
|
||||||
|
|
||||||
|
response = await auth_client.post("/api/files/upload/", files=files, data=data)
|
||||||
|
|
||||||
|
assert response.status_code == 201
|
||||||
|
data = response.json()
|
||||||
|
assert "file_path" in data
|
||||||
|
assert "file_url" in data
|
||||||
|
|
||||||
|
async def test_upload_file_unauthenticated(self, async_client: AsyncClient):
|
||||||
|
"""Test uploading file without auth returns 401."""
|
||||||
|
file_content = b"test file content"
|
||||||
|
files = {"file": ("testfile.txt", io.BytesIO(file_content), "text/plain")}
|
||||||
|
|
||||||
|
response = await async_client.post("/api/files/upload/", files=files)
|
||||||
|
|
||||||
|
assert response.status_code == 401
|
||||||
|
|
||||||
|
|
||||||
|
class TestGetFileInfoEndpoint:
|
||||||
|
"""Tests for GET /api/files/get_file/."""
|
||||||
|
|
||||||
|
async def test_get_file_info_success(self, auth_client: AsyncClient):
|
||||||
|
"""Test getting file info by path."""
|
||||||
|
response = await auth_client.get(
|
||||||
|
"/api/files/get_file/", params={"file_path": "uploads/test-file.txt"}
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert "file_path" in data
|
||||||
|
assert "file_url" in data
|
||||||
|
|
||||||
|
async def test_get_file_info_not_found(
|
||||||
|
self, auth_client: AsyncClient, mock_storage
|
||||||
|
):
|
||||||
|
"""Test getting info for nonexistent file returns 404."""
|
||||||
|
mock_storage.exists.return_value = False
|
||||||
|
|
||||||
|
response = await auth_client.get(
|
||||||
|
"/api/files/get_file/", params={"file_path": "nonexistent/file.txt"}
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
async def test_get_file_info_unauthenticated(self, async_client: AsyncClient):
|
||||||
|
"""Test getting file info without auth returns 401."""
|
||||||
|
response = await async_client.get(
|
||||||
|
"/api/files/get_file/", params={"file_path": "uploads/test.txt"}
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 401
|
||||||
|
|
||||||
|
|
||||||
|
class TestListFileEntriesEndpoint:
|
||||||
|
"""Tests for GET /api/files/files/."""
|
||||||
|
|
||||||
|
async def test_list_file_entries(self, auth_client: AsyncClient, test_file: File):
|
||||||
|
"""Test listing file entries."""
|
||||||
|
response = await auth_client.get("/api/files/files/")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert isinstance(data, list)
|
||||||
|
|
||||||
|
async def test_list_file_entries_unauthenticated(self, async_client: AsyncClient):
|
||||||
|
"""Test listing file entries without auth returns 401."""
|
||||||
|
response = await async_client.get("/api/files/files/")
|
||||||
|
|
||||||
|
assert response.status_code == 401
|
||||||
|
|
||||||
|
|
||||||
|
class TestCreateFileEntryEndpoint:
|
||||||
|
"""Tests for POST /api/files/files/."""
|
||||||
|
|
||||||
|
async def test_create_file_entry_success(self, auth_client: AsyncClient):
|
||||||
|
"""Test creating a file entry."""
|
||||||
|
response = await auth_client.post(
|
||||||
|
"/api/files/files/",
|
||||||
|
json={
|
||||||
|
"original_filename": "new-file.pdf",
|
||||||
|
"path": "uploads/new-file.pdf",
|
||||||
|
"storage_backend": "LOCAL",
|
||||||
|
"mime_type": "application/pdf",
|
||||||
|
"size_bytes": 4096,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 201
|
||||||
|
data = response.json()
|
||||||
|
assert data["original_filename"] == "new-file.pdf"
|
||||||
|
assert data["path"] == "uploads/new-file.pdf"
|
||||||
|
|
||||||
|
async def test_create_file_entry_unauthenticated(self, async_client: AsyncClient):
|
||||||
|
"""Test creating file entry without auth returns 401."""
|
||||||
|
response = await async_client.post(
|
||||||
|
"/api/files/files/",
|
||||||
|
json={
|
||||||
|
"original_filename": "test.pdf",
|
||||||
|
"path": "test.pdf",
|
||||||
|
"storage_backend": "LOCAL",
|
||||||
|
"mime_type": "application/pdf",
|
||||||
|
"size_bytes": 1024,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 401
|
||||||
|
|
||||||
|
|
||||||
|
class TestRetrieveFileEntryEndpoint:
|
||||||
|
"""Tests for GET /api/files/files/{file_id}/."""
|
||||||
|
|
||||||
|
async def test_retrieve_own_file_entry(
|
||||||
|
self, auth_client: AsyncClient, test_file: File
|
||||||
|
):
|
||||||
|
"""Test retrieving own file entry."""
|
||||||
|
response = await auth_client.get(f"/api/files/files/{test_file.id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["id"] == str(test_file.id)
|
||||||
|
assert data["original_filename"] == test_file.original_filename
|
||||||
|
|
||||||
|
async def test_retrieve_other_file_as_staff(
|
||||||
|
self, staff_client: AsyncClient, test_file: File
|
||||||
|
):
|
||||||
|
"""Test staff can retrieve any file entry."""
|
||||||
|
response = await staff_client.get(f"/api/files/files/{test_file.id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
async def test_retrieve_nonexistent_file_entry(self, auth_client: AsyncClient):
|
||||||
|
"""Test retrieving nonexistent file entry returns 404."""
|
||||||
|
fake_id = uuid.uuid4()
|
||||||
|
response = await auth_client.get(f"/api/files/files/{fake_id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
async def test_retrieve_other_file_forbidden(
|
||||||
|
self, auth_client: AsyncClient, other_file: File
|
||||||
|
):
|
||||||
|
"""Test regular user cannot retrieve other user's file entry."""
|
||||||
|
response = await auth_client.get(f"/api/files/files/{other_file.id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
|
||||||
|
class TestPatchFileEntryEndpoint:
|
||||||
|
"""Tests for PATCH /api/files/files/{file_id}/."""
|
||||||
|
|
||||||
|
async def test_patch_own_file_entry(
|
||||||
|
self, auth_client: AsyncClient, test_file: File
|
||||||
|
):
|
||||||
|
"""Test updating own file entry."""
|
||||||
|
response = await auth_client.patch(
|
||||||
|
f"/api/files/files/{test_file.id}/",
|
||||||
|
json={"original_filename": "renamed-file.pdf"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["original_filename"] == "renamed-file.pdf"
|
||||||
|
|
||||||
|
async def test_patch_other_file_as_staff(
|
||||||
|
self, staff_client: AsyncClient, test_file: File
|
||||||
|
):
|
||||||
|
"""Test staff can update any file entry."""
|
||||||
|
response = await staff_client.patch(
|
||||||
|
f"/api/files/files/{test_file.id}/",
|
||||||
|
json={"original_filename": "staff-renamed.pdf"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
async def test_patch_nonexistent_file_entry(self, auth_client: AsyncClient):
|
||||||
|
"""Test patching nonexistent file entry returns 404."""
|
||||||
|
fake_id = uuid.uuid4()
|
||||||
|
response = await auth_client.patch(
|
||||||
|
f"/api/files/files/{fake_id}/",
|
||||||
|
json={"original_filename": "test.pdf"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
async def test_patch_other_file_forbidden(
|
||||||
|
self, auth_client: AsyncClient, other_file: File
|
||||||
|
):
|
||||||
|
"""Test regular user cannot update other user's file entry."""
|
||||||
|
response = await auth_client.patch(
|
||||||
|
f"/api/files/files/{other_file.id}/",
|
||||||
|
json={"original_filename": "hacked.pdf"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
|
||||||
|
class TestDeleteFileEntryEndpoint:
|
||||||
|
"""Tests for DELETE /api/files/files/{file_id}/."""
|
||||||
|
|
||||||
|
async def test_delete_own_file_entry(
|
||||||
|
self, auth_client: AsyncClient, test_file: File
|
||||||
|
):
|
||||||
|
"""Test deleting own file entry."""
|
||||||
|
response = await auth_client.delete(f"/api/files/files/{test_file.id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 204
|
||||||
|
|
||||||
|
async def test_delete_other_file_as_staff(
|
||||||
|
self, staff_client: AsyncClient, test_file: File
|
||||||
|
):
|
||||||
|
"""Test staff can delete any file entry."""
|
||||||
|
response = await staff_client.delete(f"/api/files/files/{test_file.id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 204
|
||||||
|
|
||||||
|
async def test_delete_nonexistent_file_entry(self, auth_client: AsyncClient):
|
||||||
|
"""Test deleting nonexistent file entry returns 404."""
|
||||||
|
fake_id = uuid.uuid4()
|
||||||
|
response = await auth_client.delete(f"/api/files/files/{fake_id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
async def test_delete_other_file_forbidden(
|
||||||
|
self, auth_client: AsyncClient, other_file: File
|
||||||
|
):
|
||||||
|
"""Test regular user cannot delete other user's file entry."""
|
||||||
|
response = await auth_client.delete(f"/api/files/files/{other_file.id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
@@ -0,0 +1,365 @@
|
|||||||
|
"""
|
||||||
|
Tests for jobs and events endpoints.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from httpx import AsyncClient
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
|
from cpv3.modules.jobs.models import Job, JobEvent
|
||||||
|
from cpv3.modules.users.models import User
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
async def test_job(test_db_session: AsyncSession, test_user: User) -> Job:
|
||||||
|
"""Create a test job owned by test_user."""
|
||||||
|
job = Job(
|
||||||
|
id=uuid.uuid4(),
|
||||||
|
broker_id="test-broker-123",
|
||||||
|
user_id=test_user.id,
|
||||||
|
status="PENDING",
|
||||||
|
job_type="PENDING",
|
||||||
|
is_active=True,
|
||||||
|
)
|
||||||
|
test_db_session.add(job)
|
||||||
|
await test_db_session.commit()
|
||||||
|
await test_db_session.refresh(job)
|
||||||
|
return job
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
async def other_job(test_db_session: AsyncSession, other_user: User) -> Job:
|
||||||
|
"""Create a job owned by another user."""
|
||||||
|
job = Job(
|
||||||
|
id=uuid.uuid4(),
|
||||||
|
broker_id="other-broker-456",
|
||||||
|
user_id=other_user.id,
|
||||||
|
status="RUNNING",
|
||||||
|
job_type="RUNNING",
|
||||||
|
is_active=True,
|
||||||
|
)
|
||||||
|
test_db_session.add(job)
|
||||||
|
await test_db_session.commit()
|
||||||
|
await test_db_session.refresh(job)
|
||||||
|
return job
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
async def test_event(test_db_session: AsyncSession, test_job: Job) -> JobEvent:
|
||||||
|
"""Create a test job event linked to test_job."""
|
||||||
|
event = JobEvent(
|
||||||
|
id=uuid.uuid4(),
|
||||||
|
job_id=test_job.id,
|
||||||
|
event_type="started",
|
||||||
|
payload={"message": "Job started"},
|
||||||
|
is_active=True,
|
||||||
|
)
|
||||||
|
test_db_session.add(event)
|
||||||
|
await test_db_session.commit()
|
||||||
|
await test_db_session.refresh(event)
|
||||||
|
return event
|
||||||
|
|
||||||
|
|
||||||
|
class TestListJobsEndpoint:
|
||||||
|
"""Tests for GET /api/jobs/jobs/."""
|
||||||
|
|
||||||
|
async def test_list_jobs(self, auth_client: AsyncClient, test_job: Job):
|
||||||
|
"""Test listing jobs."""
|
||||||
|
response = await auth_client.get("/api/jobs/jobs/")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert isinstance(data, list)
|
||||||
|
|
||||||
|
async def test_list_jobs_unauthenticated(self, async_client: AsyncClient):
|
||||||
|
"""Test listing jobs without auth returns 401."""
|
||||||
|
response = await async_client.get("/api/jobs/jobs/")
|
||||||
|
|
||||||
|
assert response.status_code == 401
|
||||||
|
|
||||||
|
|
||||||
|
class TestCreateJobEndpoint:
|
||||||
|
"""Tests for POST /api/jobs/jobs/."""
|
||||||
|
|
||||||
|
async def test_create_job_success(self, auth_client: AsyncClient):
|
||||||
|
"""Test creating a job."""
|
||||||
|
response = await auth_client.post(
|
||||||
|
"/api/jobs/jobs/",
|
||||||
|
json={
|
||||||
|
"broker_id": "new-broker-789",
|
||||||
|
"status": "PENDING",
|
||||||
|
"job_type": "PENDING",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 201
|
||||||
|
data = response.json()
|
||||||
|
assert data["broker_id"] == "new-broker-789"
|
||||||
|
assert data["status"] == "PENDING"
|
||||||
|
|
||||||
|
async def test_create_job_with_input_data(self, auth_client: AsyncClient):
|
||||||
|
"""Test creating a job with input data."""
|
||||||
|
response = await auth_client.post(
|
||||||
|
"/api/jobs/jobs/",
|
||||||
|
json={
|
||||||
|
"broker_id": "broker-with-data",
|
||||||
|
"input_data": {"file_path": "uploads/test.mp4"},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 201
|
||||||
|
data = response.json()
|
||||||
|
assert data["input_data"]["file_path"] == "uploads/test.mp4"
|
||||||
|
|
||||||
|
async def test_create_job_unauthenticated(self, async_client: AsyncClient):
|
||||||
|
"""Test creating job without auth returns 401."""
|
||||||
|
response = await async_client.post(
|
||||||
|
"/api/jobs/jobs/",
|
||||||
|
json={"broker_id": "test"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 401
|
||||||
|
|
||||||
|
|
||||||
|
class TestRetrieveJobEndpoint:
|
||||||
|
"""Tests for GET /api/jobs/jobs/{job_id}/."""
|
||||||
|
|
||||||
|
async def test_retrieve_own_job(self, auth_client: AsyncClient, test_job: Job):
|
||||||
|
"""Test retrieving own job."""
|
||||||
|
response = await auth_client.get(f"/api/jobs/jobs/{test_job.id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["id"] == str(test_job.id)
|
||||||
|
assert data["broker_id"] == test_job.broker_id
|
||||||
|
|
||||||
|
async def test_retrieve_other_job_as_staff(
|
||||||
|
self, staff_client: AsyncClient, test_job: Job
|
||||||
|
):
|
||||||
|
"""Test staff can retrieve any job."""
|
||||||
|
response = await staff_client.get(f"/api/jobs/jobs/{test_job.id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
async def test_retrieve_nonexistent_job(self, auth_client: AsyncClient):
|
||||||
|
"""Test retrieving nonexistent job returns 404."""
|
||||||
|
fake_id = uuid.uuid4()
|
||||||
|
response = await auth_client.get(f"/api/jobs/jobs/{fake_id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
async def test_retrieve_other_job_forbidden(
|
||||||
|
self, auth_client: AsyncClient, other_job: Job
|
||||||
|
):
|
||||||
|
"""Test regular user cannot retrieve other user's job."""
|
||||||
|
response = await auth_client.get(f"/api/jobs/jobs/{other_job.id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
|
||||||
|
class TestPatchJobEndpoint:
|
||||||
|
"""Tests for PATCH /api/jobs/jobs/{job_id}/."""
|
||||||
|
|
||||||
|
async def test_patch_own_job(self, auth_client: AsyncClient, test_job: Job):
|
||||||
|
"""Test updating own job."""
|
||||||
|
response = await auth_client.patch(
|
||||||
|
f"/api/jobs/jobs/{test_job.id}/",
|
||||||
|
json={"status": "RUNNING", "current_message": "Processing..."},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["status"] == "RUNNING"
|
||||||
|
assert data["current_message"] == "Processing..."
|
||||||
|
|
||||||
|
async def test_patch_job_progress(self, auth_client: AsyncClient, test_job: Job):
|
||||||
|
"""Test updating job progress."""
|
||||||
|
response = await auth_client.patch(
|
||||||
|
f"/api/jobs/jobs/{test_job.id}/",
|
||||||
|
json={"project_pct": 50.0},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["project_pct"] == 50.0
|
||||||
|
|
||||||
|
async def test_patch_other_job_as_staff(
|
||||||
|
self, staff_client: AsyncClient, test_job: Job
|
||||||
|
):
|
||||||
|
"""Test staff can update any job."""
|
||||||
|
response = await staff_client.patch(
|
||||||
|
f"/api/jobs/jobs/{test_job.id}/",
|
||||||
|
json={"status": "DONE"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
async def test_patch_nonexistent_job(self, auth_client: AsyncClient):
|
||||||
|
"""Test patching nonexistent job returns 404."""
|
||||||
|
fake_id = uuid.uuid4()
|
||||||
|
response = await auth_client.patch(
|
||||||
|
f"/api/jobs/jobs/{fake_id}/",
|
||||||
|
json={"status": "DONE"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
async def test_patch_other_job_forbidden(
|
||||||
|
self, auth_client: AsyncClient, other_job: Job
|
||||||
|
):
|
||||||
|
"""Test regular user cannot update other user's job."""
|
||||||
|
response = await auth_client.patch(
|
||||||
|
f"/api/jobs/jobs/{other_job.id}/",
|
||||||
|
json={"status": "CANCELLED"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
|
||||||
|
class TestDeleteJobEndpoint:
|
||||||
|
"""Tests for DELETE /api/jobs/jobs/{job_id}/."""
|
||||||
|
|
||||||
|
async def test_delete_own_job(self, auth_client: AsyncClient, test_job: Job):
|
||||||
|
"""Test deleting own job."""
|
||||||
|
response = await auth_client.delete(f"/api/jobs/jobs/{test_job.id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 204
|
||||||
|
|
||||||
|
async def test_delete_other_job_as_staff(
|
||||||
|
self, staff_client: AsyncClient, test_job: Job
|
||||||
|
):
|
||||||
|
"""Test staff can delete any job."""
|
||||||
|
response = await staff_client.delete(f"/api/jobs/jobs/{test_job.id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 204
|
||||||
|
|
||||||
|
async def test_delete_nonexistent_job(self, auth_client: AsyncClient):
|
||||||
|
"""Test deleting nonexistent job returns 404."""
|
||||||
|
fake_id = uuid.uuid4()
|
||||||
|
response = await auth_client.delete(f"/api/jobs/jobs/{fake_id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
async def test_delete_other_job_forbidden(
|
||||||
|
self, auth_client: AsyncClient, other_job: Job
|
||||||
|
):
|
||||||
|
"""Test regular user cannot delete other user's job."""
|
||||||
|
response = await auth_client.delete(f"/api/jobs/jobs/{other_job.id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
|
||||||
|
class TestListEventsEndpoint:
|
||||||
|
"""Tests for GET /api/jobs/events/."""
|
||||||
|
|
||||||
|
async def test_list_events(self, auth_client: AsyncClient, test_event: JobEvent):
|
||||||
|
"""Test listing job events."""
|
||||||
|
response = await auth_client.get("/api/jobs/events/")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert isinstance(data, list)
|
||||||
|
|
||||||
|
async def test_list_events_unauthenticated(self, async_client: AsyncClient):
|
||||||
|
"""Test listing events without auth returns 401."""
|
||||||
|
response = await async_client.get("/api/jobs/events/")
|
||||||
|
|
||||||
|
assert response.status_code == 401
|
||||||
|
|
||||||
|
|
||||||
|
class TestCreateEventEndpoint:
|
||||||
|
"""Tests for POST /api/jobs/events/."""
|
||||||
|
|
||||||
|
async def test_create_event_success(self, auth_client: AsyncClient, test_job: Job):
|
||||||
|
"""Test creating a job event."""
|
||||||
|
response = await auth_client.post(
|
||||||
|
"/api/jobs/events/",
|
||||||
|
json={
|
||||||
|
"job_id": str(test_job.id),
|
||||||
|
"event_type": "progress",
|
||||||
|
"payload": {"percentage": 25},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 201
|
||||||
|
data = response.json()
|
||||||
|
assert data["event_type"] == "progress"
|
||||||
|
assert data["payload"]["percentage"] == 25
|
||||||
|
|
||||||
|
async def test_create_event_unauthenticated(self, async_client: AsyncClient):
|
||||||
|
"""Test creating event without auth returns 401."""
|
||||||
|
response = await async_client.post(
|
||||||
|
"/api/jobs/events/",
|
||||||
|
json={
|
||||||
|
"job_id": str(uuid.uuid4()),
|
||||||
|
"event_type": "test",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 401
|
||||||
|
|
||||||
|
|
||||||
|
class TestRetrieveEventEndpoint:
|
||||||
|
"""Tests for GET /api/jobs/events/{event_id}/."""
|
||||||
|
|
||||||
|
async def test_retrieve_event(self, auth_client: AsyncClient, test_event: JobEvent):
|
||||||
|
"""Test retrieving a job event."""
|
||||||
|
response = await auth_client.get(f"/api/jobs/events/{test_event.id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["id"] == str(test_event.id)
|
||||||
|
assert data["event_type"] == test_event.event_type
|
||||||
|
|
||||||
|
async def test_retrieve_nonexistent_event(self, auth_client: AsyncClient):
|
||||||
|
"""Test retrieving nonexistent event returns 404."""
|
||||||
|
fake_id = uuid.uuid4()
|
||||||
|
response = await auth_client.get(f"/api/jobs/events/{fake_id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
|
||||||
|
class TestPatchEventEndpoint:
|
||||||
|
"""Tests for PATCH /api/jobs/events/{event_id}/."""
|
||||||
|
|
||||||
|
async def test_patch_event(self, auth_client: AsyncClient, test_event: JobEvent):
|
||||||
|
"""Test updating a job event."""
|
||||||
|
response = await auth_client.patch(
|
||||||
|
f"/api/jobs/events/{test_event.id}/",
|
||||||
|
json={"payload": {"updated": True}},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["payload"]["updated"] is True
|
||||||
|
|
||||||
|
async def test_patch_nonexistent_event(self, auth_client: AsyncClient):
|
||||||
|
"""Test patching nonexistent event returns 404."""
|
||||||
|
fake_id = uuid.uuid4()
|
||||||
|
response = await auth_client.patch(
|
||||||
|
f"/api/jobs/events/{fake_id}/",
|
||||||
|
json={"payload": {}},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
|
||||||
|
class TestDeleteEventEndpoint:
|
||||||
|
"""Tests for DELETE /api/jobs/events/{event_id}/."""
|
||||||
|
|
||||||
|
async def test_delete_event(self, auth_client: AsyncClient, test_event: JobEvent):
|
||||||
|
"""Test deleting a job event."""
|
||||||
|
response = await auth_client.delete(f"/api/jobs/events/{test_event.id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 204
|
||||||
|
|
||||||
|
async def test_delete_nonexistent_event(self, auth_client: AsyncClient):
|
||||||
|
"""Test deleting nonexistent event returns 404."""
|
||||||
|
fake_id = uuid.uuid4()
|
||||||
|
response = await auth_client.delete(f"/api/jobs/events/{fake_id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 404
|
||||||
@@ -0,0 +1,461 @@
|
|||||||
|
"""
|
||||||
|
Tests for media management endpoints.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import uuid
|
||||||
|
from unittest.mock import AsyncMock, patch
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from httpx import AsyncClient
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
|
from cpv3.modules.media.models import ArtifactMediaFile, MediaFile
|
||||||
|
from cpv3.modules.users.models import User
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
async def test_media_file(test_db_session: AsyncSession, test_user: User) -> MediaFile:
|
||||||
|
"""Create a test media file owned by test_user."""
|
||||||
|
media_file = MediaFile(
|
||||||
|
id=uuid.uuid4(),
|
||||||
|
owner_id=test_user.id,
|
||||||
|
duration_seconds=120.5,
|
||||||
|
frame_rate=30.0,
|
||||||
|
width=1920,
|
||||||
|
height=1080,
|
||||||
|
is_deleted=False,
|
||||||
|
is_active=True,
|
||||||
|
)
|
||||||
|
test_db_session.add(media_file)
|
||||||
|
await test_db_session.commit()
|
||||||
|
await test_db_session.refresh(media_file)
|
||||||
|
return media_file
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
async def other_media_file(
|
||||||
|
test_db_session: AsyncSession, other_user: User
|
||||||
|
) -> MediaFile:
|
||||||
|
"""Create a media file owned by another user."""
|
||||||
|
media_file = MediaFile(
|
||||||
|
id=uuid.uuid4(),
|
||||||
|
owner_id=other_user.id,
|
||||||
|
duration_seconds=60.0,
|
||||||
|
frame_rate=24.0,
|
||||||
|
width=1280,
|
||||||
|
height=720,
|
||||||
|
is_deleted=False,
|
||||||
|
is_active=True,
|
||||||
|
)
|
||||||
|
test_db_session.add(media_file)
|
||||||
|
await test_db_session.commit()
|
||||||
|
await test_db_session.refresh(media_file)
|
||||||
|
return media_file
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
async def test_artifact(
|
||||||
|
test_db_session: AsyncSession, test_media_file: MediaFile
|
||||||
|
) -> ArtifactMediaFile:
|
||||||
|
"""Create a test artifact linked to test_media_file."""
|
||||||
|
artifact = ArtifactMediaFile(
|
||||||
|
id=uuid.uuid4(),
|
||||||
|
media_file_id=test_media_file.id,
|
||||||
|
artifact_type="THUMBNAIL",
|
||||||
|
is_deleted=False,
|
||||||
|
is_active=True,
|
||||||
|
)
|
||||||
|
test_db_session.add(artifact)
|
||||||
|
await test_db_session.commit()
|
||||||
|
await test_db_session.refresh(artifact)
|
||||||
|
return artifact
|
||||||
|
|
||||||
|
|
||||||
|
class TestGetMetaEndpoint:
|
||||||
|
"""Tests for GET /api/media/get_meta/."""
|
||||||
|
|
||||||
|
async def test_get_meta_success(self, auth_client: AsyncClient):
|
||||||
|
"""Test getting media metadata."""
|
||||||
|
with patch(
|
||||||
|
"cpv3.modules.media.router.probe_media",
|
||||||
|
new_callable=AsyncMock,
|
||||||
|
return_value={
|
||||||
|
"streams": [],
|
||||||
|
"format": {"filename": "test.mp4", "duration": "120.5"},
|
||||||
|
},
|
||||||
|
):
|
||||||
|
response = await auth_client.get(
|
||||||
|
"/api/media/get_meta/", params={"file_path": "uploads/test.mp4"}
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
async def test_get_meta_unauthenticated(self, async_client: AsyncClient):
|
||||||
|
"""Test getting metadata without auth returns 401."""
|
||||||
|
response = await async_client.get(
|
||||||
|
"/api/media/get_meta/", params={"file_path": "test.mp4"}
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 401
|
||||||
|
|
||||||
|
|
||||||
|
class TestSilenceRemoveEndpoint:
|
||||||
|
"""Tests for POST /api/media/silence_remove."""
|
||||||
|
|
||||||
|
async def test_silence_remove_success(self, auth_client: AsyncClient, mock_storage):
|
||||||
|
"""Test silence removal returns file info."""
|
||||||
|
with patch(
|
||||||
|
"cpv3.modules.media.router.remove_silence",
|
||||||
|
new_callable=AsyncMock,
|
||||||
|
) as mock_remove:
|
||||||
|
mock_remove.return_value = mock_storage.get_file_info.return_value
|
||||||
|
|
||||||
|
response = await auth_client.post(
|
||||||
|
"/api/media/silence_remove",
|
||||||
|
json={"file_path": "uploads/test.mp4", "folder": "processed"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert "file_path" in data
|
||||||
|
|
||||||
|
async def test_silence_remove_unauthenticated(self, async_client: AsyncClient):
|
||||||
|
"""Test silence removal without auth returns 401."""
|
||||||
|
response = await async_client.post(
|
||||||
|
"/api/media/silence_remove",
|
||||||
|
json={"file_path": "test.mp4"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 401
|
||||||
|
|
||||||
|
|
||||||
|
class TestConvertEndpoint:
|
||||||
|
"""Tests for POST /api/media/convert."""
|
||||||
|
|
||||||
|
async def test_convert_success(self, auth_client: AsyncClient, mock_storage):
|
||||||
|
"""Test media conversion returns file info."""
|
||||||
|
with patch(
|
||||||
|
"cpv3.modules.media.router.convert_to_mp4",
|
||||||
|
new_callable=AsyncMock,
|
||||||
|
) as mock_convert:
|
||||||
|
mock_convert.return_value = mock_storage.get_file_info.return_value
|
||||||
|
|
||||||
|
response = await auth_client.post(
|
||||||
|
"/api/media/convert",
|
||||||
|
json={"file_path": "uploads/test.mov", "folder": "converted"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert "file_path" in data
|
||||||
|
|
||||||
|
async def test_convert_unauthenticated(self, async_client: AsyncClient):
|
||||||
|
"""Test conversion without auth returns 401."""
|
||||||
|
response = await async_client.post(
|
||||||
|
"/api/media/convert",
|
||||||
|
json={"file_path": "test.mov"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 401
|
||||||
|
|
||||||
|
|
||||||
|
class TestListMediaFilesEndpoint:
|
||||||
|
"""Tests for GET /api/media/mediafiles/."""
|
||||||
|
|
||||||
|
async def test_list_mediafiles(
|
||||||
|
self, auth_client: AsyncClient, test_media_file: MediaFile
|
||||||
|
):
|
||||||
|
"""Test listing media files."""
|
||||||
|
response = await auth_client.get("/api/media/mediafiles/")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert isinstance(data, list)
|
||||||
|
|
||||||
|
async def test_list_mediafiles_unauthenticated(self, async_client: AsyncClient):
|
||||||
|
"""Test listing media files without auth returns 401."""
|
||||||
|
response = await async_client.get("/api/media/mediafiles/")
|
||||||
|
|
||||||
|
assert response.status_code == 401
|
||||||
|
|
||||||
|
|
||||||
|
class TestCreateMediaFileEndpoint:
|
||||||
|
"""Tests for POST /api/media/mediafiles/."""
|
||||||
|
|
||||||
|
async def test_create_mediafile_success(self, auth_client: AsyncClient):
|
||||||
|
"""Test creating a media file entry."""
|
||||||
|
response = await auth_client.post(
|
||||||
|
"/api/media/mediafiles/",
|
||||||
|
json={
|
||||||
|
"duration_seconds": 180.0,
|
||||||
|
"frame_rate": 60.0,
|
||||||
|
"width": 3840,
|
||||||
|
"height": 2160,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 201
|
||||||
|
data = response.json()
|
||||||
|
assert data["duration_seconds"] == 180.0
|
||||||
|
assert data["width"] == 3840
|
||||||
|
|
||||||
|
async def test_create_mediafile_unauthenticated(self, async_client: AsyncClient):
|
||||||
|
"""Test creating media file without auth returns 401."""
|
||||||
|
response = await async_client.post(
|
||||||
|
"/api/media/mediafiles/",
|
||||||
|
json={"duration_seconds": 60.0},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 401
|
||||||
|
|
||||||
|
|
||||||
|
class TestRetrieveMediaFileEndpoint:
|
||||||
|
"""Tests for GET /api/media/mediafiles/{media_file_id}/."""
|
||||||
|
|
||||||
|
async def test_retrieve_own_mediafile(
|
||||||
|
self, auth_client: AsyncClient, test_media_file: MediaFile
|
||||||
|
):
|
||||||
|
"""Test retrieving own media file."""
|
||||||
|
response = await auth_client.get(f"/api/media/mediafiles/{test_media_file.id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["id"] == str(test_media_file.id)
|
||||||
|
|
||||||
|
async def test_retrieve_other_mediafile_as_staff(
|
||||||
|
self, staff_client: AsyncClient, test_media_file: MediaFile
|
||||||
|
):
|
||||||
|
"""Test staff can retrieve any media file."""
|
||||||
|
response = await staff_client.get(
|
||||||
|
f"/api/media/mediafiles/{test_media_file.id}/"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
async def test_retrieve_nonexistent_mediafile(self, auth_client: AsyncClient):
|
||||||
|
"""Test retrieving nonexistent media file returns 404."""
|
||||||
|
fake_id = uuid.uuid4()
|
||||||
|
response = await auth_client.get(f"/api/media/mediafiles/{fake_id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
async def test_retrieve_other_mediafile_forbidden(
|
||||||
|
self, auth_client: AsyncClient, other_media_file: MediaFile
|
||||||
|
):
|
||||||
|
"""Test regular user cannot retrieve other user's media file."""
|
||||||
|
response = await auth_client.get(
|
||||||
|
f"/api/media/mediafiles/{other_media_file.id}/"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
|
||||||
|
class TestPatchMediaFileEndpoint:
|
||||||
|
"""Tests for PATCH /api/media/mediafiles/{media_file_id}/."""
|
||||||
|
|
||||||
|
async def test_patch_own_mediafile(
|
||||||
|
self, auth_client: AsyncClient, test_media_file: MediaFile
|
||||||
|
):
|
||||||
|
"""Test updating own media file."""
|
||||||
|
response = await auth_client.patch(
|
||||||
|
f"/api/media/mediafiles/{test_media_file.id}/",
|
||||||
|
json={"notes": "Updated notes"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["notes"] == "Updated notes"
|
||||||
|
|
||||||
|
async def test_patch_other_mediafile_as_staff(
|
||||||
|
self, staff_client: AsyncClient, test_media_file: MediaFile
|
||||||
|
):
|
||||||
|
"""Test staff can update any media file."""
|
||||||
|
response = await staff_client.patch(
|
||||||
|
f"/api/media/mediafiles/{test_media_file.id}/",
|
||||||
|
json={"notes": "Staff updated"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
async def test_patch_nonexistent_mediafile(self, auth_client: AsyncClient):
|
||||||
|
"""Test patching nonexistent media file returns 404."""
|
||||||
|
fake_id = uuid.uuid4()
|
||||||
|
response = await auth_client.patch(
|
||||||
|
f"/api/media/mediafiles/{fake_id}/",
|
||||||
|
json={"notes": "test"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
async def test_patch_other_mediafile_forbidden(
|
||||||
|
self, auth_client: AsyncClient, other_media_file: MediaFile
|
||||||
|
):
|
||||||
|
"""Test regular user cannot update other user's media file."""
|
||||||
|
response = await auth_client.patch(
|
||||||
|
f"/api/media/mediafiles/{other_media_file.id}/",
|
||||||
|
json={"notes": "hacked"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
|
||||||
|
class TestDeleteMediaFileEndpoint:
|
||||||
|
"""Tests for DELETE /api/media/mediafiles/{media_file_id}/."""
|
||||||
|
|
||||||
|
async def test_delete_own_mediafile(
|
||||||
|
self, auth_client: AsyncClient, test_media_file: MediaFile
|
||||||
|
):
|
||||||
|
"""Test deleting own media file."""
|
||||||
|
response = await auth_client.delete(
|
||||||
|
f"/api/media/mediafiles/{test_media_file.id}/"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 204
|
||||||
|
|
||||||
|
async def test_delete_other_mediafile_as_staff(
|
||||||
|
self, staff_client: AsyncClient, test_media_file: MediaFile
|
||||||
|
):
|
||||||
|
"""Test staff can delete any media file."""
|
||||||
|
response = await staff_client.delete(
|
||||||
|
f"/api/media/mediafiles/{test_media_file.id}/"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 204
|
||||||
|
|
||||||
|
async def test_delete_nonexistent_mediafile(self, auth_client: AsyncClient):
|
||||||
|
"""Test deleting nonexistent media file returns 404."""
|
||||||
|
fake_id = uuid.uuid4()
|
||||||
|
response = await auth_client.delete(f"/api/media/mediafiles/{fake_id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
async def test_delete_other_mediafile_forbidden(
|
||||||
|
self, auth_client: AsyncClient, other_media_file: MediaFile
|
||||||
|
):
|
||||||
|
"""Test regular user cannot delete other user's media file."""
|
||||||
|
response = await auth_client.delete(
|
||||||
|
f"/api/media/mediafiles/{other_media_file.id}/"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
|
||||||
|
class TestListArtifactsEndpoint:
|
||||||
|
"""Tests for GET /api/media/artifacts/."""
|
||||||
|
|
||||||
|
async def test_list_artifacts(
|
||||||
|
self, auth_client: AsyncClient, test_artifact: ArtifactMediaFile
|
||||||
|
):
|
||||||
|
"""Test listing artifacts."""
|
||||||
|
response = await auth_client.get("/api/media/artifacts/")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert isinstance(data, list)
|
||||||
|
|
||||||
|
async def test_list_artifacts_unauthenticated(self, async_client: AsyncClient):
|
||||||
|
"""Test listing artifacts without auth returns 401."""
|
||||||
|
response = await async_client.get("/api/media/artifacts/")
|
||||||
|
|
||||||
|
assert response.status_code == 401
|
||||||
|
|
||||||
|
|
||||||
|
class TestCreateArtifactEndpoint:
|
||||||
|
"""Tests for POST /api/media/artifacts/."""
|
||||||
|
|
||||||
|
async def test_create_artifact_success(
|
||||||
|
self, auth_client: AsyncClient, test_media_file: MediaFile
|
||||||
|
):
|
||||||
|
"""Test creating an artifact."""
|
||||||
|
response = await auth_client.post(
|
||||||
|
"/api/media/artifacts/",
|
||||||
|
json={
|
||||||
|
"media_file_id": str(test_media_file.id),
|
||||||
|
"artifact_type": "AUDIO_PROXY",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 201
|
||||||
|
data = response.json()
|
||||||
|
assert data["artifact_type"] == "AUDIO_PROXY"
|
||||||
|
|
||||||
|
async def test_create_artifact_unauthenticated(self, async_client: AsyncClient):
|
||||||
|
"""Test creating artifact without auth returns 401."""
|
||||||
|
response = await async_client.post(
|
||||||
|
"/api/media/artifacts/",
|
||||||
|
json={
|
||||||
|
"media_file_id": str(uuid.uuid4()),
|
||||||
|
"artifact_type": "THUMBNAIL",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 401
|
||||||
|
|
||||||
|
|
||||||
|
class TestRetrieveArtifactEndpoint:
|
||||||
|
"""Tests for GET /api/media/artifacts/{artifact_id}/."""
|
||||||
|
|
||||||
|
async def test_retrieve_artifact(
|
||||||
|
self, auth_client: AsyncClient, test_artifact: ArtifactMediaFile
|
||||||
|
):
|
||||||
|
"""Test retrieving an artifact."""
|
||||||
|
response = await auth_client.get(f"/api/media/artifacts/{test_artifact.id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["id"] == str(test_artifact.id)
|
||||||
|
|
||||||
|
async def test_retrieve_nonexistent_artifact(self, auth_client: AsyncClient):
|
||||||
|
"""Test retrieving nonexistent artifact returns 404."""
|
||||||
|
fake_id = uuid.uuid4()
|
||||||
|
response = await auth_client.get(f"/api/media/artifacts/{fake_id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
|
||||||
|
class TestPatchArtifactEndpoint:
|
||||||
|
"""Tests for PATCH /api/media/artifacts/{artifact_id}/."""
|
||||||
|
|
||||||
|
async def test_patch_artifact(
|
||||||
|
self, auth_client: AsyncClient, test_artifact: ArtifactMediaFile
|
||||||
|
):
|
||||||
|
"""Test updating an artifact."""
|
||||||
|
response = await auth_client.patch(
|
||||||
|
f"/api/media/artifacts/{test_artifact.id}/",
|
||||||
|
json={"is_deleted": True},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["is_deleted"] is True
|
||||||
|
|
||||||
|
async def test_patch_nonexistent_artifact(self, auth_client: AsyncClient):
|
||||||
|
"""Test patching nonexistent artifact returns 404."""
|
||||||
|
fake_id = uuid.uuid4()
|
||||||
|
response = await auth_client.patch(
|
||||||
|
f"/api/media/artifacts/{fake_id}/",
|
||||||
|
json={"is_deleted": True},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
|
||||||
|
class TestDeleteArtifactEndpoint:
|
||||||
|
"""Tests for DELETE /api/media/artifacts/{artifact_id}/."""
|
||||||
|
|
||||||
|
async def test_delete_artifact(
|
||||||
|
self, auth_client: AsyncClient, test_artifact: ArtifactMediaFile
|
||||||
|
):
|
||||||
|
"""Test deleting an artifact."""
|
||||||
|
response = await auth_client.delete(f"/api/media/artifacts/{test_artifact.id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 204
|
||||||
|
|
||||||
|
async def test_delete_nonexistent_artifact(self, auth_client: AsyncClient):
|
||||||
|
"""Test deleting nonexistent artifact returns 404."""
|
||||||
|
fake_id = uuid.uuid4()
|
||||||
|
response = await auth_client.delete(f"/api/media/artifacts/{fake_id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 404
|
||||||
@@ -0,0 +1,254 @@
|
|||||||
|
"""
|
||||||
|
Tests for project management endpoints.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from httpx import AsyncClient
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
|
from cpv3.modules.projects.models import Project
|
||||||
|
from cpv3.modules.users.models import User
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
async def test_project(test_db_session: AsyncSession, test_user: User) -> Project:
|
||||||
|
"""Create a test project owned by test_user."""
|
||||||
|
project = Project(
|
||||||
|
id=uuid.uuid4(),
|
||||||
|
owner_id=test_user.id,
|
||||||
|
name="Test Project",
|
||||||
|
description="A test project",
|
||||||
|
language="en",
|
||||||
|
status="DRAFT",
|
||||||
|
is_active=True,
|
||||||
|
)
|
||||||
|
test_db_session.add(project)
|
||||||
|
await test_db_session.commit()
|
||||||
|
await test_db_session.refresh(project)
|
||||||
|
return project
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
async def other_project(test_db_session: AsyncSession, other_user: User) -> Project:
|
||||||
|
"""Create a project owned by another user."""
|
||||||
|
project = Project(
|
||||||
|
id=uuid.uuid4(),
|
||||||
|
owner_id=other_user.id,
|
||||||
|
name="Other Project",
|
||||||
|
description="Another user's project",
|
||||||
|
language="en",
|
||||||
|
status="DRAFT",
|
||||||
|
is_active=True,
|
||||||
|
)
|
||||||
|
test_db_session.add(project)
|
||||||
|
await test_db_session.commit()
|
||||||
|
await test_db_session.refresh(project)
|
||||||
|
return project
|
||||||
|
|
||||||
|
|
||||||
|
class TestListProjectsEndpoint:
|
||||||
|
"""Tests for GET /api/projects/."""
|
||||||
|
|
||||||
|
async def test_list_projects_authenticated(
|
||||||
|
self, auth_client: AsyncClient, test_project: Project
|
||||||
|
):
|
||||||
|
"""Test listing projects as authenticated user."""
|
||||||
|
response = await auth_client.get("/api/projects/")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert isinstance(data, list)
|
||||||
|
|
||||||
|
async def test_list_projects_unauthenticated(self, async_client: AsyncClient):
|
||||||
|
"""Test listing projects without auth returns 401."""
|
||||||
|
response = await async_client.get("/api/projects/")
|
||||||
|
|
||||||
|
assert response.status_code == 401
|
||||||
|
|
||||||
|
|
||||||
|
class TestCreateProjectEndpoint:
|
||||||
|
"""Tests for POST /api/projects/."""
|
||||||
|
|
||||||
|
async def test_create_project_success(self, auth_client: AsyncClient):
|
||||||
|
"""Test creating a new project."""
|
||||||
|
response = await auth_client.post(
|
||||||
|
"/api/projects/",
|
||||||
|
json={
|
||||||
|
"name": "New Project",
|
||||||
|
"description": "A new project",
|
||||||
|
"language": "en",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 201
|
||||||
|
data = response.json()
|
||||||
|
assert data["name"] == "New Project"
|
||||||
|
assert data["description"] == "A new project"
|
||||||
|
assert data["language"] == "en"
|
||||||
|
assert data["status"] == "DRAFT"
|
||||||
|
|
||||||
|
async def test_create_project_minimal(self, auth_client: AsyncClient):
|
||||||
|
"""Test creating a project with minimal fields."""
|
||||||
|
response = await auth_client.post(
|
||||||
|
"/api/projects/",
|
||||||
|
json={"name": "Minimal Project"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 201
|
||||||
|
data = response.json()
|
||||||
|
assert data["name"] == "Minimal Project"
|
||||||
|
|
||||||
|
async def test_create_project_unauthenticated(self, async_client: AsyncClient):
|
||||||
|
"""Test creating project without auth returns 401."""
|
||||||
|
response = await async_client.post(
|
||||||
|
"/api/projects/",
|
||||||
|
json={"name": "Unauthorized Project"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 401
|
||||||
|
|
||||||
|
|
||||||
|
class TestRetrieveProjectEndpoint:
|
||||||
|
"""Tests for GET /api/projects/{project_id}/."""
|
||||||
|
|
||||||
|
async def test_retrieve_own_project(
|
||||||
|
self, auth_client: AsyncClient, test_project: Project
|
||||||
|
):
|
||||||
|
"""Test retrieving own project."""
|
||||||
|
response = await auth_client.get(f"/api/projects/{test_project.id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["id"] == str(test_project.id)
|
||||||
|
assert data["name"] == test_project.name
|
||||||
|
|
||||||
|
async def test_retrieve_other_project_as_staff(
|
||||||
|
self, staff_client: AsyncClient, test_project: Project
|
||||||
|
):
|
||||||
|
"""Test staff can retrieve any project."""
|
||||||
|
response = await staff_client.get(f"/api/projects/{test_project.id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["id"] == str(test_project.id)
|
||||||
|
|
||||||
|
async def test_retrieve_nonexistent_project(self, auth_client: AsyncClient):
|
||||||
|
"""Test retrieving nonexistent project returns 404."""
|
||||||
|
fake_id = uuid.uuid4()
|
||||||
|
response = await auth_client.get(f"/api/projects/{fake_id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 404
|
||||||
|
assert response.json()["detail"] == "Not found"
|
||||||
|
|
||||||
|
async def test_retrieve_other_project_forbidden(
|
||||||
|
self, auth_client: AsyncClient, other_project: Project
|
||||||
|
):
|
||||||
|
"""Test regular user cannot retrieve other user's project."""
|
||||||
|
response = await auth_client.get(f"/api/projects/{other_project.id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
assert response.json()["detail"] == "Forbidden"
|
||||||
|
|
||||||
|
|
||||||
|
class TestPatchProjectEndpoint:
|
||||||
|
"""Tests for PATCH /api/projects/{project_id}/."""
|
||||||
|
|
||||||
|
async def test_patch_own_project(
|
||||||
|
self, auth_client: AsyncClient, test_project: Project
|
||||||
|
):
|
||||||
|
"""Test updating own project."""
|
||||||
|
response = await auth_client.patch(
|
||||||
|
f"/api/projects/{test_project.id}/",
|
||||||
|
json={"name": "Updated Project", "description": "Updated description"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["name"] == "Updated Project"
|
||||||
|
assert data["description"] == "Updated description"
|
||||||
|
|
||||||
|
async def test_patch_project_status(
|
||||||
|
self, auth_client: AsyncClient, test_project: Project
|
||||||
|
):
|
||||||
|
"""Test updating project status."""
|
||||||
|
response = await auth_client.patch(
|
||||||
|
f"/api/projects/{test_project.id}/",
|
||||||
|
json={"status": "PROCESSING"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["status"] == "PROCESSING"
|
||||||
|
|
||||||
|
async def test_patch_other_project_as_staff(
|
||||||
|
self, staff_client: AsyncClient, test_project: Project
|
||||||
|
):
|
||||||
|
"""Test staff can update any project."""
|
||||||
|
response = await staff_client.patch(
|
||||||
|
f"/api/projects/{test_project.id}/",
|
||||||
|
json={"name": "Staff Updated"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["name"] == "Staff Updated"
|
||||||
|
|
||||||
|
async def test_patch_nonexistent_project(self, auth_client: AsyncClient):
|
||||||
|
"""Test patching nonexistent project returns 404."""
|
||||||
|
fake_id = uuid.uuid4()
|
||||||
|
response = await auth_client.patch(
|
||||||
|
f"/api/projects/{fake_id}/",
|
||||||
|
json={"name": "Test"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
async def test_patch_other_project_forbidden(
|
||||||
|
self, auth_client: AsyncClient, other_project: Project
|
||||||
|
):
|
||||||
|
"""Test regular user cannot update other user's project."""
|
||||||
|
response = await auth_client.patch(
|
||||||
|
f"/api/projects/{other_project.id}/",
|
||||||
|
json={"name": "Hacked"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
|
||||||
|
class TestDeleteProjectEndpoint:
|
||||||
|
"""Tests for DELETE /api/projects/{project_id}/."""
|
||||||
|
|
||||||
|
async def test_delete_own_project(
|
||||||
|
self, auth_client: AsyncClient, test_project: Project
|
||||||
|
):
|
||||||
|
"""Test deleting (deactivating) own project."""
|
||||||
|
response = await auth_client.delete(f"/api/projects/{test_project.id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 204
|
||||||
|
|
||||||
|
async def test_delete_other_project_as_staff(
|
||||||
|
self, staff_client: AsyncClient, test_project: Project
|
||||||
|
):
|
||||||
|
"""Test staff can delete any project."""
|
||||||
|
response = await staff_client.delete(f"/api/projects/{test_project.id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 204
|
||||||
|
|
||||||
|
async def test_delete_nonexistent_project(self, auth_client: AsyncClient):
|
||||||
|
"""Test deleting nonexistent project returns 404."""
|
||||||
|
fake_id = uuid.uuid4()
|
||||||
|
response = await auth_client.delete(f"/api/projects/{fake_id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
async def test_delete_other_project_forbidden(
|
||||||
|
self, auth_client: AsyncClient, other_project: Project
|
||||||
|
):
|
||||||
|
"""Test regular user cannot delete other user's project."""
|
||||||
|
response = await auth_client.delete(f"/api/projects/{other_project.id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
@@ -0,0 +1,26 @@
|
|||||||
|
"""
|
||||||
|
Tests for system endpoints (health check).
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from httpx import AsyncClient
|
||||||
|
|
||||||
|
|
||||||
|
class TestSystemEndpoints:
|
||||||
|
"""Tests for GET /api/ping/."""
|
||||||
|
|
||||||
|
async def test_ping_returns_ok(self, async_client: AsyncClient):
|
||||||
|
"""Test health check endpoint returns ok status."""
|
||||||
|
response = await async_client.get("/api/ping/")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.json() == {"status": "ok"}
|
||||||
|
|
||||||
|
async def test_ping_no_auth_required(self, async_client: AsyncClient):
|
||||||
|
"""Test health check endpoint works without authentication."""
|
||||||
|
# async_client has no auth header set
|
||||||
|
response = await async_client.get("/api/ping/")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
@@ -0,0 +1,295 @@
|
|||||||
|
"""
|
||||||
|
Tests for transcription endpoints.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import uuid
|
||||||
|
from unittest.mock import AsyncMock, patch
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from httpx import AsyncClient
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
|
from cpv3.modules.files.models import File
|
||||||
|
from cpv3.modules.transcription.models import Transcription
|
||||||
|
from cpv3.modules.users.models import User
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
async def source_file(test_db_session: AsyncSession, test_user: User) -> File:
|
||||||
|
"""Create a source file for transcription."""
|
||||||
|
file = File(
|
||||||
|
id=uuid.uuid4(),
|
||||||
|
owner_id=test_user.id,
|
||||||
|
original_filename="audio.mp3",
|
||||||
|
path="uploads/audio.mp3",
|
||||||
|
storage_backend="LOCAL",
|
||||||
|
mime_type="audio/mpeg",
|
||||||
|
size_bytes=5000000,
|
||||||
|
is_uploaded=True,
|
||||||
|
is_active=True,
|
||||||
|
)
|
||||||
|
test_db_session.add(file)
|
||||||
|
await test_db_session.commit()
|
||||||
|
await test_db_session.refresh(file)
|
||||||
|
return file
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
async def test_transcription(
|
||||||
|
test_db_session: AsyncSession, source_file: File
|
||||||
|
) -> Transcription:
|
||||||
|
"""Create a test transcription."""
|
||||||
|
transcription = Transcription(
|
||||||
|
id=uuid.uuid4(),
|
||||||
|
source_file_id=source_file.id,
|
||||||
|
engine="LOCAL_WHISPER",
|
||||||
|
language="en",
|
||||||
|
document={"segments": []},
|
||||||
|
is_active=True,
|
||||||
|
)
|
||||||
|
test_db_session.add(transcription)
|
||||||
|
await test_db_session.commit()
|
||||||
|
await test_db_session.refresh(transcription)
|
||||||
|
return transcription
|
||||||
|
|
||||||
|
|
||||||
|
class TestListTranscriptionsEndpoint:
|
||||||
|
"""Tests for GET /api/transcribe/transcriptions/."""
|
||||||
|
|
||||||
|
async def test_list_transcriptions(
|
||||||
|
self, auth_client: AsyncClient, test_transcription: Transcription
|
||||||
|
):
|
||||||
|
"""Test listing transcriptions."""
|
||||||
|
response = await auth_client.get("/api/transcribe/transcriptions/")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert isinstance(data, list)
|
||||||
|
|
||||||
|
async def test_list_transcriptions_unauthenticated(self, async_client: AsyncClient):
|
||||||
|
"""Test listing transcriptions without auth returns 401."""
|
||||||
|
response = await async_client.get("/api/transcribe/transcriptions/")
|
||||||
|
|
||||||
|
assert response.status_code == 401
|
||||||
|
|
||||||
|
|
||||||
|
class TestCreateTranscriptionEndpoint:
|
||||||
|
"""Tests for POST /api/transcribe/transcriptions/."""
|
||||||
|
|
||||||
|
async def test_create_transcription_success(
|
||||||
|
self, auth_client: AsyncClient, source_file: File
|
||||||
|
):
|
||||||
|
"""Test creating a transcription entry."""
|
||||||
|
response = await auth_client.post(
|
||||||
|
"/api/transcribe/transcriptions/",
|
||||||
|
json={
|
||||||
|
"source_file_id": str(source_file.id),
|
||||||
|
"engine": "LOCAL_WHISPER",
|
||||||
|
"language": "en",
|
||||||
|
"document": {
|
||||||
|
"segments": [
|
||||||
|
{
|
||||||
|
"text": "Hello world",
|
||||||
|
"semantic_tags": [],
|
||||||
|
"structure_tags": [],
|
||||||
|
"time": {"start": 0.0, "end": 2.0},
|
||||||
|
"lines": [],
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 201
|
||||||
|
data = response.json()
|
||||||
|
assert data["engine"] == "LOCAL_WHISPER"
|
||||||
|
assert data["language"] == "en"
|
||||||
|
|
||||||
|
async def test_create_transcription_unauthenticated(
|
||||||
|
self, async_client: AsyncClient
|
||||||
|
):
|
||||||
|
"""Test creating transcription without auth returns 401."""
|
||||||
|
response = await async_client.post(
|
||||||
|
"/api/transcribe/transcriptions/",
|
||||||
|
json={
|
||||||
|
"source_file_id": str(uuid.uuid4()),
|
||||||
|
"document": {"segments": []},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 401
|
||||||
|
|
||||||
|
|
||||||
|
class TestRetrieveTranscriptionEndpoint:
|
||||||
|
"""Tests for GET /api/transcribe/transcriptions/{transcription_id}/."""
|
||||||
|
|
||||||
|
async def test_retrieve_transcription(
|
||||||
|
self, auth_client: AsyncClient, test_transcription: Transcription
|
||||||
|
):
|
||||||
|
"""Test retrieving a transcription."""
|
||||||
|
response = await auth_client.get(
|
||||||
|
f"/api/transcribe/transcriptions/{test_transcription.id}/"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["id"] == str(test_transcription.id)
|
||||||
|
assert data["engine"] == test_transcription.engine
|
||||||
|
|
||||||
|
async def test_retrieve_nonexistent_transcription(self, auth_client: AsyncClient):
|
||||||
|
"""Test retrieving nonexistent transcription returns 404."""
|
||||||
|
fake_id = uuid.uuid4()
|
||||||
|
response = await auth_client.get(f"/api/transcribe/transcriptions/{fake_id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
|
||||||
|
class TestPatchTranscriptionEndpoint:
|
||||||
|
"""Tests for PATCH /api/transcribe/transcriptions/{transcription_id}/."""
|
||||||
|
|
||||||
|
async def test_patch_transcription(
|
||||||
|
self, auth_client: AsyncClient, test_transcription: Transcription
|
||||||
|
):
|
||||||
|
"""Test updating a transcription."""
|
||||||
|
updated_document = {
|
||||||
|
"segments": [
|
||||||
|
{
|
||||||
|
"text": "Updated text",
|
||||||
|
"semantic_tags": [],
|
||||||
|
"structure_tags": [],
|
||||||
|
"time": {"start": 0.0, "end": 3.0},
|
||||||
|
"lines": [],
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
response = await auth_client.patch(
|
||||||
|
f"/api/transcribe/transcriptions/{test_transcription.id}/",
|
||||||
|
json={"document": updated_document},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["document"]["segments"][0]["text"] == "Updated text"
|
||||||
|
|
||||||
|
async def test_patch_nonexistent_transcription(self, auth_client: AsyncClient):
|
||||||
|
"""Test patching nonexistent transcription returns 404."""
|
||||||
|
fake_id = uuid.uuid4()
|
||||||
|
response = await auth_client.patch(
|
||||||
|
f"/api/transcribe/transcriptions/{fake_id}/",
|
||||||
|
json={"document": {"segments": []}},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
|
||||||
|
class TestDeleteTranscriptionEndpoint:
|
||||||
|
"""Tests for DELETE /api/transcribe/transcriptions/{transcription_id}/."""
|
||||||
|
|
||||||
|
async def test_delete_transcription(
|
||||||
|
self, auth_client: AsyncClient, test_transcription: Transcription
|
||||||
|
):
|
||||||
|
"""Test deleting a transcription."""
|
||||||
|
response = await auth_client.delete(
|
||||||
|
f"/api/transcribe/transcriptions/{test_transcription.id}/"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 204
|
||||||
|
|
||||||
|
async def test_delete_nonexistent_transcription(self, auth_client: AsyncClient):
|
||||||
|
"""Test deleting nonexistent transcription returns 404."""
|
||||||
|
fake_id = uuid.uuid4()
|
||||||
|
response = await auth_client.delete(
|
||||||
|
f"/api/transcribe/transcriptions/{fake_id}/"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
|
||||||
|
class TestWhisperTranscribeEndpoint:
|
||||||
|
"""Tests for POST /api/transcribe/whisper/."""
|
||||||
|
|
||||||
|
async def test_whisper_transcribe_success(self, auth_client: AsyncClient):
|
||||||
|
"""Test Whisper transcription endpoint."""
|
||||||
|
mock_result = {
|
||||||
|
"segments": [
|
||||||
|
{
|
||||||
|
"text": "Hello from Whisper",
|
||||||
|
"semantic_tags": [],
|
||||||
|
"structure_tags": [],
|
||||||
|
"time": {"start": 0.0, "end": 2.5},
|
||||||
|
"lines": [],
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
with patch(
|
||||||
|
"cpv3.modules.transcription.router.transcribe_with_whisper",
|
||||||
|
new_callable=AsyncMock,
|
||||||
|
return_value=mock_result,
|
||||||
|
):
|
||||||
|
response = await auth_client.post(
|
||||||
|
"/api/transcribe/whisper/",
|
||||||
|
json={
|
||||||
|
"file_path": "uploads/audio.mp3",
|
||||||
|
"model_name": "tiny",
|
||||||
|
"language": "en",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
async def test_whisper_transcribe_unauthenticated(self, async_client: AsyncClient):
|
||||||
|
"""Test Whisper transcription without auth returns 401."""
|
||||||
|
response = await async_client.post(
|
||||||
|
"/api/transcribe/whisper/",
|
||||||
|
json={"file_path": "test.mp3"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 401
|
||||||
|
|
||||||
|
|
||||||
|
class TestGoogleSpeechTranscribeEndpoint:
|
||||||
|
"""Tests for POST /api/transcribe/google-speech/."""
|
||||||
|
|
||||||
|
async def test_google_speech_transcribe_success(self, auth_client: AsyncClient):
|
||||||
|
"""Test Google Speech transcription endpoint."""
|
||||||
|
mock_result = {
|
||||||
|
"segments": [
|
||||||
|
{
|
||||||
|
"text": "Hello from Google",
|
||||||
|
"semantic_tags": [],
|
||||||
|
"structure_tags": [],
|
||||||
|
"time": {"start": 0.0, "end": 2.0},
|
||||||
|
"lines": [],
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
with patch(
|
||||||
|
"cpv3.modules.transcription.router.transcribe_with_google_speech",
|
||||||
|
new_callable=AsyncMock,
|
||||||
|
return_value=mock_result,
|
||||||
|
):
|
||||||
|
response = await auth_client.post(
|
||||||
|
"/api/transcribe/google-speech/",
|
||||||
|
json={
|
||||||
|
"file_path": "uploads/audio.mp3",
|
||||||
|
"language_codes": ["en-US"],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
async def test_google_speech_transcribe_unauthenticated(
|
||||||
|
self, async_client: AsyncClient
|
||||||
|
):
|
||||||
|
"""Test Google Speech transcription without auth returns 401."""
|
||||||
|
response = await async_client.post(
|
||||||
|
"/api/transcribe/google-speech/",
|
||||||
|
json={"file_path": "test.mp3"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 401
|
||||||
@@ -0,0 +1,212 @@
|
|||||||
|
"""
|
||||||
|
Tests for user management endpoints.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from httpx import AsyncClient
|
||||||
|
|
||||||
|
from cpv3.modules.users.models import User
|
||||||
|
|
||||||
|
|
||||||
|
class TestListUsersEndpoint:
|
||||||
|
"""Tests for GET /api/users/."""
|
||||||
|
|
||||||
|
async def test_list_users_authenticated(
|
||||||
|
self, auth_client: AsyncClient, test_user: User
|
||||||
|
):
|
||||||
|
"""Test listing users as authenticated user."""
|
||||||
|
response = await auth_client.get("/api/users/")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert isinstance(data, list)
|
||||||
|
|
||||||
|
async def test_list_users_unauthenticated(self, async_client: AsyncClient):
|
||||||
|
"""Test listing users without auth returns 401."""
|
||||||
|
response = await async_client.get("/api/users/")
|
||||||
|
|
||||||
|
assert response.status_code == 401
|
||||||
|
|
||||||
|
|
||||||
|
class TestCreateUserEndpoint:
|
||||||
|
"""Tests for POST /api/users/."""
|
||||||
|
|
||||||
|
async def test_create_user_as_staff(self, staff_client: AsyncClient):
|
||||||
|
"""Test staff can create a new user."""
|
||||||
|
response = await staff_client.post(
|
||||||
|
"/api/users/",
|
||||||
|
json={
|
||||||
|
"username": "newcreateduser",
|
||||||
|
"email": "newcreated@example.com",
|
||||||
|
"password": "password123",
|
||||||
|
"first_name": "New",
|
||||||
|
"last_name": "Created",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 201
|
||||||
|
data = response.json()
|
||||||
|
assert data["username"] == "newcreateduser"
|
||||||
|
assert data["email"] == "newcreated@example.com"
|
||||||
|
|
||||||
|
async def test_create_user_unauthenticated(self, async_client: AsyncClient):
|
||||||
|
"""Test creating user without auth returns 401."""
|
||||||
|
response = await async_client.post(
|
||||||
|
"/api/users/",
|
||||||
|
json={
|
||||||
|
"username": "newuser",
|
||||||
|
"email": "new@example.com",
|
||||||
|
"password": "password123",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 401
|
||||||
|
|
||||||
|
|
||||||
|
class TestMeEndpoint:
|
||||||
|
"""Tests for GET /api/users/me/."""
|
||||||
|
|
||||||
|
async def test_me_returns_current_user(
|
||||||
|
self, auth_client: AsyncClient, test_user: User
|
||||||
|
):
|
||||||
|
"""Test getting current user info."""
|
||||||
|
response = await auth_client.get("/api/users/me/")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["username"] == test_user.username
|
||||||
|
assert data["email"] == test_user.email
|
||||||
|
assert data["id"] == str(test_user.id)
|
||||||
|
|
||||||
|
async def test_me_unauthenticated(self, async_client: AsyncClient):
|
||||||
|
"""Test getting current user without auth returns 401."""
|
||||||
|
response = await async_client.get("/api/users/me/")
|
||||||
|
|
||||||
|
assert response.status_code == 401
|
||||||
|
|
||||||
|
|
||||||
|
class TestRetrieveUserEndpoint:
|
||||||
|
"""Tests for GET /api/users/{user_id}/."""
|
||||||
|
|
||||||
|
async def test_retrieve_own_user(self, auth_client: AsyncClient, test_user: User):
|
||||||
|
"""Test user can retrieve their own info."""
|
||||||
|
response = await auth_client.get(f"/api/users/{test_user.id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["id"] == str(test_user.id)
|
||||||
|
assert data["username"] == test_user.username
|
||||||
|
|
||||||
|
async def test_retrieve_other_user_as_staff(
|
||||||
|
self, staff_client: AsyncClient, test_user: User
|
||||||
|
):
|
||||||
|
"""Test staff can retrieve other user's info."""
|
||||||
|
response = await staff_client.get(f"/api/users/{test_user.id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["id"] == str(test_user.id)
|
||||||
|
|
||||||
|
async def test_retrieve_nonexistent_user(self, auth_client: AsyncClient):
|
||||||
|
"""Test retrieving nonexistent user returns 404."""
|
||||||
|
fake_id = uuid.uuid4()
|
||||||
|
response = await auth_client.get(f"/api/users/{fake_id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 404
|
||||||
|
assert response.json()["detail"] == "Not found"
|
||||||
|
|
||||||
|
async def test_retrieve_other_user_forbidden(
|
||||||
|
self, auth_client: AsyncClient, other_user: User
|
||||||
|
):
|
||||||
|
"""Test regular user cannot retrieve other user's info."""
|
||||||
|
response = await auth_client.get(f"/api/users/{other_user.id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
assert response.json()["detail"] == "Forbidden"
|
||||||
|
|
||||||
|
|
||||||
|
class TestPatchUserEndpoint:
|
||||||
|
"""Tests for PATCH /api/users/{user_id}/."""
|
||||||
|
|
||||||
|
async def test_patch_own_user(self, auth_client: AsyncClient, test_user: User):
|
||||||
|
"""Test user can update their own info."""
|
||||||
|
response = await auth_client.patch(
|
||||||
|
f"/api/users/{test_user.id}/",
|
||||||
|
json={"first_name": "Updated", "last_name": "Name"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["first_name"] == "Updated"
|
||||||
|
assert data["last_name"] == "Name"
|
||||||
|
|
||||||
|
async def test_patch_other_user_as_staff(
|
||||||
|
self, staff_client: AsyncClient, test_user: User
|
||||||
|
):
|
||||||
|
"""Test staff can update other user's info."""
|
||||||
|
response = await staff_client.patch(
|
||||||
|
f"/api/users/{test_user.id}/",
|
||||||
|
json={"first_name": "StaffUpdated"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["first_name"] == "StaffUpdated"
|
||||||
|
|
||||||
|
async def test_patch_nonexistent_user(self, auth_client: AsyncClient):
|
||||||
|
"""Test patching nonexistent user returns 404."""
|
||||||
|
fake_id = uuid.uuid4()
|
||||||
|
response = await auth_client.patch(
|
||||||
|
f"/api/users/{fake_id}/",
|
||||||
|
json={"first_name": "Test"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
async def test_patch_other_user_forbidden(
|
||||||
|
self, auth_client: AsyncClient, other_user: User
|
||||||
|
):
|
||||||
|
"""Test regular user cannot update other user's info."""
|
||||||
|
response = await auth_client.patch(
|
||||||
|
f"/api/users/{other_user.id}/",
|
||||||
|
json={"first_name": "Hacked"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
|
||||||
|
class TestDeleteUserEndpoint:
|
||||||
|
"""Tests for DELETE /api/users/{user_id}/."""
|
||||||
|
|
||||||
|
async def test_delete_own_user(self, auth_client: AsyncClient, test_user: User):
|
||||||
|
"""Test user can delete (deactivate) their own account."""
|
||||||
|
response = await auth_client.delete(f"/api/users/{test_user.id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 204
|
||||||
|
|
||||||
|
async def test_delete_other_user_as_staff(
|
||||||
|
self, staff_client: AsyncClient, test_user: User
|
||||||
|
):
|
||||||
|
"""Test staff can delete other user's account."""
|
||||||
|
response = await staff_client.delete(f"/api/users/{test_user.id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 204
|
||||||
|
|
||||||
|
async def test_delete_nonexistent_user(self, auth_client: AsyncClient):
|
||||||
|
"""Test deleting nonexistent user returns 404."""
|
||||||
|
fake_id = uuid.uuid4()
|
||||||
|
response = await auth_client.delete(f"/api/users/{fake_id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
async def test_delete_other_user_forbidden(
|
||||||
|
self, auth_client: AsyncClient, other_user: User
|
||||||
|
):
|
||||||
|
"""Test regular user cannot delete other user's account."""
|
||||||
|
response = await auth_client.delete(f"/api/users/{other_user.id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
@@ -0,0 +1,247 @@
|
|||||||
|
"""
|
||||||
|
Tests for webhooks endpoints.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from httpx import AsyncClient
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
|
from cpv3.modules.webhooks.models import Webhook
|
||||||
|
from cpv3.modules.users.models import User
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
async def test_webhook(test_db_session: AsyncSession, test_user: User) -> Webhook:
|
||||||
|
"""Create a test webhook owned by test_user."""
|
||||||
|
webhook = Webhook(
|
||||||
|
id=uuid.uuid4(),
|
||||||
|
user_id=test_user.id,
|
||||||
|
event="job.completed",
|
||||||
|
url="https://example.com/webhook",
|
||||||
|
secret="test-secret-123",
|
||||||
|
is_active=True,
|
||||||
|
)
|
||||||
|
test_db_session.add(webhook)
|
||||||
|
await test_db_session.commit()
|
||||||
|
await test_db_session.refresh(webhook)
|
||||||
|
return webhook
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
async def other_webhook(test_db_session: AsyncSession, other_user: User) -> Webhook:
|
||||||
|
"""Create a webhook owned by another user."""
|
||||||
|
webhook = Webhook(
|
||||||
|
id=uuid.uuid4(),
|
||||||
|
user_id=other_user.id,
|
||||||
|
event="job.failed",
|
||||||
|
url="https://other.com/webhook",
|
||||||
|
secret="other-secret-456",
|
||||||
|
is_active=True,
|
||||||
|
)
|
||||||
|
test_db_session.add(webhook)
|
||||||
|
await test_db_session.commit()
|
||||||
|
await test_db_session.refresh(webhook)
|
||||||
|
return webhook
|
||||||
|
|
||||||
|
|
||||||
|
class TestListWebhooksEndpoint:
|
||||||
|
"""Tests for GET /api/webhooks/."""
|
||||||
|
|
||||||
|
async def test_list_webhooks(self, auth_client: AsyncClient, test_webhook: Webhook):
|
||||||
|
"""Test listing webhooks."""
|
||||||
|
response = await auth_client.get("/api/webhooks/")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert isinstance(data, list)
|
||||||
|
|
||||||
|
async def test_list_webhooks_unauthenticated(self, async_client: AsyncClient):
|
||||||
|
"""Test listing webhooks without auth returns 401."""
|
||||||
|
response = await async_client.get("/api/webhooks/")
|
||||||
|
|
||||||
|
assert response.status_code == 401
|
||||||
|
|
||||||
|
|
||||||
|
class TestCreateWebhookEndpoint:
|
||||||
|
"""Tests for POST /api/webhooks/."""
|
||||||
|
|
||||||
|
async def test_create_webhook_success(self, auth_client: AsyncClient):
|
||||||
|
"""Test creating a webhook."""
|
||||||
|
response = await auth_client.post(
|
||||||
|
"/api/webhooks/",
|
||||||
|
json={
|
||||||
|
"event": "transcription.completed",
|
||||||
|
"url": "https://myapp.com/webhook",
|
||||||
|
"secret": "my-webhook-secret",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 201
|
||||||
|
data = response.json()
|
||||||
|
assert data["event"] == "transcription.completed"
|
||||||
|
assert data["url"] == "https://myapp.com/webhook"
|
||||||
|
|
||||||
|
async def test_create_webhook_minimal(self, auth_client: AsyncClient):
|
||||||
|
"""Test creating a webhook with minimal fields."""
|
||||||
|
response = await auth_client.post(
|
||||||
|
"/api/webhooks/",
|
||||||
|
json={"url": "https://minimal.com/hook"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 201
|
||||||
|
data = response.json()
|
||||||
|
assert data["url"] == "https://minimal.com/hook"
|
||||||
|
|
||||||
|
async def test_create_webhook_unauthenticated(self, async_client: AsyncClient):
|
||||||
|
"""Test creating webhook without auth returns 401."""
|
||||||
|
response = await async_client.post(
|
||||||
|
"/api/webhooks/",
|
||||||
|
json={"url": "https://test.com/hook"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 401
|
||||||
|
|
||||||
|
|
||||||
|
class TestRetrieveWebhookEndpoint:
|
||||||
|
"""Tests for GET /api/webhooks/{webhook_id}/."""
|
||||||
|
|
||||||
|
async def test_retrieve_own_webhook(
|
||||||
|
self, auth_client: AsyncClient, test_webhook: Webhook
|
||||||
|
):
|
||||||
|
"""Test retrieving own webhook."""
|
||||||
|
response = await auth_client.get(f"/api/webhooks/{test_webhook.id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["id"] == str(test_webhook.id)
|
||||||
|
assert data["url"] == test_webhook.url
|
||||||
|
|
||||||
|
async def test_retrieve_other_webhook_as_staff(
|
||||||
|
self, staff_client: AsyncClient, test_webhook: Webhook
|
||||||
|
):
|
||||||
|
"""Test staff can retrieve any webhook."""
|
||||||
|
response = await staff_client.get(f"/api/webhooks/{test_webhook.id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
async def test_retrieve_nonexistent_webhook(self, auth_client: AsyncClient):
|
||||||
|
"""Test retrieving nonexistent webhook returns 404."""
|
||||||
|
fake_id = uuid.uuid4()
|
||||||
|
response = await auth_client.get(f"/api/webhooks/{fake_id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 404
|
||||||
|
assert response.json()["detail"] == "Not found"
|
||||||
|
|
||||||
|
async def test_retrieve_other_webhook_forbidden(
|
||||||
|
self, auth_client: AsyncClient, other_webhook: Webhook
|
||||||
|
):
|
||||||
|
"""Test regular user cannot retrieve other user's webhook."""
|
||||||
|
response = await auth_client.get(f"/api/webhooks/{other_webhook.id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
assert response.json()["detail"] == "Forbidden"
|
||||||
|
|
||||||
|
|
||||||
|
class TestPatchWebhookEndpoint:
|
||||||
|
"""Tests for PATCH /api/webhooks/{webhook_id}/."""
|
||||||
|
|
||||||
|
async def test_patch_own_webhook(
|
||||||
|
self, auth_client: AsyncClient, test_webhook: Webhook
|
||||||
|
):
|
||||||
|
"""Test updating own webhook."""
|
||||||
|
response = await auth_client.patch(
|
||||||
|
f"/api/webhooks/{test_webhook.id}/",
|
||||||
|
json={
|
||||||
|
"url": "https://updated.com/webhook",
|
||||||
|
"event": "job.started",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["url"] == "https://updated.com/webhook"
|
||||||
|
assert data["event"] == "job.started"
|
||||||
|
|
||||||
|
async def test_patch_webhook_deactivate(
|
||||||
|
self, auth_client: AsyncClient, test_webhook: Webhook
|
||||||
|
):
|
||||||
|
"""Test deactivating a webhook."""
|
||||||
|
response = await auth_client.patch(
|
||||||
|
f"/api/webhooks/{test_webhook.id}/",
|
||||||
|
json={"is_active": False},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["is_active"] is False
|
||||||
|
|
||||||
|
async def test_patch_other_webhook_as_staff(
|
||||||
|
self, staff_client: AsyncClient, test_webhook: Webhook
|
||||||
|
):
|
||||||
|
"""Test staff can update any webhook."""
|
||||||
|
response = await staff_client.patch(
|
||||||
|
f"/api/webhooks/{test_webhook.id}/",
|
||||||
|
json={"event": "staff.updated"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
async def test_patch_nonexistent_webhook(self, auth_client: AsyncClient):
|
||||||
|
"""Test patching nonexistent webhook returns 404."""
|
||||||
|
fake_id = uuid.uuid4()
|
||||||
|
response = await auth_client.patch(
|
||||||
|
f"/api/webhooks/{fake_id}/",
|
||||||
|
json={"url": "https://test.com"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
async def test_patch_other_webhook_forbidden(
|
||||||
|
self, auth_client: AsyncClient, other_webhook: Webhook
|
||||||
|
):
|
||||||
|
"""Test regular user cannot update other user's webhook."""
|
||||||
|
response = await auth_client.patch(
|
||||||
|
f"/api/webhooks/{other_webhook.id}/",
|
||||||
|
json={"url": "https://hacked.com"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
|
||||||
|
class TestDeleteWebhookEndpoint:
|
||||||
|
"""Tests for DELETE /api/webhooks/{webhook_id}/."""
|
||||||
|
|
||||||
|
async def test_delete_own_webhook(
|
||||||
|
self, auth_client: AsyncClient, test_webhook: Webhook
|
||||||
|
):
|
||||||
|
"""Test deleting own webhook."""
|
||||||
|
response = await auth_client.delete(f"/api/webhooks/{test_webhook.id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 204
|
||||||
|
|
||||||
|
async def test_delete_other_webhook_as_staff(
|
||||||
|
self, staff_client: AsyncClient, test_webhook: Webhook
|
||||||
|
):
|
||||||
|
"""Test staff can delete any webhook."""
|
||||||
|
response = await staff_client.delete(f"/api/webhooks/{test_webhook.id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 204
|
||||||
|
|
||||||
|
async def test_delete_nonexistent_webhook(self, auth_client: AsyncClient):
|
||||||
|
"""Test deleting nonexistent webhook returns 404."""
|
||||||
|
fake_id = uuid.uuid4()
|
||||||
|
response = await auth_client.delete(f"/api/webhooks/{fake_id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
async def test_delete_other_webhook_forbidden(
|
||||||
|
self, auth_client: AsyncClient, other_webhook: Webhook
|
||||||
|
):
|
||||||
|
"""Test regular user cannot delete other user's webhook."""
|
||||||
|
response = await auth_client.delete(f"/api/webhooks/{other_webhook.id}/")
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
@@ -0,0 +1,595 @@
|
|||||||
|
"""
|
||||||
|
Unit tests for S3 storage backend.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import io
|
||||||
|
from unittest.mock import MagicMock, patch
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from botocore.exceptions import ClientError
|
||||||
|
|
||||||
|
from cpv3.infrastructure.storage.s3 import S3Config, S3StorageBackend
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def s3_config() -> S3Config:
|
||||||
|
"""Create a test S3 configuration."""
|
||||||
|
return S3Config(
|
||||||
|
access_key="test-access-key",
|
||||||
|
secret_key="test-secret-key",
|
||||||
|
bucket_name="test-bucket",
|
||||||
|
endpoint_url_internal="http://localhost:9000",
|
||||||
|
endpoint_url_public="http://localhost:9000",
|
||||||
|
presign_expires_seconds=3600,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_boto3_session():
|
||||||
|
"""Mock boto3 session and clients."""
|
||||||
|
with patch("cpv3.infrastructure.storage.s3.boto3.session.Session") as mock_session:
|
||||||
|
mock_client = MagicMock()
|
||||||
|
mock_presign_client = MagicMock()
|
||||||
|
|
||||||
|
# Track which client is being created
|
||||||
|
clients = [mock_client, mock_presign_client]
|
||||||
|
client_index = [0]
|
||||||
|
|
||||||
|
def create_client(*args, **kwargs):
|
||||||
|
idx = client_index[0]
|
||||||
|
client_index[0] += 1
|
||||||
|
return clients[idx % 2]
|
||||||
|
|
||||||
|
mock_session.return_value.client.side_effect = create_client
|
||||||
|
|
||||||
|
yield {
|
||||||
|
"session": mock_session,
|
||||||
|
"client": mock_client,
|
||||||
|
"presign_client": mock_presign_client,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def s3_backend(s3_config: S3Config, mock_boto3_session) -> S3StorageBackend:
|
||||||
|
"""Create an S3StorageBackend with mocked boto3."""
|
||||||
|
return S3StorageBackend(s3_config)
|
||||||
|
|
||||||
|
|
||||||
|
class TestS3Config:
|
||||||
|
"""Tests for S3Config dataclass."""
|
||||||
|
|
||||||
|
def test_config_creation(self):
|
||||||
|
"""Test creating S3 config with all parameters."""
|
||||||
|
config = S3Config(
|
||||||
|
access_key="key",
|
||||||
|
secret_key="secret",
|
||||||
|
bucket_name="bucket",
|
||||||
|
endpoint_url_internal="http://internal:9000",
|
||||||
|
endpoint_url_public="http://public:9000",
|
||||||
|
presign_expires_seconds=7200,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert config.access_key == "key"
|
||||||
|
assert config.secret_key == "secret"
|
||||||
|
assert config.bucket_name == "bucket"
|
||||||
|
assert config.endpoint_url_internal == "http://internal:9000"
|
||||||
|
assert config.endpoint_url_public == "http://public:9000"
|
||||||
|
assert config.presign_expires_seconds == 7200
|
||||||
|
|
||||||
|
def test_config_default_presign_expires(self):
|
||||||
|
"""Test default presign expiration time."""
|
||||||
|
config = S3Config(
|
||||||
|
access_key="key",
|
||||||
|
secret_key="secret",
|
||||||
|
bucket_name="bucket",
|
||||||
|
endpoint_url_internal=None,
|
||||||
|
endpoint_url_public=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert config.presign_expires_seconds == 3600
|
||||||
|
|
||||||
|
def test_config_is_frozen(self):
|
||||||
|
"""Test that config is immutable."""
|
||||||
|
config = S3Config(
|
||||||
|
access_key="key",
|
||||||
|
secret_key="secret",
|
||||||
|
bucket_name="bucket",
|
||||||
|
endpoint_url_internal=None,
|
||||||
|
endpoint_url_public=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
with pytest.raises(AttributeError):
|
||||||
|
config.access_key = "new_key" # type: ignore
|
||||||
|
|
||||||
|
|
||||||
|
class TestS3StorageBackendInit:
|
||||||
|
"""Tests for S3StorageBackend initialization."""
|
||||||
|
|
||||||
|
def test_init_creates_two_clients(self, s3_config: S3Config, mock_boto3_session):
|
||||||
|
"""Test that initialization creates both internal and presign clients."""
|
||||||
|
S3StorageBackend(s3_config)
|
||||||
|
|
||||||
|
# Should create two clients
|
||||||
|
assert mock_boto3_session["session"].return_value.client.call_count == 2
|
||||||
|
|
||||||
|
def test_init_uses_correct_endpoints(self, mock_boto3_session):
|
||||||
|
"""Test that clients use correct endpoints."""
|
||||||
|
config = S3Config(
|
||||||
|
access_key="key",
|
||||||
|
secret_key="secret",
|
||||||
|
bucket_name="bucket",
|
||||||
|
endpoint_url_internal="http://internal:9000",
|
||||||
|
endpoint_url_public="http://public:9000",
|
||||||
|
)
|
||||||
|
|
||||||
|
S3StorageBackend(config)
|
||||||
|
|
||||||
|
calls = mock_boto3_session["session"].return_value.client.call_args_list
|
||||||
|
|
||||||
|
# First call should use internal endpoint
|
||||||
|
assert calls[0][1]["endpoint_url"] == "http://internal:9000"
|
||||||
|
# Second call should use public endpoint
|
||||||
|
assert calls[1][1]["endpoint_url"] == "http://public:9000"
|
||||||
|
|
||||||
|
def test_init_uses_internal_for_presign_when_no_public(self, mock_boto3_session):
|
||||||
|
"""Test that presign client uses internal endpoint when public is not set."""
|
||||||
|
config = S3Config(
|
||||||
|
access_key="key",
|
||||||
|
secret_key="secret",
|
||||||
|
bucket_name="bucket",
|
||||||
|
endpoint_url_internal="http://internal:9000",
|
||||||
|
endpoint_url_public=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
S3StorageBackend(config)
|
||||||
|
|
||||||
|
calls = mock_boto3_session["session"].return_value.client.call_args_list
|
||||||
|
|
||||||
|
# Both should use internal endpoint
|
||||||
|
assert calls[0][1]["endpoint_url"] == "http://internal:9000"
|
||||||
|
assert calls[1][1]["endpoint_url"] == "http://internal:9000"
|
||||||
|
|
||||||
|
|
||||||
|
class TestEnsureBucket:
|
||||||
|
"""Tests for ensure_bucket method."""
|
||||||
|
|
||||||
|
def test_ensure_bucket_creates_bucket_if_not_exists(
|
||||||
|
self, s3_backend: S3StorageBackend, mock_boto3_session
|
||||||
|
):
|
||||||
|
"""Test that bucket is created if it doesn't exist."""
|
||||||
|
mock_client = mock_boto3_session["client"]
|
||||||
|
|
||||||
|
# Simulate bucket not found
|
||||||
|
error_response = {"Error": {"Code": "404"}}
|
||||||
|
mock_client.head_bucket.side_effect = ClientError(error_response, "HeadBucket")
|
||||||
|
|
||||||
|
s3_backend.ensure_bucket()
|
||||||
|
|
||||||
|
mock_client.create_bucket.assert_called_once_with(Bucket="test-bucket")
|
||||||
|
|
||||||
|
def test_ensure_bucket_creates_bucket_no_such_bucket_error(
|
||||||
|
self, s3_backend: S3StorageBackend, mock_boto3_session
|
||||||
|
):
|
||||||
|
"""Test that bucket is created on NoSuchBucket error."""
|
||||||
|
mock_client = mock_boto3_session["client"]
|
||||||
|
|
||||||
|
error_response = {"Error": {"Code": "NoSuchBucket"}}
|
||||||
|
mock_client.head_bucket.side_effect = ClientError(error_response, "HeadBucket")
|
||||||
|
|
||||||
|
s3_backend.ensure_bucket()
|
||||||
|
|
||||||
|
mock_client.create_bucket.assert_called_once_with(Bucket="test-bucket")
|
||||||
|
|
||||||
|
def test_ensure_bucket_does_not_create_if_exists(
|
||||||
|
self, s3_backend: S3StorageBackend, mock_boto3_session
|
||||||
|
):
|
||||||
|
"""Test that bucket is not created if it already exists."""
|
||||||
|
mock_client = mock_boto3_session["client"]
|
||||||
|
mock_client.head_bucket.return_value = {}
|
||||||
|
|
||||||
|
s3_backend.ensure_bucket()
|
||||||
|
|
||||||
|
mock_client.create_bucket.assert_not_called()
|
||||||
|
|
||||||
|
def test_ensure_bucket_caches_result(
|
||||||
|
self, s3_backend: S3StorageBackend, mock_boto3_session
|
||||||
|
):
|
||||||
|
"""Test that bucket check is cached after first call."""
|
||||||
|
mock_client = mock_boto3_session["client"]
|
||||||
|
mock_client.head_bucket.return_value = {}
|
||||||
|
|
||||||
|
s3_backend.ensure_bucket()
|
||||||
|
s3_backend.ensure_bucket()
|
||||||
|
s3_backend.ensure_bucket()
|
||||||
|
|
||||||
|
# Should only check once
|
||||||
|
assert mock_client.head_bucket.call_count == 1
|
||||||
|
|
||||||
|
def test_ensure_bucket_raises_on_other_errors(
|
||||||
|
self, s3_backend: S3StorageBackend, mock_boto3_session
|
||||||
|
):
|
||||||
|
"""Test that other errors are raised."""
|
||||||
|
mock_client = mock_boto3_session["client"]
|
||||||
|
|
||||||
|
error_response = {"Error": {"Code": "AccessDenied"}}
|
||||||
|
mock_client.head_bucket.side_effect = ClientError(error_response, "HeadBucket")
|
||||||
|
|
||||||
|
with pytest.raises(ClientError):
|
||||||
|
s3_backend.ensure_bucket()
|
||||||
|
|
||||||
|
|
||||||
|
class TestUploadFileobj:
|
||||||
|
"""Tests for upload_fileobj method."""
|
||||||
|
|
||||||
|
def test_upload_fileobj_with_content_type(
|
||||||
|
self, s3_backend: S3StorageBackend, mock_boto3_session
|
||||||
|
):
|
||||||
|
"""Test uploading a file with content type."""
|
||||||
|
mock_client = mock_boto3_session["client"]
|
||||||
|
mock_client.head_bucket.return_value = {}
|
||||||
|
|
||||||
|
fileobj = io.BytesIO(b"test content")
|
||||||
|
s3_backend.upload_fileobj("test/key.txt", fileobj, content_type="text/plain")
|
||||||
|
|
||||||
|
mock_client.upload_fileobj.assert_called_once()
|
||||||
|
call_kwargs = mock_client.upload_fileobj.call_args[1]
|
||||||
|
|
||||||
|
assert call_kwargs["Bucket"] == "test-bucket"
|
||||||
|
assert call_kwargs["Key"] == "test/key.txt"
|
||||||
|
assert call_kwargs["ExtraArgs"] == {"ContentType": "text/plain"}
|
||||||
|
|
||||||
|
def test_upload_fileobj_without_content_type(
|
||||||
|
self, s3_backend: S3StorageBackend, mock_boto3_session
|
||||||
|
):
|
||||||
|
"""Test uploading a file without content type."""
|
||||||
|
mock_client = mock_boto3_session["client"]
|
||||||
|
mock_client.head_bucket.return_value = {}
|
||||||
|
|
||||||
|
fileobj = io.BytesIO(b"test content")
|
||||||
|
s3_backend.upload_fileobj("test/key.txt", fileobj, content_type=None)
|
||||||
|
|
||||||
|
call_kwargs = mock_client.upload_fileobj.call_args[1]
|
||||||
|
assert call_kwargs["ExtraArgs"] is None
|
||||||
|
|
||||||
|
def test_upload_fileobj_ensures_bucket(
|
||||||
|
self, s3_backend: S3StorageBackend, mock_boto3_session
|
||||||
|
):
|
||||||
|
"""Test that upload_fileobj calls ensure_bucket."""
|
||||||
|
mock_client = mock_boto3_session["client"]
|
||||||
|
mock_client.head_bucket.return_value = {}
|
||||||
|
|
||||||
|
fileobj = io.BytesIO(b"test content")
|
||||||
|
s3_backend.upload_fileobj("test/key.txt", fileobj, content_type=None)
|
||||||
|
|
||||||
|
mock_client.head_bucket.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
|
class TestDownloadFileobj:
|
||||||
|
"""Tests for download_fileobj method."""
|
||||||
|
|
||||||
|
def test_download_fileobj(self, s3_backend: S3StorageBackend, mock_boto3_session):
|
||||||
|
"""Test downloading a file."""
|
||||||
|
mock_client = mock_boto3_session["client"]
|
||||||
|
mock_client.head_bucket.return_value = {}
|
||||||
|
|
||||||
|
fileobj = io.BytesIO()
|
||||||
|
s3_backend.download_fileobj("test/key.txt", fileobj)
|
||||||
|
|
||||||
|
mock_client.download_fileobj.assert_called_once_with(
|
||||||
|
"test-bucket", "test/key.txt", fileobj
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_download_fileobj_ensures_bucket(
|
||||||
|
self, s3_backend: S3StorageBackend, mock_boto3_session
|
||||||
|
):
|
||||||
|
"""Test that download_fileobj calls ensure_bucket."""
|
||||||
|
mock_client = mock_boto3_session["client"]
|
||||||
|
mock_client.head_bucket.return_value = {}
|
||||||
|
|
||||||
|
fileobj = io.BytesIO()
|
||||||
|
s3_backend.download_fileobj("test/key.txt", fileobj)
|
||||||
|
|
||||||
|
mock_client.head_bucket.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
|
class TestExists:
|
||||||
|
"""Tests for exists method."""
|
||||||
|
|
||||||
|
def test_exists_returns_true_when_object_exists(
|
||||||
|
self, s3_backend: S3StorageBackend, mock_boto3_session
|
||||||
|
):
|
||||||
|
"""Test that exists returns True when object exists."""
|
||||||
|
mock_client = mock_boto3_session["client"]
|
||||||
|
mock_client.head_bucket.return_value = {}
|
||||||
|
mock_client.head_object.return_value = {}
|
||||||
|
|
||||||
|
result = s3_backend.exists("test/key.txt")
|
||||||
|
|
||||||
|
assert result is True
|
||||||
|
mock_client.head_object.assert_called_once_with(
|
||||||
|
Bucket="test-bucket", Key="test/key.txt"
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_exists_returns_false_when_object_not_found_404(
|
||||||
|
self, s3_backend: S3StorageBackend, mock_boto3_session
|
||||||
|
):
|
||||||
|
"""Test that exists returns False on 404 error."""
|
||||||
|
mock_client = mock_boto3_session["client"]
|
||||||
|
mock_client.head_bucket.return_value = {}
|
||||||
|
|
||||||
|
error_response = {"Error": {"Code": "404"}}
|
||||||
|
mock_client.head_object.side_effect = ClientError(error_response, "HeadObject")
|
||||||
|
|
||||||
|
result = s3_backend.exists("test/key.txt")
|
||||||
|
|
||||||
|
assert result is False
|
||||||
|
|
||||||
|
def test_exists_returns_false_when_no_such_key(
|
||||||
|
self, s3_backend: S3StorageBackend, mock_boto3_session
|
||||||
|
):
|
||||||
|
"""Test that exists returns False on NoSuchKey error."""
|
||||||
|
mock_client = mock_boto3_session["client"]
|
||||||
|
mock_client.head_bucket.return_value = {}
|
||||||
|
|
||||||
|
error_response = {"Error": {"Code": "NoSuchKey"}}
|
||||||
|
mock_client.head_object.side_effect = ClientError(error_response, "HeadObject")
|
||||||
|
|
||||||
|
result = s3_backend.exists("test/key.txt")
|
||||||
|
|
||||||
|
assert result is False
|
||||||
|
|
||||||
|
def test_exists_raises_on_other_errors(
|
||||||
|
self, s3_backend: S3StorageBackend, mock_boto3_session
|
||||||
|
):
|
||||||
|
"""Test that other errors are raised."""
|
||||||
|
mock_client = mock_boto3_session["client"]
|
||||||
|
mock_client.head_bucket.return_value = {}
|
||||||
|
|
||||||
|
error_response = {"Error": {"Code": "AccessDenied"}}
|
||||||
|
mock_client.head_object.side_effect = ClientError(error_response, "HeadObject")
|
||||||
|
|
||||||
|
with pytest.raises(ClientError):
|
||||||
|
s3_backend.exists("test/key.txt")
|
||||||
|
|
||||||
|
|
||||||
|
class TestSize:
|
||||||
|
"""Tests for size method."""
|
||||||
|
|
||||||
|
def test_size_returns_content_length(
|
||||||
|
self, s3_backend: S3StorageBackend, mock_boto3_session
|
||||||
|
):
|
||||||
|
"""Test that size returns ContentLength from head_object."""
|
||||||
|
mock_client = mock_boto3_session["client"]
|
||||||
|
mock_client.head_bucket.return_value = {}
|
||||||
|
mock_client.head_object.return_value = {"ContentLength": 12345}
|
||||||
|
|
||||||
|
result = s3_backend.size("test/key.txt")
|
||||||
|
|
||||||
|
assert result == 12345
|
||||||
|
mock_client.head_object.assert_called_once_with(
|
||||||
|
Bucket="test-bucket", Key="test/key.txt"
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_size_returns_zero_when_no_content_length(
|
||||||
|
self, s3_backend: S3StorageBackend, mock_boto3_session
|
||||||
|
):
|
||||||
|
"""Test that size returns 0 when ContentLength is missing."""
|
||||||
|
mock_client = mock_boto3_session["client"]
|
||||||
|
mock_client.head_bucket.return_value = {}
|
||||||
|
mock_client.head_object.return_value = {}
|
||||||
|
|
||||||
|
result = s3_backend.size("test/key.txt")
|
||||||
|
|
||||||
|
assert result == 0
|
||||||
|
|
||||||
|
|
||||||
|
class TestDelete:
|
||||||
|
"""Tests for delete method."""
|
||||||
|
|
||||||
|
def test_delete_calls_delete_object(
|
||||||
|
self, s3_backend: S3StorageBackend, mock_boto3_session
|
||||||
|
):
|
||||||
|
"""Test that delete calls delete_object."""
|
||||||
|
mock_client = mock_boto3_session["client"]
|
||||||
|
mock_client.head_bucket.return_value = {}
|
||||||
|
|
||||||
|
s3_backend.delete("test/key.txt")
|
||||||
|
|
||||||
|
mock_client.delete_object.assert_called_once_with(
|
||||||
|
Bucket="test-bucket", Key="test/key.txt"
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_delete_ensures_bucket(
|
||||||
|
self, s3_backend: S3StorageBackend, mock_boto3_session
|
||||||
|
):
|
||||||
|
"""Test that delete calls ensure_bucket."""
|
||||||
|
mock_client = mock_boto3_session["client"]
|
||||||
|
mock_client.head_bucket.return_value = {}
|
||||||
|
|
||||||
|
s3_backend.delete("test/key.txt")
|
||||||
|
|
||||||
|
mock_client.head_bucket.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
|
class TestRead:
|
||||||
|
"""Tests for read method."""
|
||||||
|
|
||||||
|
def test_read_returns_body_content(
|
||||||
|
self, s3_backend: S3StorageBackend, mock_boto3_session
|
||||||
|
):
|
||||||
|
"""Test that read returns the object body content."""
|
||||||
|
mock_client = mock_boto3_session["client"]
|
||||||
|
mock_client.head_bucket.return_value = {}
|
||||||
|
|
||||||
|
mock_body = MagicMock()
|
||||||
|
mock_body.read.return_value = b"file content"
|
||||||
|
mock_client.get_object.return_value = {"Body": mock_body}
|
||||||
|
|
||||||
|
result = s3_backend.read("test/key.txt")
|
||||||
|
|
||||||
|
assert result == b"file content"
|
||||||
|
mock_client.get_object.assert_called_once_with(
|
||||||
|
Bucket="test-bucket", Key="test/key.txt"
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_read_ensures_bucket(
|
||||||
|
self, s3_backend: S3StorageBackend, mock_boto3_session
|
||||||
|
):
|
||||||
|
"""Test that read calls ensure_bucket."""
|
||||||
|
mock_client = mock_boto3_session["client"]
|
||||||
|
mock_client.head_bucket.return_value = {}
|
||||||
|
|
||||||
|
mock_body = MagicMock()
|
||||||
|
mock_body.read.return_value = b"content"
|
||||||
|
mock_client.get_object.return_value = {"Body": mock_body}
|
||||||
|
|
||||||
|
s3_backend.read("test/key.txt")
|
||||||
|
|
||||||
|
mock_client.head_bucket.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
|
class TestGenerateUrl:
|
||||||
|
"""Tests for generate_url method."""
|
||||||
|
|
||||||
|
def test_generate_url_returns_presigned_url(
|
||||||
|
self, s3_backend: S3StorageBackend, mock_boto3_session
|
||||||
|
):
|
||||||
|
"""Test that generate_url returns a presigned URL."""
|
||||||
|
mock_client = mock_boto3_session["client"]
|
||||||
|
mock_presign_client = mock_boto3_session["presign_client"]
|
||||||
|
mock_client.head_bucket.return_value = {}
|
||||||
|
mock_presign_client.generate_presigned_url.return_value = (
|
||||||
|
"https://example.com/presigned-url"
|
||||||
|
)
|
||||||
|
|
||||||
|
result = s3_backend.generate_url("test/key.txt")
|
||||||
|
|
||||||
|
assert result == "https://example.com/presigned-url"
|
||||||
|
|
||||||
|
def test_generate_url_uses_correct_parameters(
|
||||||
|
self, s3_backend: S3StorageBackend, mock_boto3_session
|
||||||
|
):
|
||||||
|
"""Test that generate_url uses correct parameters."""
|
||||||
|
mock_client = mock_boto3_session["client"]
|
||||||
|
mock_presign_client = mock_boto3_session["presign_client"]
|
||||||
|
mock_client.head_bucket.return_value = {}
|
||||||
|
|
||||||
|
s3_backend.generate_url("test/key.txt")
|
||||||
|
|
||||||
|
mock_presign_client.generate_presigned_url.assert_called_once_with(
|
||||||
|
ClientMethod="get_object",
|
||||||
|
Params={"Bucket": "test-bucket", "Key": "test/key.txt"},
|
||||||
|
ExpiresIn=3600,
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_generate_url_uses_custom_expiration(self, mock_boto3_session):
|
||||||
|
"""Test that generate_url uses custom expiration time."""
|
||||||
|
config = S3Config(
|
||||||
|
access_key="key",
|
||||||
|
secret_key="secret",
|
||||||
|
bucket_name="bucket",
|
||||||
|
endpoint_url_internal="http://localhost:9000",
|
||||||
|
endpoint_url_public=None,
|
||||||
|
presign_expires_seconds=7200,
|
||||||
|
)
|
||||||
|
|
||||||
|
backend = S3StorageBackend(config)
|
||||||
|
mock_client = mock_boto3_session["client"]
|
||||||
|
mock_presign_client = mock_boto3_session["presign_client"]
|
||||||
|
mock_client.head_bucket.return_value = {}
|
||||||
|
|
||||||
|
backend.generate_url("test/key.txt")
|
||||||
|
|
||||||
|
call_kwargs = mock_presign_client.generate_presigned_url.call_args[1]
|
||||||
|
assert call_kwargs["ExpiresIn"] == 7200
|
||||||
|
|
||||||
|
|
||||||
|
class TestIntegrationScenarios:
|
||||||
|
"""Integration-style tests for common usage scenarios."""
|
||||||
|
|
||||||
|
def test_upload_then_check_exists(
|
||||||
|
self, s3_backend: S3StorageBackend, mock_boto3_session
|
||||||
|
):
|
||||||
|
"""Test uploading a file and checking it exists."""
|
||||||
|
mock_client = mock_boto3_session["client"]
|
||||||
|
mock_client.head_bucket.return_value = {}
|
||||||
|
mock_client.head_object.return_value = {}
|
||||||
|
|
||||||
|
# Upload
|
||||||
|
fileobj = io.BytesIO(b"test content")
|
||||||
|
s3_backend.upload_fileobj("test/file.txt", fileobj, content_type="text/plain")
|
||||||
|
|
||||||
|
# Check exists
|
||||||
|
result = s3_backend.exists("test/file.txt")
|
||||||
|
|
||||||
|
assert result is True
|
||||||
|
|
||||||
|
def test_upload_then_read_back(
|
||||||
|
self, s3_backend: S3StorageBackend, mock_boto3_session
|
||||||
|
):
|
||||||
|
"""Test uploading a file and reading it back."""
|
||||||
|
mock_client = mock_boto3_session["client"]
|
||||||
|
mock_client.head_bucket.return_value = {}
|
||||||
|
|
||||||
|
content = b"test file content"
|
||||||
|
mock_body = MagicMock()
|
||||||
|
mock_body.read.return_value = content
|
||||||
|
mock_client.get_object.return_value = {"Body": mock_body}
|
||||||
|
|
||||||
|
# Upload
|
||||||
|
fileobj = io.BytesIO(content)
|
||||||
|
s3_backend.upload_fileobj("test/file.txt", fileobj, content_type="text/plain")
|
||||||
|
|
||||||
|
# Read back
|
||||||
|
result = s3_backend.read("test/file.txt")
|
||||||
|
|
||||||
|
assert result == content
|
||||||
|
|
||||||
|
def test_upload_then_delete(self, s3_backend: S3StorageBackend, mock_boto3_session):
|
||||||
|
"""Test uploading a file and then deleting it."""
|
||||||
|
mock_client = mock_boto3_session["client"]
|
||||||
|
mock_client.head_bucket.return_value = {}
|
||||||
|
|
||||||
|
# Upload
|
||||||
|
fileobj = io.BytesIO(b"test content")
|
||||||
|
s3_backend.upload_fileobj("test/file.txt", fileobj, content_type="text/plain")
|
||||||
|
|
||||||
|
# Delete
|
||||||
|
s3_backend.delete("test/file.txt")
|
||||||
|
|
||||||
|
mock_client.delete_object.assert_called_once_with(
|
||||||
|
Bucket="test-bucket", Key="test/file.txt"
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_full_lifecycle(self, s3_backend: S3StorageBackend, mock_boto3_session):
|
||||||
|
"""Test full file lifecycle: upload, check, get size, get url, delete."""
|
||||||
|
mock_client = mock_boto3_session["client"]
|
||||||
|
mock_presign_client = mock_boto3_session["presign_client"]
|
||||||
|
mock_client.head_bucket.return_value = {}
|
||||||
|
mock_client.head_object.return_value = {"ContentLength": 100}
|
||||||
|
mock_presign_client.generate_presigned_url.return_value = "https://url"
|
||||||
|
|
||||||
|
content = b"test file content"
|
||||||
|
|
||||||
|
# Upload
|
||||||
|
fileobj = io.BytesIO(content)
|
||||||
|
s3_backend.upload_fileobj("media/file.mp4", fileobj, content_type="video/mp4")
|
||||||
|
|
||||||
|
# Check exists
|
||||||
|
assert s3_backend.exists("media/file.mp4") is True
|
||||||
|
|
||||||
|
# Get size
|
||||||
|
assert s3_backend.size("media/file.mp4") == 100
|
||||||
|
|
||||||
|
# Generate URL
|
||||||
|
url = s3_backend.generate_url("media/file.mp4")
|
||||||
|
assert url == "https://url"
|
||||||
|
|
||||||
|
# Delete
|
||||||
|
s3_backend.delete("media/file.mp4")
|
||||||
|
|
||||||
|
# Verify all operations were called
|
||||||
|
assert mock_client.upload_fileobj.called
|
||||||
|
assert mock_client.delete_object.called
|
||||||
@@ -0,0 +1,374 @@
|
|||||||
|
"""
|
||||||
|
Unit tests for StorageService (async wrapper for storage backends).
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import io
|
||||||
|
from unittest.mock import MagicMock
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from cpv3.infrastructure.storage.base import StorageService
|
||||||
|
from cpv3.infrastructure.storage.types import FileInfo
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_backend() -> MagicMock:
|
||||||
|
"""Create a mock storage backend."""
|
||||||
|
backend = MagicMock()
|
||||||
|
backend.upload_fileobj = MagicMock()
|
||||||
|
backend.download_fileobj = MagicMock()
|
||||||
|
backend.exists = MagicMock(return_value=True)
|
||||||
|
backend.size = MagicMock(return_value=1024)
|
||||||
|
backend.delete = MagicMock()
|
||||||
|
backend.read = MagicMock(return_value=b"file content")
|
||||||
|
backend.generate_url = MagicMock(return_value="https://example.com/file.txt")
|
||||||
|
return backend
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def storage_service(mock_backend: MagicMock) -> StorageService:
|
||||||
|
"""Create a StorageService with mocked backend."""
|
||||||
|
return StorageService(mock_backend)
|
||||||
|
|
||||||
|
|
||||||
|
class TestStorageServiceUpload:
|
||||||
|
"""Tests for StorageService upload functionality."""
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_upload_fileobj_with_generated_name(
|
||||||
|
self, storage_service: StorageService, mock_backend: MagicMock
|
||||||
|
):
|
||||||
|
"""Test uploading a file with auto-generated name."""
|
||||||
|
fileobj = io.BytesIO(b"test content")
|
||||||
|
|
||||||
|
key = await storage_service.upload_fileobj(
|
||||||
|
fileobj=fileobj,
|
||||||
|
file_name="original.txt",
|
||||||
|
folder="uploads",
|
||||||
|
gen_name=True,
|
||||||
|
content_type="text/plain",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Key should contain folder and have .txt extension
|
||||||
|
assert key.startswith("uploads/")
|
||||||
|
assert key.endswith(".txt")
|
||||||
|
# Key should not contain original filename
|
||||||
|
assert "original" not in key
|
||||||
|
|
||||||
|
mock_backend.upload_fileobj.assert_called_once()
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_upload_fileobj_with_original_name(
|
||||||
|
self, storage_service: StorageService, mock_backend: MagicMock
|
||||||
|
):
|
||||||
|
"""Test uploading a file keeping original name."""
|
||||||
|
fileobj = io.BytesIO(b"test content")
|
||||||
|
|
||||||
|
key = await storage_service.upload_fileobj(
|
||||||
|
fileobj=fileobj,
|
||||||
|
file_name="myfile.txt",
|
||||||
|
folder="uploads",
|
||||||
|
gen_name=False,
|
||||||
|
content_type="text/plain",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert key == "uploads/myfile.txt"
|
||||||
|
mock_backend.upload_fileobj.assert_called_once()
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_upload_fileobj_without_folder(
|
||||||
|
self, storage_service: StorageService, mock_backend: MagicMock
|
||||||
|
):
|
||||||
|
"""Test uploading a file without folder."""
|
||||||
|
fileobj = io.BytesIO(b"test content")
|
||||||
|
|
||||||
|
key = await storage_service.upload_fileobj(
|
||||||
|
fileobj=fileobj,
|
||||||
|
file_name="myfile.txt",
|
||||||
|
folder="",
|
||||||
|
gen_name=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert key == "myfile.txt"
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_upload_fileobj_without_extension(
|
||||||
|
self, storage_service: StorageService, mock_backend: MagicMock
|
||||||
|
):
|
||||||
|
"""Test uploading a file without extension."""
|
||||||
|
fileobj = io.BytesIO(b"test content")
|
||||||
|
|
||||||
|
key = await storage_service.upload_fileobj(
|
||||||
|
fileobj=fileobj,
|
||||||
|
file_name="noextension",
|
||||||
|
folder="uploads",
|
||||||
|
gen_name=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Should generate UUID without extension
|
||||||
|
assert key.startswith("uploads/")
|
||||||
|
assert "." not in key.split("/")[-1] # No extension in filename
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_upload_fileobj_seeks_to_start(
|
||||||
|
self, storage_service: StorageService, mock_backend: MagicMock
|
||||||
|
):
|
||||||
|
"""Test that upload_fileobj seeks to start of file."""
|
||||||
|
fileobj = io.BytesIO(b"test content")
|
||||||
|
fileobj.seek(5) # Move position
|
||||||
|
|
||||||
|
await storage_service.upload_fileobj(
|
||||||
|
fileobj=fileobj,
|
||||||
|
file_name="test.txt",
|
||||||
|
folder="",
|
||||||
|
gen_name=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Backend should receive fileobj that's been seeked to 0
|
||||||
|
mock_backend.upload_fileobj.assert_called_once()
|
||||||
|
# The call should have seeked the fileobj
|
||||||
|
assert fileobj.tell() == 0 or mock_backend.upload_fileobj.called
|
||||||
|
|
||||||
|
|
||||||
|
class TestStorageServiceExists:
|
||||||
|
"""Tests for StorageService exists functionality."""
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_exists_returns_true(
|
||||||
|
self, storage_service: StorageService, mock_backend: MagicMock
|
||||||
|
):
|
||||||
|
"""Test exists returns True when file exists."""
|
||||||
|
mock_backend.exists.return_value = True
|
||||||
|
|
||||||
|
result = await storage_service.exists("test/file.txt")
|
||||||
|
|
||||||
|
assert result is True
|
||||||
|
mock_backend.exists.assert_called_once_with("test/file.txt")
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_exists_returns_false(
|
||||||
|
self, storage_service: StorageService, mock_backend: MagicMock
|
||||||
|
):
|
||||||
|
"""Test exists returns False when file doesn't exist."""
|
||||||
|
mock_backend.exists.return_value = False
|
||||||
|
|
||||||
|
result = await storage_service.exists("nonexistent.txt")
|
||||||
|
|
||||||
|
assert result is False
|
||||||
|
|
||||||
|
|
||||||
|
class TestStorageServiceDelete:
|
||||||
|
"""Tests for StorageService delete functionality."""
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_delete_calls_backend(
|
||||||
|
self, storage_service: StorageService, mock_backend: MagicMock
|
||||||
|
):
|
||||||
|
"""Test delete calls backend delete."""
|
||||||
|
await storage_service.delete("test/file.txt")
|
||||||
|
|
||||||
|
mock_backend.delete.assert_called_once_with("test/file.txt")
|
||||||
|
|
||||||
|
|
||||||
|
class TestStorageServiceSize:
|
||||||
|
"""Tests for StorageService size functionality."""
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_size_returns_file_size(
|
||||||
|
self, storage_service: StorageService, mock_backend: MagicMock
|
||||||
|
):
|
||||||
|
"""Test size returns file size from backend."""
|
||||||
|
mock_backend.size.return_value = 12345
|
||||||
|
|
||||||
|
result = await storage_service.size("test/file.txt")
|
||||||
|
|
||||||
|
assert result == 12345
|
||||||
|
mock_backend.size.assert_called_once_with("test/file.txt")
|
||||||
|
|
||||||
|
|
||||||
|
class TestStorageServiceRead:
|
||||||
|
"""Tests for StorageService read functionality."""
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_read_returns_content(
|
||||||
|
self, storage_service: StorageService, mock_backend: MagicMock
|
||||||
|
):
|
||||||
|
"""Test read returns file content from backend."""
|
||||||
|
expected_content = b"file content bytes"
|
||||||
|
mock_backend.read.return_value = expected_content
|
||||||
|
|
||||||
|
result = await storage_service.read("test/file.txt")
|
||||||
|
|
||||||
|
assert result == expected_content
|
||||||
|
mock_backend.read.assert_called_once_with("test/file.txt")
|
||||||
|
|
||||||
|
|
||||||
|
class TestStorageServiceUrl:
|
||||||
|
"""Tests for StorageService url functionality."""
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_url_returns_presigned_url(
|
||||||
|
self, storage_service: StorageService, mock_backend: MagicMock
|
||||||
|
):
|
||||||
|
"""Test url returns presigned URL from backend."""
|
||||||
|
expected_url = "https://s3.example.com/bucket/file.txt?signature=xyz"
|
||||||
|
mock_backend.generate_url.return_value = expected_url
|
||||||
|
|
||||||
|
result = await storage_service.url("test/file.txt")
|
||||||
|
|
||||||
|
assert result == expected_url
|
||||||
|
mock_backend.generate_url.assert_called_once_with("test/file.txt")
|
||||||
|
|
||||||
|
|
||||||
|
class TestStorageServiceGetFileInfo:
|
||||||
|
"""Tests for StorageService get_file_info functionality."""
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_get_file_info_returns_file_info(
|
||||||
|
self, storage_service: StorageService, mock_backend: MagicMock
|
||||||
|
):
|
||||||
|
"""Test get_file_info returns FileInfo with all details."""
|
||||||
|
mock_backend.exists.return_value = True
|
||||||
|
mock_backend.generate_url.return_value = "https://example.com/uploads/file.txt"
|
||||||
|
mock_backend.size.return_value = 2048
|
||||||
|
|
||||||
|
result = await storage_service.get_file_info("uploads/file.txt")
|
||||||
|
|
||||||
|
assert isinstance(result, FileInfo)
|
||||||
|
assert result.file_path == "uploads/file.txt"
|
||||||
|
assert result.file_url == "https://example.com/uploads/file.txt"
|
||||||
|
assert result.file_size == 2048
|
||||||
|
assert result.filename == "file.txt"
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_get_file_info_raises_when_not_found(
|
||||||
|
self, storage_service: StorageService, mock_backend: MagicMock
|
||||||
|
):
|
||||||
|
"""Test get_file_info raises FileNotFoundError when file doesn't exist."""
|
||||||
|
mock_backend.exists.return_value = False
|
||||||
|
|
||||||
|
with pytest.raises(FileNotFoundError) as exc_info:
|
||||||
|
await storage_service.get_file_info("nonexistent.txt")
|
||||||
|
|
||||||
|
assert "nonexistent.txt" in str(exc_info.value)
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_get_file_info_extracts_filename_from_path(
|
||||||
|
self, storage_service: StorageService, mock_backend: MagicMock
|
||||||
|
):
|
||||||
|
"""Test get_file_info correctly extracts filename from path."""
|
||||||
|
mock_backend.exists.return_value = True
|
||||||
|
mock_backend.generate_url.return_value = "https://example.com/path"
|
||||||
|
mock_backend.size.return_value = 100
|
||||||
|
|
||||||
|
result = await storage_service.get_file_info("deep/nested/path/myfile.mp4")
|
||||||
|
|
||||||
|
assert result.filename == "myfile.mp4"
|
||||||
|
|
||||||
|
|
||||||
|
class TestStorageServiceIntegration:
|
||||||
|
"""Integration-style tests for StorageService."""
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_upload_then_get_info(
|
||||||
|
self, storage_service: StorageService, mock_backend: MagicMock
|
||||||
|
):
|
||||||
|
"""Test uploading a file and getting its info."""
|
||||||
|
mock_backend.exists.return_value = True
|
||||||
|
mock_backend.size.return_value = 1024
|
||||||
|
mock_backend.generate_url.return_value = "https://example.com/url"
|
||||||
|
|
||||||
|
# Upload
|
||||||
|
fileobj = io.BytesIO(b"test content")
|
||||||
|
key = await storage_service.upload_fileobj(
|
||||||
|
fileobj=fileobj,
|
||||||
|
file_name="video.mp4",
|
||||||
|
folder="media",
|
||||||
|
gen_name=False,
|
||||||
|
content_type="video/mp4",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get info
|
||||||
|
info = await storage_service.get_file_info(key)
|
||||||
|
|
||||||
|
assert info.file_path == key
|
||||||
|
assert info.file_size == 1024
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_full_workflow(
|
||||||
|
self, storage_service: StorageService, mock_backend: MagicMock
|
||||||
|
):
|
||||||
|
"""Test full file management workflow."""
|
||||||
|
mock_backend.exists.return_value = True
|
||||||
|
mock_backend.size.return_value = 500
|
||||||
|
mock_backend.read.return_value = b"content"
|
||||||
|
mock_backend.generate_url.return_value = "https://url"
|
||||||
|
|
||||||
|
# Upload
|
||||||
|
fileobj = io.BytesIO(b"content")
|
||||||
|
key = await storage_service.upload_fileobj(
|
||||||
|
fileobj=fileobj,
|
||||||
|
file_name="doc.pdf",
|
||||||
|
folder="documents",
|
||||||
|
gen_name=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check exists
|
||||||
|
exists = await storage_service.exists(key)
|
||||||
|
assert exists is True
|
||||||
|
|
||||||
|
# Get size
|
||||||
|
size = await storage_service.size(key)
|
||||||
|
assert size == 500
|
||||||
|
|
||||||
|
# Read content
|
||||||
|
content = await storage_service.read(key)
|
||||||
|
assert content == b"content"
|
||||||
|
|
||||||
|
# Get URL
|
||||||
|
url = await storage_service.url(key)
|
||||||
|
assert url == "https://url"
|
||||||
|
|
||||||
|
# Delete
|
||||||
|
await storage_service.delete(key)
|
||||||
|
mock_backend.delete.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
|
class TestFileInfoDataclass:
|
||||||
|
"""Tests for FileInfo dataclass."""
|
||||||
|
|
||||||
|
def test_file_info_creation(self):
|
||||||
|
"""Test creating FileInfo with all fields."""
|
||||||
|
info = FileInfo(
|
||||||
|
file_path="path/to/file.txt",
|
||||||
|
file_url="https://example.com/file.txt",
|
||||||
|
file_size=1024,
|
||||||
|
filename="file.txt",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert info.file_path == "path/to/file.txt"
|
||||||
|
assert info.file_url == "https://example.com/file.txt"
|
||||||
|
assert info.file_size == 1024
|
||||||
|
assert info.filename == "file.txt"
|
||||||
|
|
||||||
|
def test_file_info_optional_fields(self):
|
||||||
|
"""Test creating FileInfo with optional fields as None."""
|
||||||
|
info = FileInfo(
|
||||||
|
file_path="path/to/file.txt",
|
||||||
|
file_url="https://example.com/file.txt",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert info.file_size is None
|
||||||
|
assert info.filename is None
|
||||||
|
|
||||||
|
def test_file_info_is_frozen(self):
|
||||||
|
"""Test that FileInfo is immutable."""
|
||||||
|
info = FileInfo(
|
||||||
|
file_path="path/to/file.txt",
|
||||||
|
file_url="https://example.com/file.txt",
|
||||||
|
)
|
||||||
|
|
||||||
|
with pytest.raises(AttributeError):
|
||||||
|
info.file_path = "new/path" # type: ignore
|
||||||
@@ -8,6 +8,15 @@ resolution-markers = [
|
|||||||
"python_full_version < '3.12'",
|
"python_full_version < '3.12'",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "aiosqlite"
|
||||||
|
version = "0.22.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/4e/8a/64761f4005f17809769d23e518d915db74e6310474e733e3593cfc854ef1/aiosqlite-0.22.1.tar.gz", hash = "sha256:043e0bd78d32888c0a9ca90fc788b38796843360c855a7262a532813133a0650", size = 14821, upload-time = "2025-12-23T19:25:43.997Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/00/b7/e3bf5133d697a08128598c8d0abc5e16377b51465a33756de24fa7dee953/aiosqlite-0.22.1-py3-none-any.whl", hash = "sha256:21c002eb13823fad740196c5a2e9d8e62f6243bd9e7e4a1f87fb5e44ecb4fceb", size = 17405, upload-time = "2025-12-23T19:25:42.139Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "alembic"
|
name = "alembic"
|
||||||
version = "1.18.0"
|
version = "1.18.0"
|
||||||
@@ -53,6 +62,15 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl", hash = "sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c", size = 113592, upload-time = "2026-01-06T11:45:19.497Z" },
|
{ url = "https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl", hash = "sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c", size = 113592, upload-time = "2026-01-06T11:45:19.497Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "async-timeout"
|
||||||
|
version = "5.0.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/a5/ae/136395dfbfe00dfc94da3f3e136d0b13f394cba8f4841120e34226265780/async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3", size = 9274, upload-time = "2024-11-06T16:41:39.6Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c", size = 6233, upload-time = "2024-11-06T16:41:37.9Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "asyncpg"
|
name = "asyncpg"
|
||||||
version = "0.31.0"
|
version = "0.31.0"
|
||||||
@@ -332,24 +350,30 @@ dependencies = [
|
|||||||
{ name = "asyncpg" },
|
{ name = "asyncpg" },
|
||||||
{ name = "bcrypt" },
|
{ name = "bcrypt" },
|
||||||
{ name = "boto3" },
|
{ name = "boto3" },
|
||||||
|
{ name = "dramatiq", extra = ["redis"] },
|
||||||
{ name = "fastapi" },
|
{ name = "fastapi" },
|
||||||
{ name = "google-cloud-speech" },
|
{ name = "google-cloud-speech" },
|
||||||
{ name = "httpx" },
|
{ name = "httpx" },
|
||||||
{ name = "openai-whisper" },
|
{ name = "openai-whisper" },
|
||||||
{ name = "passlib", extra = ["bcrypt"] },
|
{ name = "passlib", extra = ["bcrypt"] },
|
||||||
|
{ name = "psycopg2-binary" },
|
||||||
{ name = "pydantic" },
|
{ name = "pydantic" },
|
||||||
{ name = "pydantic-settings" },
|
{ name = "pydantic-settings" },
|
||||||
{ name = "pydub" },
|
{ name = "pydub" },
|
||||||
{ name = "pyjwt" },
|
{ name = "pyjwt" },
|
||||||
{ name = "python-dotenv" },
|
{ name = "python-dotenv" },
|
||||||
{ name = "python-multipart" },
|
{ name = "python-multipart" },
|
||||||
|
{ name = "redis" },
|
||||||
{ name = "sqlalchemy" },
|
{ name = "sqlalchemy" },
|
||||||
{ name = "uvicorn", extra = ["standard"] },
|
{ name = "uvicorn", extra = ["standard"] },
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dev-dependencies]
|
[package.dev-dependencies]
|
||||||
dev = [
|
dev = [
|
||||||
|
{ name = "aiosqlite" },
|
||||||
{ name = "mypy" },
|
{ name = "mypy" },
|
||||||
|
{ name = "pytest" },
|
||||||
|
{ name = "pytest-asyncio" },
|
||||||
{ name = "ruff" },
|
{ name = "ruff" },
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -359,27 +383,47 @@ requires-dist = [
|
|||||||
{ name = "asyncpg", specifier = ">=0.29.0" },
|
{ name = "asyncpg", specifier = ">=0.29.0" },
|
||||||
{ name = "bcrypt", specifier = ">=3.2.2,<4.0.0" },
|
{ name = "bcrypt", specifier = ">=3.2.2,<4.0.0" },
|
||||||
{ name = "boto3", specifier = ">=1.42.1" },
|
{ name = "boto3", specifier = ">=1.42.1" },
|
||||||
|
{ name = "dramatiq", extras = ["redis"], specifier = ">=1.17.0" },
|
||||||
{ name = "fastapi", specifier = ">=0.115.0" },
|
{ name = "fastapi", specifier = ">=0.115.0" },
|
||||||
{ name = "google-cloud-speech", specifier = ">=2.34.0" },
|
{ name = "google-cloud-speech", specifier = ">=2.34.0" },
|
||||||
{ name = "httpx", specifier = ">=0.27.0" },
|
{ name = "httpx", specifier = ">=0.27.0" },
|
||||||
{ name = "openai-whisper", specifier = ">=20250625" },
|
{ name = "openai-whisper", specifier = ">=20250625" },
|
||||||
{ name = "passlib", extras = ["bcrypt"], specifier = ">=1.7.4" },
|
{ name = "passlib", extras = ["bcrypt"], specifier = ">=1.7.4" },
|
||||||
|
{ name = "psycopg2-binary", specifier = ">=2.9.9" },
|
||||||
{ name = "pydantic", specifier = ">=2.7.0" },
|
{ name = "pydantic", specifier = ">=2.7.0" },
|
||||||
{ name = "pydantic-settings", specifier = ">=2.3.0" },
|
{ name = "pydantic-settings", specifier = ">=2.3.0" },
|
||||||
{ name = "pydub", specifier = ">=0.25.1" },
|
{ name = "pydub", specifier = ">=0.25.1" },
|
||||||
{ name = "pyjwt", specifier = ">=2.8.0" },
|
{ name = "pyjwt", specifier = ">=2.8.0" },
|
||||||
{ name = "python-dotenv", specifier = ">=1.0.1" },
|
{ name = "python-dotenv", specifier = ">=1.0.1" },
|
||||||
{ name = "python-multipart", specifier = ">=0.0.9" },
|
{ name = "python-multipart", specifier = ">=0.0.9" },
|
||||||
|
{ name = "redis", specifier = ">=5.0.0" },
|
||||||
{ name = "sqlalchemy", specifier = ">=2.0.30" },
|
{ name = "sqlalchemy", specifier = ">=2.0.30" },
|
||||||
{ name = "uvicorn", extras = ["standard"], specifier = ">=0.30.0" },
|
{ name = "uvicorn", extras = ["standard"], specifier = ">=0.30.0" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.metadata.requires-dev]
|
[package.metadata.requires-dev]
|
||||||
dev = [
|
dev = [
|
||||||
|
{ name = "aiosqlite", specifier = ">=0.20.0" },
|
||||||
{ name = "mypy", specifier = ">=1.19.1" },
|
{ name = "mypy", specifier = ">=1.19.1" },
|
||||||
|
{ name = "pytest", specifier = ">=8.0.0" },
|
||||||
|
{ name = "pytest-asyncio", specifier = ">=0.23.0" },
|
||||||
{ name = "ruff", specifier = ">=0.6.0" },
|
{ name = "ruff", specifier = ">=0.6.0" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "dramatiq"
|
||||||
|
version = "2.0.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/fa/bb/56b5d615c32ec8e136beee243efc54afa099b384c057e896d16268e35e08/dramatiq-2.0.1.tar.gz", hash = "sha256:3caa0587057eee67bd3a0e6d439d78d6cf88b300b5185dad1f4044a0c5f57fc2", size = 104165, upload-time = "2026-01-18T11:31:09.807Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ca/28/4bfc19a3b12177febcb3d28767933c823c056727872a8792a87d6f68df67/dramatiq-2.0.1-py3-none-any.whl", hash = "sha256:0cdfe5fdd1028adf65c6f3b2f0c5e6909053d6e41cf6556ff4def991d2419c89", size = 124391, upload-time = "2026-01-18T11:31:08.803Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.optional-dependencies]
|
||||||
|
redis = [
|
||||||
|
{ name = "redis" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "fastapi"
|
name = "fastapi"
|
||||||
version = "0.128.0"
|
version = "0.128.0"
|
||||||
@@ -665,6 +709,15 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" },
|
{ url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "iniconfig"
|
||||||
|
version = "2.3.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "jinja2"
|
name = "jinja2"
|
||||||
version = "3.1.6"
|
version = "3.1.6"
|
||||||
@@ -1192,6 +1245,15 @@ dependencies = [
|
|||||||
]
|
]
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/35/8e/d36f8880bcf18ec026a55807d02fe4c7357da9f25aebd92f85178000c0dc/openai_whisper-20250625.tar.gz", hash = "sha256:37a91a3921809d9f44748ffc73c0a55c9f366c85a3ef5c2ae0cc09540432eb96", size = 803191, upload-time = "2025-06-26T01:06:13.34Z" }
|
sdist = { url = "https://files.pythonhosted.org/packages/35/8e/d36f8880bcf18ec026a55807d02fe4c7357da9f25aebd92f85178000c0dc/openai_whisper-20250625.tar.gz", hash = "sha256:37a91a3921809d9f44748ffc73c0a55c9f366c85a3ef5c2ae0cc09540432eb96", size = 803191, upload-time = "2025-06-26T01:06:13.34Z" }
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "packaging"
|
||||||
|
version = "26.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/65/ee/299d360cdc32edc7d2cf530f3accf79c4fca01e96ffc950d8a52213bd8e4/packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4", size = 143416, upload-time = "2026-01-21T20:50:39.064Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366, upload-time = "2026-01-21T20:50:37.788Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "passlib"
|
name = "passlib"
|
||||||
version = "1.7.4"
|
version = "1.7.4"
|
||||||
@@ -1215,6 +1277,15 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/ef/3c/2c197d226f9ea224a9ab8d197933f9da0ae0aac5b6e0f884e2b8d9c8e9f7/pathspec-1.0.4-py3-none-any.whl", hash = "sha256:fb6ae2fd4e7c921a165808a552060e722767cfa526f99ca5156ed2ce45a5c723", size = 55206, upload-time = "2026-01-27T03:59:45.137Z" },
|
{ url = "https://files.pythonhosted.org/packages/ef/3c/2c197d226f9ea224a9ab8d197933f9da0ae0aac5b6e0f884e2b8d9c8e9f7/pathspec-1.0.4-py3-none-any.whl", hash = "sha256:fb6ae2fd4e7c921a165808a552060e722767cfa526f99ca5156ed2ce45a5c723", size = 55206, upload-time = "2026-01-27T03:59:45.137Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pluggy"
|
||||||
|
version = "1.6.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "proto-plus"
|
name = "proto-plus"
|
||||||
version = "1.27.0"
|
version = "1.27.0"
|
||||||
@@ -1242,6 +1313,58 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/75/b1/1dc83c2c661b4c62d56cc081706ee33a4fc2835bd90f965baa2663ef7676/protobuf-6.33.4-py3-none-any.whl", hash = "sha256:1fe3730068fcf2e595816a6c34fe66eeedd37d51d0400b72fabc848811fdc1bc", size = 170532, upload-time = "2026-01-12T18:33:39.199Z" },
|
{ url = "https://files.pythonhosted.org/packages/75/b1/1dc83c2c661b4c62d56cc081706ee33a4fc2835bd90f965baa2663ef7676/protobuf-6.33.4-py3-none-any.whl", hash = "sha256:1fe3730068fcf2e595816a6c34fe66eeedd37d51d0400b72fabc848811fdc1bc", size = 170532, upload-time = "2026-01-12T18:33:39.199Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "psycopg2-binary"
|
||||||
|
version = "2.9.11"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/ac/6c/8767aaa597ba424643dc87348c6f1754dd9f48e80fdc1b9f7ca5c3a7c213/psycopg2-binary-2.9.11.tar.gz", hash = "sha256:b6aed9e096bf63f9e75edf2581aa9a7e7186d97ab5c177aa6c87797cd591236c", size = 379620, upload-time = "2025-10-10T11:14:48.041Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c7/ae/8d8266f6dd183ab4d48b95b9674034e1b482a3f8619b33a0d86438694577/psycopg2_binary-2.9.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0e8480afd62362d0a6a27dd09e4ca2def6fa50ed3a4e7c09165266106b2ffa10", size = 3756452, upload-time = "2025-10-10T11:11:11.583Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/4b/34/aa03d327739c1be70e09d01182619aca8ebab5970cd0cfa50dd8b9cec2ac/psycopg2_binary-2.9.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:763c93ef1df3da6d1a90f86ea7f3f806dc06b21c198fa87c3c25504abec9404a", size = 3863957, upload-time = "2025-10-10T11:11:16.932Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/48/89/3fdb5902bdab8868bbedc1c6e6023a4e08112ceac5db97fc2012060e0c9a/psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2e164359396576a3cc701ba8af4751ae68a07235d7a380c631184a611220d9a4", size = 4410955, upload-time = "2025-10-10T11:11:21.21Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ce/24/e18339c407a13c72b336e0d9013fbbbde77b6fd13e853979019a1269519c/psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:d57c9c387660b8893093459738b6abddbb30a7eab058b77b0d0d1c7d521ddfd7", size = 4468007, upload-time = "2025-10-10T11:11:24.831Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/91/7e/b8441e831a0f16c159b5381698f9f7f7ed54b77d57bc9c5f99144cc78232/psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2c226ef95eb2250974bf6fa7a842082b31f68385c4f3268370e3f3870e7859ee", size = 4165012, upload-time = "2025-10-10T11:11:29.51Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/0d/61/4aa89eeb6d751f05178a13da95516c036e27468c5d4d2509bb1e15341c81/psycopg2_binary-2.9.11-cp311-cp311-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a311f1edc9967723d3511ea7d2708e2c3592e3405677bf53d5c7246753591fbb", size = 3981881, upload-time = "2025-10-30T02:55:07.332Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/76/a1/2f5841cae4c635a9459fe7aca8ed771336e9383b6429e05c01267b0774cf/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ebb415404821b6d1c47353ebe9c8645967a5235e6d88f914147e7fd411419e6f", size = 3650985, upload-time = "2025-10-10T11:11:34.975Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/84/74/4defcac9d002bca5709951b975173c8c2fa968e1a95dc713f61b3a8d3b6a/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f07c9c4a5093258a03b28fab9b4f151aa376989e7f35f855088234e656ee6a94", size = 3296039, upload-time = "2025-10-10T11:11:40.432Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/6d/c2/782a3c64403d8ce35b5c50e1b684412cf94f171dc18111be8c976abd2de1/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:00ce1830d971f43b667abe4a56e42c1e2d594b32da4802e44a73bacacb25535f", size = 3043477, upload-time = "2025-10-30T02:55:11.182Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c8/31/36a1d8e702aa35c38fc117c2b8be3f182613faa25d794b8aeaab948d4c03/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cffe9d7697ae7456649617e8bb8d7a45afb71cd13f7ab22af3e5c61f04840908", size = 3345842, upload-time = "2025-10-10T11:11:45.366Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/6e/b4/a5375cda5b54cb95ee9b836930fea30ae5a8f14aa97da7821722323d979b/psycopg2_binary-2.9.11-cp311-cp311-win_amd64.whl", hash = "sha256:304fd7b7f97eef30e91b8f7e720b3db75fee010b520e434ea35ed1ff22501d03", size = 2713894, upload-time = "2025-10-10T11:11:48.775Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d8/91/f870a02f51be4a65987b45a7de4c2e1897dd0d01051e2b559a38fa634e3e/psycopg2_binary-2.9.11-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:be9b840ac0525a283a96b556616f5b4820e0526addb8dcf6525a0fa162730be4", size = 3756603, upload-time = "2025-10-10T11:11:52.213Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/27/fa/cae40e06849b6c9a95eb5c04d419942f00d9eaac8d81626107461e268821/psycopg2_binary-2.9.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f090b7ddd13ca842ebfe301cd587a76a4cf0913b1e429eb92c1be5dbeb1a19bc", size = 3864509, upload-time = "2025-10-10T11:11:56.452Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/2d/75/364847b879eb630b3ac8293798e380e441a957c53657995053c5ec39a316/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ab8905b5dcb05bf3fb22e0cf90e10f469563486ffb6a96569e51f897c750a76a", size = 4411159, upload-time = "2025-10-10T11:12:00.49Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/6f/a0/567f7ea38b6e1c62aafd58375665a547c00c608a471620c0edc364733e13/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:bf940cd7e7fec19181fdbc29d76911741153d51cab52e5c21165f3262125685e", size = 4468234, upload-time = "2025-10-10T11:12:04.892Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/30/da/4e42788fb811bbbfd7b7f045570c062f49e350e1d1f3df056c3fb5763353/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fa0f693d3c68ae925966f0b14b8edda71696608039f4ed61b1fe9ffa468d16db", size = 4166236, upload-time = "2025-10-10T11:12:11.674Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/3c/94/c1777c355bc560992af848d98216148be5f1be001af06e06fc49cbded578/psycopg2_binary-2.9.11-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a1cf393f1cdaf6a9b57c0a719a1068ba1069f022a59b8b1fe44b006745b59757", size = 3983083, upload-time = "2025-10-30T02:55:15.73Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/bd/42/c9a21edf0e3daa7825ed04a4a8588686c6c14904344344a039556d78aa58/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ef7a6beb4beaa62f88592ccc65df20328029d721db309cb3250b0aae0fa146c3", size = 3652281, upload-time = "2025-10-10T11:12:17.713Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/12/22/dedfbcfa97917982301496b6b5e5e6c5531d1f35dd2b488b08d1ebc52482/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:31b32c457a6025e74d233957cc9736742ac5a6cb196c6b68499f6bb51390bd6a", size = 3298010, upload-time = "2025-10-10T11:12:22.671Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/66/ea/d3390e6696276078bd01b2ece417deac954dfdd552d2edc3d03204416c0c/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:edcb3aeb11cb4bf13a2af3c53a15b3d612edeb6409047ea0b5d6a21a9d744b34", size = 3044641, upload-time = "2025-10-30T02:55:19.929Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/12/9a/0402ded6cbd321da0c0ba7d34dc12b29b14f5764c2fc10750daa38e825fc/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b6d93d7c0b61a1dd6197d208ab613eb7dcfdcca0a49c42ceb082257991de9d", size = 3347940, upload-time = "2025-10-10T11:12:26.529Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b1/d2/99b55e85832ccde77b211738ff3925a5d73ad183c0b37bcbbe5a8ff04978/psycopg2_binary-2.9.11-cp312-cp312-win_amd64.whl", hash = "sha256:b33fabeb1fde21180479b2d4667e994de7bbf0eec22832ba5d9b5e4cf65b6c6d", size = 2714147, upload-time = "2025-10-10T11:12:29.535Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ff/a8/a2709681b3ac11b0b1786def10006b8995125ba268c9a54bea6f5ae8bd3e/psycopg2_binary-2.9.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b8fb3db325435d34235b044b199e56cdf9ff41223a4b9752e8576465170bb38c", size = 3756572, upload-time = "2025-10-10T11:12:32.873Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/62/e1/c2b38d256d0dafd32713e9f31982a5b028f4a3651f446be70785f484f472/psycopg2_binary-2.9.11-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:366df99e710a2acd90efed3764bb1e28df6c675d33a7fb40df9b7281694432ee", size = 3864529, upload-time = "2025-10-10T11:12:36.791Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/11/32/b2ffe8f3853c181e88f0a157c5fb4e383102238d73c52ac6d93a5c8bffe6/psycopg2_binary-2.9.11-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8c55b385daa2f92cb64b12ec4536c66954ac53654c7f15a203578da4e78105c0", size = 4411242, upload-time = "2025-10-10T11:12:42.388Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/10/04/6ca7477e6160ae258dc96f67c371157776564679aefd247b66f4661501a2/psycopg2_binary-2.9.11-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:c0377174bf1dd416993d16edc15357f6eb17ac998244cca19bc67cdc0e2e5766", size = 4468258, upload-time = "2025-10-10T11:12:48.654Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/3c/7e/6a1a38f86412df101435809f225d57c1a021307dd0689f7a5e7fe83588b1/psycopg2_binary-2.9.11-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5c6ff3335ce08c75afaed19e08699e8aacf95d4a260b495a4a8545244fe2ceb3", size = 4166295, upload-time = "2025-10-10T11:12:52.525Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f2/7d/c07374c501b45f3579a9eb761cbf2604ddef3d96ad48679112c2c5aa9c25/psycopg2_binary-2.9.11-cp313-cp313-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:84011ba3109e06ac412f95399b704d3d6950e386b7994475b231cf61eec2fc1f", size = 3983133, upload-time = "2025-10-30T02:55:24.329Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/82/56/993b7104cb8345ad7d4516538ccf8f0d0ac640b1ebd8c754a7b024e76878/psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ba34475ceb08cccbdd98f6b46916917ae6eeb92b5ae111df10b544c3a4621dc4", size = 3652383, upload-time = "2025-10-10T11:12:56.387Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/2d/ac/eaeb6029362fd8d454a27374d84c6866c82c33bfc24587b4face5a8e43ef/psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b31e90fdd0f968c2de3b26ab014314fe814225b6c324f770952f7d38abf17e3c", size = 3298168, upload-time = "2025-10-10T11:13:00.403Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/2b/39/50c3facc66bded9ada5cbc0de867499a703dc6bca6be03070b4e3b65da6c/psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:d526864e0f67f74937a8fce859bd56c979f5e2ec57ca7c627f5f1071ef7fee60", size = 3044712, upload-time = "2025-10-30T02:55:27.975Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/9c/8e/b7de019a1f562f72ada81081a12823d3c1590bedc48d7d2559410a2763fe/psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04195548662fa544626c8ea0f06561eb6203f1984ba5b4562764fbeb4c3d14b1", size = 3347549, upload-time = "2025-10-10T11:13:03.971Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/80/2d/1bb683f64737bbb1f86c82b7359db1eb2be4e2c0c13b947f80efefa7d3e5/psycopg2_binary-2.9.11-cp313-cp313-win_amd64.whl", hash = "sha256:efff12b432179443f54e230fdf60de1f6cc726b6c832db8701227d089310e8aa", size = 2714215, upload-time = "2025-10-10T11:13:07.14Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/64/12/93ef0098590cf51d9732b4f139533732565704f45bdc1ffa741b7c95fb54/psycopg2_binary-2.9.11-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:92e3b669236327083a2e33ccfa0d320dd01b9803b3e14dd986a4fc54aa00f4e1", size = 3756567, upload-time = "2025-10-10T11:13:11.885Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/7c/a9/9d55c614a891288f15ca4b5209b09f0f01e3124056924e17b81b9fa054cc/psycopg2_binary-2.9.11-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:e0deeb03da539fa3577fcb0b3f2554a97f7e5477c246098dbb18091a4a01c16f", size = 3864755, upload-time = "2025-10-10T11:13:17.727Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/13/1e/98874ce72fd29cbde93209977b196a2edae03f8490d1bd8158e7f1daf3a0/psycopg2_binary-2.9.11-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9b52a3f9bb540a3e4ec0f6ba6d31339727b2950c9772850d6545b7eae0b9d7c5", size = 4411646, upload-time = "2025-10-10T11:13:24.432Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/5a/bd/a335ce6645334fb8d758cc358810defca14a1d19ffbc8a10bd38a2328565/psycopg2_binary-2.9.11-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:db4fd476874ccfdbb630a54426964959e58da4c61c9feba73e6094d51303d7d8", size = 4468701, upload-time = "2025-10-10T11:13:29.266Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/44/d6/c8b4f53f34e295e45709b7568bf9b9407a612ea30387d35eb9fa84f269b4/psycopg2_binary-2.9.11-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:47f212c1d3be608a12937cc131bd85502954398aaa1320cb4c14421a0ffccf4c", size = 4166293, upload-time = "2025-10-10T11:13:33.336Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/4b/e0/f8cc36eadd1b716ab36bb290618a3292e009867e5c97ce4aba908cb99644/psycopg2_binary-2.9.11-cp314-cp314-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e35b7abae2b0adab776add56111df1735ccc71406e56203515e228a8dc07089f", size = 3983184, upload-time = "2025-10-30T02:55:32.483Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/53/3e/2a8fe18a4e61cfb3417da67b6318e12691772c0696d79434184a511906dc/psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:fcf21be3ce5f5659daefd2b3b3b6e4727b028221ddc94e6c1523425579664747", size = 3652650, upload-time = "2025-10-10T11:13:38.181Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/76/36/03801461b31b29fe58d228c24388f999fe814dfc302856e0d17f97d7c54d/psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:9bd81e64e8de111237737b29d68039b9c813bdf520156af36d26819c9a979e5f", size = 3298663, upload-time = "2025-10-10T11:13:44.878Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/97/77/21b0ea2e1a73aa5fa9222b2a6b8ba325c43c3a8d54272839c991f2345656/psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:32770a4d666fbdafab017086655bcddab791d7cb260a16679cc5a7338b64343b", size = 3044737, upload-time = "2025-10-30T02:55:35.69Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/67/69/f36abe5f118c1dca6d3726ceae164b9356985805480731ac6712a63f24f0/psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c3cb3a676873d7506825221045bd70e0427c905b9c8ee8d6acd70cfcbd6e576d", size = 3347643, upload-time = "2025-10-10T11:13:53.499Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e1/36/9c0c326fe3a4227953dfb29f5d0c8ae3b8eb8c1cd2967aa569f50cb3c61f/psycopg2_binary-2.9.11-cp314-cp314-win_amd64.whl", hash = "sha256:4012c9c954dfaccd28f94e84ab9f94e12df76b4afb22331b1f0d3154893a6316", size = 2803913, upload-time = "2025-10-10T11:13:57.058Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pyasn1"
|
name = "pyasn1"
|
||||||
version = "0.6.1"
|
version = "0.6.1"
|
||||||
@@ -1407,6 +1530,15 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/a6/53/d78dc063216e62fc55f6b2eebb447f6a4b0a59f55c8406376f76bf959b08/pydub-0.25.1-py2.py3-none-any.whl", hash = "sha256:65617e33033874b59d87db603aa1ed450633288aefead953b30bded59cb599a6", size = 32327, upload-time = "2021-03-10T02:09:53.503Z" },
|
{ url = "https://files.pythonhosted.org/packages/a6/53/d78dc063216e62fc55f6b2eebb447f6a4b0a59f55c8406376f76bf959b08/pydub-0.25.1-py2.py3-none-any.whl", hash = "sha256:65617e33033874b59d87db603aa1ed450633288aefead953b30bded59cb599a6", size = 32327, upload-time = "2021-03-10T02:09:53.503Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pygments"
|
||||||
|
version = "2.19.2"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pyjwt"
|
name = "pyjwt"
|
||||||
version = "2.10.1"
|
version = "2.10.1"
|
||||||
@@ -1416,6 +1548,35 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/61/ad/689f02752eeec26aed679477e80e632ef1b682313be70793d798c1d5fc8f/PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb", size = 22997, upload-time = "2024-11-28T03:43:27.893Z" },
|
{ url = "https://files.pythonhosted.org/packages/61/ad/689f02752eeec26aed679477e80e632ef1b682313be70793d798c1d5fc8f/PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb", size = 22997, upload-time = "2024-11-28T03:43:27.893Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pytest"
|
||||||
|
version = "9.0.2"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
||||||
|
{ name = "iniconfig" },
|
||||||
|
{ name = "packaging" },
|
||||||
|
{ name = "pluggy" },
|
||||||
|
{ name = "pygments" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pytest-asyncio"
|
||||||
|
version = "1.3.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "pytest" },
|
||||||
|
{ name = "typing-extensions", marker = "python_full_version < '3.13'" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/90/2c/8af215c0f776415f3590cac4f9086ccefd6fd463befeae41cd4d3f193e5a/pytest_asyncio-1.3.0.tar.gz", hash = "sha256:d7f52f36d231b80ee124cd216ffb19369aa168fc10095013c6b014a34d3ee9e5", size = 50087, upload-time = "2025-11-10T16:07:47.256Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e5/35/f8b19922b6a25bc0880171a2f1a003eaeb93657475193ab516fd87cac9da/pytest_asyncio-1.3.0-py3-none-any.whl", hash = "sha256:611e26147c7f77640e6d0a92a38ed17c3e9848063698d5c93d5aa7aa11cebff5", size = 15075, upload-time = "2025-11-10T16:07:45.537Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "python-dateutil"
|
name = "python-dateutil"
|
||||||
version = "2.9.0.post0"
|
version = "2.9.0.post0"
|
||||||
@@ -1501,6 +1662,18 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" },
|
{ url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "redis"
|
||||||
|
version = "6.4.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "async-timeout", marker = "python_full_version < '3.11.3'" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/0d/d6/e8b92798a5bd67d659d51a18170e91c16ac3b59738d91894651ee255ed49/redis-6.4.0.tar.gz", hash = "sha256:b01bc7282b8444e28ec36b261df5375183bb47a07eb9c603f284e89cbc5ef010", size = 4647399, upload-time = "2025-08-07T08:10:11.441Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e8/02/89e2ed7e85db6c93dfa9e8f691c5087df4e3551ab39081a4d7c6d1f90e05/redis-6.4.0-py3-none-any.whl", hash = "sha256:f0544fa9604264e9464cdf4814e7d4830f74b165d52f2a330a760a88dd248b7f", size = 279847, upload-time = "2025-08-07T08:10:09.84Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "regex"
|
name = "regex"
|
||||||
version = "2025.11.3"
|
version = "2025.11.3"
|
||||||
|
|||||||
Reference in New Issue
Block a user