new features
This commit is contained in:
@@ -28,6 +28,8 @@ This document provides guidelines and best practices for AI agents working with
|
||||
- Use enums or `Literal` types for fixed sets of values (see `ArtifactTypeEnum` pattern)
|
||||
- Configuration values belong in `Settings` class with explicit defaults
|
||||
- Never hardcode timeouts, limits, or thresholds inline
|
||||
- Store user-facing error messages as module-level constants with `ERROR_` prefix
|
||||
- Example: `ERROR_NO_AUDIO_STREAM = "Файл не содержит аудиодорожки"`
|
||||
|
||||
```python
|
||||
# BAD
|
||||
|
||||
@@ -0,0 +1,26 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
@@ -0,0 +1,55 @@
|
||||
"""add notifications table
|
||||
|
||||
Revision ID: 6a41fa07bd94
|
||||
Revises: 0001
|
||||
Create Date: 2026-02-20 19:07:40.634385
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '6a41fa07bd94'
|
||||
down_revision: Union[str, None] = '0001'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('notifications',
|
||||
sa.Column('user_id', sa.UUID(), nullable=False),
|
||||
sa.Column('job_id', sa.UUID(), nullable=True),
|
||||
sa.Column('project_id', sa.UUID(), nullable=True),
|
||||
sa.Column('notification_type', sa.String(length=32), nullable=False),
|
||||
sa.Column('title', sa.String(length=255), nullable=False),
|
||||
sa.Column('message', sa.Text(), nullable=True),
|
||||
sa.Column('payload', sa.JSON(), nullable=True),
|
||||
sa.Column('is_read', sa.Boolean(), nullable=False),
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['job_id'], ['jobs.id'], ondelete='SET NULL'),
|
||||
sa.ForeignKeyConstraint(['project_id'], ['projects.id'], ondelete='SET NULL'),
|
||||
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_notifications_user_id'), 'notifications', ['user_id'], unique=False)
|
||||
op.drop_constraint(op.f('uq_users_username'), 'users', type_='unique')
|
||||
op.drop_index(op.f('ix_users_username'), table_name='users')
|
||||
op.create_index(op.f('ix_users_username'), 'users', ['username'], unique=True)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index(op.f('ix_users_username'), table_name='users')
|
||||
op.create_index(op.f('ix_users_username'), 'users', ['username'], unique=False)
|
||||
op.create_unique_constraint(op.f('uq_users_username'), 'users', ['username'], postgresql_nulls_not_distinct=False)
|
||||
op.drop_index(op.f('ix_notifications_user_id'), table_name='notifications')
|
||||
op.drop_table('notifications')
|
||||
# ### end Alembic commands ###
|
||||
@@ -0,0 +1,71 @@
|
||||
"""make artifact media_file_id nullable and fix transcription data
|
||||
|
||||
Revision ID: b3c4d5e6f7a8
|
||||
Revises: a1b2c3d4e5f6
|
||||
Create Date: 2026-02-21 18:00:00.000000
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "b3c4d5e6f7a8"
|
||||
down_revision: Union[str, None] = "a1b2c3d4e5f6"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Schema change: make media_file_id nullable
|
||||
op.alter_column(
|
||||
"artifact_media_files",
|
||||
"media_file_id",
|
||||
existing_type=sa.UUID(),
|
||||
nullable=True,
|
||||
)
|
||||
|
||||
conn = op.get_bind()
|
||||
|
||||
# Soft-delete MediaFiles that back transcription artifacts (BEFORE nulling the link)
|
||||
conn.execute(
|
||||
sa.text(
|
||||
"UPDATE media_files SET is_deleted = true "
|
||||
"WHERE id IN ("
|
||||
" SELECT amf.media_file_id FROM artifact_media_files amf "
|
||||
" WHERE amf.artifact_type = 'TRANSCRIPTION_JSON' "
|
||||
" AND amf.media_file_id IS NOT NULL"
|
||||
")"
|
||||
)
|
||||
)
|
||||
|
||||
# Null out media_file_id on transcription artifacts
|
||||
conn.execute(
|
||||
sa.text(
|
||||
"UPDATE artifact_media_files "
|
||||
"SET media_file_id = NULL "
|
||||
"WHERE artifact_type = 'TRANSCRIPTION_JSON'"
|
||||
)
|
||||
)
|
||||
|
||||
# Null out project_id on File records backing transcription artifacts
|
||||
conn.execute(
|
||||
sa.text(
|
||||
"UPDATE files SET project_id = NULL "
|
||||
"FROM artifact_media_files "
|
||||
"WHERE artifact_media_files.file_id = files.id "
|
||||
" AND artifact_media_files.artifact_type = 'TRANSCRIPTION_JSON'"
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Restore NOT NULL constraint (data changes are not reversible)
|
||||
op.alter_column(
|
||||
"artifact_media_files",
|
||||
"media_file_id",
|
||||
existing_type=sa.UUID(),
|
||||
nullable=False,
|
||||
)
|
||||
@@ -0,0 +1,26 @@
|
||||
"""add workspace_state to projects
|
||||
|
||||
Revision ID: c4d5e6f7a8b9
|
||||
Revises: b3c4d5e6f7a8
|
||||
Create Date: 2026-02-22 12:00:00.000000
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "c4d5e6f7a8b9"
|
||||
down_revision: Union[str, None] = "b3c4d5e6f7a8"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.add_column("projects", sa.Column("workspace_state", sa.JSON(), nullable=True))
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_column("projects", "workspace_state")
|
||||
@@ -0,0 +1,58 @@
|
||||
"""strip presigned query params from avatar URLs
|
||||
|
||||
Revision ID: a1b2c3d4e5f6
|
||||
Revises: 6a41fa07bd94
|
||||
Create Date: 2026-02-21 12:00:00.000000
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "a1b2c3d4e5f6"
|
||||
down_revision: Union[str, None] = "6a41fa07bd94"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
|
||||
# Extract bare S3 key from presigned URLs.
|
||||
# Example: http://localhost:9000/coffee-bucket/avatars/abc.jpg?X-Amz-...
|
||||
# -> avatars/abc.jpg
|
||||
#
|
||||
# Strategy: strip the query string, then remove the scheme+host+bucket prefix.
|
||||
# Only touch rows where avatar looks like a full URL (contains '://').
|
||||
result = conn.execute(
|
||||
sa.text("SELECT id, avatar FROM users WHERE avatar IS NOT NULL AND avatar LIKE :pattern"),
|
||||
{"pattern": "%://%"},
|
||||
)
|
||||
for row in result:
|
||||
avatar_url: str = row[1]
|
||||
# Remove query string
|
||||
path = avatar_url.split("?")[0]
|
||||
# Remove scheme + host + bucket prefix: everything up to and including the bucket segment
|
||||
# e.g. "http://localhost:9000/coffee-bucket/avatars/abc.jpg" -> "avatars/abc.jpg"
|
||||
parts = path.split("/")
|
||||
# Find the bucket segment (after host) and take everything after it
|
||||
# URL structure: scheme: / / host:port / bucket / key...
|
||||
# parts: ['http:', '', 'localhost:9000', 'coffee-bucket', 'avatars', 'abc.jpg']
|
||||
try:
|
||||
# Skip scheme ('http:'), empty (''), host, bucket -> index 4 onward is the key
|
||||
key = "/".join(parts[4:])
|
||||
except IndexError:
|
||||
continue
|
||||
|
||||
if key:
|
||||
conn.execute(
|
||||
sa.text("UPDATE users SET avatar = :key WHERE id = :id"),
|
||||
{"key": key, "id": row[0]},
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Data-only migration; cannot restore original presigned URLs.
|
||||
pass
|
||||
@@ -15,6 +15,7 @@ from cpv3.modules.system.router import router as system_router
|
||||
from cpv3.modules.tasks.router import router as tasks_router
|
||||
from cpv3.modules.transcription.router import router as transcription_router
|
||||
from cpv3.modules.users.router import auth_router, users_router
|
||||
from cpv3.modules.notifications.router import router as notifications_router
|
||||
from cpv3.modules.webhooks.router import router as webhooks_router
|
||||
|
||||
api_router = APIRouter()
|
||||
@@ -48,5 +49,8 @@ api_router.include_router(events_router)
|
||||
# Tasks (background processing)
|
||||
api_router.include_router(tasks_router)
|
||||
|
||||
# Notifications
|
||||
api_router.include_router(notifications_router)
|
||||
|
||||
# Webhooks
|
||||
api_router.include_router(webhooks_router)
|
||||
|
||||
@@ -5,6 +5,7 @@ from cpv3.modules.projects.models import Project
|
||||
from cpv3.modules.files.models import File
|
||||
from cpv3.modules.transcription.models import Transcription
|
||||
from cpv3.modules.users.models import User
|
||||
from cpv3.modules.notifications.models import Notification
|
||||
from cpv3.modules.webhooks.models import Webhook
|
||||
|
||||
__all__ = [
|
||||
@@ -17,5 +18,6 @@ __all__ = [
|
||||
"Transcription",
|
||||
"Job",
|
||||
"JobEvent",
|
||||
"Notification",
|
||||
"Webhook",
|
||||
]
|
||||
|
||||
+29
-5
@@ -2,19 +2,43 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import AsyncGenerator
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
|
||||
from sqlalchemy.ext.asyncio import (
|
||||
AsyncSession,
|
||||
async_sessionmaker,
|
||||
create_async_engine,
|
||||
)
|
||||
|
||||
from cpv3.infrastructure.settings import get_settings
|
||||
|
||||
|
||||
_settings = get_settings()
|
||||
_database_url = _settings.get_database_url()
|
||||
|
||||
_engine_kwargs: dict[str, bool | int] = {
|
||||
"echo": _settings.debug,
|
||||
"pool_pre_ping": True,
|
||||
}
|
||||
|
||||
if not _database_url.startswith("sqlite"):
|
||||
_engine_kwargs.update(
|
||||
{
|
||||
"pool_size": _settings.db_pool_size,
|
||||
"max_overflow": _settings.db_max_overflow,
|
||||
"pool_timeout": _settings.db_pool_timeout,
|
||||
"pool_recycle": _settings.db_pool_recycle_seconds,
|
||||
}
|
||||
)
|
||||
|
||||
_engine = create_async_engine(
|
||||
_settings.get_database_url(),
|
||||
echo=_settings.debug,
|
||||
pool_pre_ping=True,
|
||||
_database_url,
|
||||
**_engine_kwargs,
|
||||
)
|
||||
|
||||
SessionLocal = async_sessionmaker(bind=_engine, class_=AsyncSession, expire_on_commit=False)
|
||||
SessionLocal = async_sessionmaker(
|
||||
bind=_engine,
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False,
|
||||
)
|
||||
|
||||
|
||||
async def get_db() -> AsyncGenerator[AsyncSession, None]:
|
||||
|
||||
+38
-34
@@ -17,44 +17,48 @@ _bearer = HTTPBearer(auto_error=True)
|
||||
|
||||
async def get_current_user(
|
||||
credentials: HTTPAuthorizationCredentials = Depends(_bearer),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
db: AsyncSession = Depends(get_db, use_cache=False),
|
||||
) -> User:
|
||||
token = credentials.credentials
|
||||
|
||||
try:
|
||||
payload = decode_token(token)
|
||||
except ExpiredSignatureError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED, detail="Token expired"
|
||||
) from e
|
||||
except InvalidTokenError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid token"
|
||||
) from e
|
||||
token = credentials.credentials
|
||||
|
||||
if payload.get("type") != "access":
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid token"
|
||||
)
|
||||
try:
|
||||
payload = decode_token(token)
|
||||
except ExpiredSignatureError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED, detail="Token expired"
|
||||
) from e
|
||||
except InvalidTokenError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid token"
|
||||
) from e
|
||||
|
||||
sub = payload.get("sub")
|
||||
if not sub:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid token"
|
||||
)
|
||||
if payload.get("type") != "access":
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid token"
|
||||
)
|
||||
|
||||
try:
|
||||
user_id = uuid.UUID(str(sub))
|
||||
except ValueError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid token"
|
||||
) from e
|
||||
sub = payload.get("sub")
|
||||
if not sub:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid token"
|
||||
)
|
||||
|
||||
user_repo = UserRepository(db)
|
||||
user = await user_repo.get_by_id(user_id)
|
||||
if user is None or not user.is_active:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid credentials"
|
||||
)
|
||||
try:
|
||||
user_id = uuid.UUID(str(sub))
|
||||
except ValueError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid token"
|
||||
) from e
|
||||
|
||||
return user
|
||||
user_repo = UserRepository(db)
|
||||
user = await user_repo.get_by_id(user_id)
|
||||
if user is None or not user.is_active:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid credentials"
|
||||
)
|
||||
|
||||
return user
|
||||
finally:
|
||||
# Free the auth session immediately so long-running handlers don't pin a pool slot.
|
||||
await db.close()
|
||||
|
||||
@@ -17,7 +17,11 @@ class Settings(BaseSettings):
|
||||
# App
|
||||
debug: bool = Field(default=True, alias="DEBUG")
|
||||
cors_allowed_origins: list[str] = Field(
|
||||
default_factory=lambda: ["http://localhost:3000", "http://localhost:8000"],
|
||||
default_factory=lambda: [
|
||||
"http://localhost:3000",
|
||||
"http://localhost:3001",
|
||||
"http://localhost:8000",
|
||||
],
|
||||
alias="CORS_ALLOWED_ORIGINS",
|
||||
)
|
||||
|
||||
@@ -37,6 +41,13 @@ class Settings(BaseSettings):
|
||||
)
|
||||
|
||||
database_url: str | None = Field(default=None, alias="DATABASE_URL")
|
||||
db_pool_size: int = Field(default=5, alias="DB_POOL_SIZE")
|
||||
db_max_overflow: int = Field(default=10, alias="DB_MAX_OVERFLOW")
|
||||
db_pool_timeout: int = Field(default=30, alias="DB_POOL_TIMEOUT")
|
||||
db_pool_recycle_seconds: int = Field(
|
||||
default=1800,
|
||||
alias="DB_POOL_RECYCLE_SECONDS",
|
||||
)
|
||||
|
||||
# Storage
|
||||
storage_backend: str = Field(default="S3", alias="STORAGE_BACKEND")
|
||||
|
||||
@@ -0,0 +1,13 @@
|
||||
"""Storage utility helpers."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from cpv3.modules.users.models import User
|
||||
|
||||
|
||||
def get_user_folder(user: User) -> str:
|
||||
"""Return the per-user S3 folder prefix: ``<username>_<user_id>``."""
|
||||
return f"{user.username}_{user.id}"
|
||||
@@ -33,6 +33,12 @@ class FileRepository:
|
||||
return None
|
||||
return file
|
||||
|
||||
async def get_by_path(self, path: str) -> File | None:
|
||||
result = await self._session.execute(
|
||||
select(File).where(File.path == path, File.is_deleted.is_(False))
|
||||
)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def create(self, *, requester: User, data: FileCreate) -> File:
|
||||
file = File(
|
||||
owner_id=requester.id,
|
||||
|
||||
@@ -19,6 +19,7 @@ from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from cpv3.infrastructure.auth import get_current_user
|
||||
from cpv3.infrastructure.deps import get_storage
|
||||
from cpv3.infrastructure.storage.base import StorageService
|
||||
from cpv3.infrastructure.storage.utils import get_user_folder
|
||||
from cpv3.infrastructure.settings import get_settings
|
||||
from cpv3.db.session import get_db
|
||||
from cpv3.modules.files.schemas import (
|
||||
@@ -32,7 +33,7 @@ from cpv3.modules.users.models import User
|
||||
|
||||
router = APIRouter(prefix="/api/files", tags=["Files"])
|
||||
|
||||
MAX_MB_SIZE = 100
|
||||
MAX_MB_SIZE = 1024
|
||||
|
||||
|
||||
@router.post(
|
||||
@@ -44,8 +45,6 @@ async def upload_file(
|
||||
current_user: User = Depends(get_current_user),
|
||||
storage: StorageService = Depends(get_storage),
|
||||
) -> FileInfoResponse:
|
||||
_ = current_user
|
||||
|
||||
# Validate max file size (matches old behavior).
|
||||
file.file.seek(0, 2)
|
||||
size_bytes = file.file.tell()
|
||||
@@ -58,10 +57,13 @@ async def upload_file(
|
||||
detail=f"File size exceeds the maximum limit of {MAX_MB_SIZE} MB.",
|
||||
)
|
||||
|
||||
user_folder = get_user_folder(current_user)
|
||||
resolved_folder = f"{user_folder}/{folder}" if folder else f"{user_folder}/user_upload"
|
||||
|
||||
key = await storage.upload_fileobj(
|
||||
fileobj=file.file,
|
||||
file_name=file.filename or "upload.bin",
|
||||
folder=folder,
|
||||
folder=resolved_folder,
|
||||
gen_name=True,
|
||||
content_type=file.content_type,
|
||||
)
|
||||
@@ -81,7 +83,10 @@ async def get_file_info(
|
||||
current_user: User = Depends(get_current_user),
|
||||
storage: StorageService = Depends(get_storage),
|
||||
) -> FileInfoResponse:
|
||||
_ = current_user
|
||||
if not current_user.is_staff:
|
||||
user_prefix = f"{get_user_folder(current_user)}/"
|
||||
if not file_path.startswith(user_prefix):
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Forbidden")
|
||||
|
||||
if not await storage.exists(file_path):
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Not found")
|
||||
|
||||
@@ -11,9 +11,12 @@ JobStatusEnum = Literal["PENDING", "RUNNING", "FAILED", "CANCELLED", "DONE"]
|
||||
JobTypeEnum = Literal[
|
||||
"MEDIA_PROBE",
|
||||
"SILENCE_REMOVE",
|
||||
"SILENCE_DETECT",
|
||||
"SILENCE_APPLY",
|
||||
"MEDIA_CONVERT",
|
||||
"TRANSCRIPTION_GENERATE",
|
||||
"CAPTIONS_GENERATE",
|
||||
"FRAME_EXTRACT",
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -46,8 +46,11 @@ class ArtifactMediaFile(Base, BaseModelMixin):
|
||||
file_id: Mapped[uuid.UUID | None] = mapped_column(
|
||||
UUID(as_uuid=True), ForeignKey("files.id", ondelete="RESTRICT"), nullable=True, index=True
|
||||
)
|
||||
media_file_id: Mapped[uuid.UUID] = mapped_column(
|
||||
UUID(as_uuid=True), ForeignKey("media_files.id", ondelete="RESTRICT"), index=True
|
||||
media_file_id: Mapped[uuid.UUID | None] = mapped_column(
|
||||
UUID(as_uuid=True),
|
||||
ForeignKey("media_files.id", ondelete="RESTRICT"),
|
||||
nullable=True,
|
||||
index=True,
|
||||
)
|
||||
|
||||
artifact_type: Mapped[str] = mapped_column(String(32), default="TRANSCRIPTION_JSON")
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import math
|
||||
import uuid
|
||||
from os import path
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, Response, status
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
@@ -8,11 +10,14 @@ from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from cpv3.infrastructure.auth import get_current_user
|
||||
from cpv3.infrastructure.deps import get_storage
|
||||
from cpv3.infrastructure.storage.base import StorageService
|
||||
from cpv3.infrastructure.storage.utils import get_user_folder
|
||||
from cpv3.db.session import get_db
|
||||
from cpv3.modules.media.schemas import (
|
||||
ArtifactMediaFileCreate,
|
||||
ArtifactMediaFileRead,
|
||||
ArtifactMediaFileUpdate,
|
||||
FrameItem,
|
||||
FrameRangeResponse,
|
||||
MediaConverterParams,
|
||||
MediaFileCreate,
|
||||
MediaFileRead,
|
||||
@@ -20,7 +25,13 @@ from cpv3.modules.media.schemas import (
|
||||
MediaProbeSchema,
|
||||
MediaSilencerParams,
|
||||
)
|
||||
from cpv3.modules.media.service import convert_to_mp4, probe_media, remove_silence
|
||||
from cpv3.modules.media.service import (
|
||||
convert_to_mp4,
|
||||
get_frames_folder,
|
||||
probe_media,
|
||||
read_frames_metadata,
|
||||
remove_silence,
|
||||
)
|
||||
from cpv3.modules.media.repository import ArtifactRepository, MediaFileRepository
|
||||
from cpv3.modules.files.schemas import FileInfoResponse
|
||||
from cpv3.modules.users.models import User
|
||||
@@ -46,12 +57,13 @@ async def silence_remove(
|
||||
current_user: User = Depends(get_current_user),
|
||||
storage: StorageService = Depends(get_storage),
|
||||
) -> FileInfoResponse:
|
||||
_ = current_user
|
||||
user_folder = get_user_folder(current_user)
|
||||
resolved_folder = f"{user_folder}/{body.folder}" if body.folder else f"{user_folder}/output_files"
|
||||
|
||||
info = await remove_silence(
|
||||
storage,
|
||||
file_key=body.file_path,
|
||||
out_folder=body.folder,
|
||||
out_folder=resolved_folder,
|
||||
min_silence_duration_ms=body.min_silence_duration_ms,
|
||||
silence_threshold_db=body.silence_threshold_db,
|
||||
padding_ms=body.padding_ms,
|
||||
@@ -71,9 +83,10 @@ async def convert(
|
||||
current_user: User = Depends(get_current_user),
|
||||
storage: StorageService = Depends(get_storage),
|
||||
) -> FileInfoResponse:
|
||||
_ = current_user
|
||||
user_folder = get_user_folder(current_user)
|
||||
resolved_folder = f"{user_folder}/{body.folder}" if body.folder else f"{user_folder}/output_files"
|
||||
|
||||
info = await convert_to_mp4(storage, file_key=body.file_path, out_folder=body.folder)
|
||||
info = await convert_to_mp4(storage, file_key=body.file_path, out_folder=resolved_folder)
|
||||
return FileInfoResponse(
|
||||
file_path=info.file_path,
|
||||
file_url=info.file_url,
|
||||
@@ -82,6 +95,36 @@ async def convert(
|
||||
)
|
||||
|
||||
|
||||
@media_router.get("/frames/", response_model=FrameRangeResponse)
|
||||
async def get_frames(
|
||||
file_key: str = Query(..., description="S3 key of the source video"),
|
||||
start: float = Query(0.0, ge=0, description="Start time in seconds"),
|
||||
end: float = Query(..., gt=0, description="End time in seconds"),
|
||||
current_user: User = Depends(get_current_user),
|
||||
storage: StorageService = Depends(get_storage),
|
||||
) -> FrameRangeResponse:
|
||||
"""Return presigned URLs for extracted frames within a time range."""
|
||||
user_folder = get_user_folder(current_user)
|
||||
frames_folder = get_frames_folder(user_folder, file_key)
|
||||
|
||||
metadata = await read_frames_metadata(storage, frames_folder=frames_folder)
|
||||
if metadata is None:
|
||||
return FrameRangeResponse(interval=1.0, frames=[])
|
||||
|
||||
interval = metadata.interval
|
||||
first_index = max(1, math.floor(start / interval) + 1)
|
||||
last_index = min(metadata.frame_count, math.ceil(end / interval) + 1)
|
||||
|
||||
frames: list[FrameItem] = []
|
||||
for i in range(first_index, last_index + 1):
|
||||
key = path.join(frames_folder, f"{i:06d}.jpg")
|
||||
timestamp = (i - 1) * interval
|
||||
url = await storage.url(key)
|
||||
frames.append(FrameItem(timestamp=timestamp, url=url))
|
||||
|
||||
return FrameRangeResponse(interval=interval, frames=frames)
|
||||
|
||||
|
||||
@mediafiles_router.get("/mediafiles/", response_model=list[MediaFileRead])
|
||||
async def list_mediafiles(
|
||||
current_user: User = Depends(get_current_user),
|
||||
|
||||
@@ -12,9 +12,11 @@ from cpv3.common.schemas import Schema
|
||||
ArtifactTypeEnum = Literal[
|
||||
"TRANSCRIPTION_JSON",
|
||||
"SILENCE_REMOVED_VIDEO",
|
||||
"CONVERTED_VIDEO",
|
||||
"THUMBNAIL",
|
||||
"AUDIO_PROXY",
|
||||
"RENDERED_VIDEO",
|
||||
"FRAME_SPRITES",
|
||||
]
|
||||
|
||||
|
||||
@@ -60,7 +62,7 @@ class ArtifactMediaFileRead(Schema):
|
||||
id: UUID
|
||||
project_id: UUID | None
|
||||
file_id: UUID | None
|
||||
media_file_id: UUID
|
||||
media_file_id: UUID | None
|
||||
|
||||
artifact_type: ArtifactTypeEnum
|
||||
|
||||
@@ -74,7 +76,7 @@ class ArtifactMediaFileRead(Schema):
|
||||
class ArtifactMediaFileCreate(Schema):
|
||||
project_id: UUID | None = None
|
||||
file_id: UUID | None = None
|
||||
media_file_id: UUID
|
||||
media_file_id: UUID | None = None
|
||||
artifact_type: ArtifactTypeEnum
|
||||
|
||||
|
||||
@@ -148,3 +150,28 @@ class MediaSilencerParams(Schema):
|
||||
class MediaConverterParams(Schema):
|
||||
file_path: str
|
||||
folder: str = ""
|
||||
|
||||
|
||||
class FrameSpriteMetadata(Schema):
|
||||
"""Metadata stored in ArtifactMediaFile.meta for extracted frames."""
|
||||
|
||||
frame_count: int
|
||||
interval: float
|
||||
width: int
|
||||
height: int
|
||||
folder_key: str
|
||||
source_file_key: str
|
||||
|
||||
|
||||
class FrameItem(Schema):
|
||||
"""Single frame in a range query response."""
|
||||
|
||||
timestamp: float
|
||||
url: str
|
||||
|
||||
|
||||
class FrameRangeResponse(Schema):
|
||||
"""Response for GET /api/media/frames/ range query."""
|
||||
|
||||
interval: float
|
||||
frames: list[FrameItem]
|
||||
|
||||
@@ -1,15 +1,30 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import glob as glob_mod
|
||||
import hashlib
|
||||
import io
|
||||
import json
|
||||
from os import path
|
||||
from tempfile import NamedTemporaryFile
|
||||
from tempfile import NamedTemporaryFile, mkdtemp
|
||||
from typing import Callable
|
||||
|
||||
import anyio
|
||||
|
||||
from cpv3.infrastructure.storage.base import StorageService
|
||||
from cpv3.infrastructure.storage.types import FileInfo
|
||||
from cpv3.modules.media.schemas import MediaProbeSchema
|
||||
from cpv3.modules.media.schemas import FrameSpriteMetadata, MediaProbeSchema
|
||||
|
||||
FRAME_WIDTH_PX = 128
|
||||
FRAME_FPS = 1
|
||||
FRAME_JPEG_QUALITY = 5
|
||||
FRAMES_META_FILENAME = "meta.json"
|
||||
|
||||
|
||||
def get_frames_folder(user_folder: str, file_key: str) -> str:
|
||||
"""Build deterministic S3 folder path for frames based on file_key hash."""
|
||||
key_hash = hashlib.sha256(file_key.encode()).hexdigest()[:16]
|
||||
return path.join(user_folder, "frames", key_hash)
|
||||
|
||||
|
||||
async def probe_media(storage: StorageService, *, file_key: str) -> MediaProbeSchema:
|
||||
@@ -68,6 +83,160 @@ def _compute_non_silent_segments(
|
||||
return segments
|
||||
|
||||
|
||||
async def detect_silence(
|
||||
storage: StorageService,
|
||||
*,
|
||||
file_key: str,
|
||||
min_silence_duration_ms: int = 200,
|
||||
silence_threshold_db: int = 16,
|
||||
padding_ms: int = 100,
|
||||
) -> dict:
|
||||
"""Detect silent segments in a media file and return their intervals."""
|
||||
input_tmp = await storage.download_to_temp(file_key)
|
||||
|
||||
try:
|
||||
from pydub import AudioSegment # type: ignore[import-untyped]
|
||||
|
||||
audio: AudioSegment = await anyio.to_thread.run_sync(
|
||||
lambda: AudioSegment.from_file(input_tmp.path)
|
||||
)
|
||||
duration_ms = len(audio)
|
||||
|
||||
non_silent = await anyio.to_thread.run_sync(
|
||||
lambda: _compute_non_silent_segments(
|
||||
local_audio_path=input_tmp.path,
|
||||
min_silence_duration_ms=min_silence_duration_ms,
|
||||
silence_threshold_db=silence_threshold_db,
|
||||
padding_ms=padding_ms,
|
||||
)
|
||||
)
|
||||
|
||||
# Invert non-silent segments to get silent segments
|
||||
silent_segments: list[dict[str, int]] = []
|
||||
prev_end = 0
|
||||
for start_ms, end_ms in non_silent:
|
||||
if start_ms > prev_end:
|
||||
silent_segments.append({"start_ms": prev_end, "end_ms": start_ms})
|
||||
prev_end = end_ms
|
||||
if prev_end < duration_ms:
|
||||
silent_segments.append({"start_ms": prev_end, "end_ms": duration_ms})
|
||||
|
||||
return {
|
||||
"silent_segments": silent_segments,
|
||||
"duration_ms": duration_ms,
|
||||
"file_key": file_key,
|
||||
}
|
||||
finally:
|
||||
input_tmp.cleanup()
|
||||
|
||||
|
||||
async def apply_silence_cuts(
|
||||
storage: StorageService,
|
||||
*,
|
||||
file_key: str,
|
||||
out_folder: str,
|
||||
cuts: list[dict],
|
||||
output_name: str | None = None,
|
||||
) -> FileInfo:
|
||||
"""Apply explicit cut regions to a media file, concatenating the non-cut parts."""
|
||||
input_tmp = await storage.download_to_temp(file_key)
|
||||
|
||||
try:
|
||||
from pydub import AudioSegment # type: ignore[import-untyped]
|
||||
|
||||
audio: AudioSegment = await anyio.to_thread.run_sync(
|
||||
lambda: AudioSegment.from_file(input_tmp.path)
|
||||
)
|
||||
duration_ms = len(audio)
|
||||
|
||||
# Sort cuts and compute non-cut (keep) segments
|
||||
sorted_cuts = sorted(cuts, key=lambda c: c["start_ms"])
|
||||
segments: list[tuple[int, int]] = []
|
||||
prev_end = 0
|
||||
for cut in sorted_cuts:
|
||||
cut_start = max(0, cut["start_ms"])
|
||||
cut_end = min(duration_ms, cut["end_ms"])
|
||||
if cut_start > prev_end:
|
||||
segments.append((prev_end, cut_start))
|
||||
prev_end = max(prev_end, cut_end)
|
||||
if prev_end < duration_ms:
|
||||
segments.append((prev_end, duration_ms))
|
||||
|
||||
if not segments:
|
||||
return await storage.get_file_info(file_key)
|
||||
|
||||
with NamedTemporaryFile(
|
||||
suffix=path.splitext(file_key)[1] or ".mp4", delete=False
|
||||
) as out:
|
||||
out_path = out.name
|
||||
|
||||
try:
|
||||
cmd: list[str] = ["ffmpeg"]
|
||||
for start_ms, end_ms in segments:
|
||||
start_s = start_ms / 1000.0
|
||||
duration_s = (end_ms - start_ms) / 1000.0
|
||||
cmd.extend(
|
||||
[
|
||||
"-ss",
|
||||
f"{start_s:.3f}",
|
||||
"-t",
|
||||
f"{duration_s:.3f}",
|
||||
"-y",
|
||||
"-i",
|
||||
input_tmp.path,
|
||||
]
|
||||
)
|
||||
|
||||
seg_count = len(segments)
|
||||
parts = [f"[{i}:v:0][{i}:a:0]" for i in range(seg_count)]
|
||||
filter_complex = "".join(parts) + f"concat=n={seg_count}:v=1:a=1[v][a]"
|
||||
|
||||
cmd.extend(
|
||||
[
|
||||
"-filter_complex",
|
||||
filter_complex,
|
||||
"-map",
|
||||
"[v]",
|
||||
"-map",
|
||||
"[a]",
|
||||
"-c:v",
|
||||
"libx264",
|
||||
"-c:a",
|
||||
"aac",
|
||||
"-preset",
|
||||
"medium",
|
||||
out_path,
|
||||
]
|
||||
)
|
||||
|
||||
proc = await asyncio.create_subprocess_exec(
|
||||
*cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
|
||||
)
|
||||
_, stderr = await proc.communicate()
|
||||
if proc.returncode != 0:
|
||||
raise RuntimeError(f"ffmpeg failed: {stderr.decode(errors='ignore')}")
|
||||
|
||||
base_name = output_name or path.basename(file_key)
|
||||
output_key = path.join(out_folder or "", "silent", base_name)
|
||||
with open(out_path, "rb") as out_file:
|
||||
_ = await storage.upload_fileobj(
|
||||
fileobj=out_file,
|
||||
file_name=path.basename(output_key),
|
||||
folder=path.dirname(output_key),
|
||||
gen_name=False,
|
||||
content_type="video/mp4",
|
||||
)
|
||||
|
||||
return await storage.get_file_info(output_key)
|
||||
finally:
|
||||
import os
|
||||
|
||||
if os.path.exists(out_path):
|
||||
os.remove(out_path)
|
||||
finally:
|
||||
input_tmp.cleanup()
|
||||
|
||||
|
||||
async def remove_silence(
|
||||
storage: StorageService,
|
||||
*,
|
||||
@@ -264,3 +433,131 @@ async def convert_to_ogg_temp(
|
||||
|
||||
_ = filename_without_ext
|
||||
return out_path, _cleanup
|
||||
|
||||
|
||||
async def extract_frames(
|
||||
storage: StorageService,
|
||||
*,
|
||||
file_key: str,
|
||||
frames_folder: str,
|
||||
on_progress: Callable[[int, int], None] | None = None,
|
||||
) -> FrameSpriteMetadata:
|
||||
"""Extract video frames at 1fps via ffmpeg and upload to S3.
|
||||
|
||||
Also writes a ``meta.json`` alongside the frames for fast lookup.
|
||||
Returns metadata about the extracted frames.
|
||||
"""
|
||||
input_tmp = await storage.download_to_temp(file_key)
|
||||
tmp_dir = mkdtemp(prefix="frames_")
|
||||
|
||||
try:
|
||||
cmd = [
|
||||
"ffmpeg",
|
||||
"-y",
|
||||
"-i",
|
||||
input_tmp.path,
|
||||
"-vf",
|
||||
f"fps={FRAME_FPS},scale={FRAME_WIDTH_PX}:-1",
|
||||
"-q:v",
|
||||
str(FRAME_JPEG_QUALITY),
|
||||
path.join(tmp_dir, "%06d.jpg"),
|
||||
]
|
||||
|
||||
proc = await asyncio.create_subprocess_exec(
|
||||
*cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
|
||||
)
|
||||
_, stderr = await proc.communicate()
|
||||
if proc.returncode != 0:
|
||||
raise RuntimeError(f"ffmpeg frame extraction failed: {stderr.decode(errors='ignore')}")
|
||||
|
||||
frame_files = sorted(glob_mod.glob(path.join(tmp_dir, "*.jpg")))
|
||||
frame_count = len(frame_files)
|
||||
|
||||
if frame_count == 0:
|
||||
raise RuntimeError("No frames extracted from video")
|
||||
|
||||
# Read first frame dimensions via ffprobe (avoids PIL dependency)
|
||||
probe_proc = await asyncio.create_subprocess_exec(
|
||||
"ffprobe",
|
||||
"-v", "error",
|
||||
"-select_streams", "v:0",
|
||||
"-show_entries", "stream=width,height",
|
||||
"-of", "json",
|
||||
frame_files[0],
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
stderr=asyncio.subprocess.PIPE,
|
||||
)
|
||||
probe_stdout, _ = await probe_proc.communicate()
|
||||
probe_data = json.loads(probe_stdout.decode())
|
||||
stream = probe_data.get("streams", [{}])[0]
|
||||
width = stream.get("width", FRAME_WIDTH_PX)
|
||||
height = stream.get("height", FRAME_WIDTH_PX)
|
||||
|
||||
# Upload each frame to S3
|
||||
for idx, frame_path in enumerate(frame_files):
|
||||
frame_name = path.basename(frame_path)
|
||||
with open(frame_path, "rb") as f:
|
||||
await storage.upload_fileobj(
|
||||
fileobj=f,
|
||||
file_name=frame_name,
|
||||
folder=frames_folder,
|
||||
gen_name=False,
|
||||
content_type="image/jpeg",
|
||||
)
|
||||
if on_progress is not None:
|
||||
on_progress(idx + 1, frame_count)
|
||||
|
||||
metadata = FrameSpriteMetadata(
|
||||
frame_count=frame_count,
|
||||
interval=1.0 / FRAME_FPS,
|
||||
width=width,
|
||||
height=height,
|
||||
folder_key=frames_folder,
|
||||
source_file_key=file_key,
|
||||
)
|
||||
|
||||
# Write metadata JSON to S3 for fast lookup by the frames endpoint
|
||||
meta_bytes = json.dumps(metadata.model_dump(mode="json")).encode("utf-8")
|
||||
await storage.upload_fileobj(
|
||||
fileobj=io.BytesIO(meta_bytes),
|
||||
file_name=FRAMES_META_FILENAME,
|
||||
folder=frames_folder,
|
||||
gen_name=False,
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
return metadata
|
||||
finally:
|
||||
import shutil
|
||||
|
||||
input_tmp.cleanup()
|
||||
shutil.rmtree(tmp_dir, ignore_errors=True)
|
||||
|
||||
|
||||
async def read_frames_metadata(
|
||||
storage: StorageService, *, frames_folder: str
|
||||
) -> FrameSpriteMetadata | None:
|
||||
"""Read frame extraction metadata from S3. Returns None if not found."""
|
||||
meta_key = path.join(frames_folder, FRAMES_META_FILENAME)
|
||||
if not await storage.exists(meta_key):
|
||||
return None
|
||||
raw = await storage.read(meta_key)
|
||||
return FrameSpriteMetadata.model_validate(json.loads(raw))
|
||||
|
||||
|
||||
async def delete_frames(
|
||||
storage: StorageService, *, frames_folder: str, frame_count: int
|
||||
) -> None:
|
||||
"""Delete all frame files and metadata from S3 for a given folder."""
|
||||
for i in range(1, frame_count + 1):
|
||||
key = path.join(frames_folder, f"{i:06d}.jpg")
|
||||
try:
|
||||
await storage.delete(key)
|
||||
except Exception:
|
||||
pass
|
||||
# Delete metadata file
|
||||
meta_key = path.join(frames_folder, FRAMES_META_FILENAME)
|
||||
try:
|
||||
await storage.delete(meta_key)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
@@ -0,0 +1,31 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import uuid
|
||||
|
||||
from sqlalchemy import Boolean, ForeignKey, JSON, String, Text
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
||||
from cpv3.db.base import Base, BaseModelMixin
|
||||
|
||||
|
||||
class Notification(Base, BaseModelMixin):
|
||||
__tablename__ = "notifications"
|
||||
|
||||
user_id: Mapped[uuid.UUID] = mapped_column(
|
||||
UUID(as_uuid=True), ForeignKey("users.id", ondelete="CASCADE"), index=True
|
||||
)
|
||||
job_id: Mapped[uuid.UUID | None] = mapped_column(
|
||||
UUID(as_uuid=True), ForeignKey("jobs.id", ondelete="SET NULL"), nullable=True
|
||||
)
|
||||
project_id: Mapped[uuid.UUID | None] = mapped_column(
|
||||
UUID(as_uuid=True),
|
||||
ForeignKey("projects.id", ondelete="SET NULL"),
|
||||
nullable=True,
|
||||
)
|
||||
|
||||
notification_type: Mapped[str] = mapped_column(String(32))
|
||||
title: Mapped[str] = mapped_column(String(255))
|
||||
message: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
payload: Mapped[dict | None] = mapped_column(JSON, nullable=True)
|
||||
is_read: Mapped[bool] = mapped_column(Boolean, default=False)
|
||||
@@ -0,0 +1,79 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import uuid
|
||||
|
||||
from sqlalchemy import Select, func, select, update
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from cpv3.modules.notifications.models import Notification
|
||||
from cpv3.modules.notifications.schemas import NotificationCreate
|
||||
|
||||
|
||||
class NotificationRepository:
|
||||
def __init__(self, session: AsyncSession) -> None:
|
||||
self._session = session
|
||||
|
||||
async def list_for_user(
|
||||
self,
|
||||
user_id: uuid.UUID,
|
||||
*,
|
||||
limit: int = 50,
|
||||
unread_only: bool = False,
|
||||
) -> list[Notification]:
|
||||
stmt: Select[tuple[Notification]] = (
|
||||
select(Notification)
|
||||
.where(Notification.user_id == user_id)
|
||||
.where(Notification.is_active.is_(True))
|
||||
.order_by(Notification.created_at.desc())
|
||||
.limit(limit)
|
||||
)
|
||||
if unread_only:
|
||||
stmt = stmt.where(Notification.is_read.is_(False))
|
||||
|
||||
result = await self._session.execute(stmt)
|
||||
return list(result.scalars().all())
|
||||
|
||||
async def create(self, data: NotificationCreate) -> Notification:
|
||||
notification = Notification(
|
||||
user_id=data.user_id,
|
||||
job_id=data.job_id,
|
||||
project_id=data.project_id,
|
||||
notification_type=data.notification_type,
|
||||
title=data.title,
|
||||
message=data.message,
|
||||
payload=data.payload,
|
||||
)
|
||||
self._session.add(notification)
|
||||
await self._session.commit()
|
||||
await self._session.refresh(notification)
|
||||
return notification
|
||||
|
||||
async def mark_read(self, notification_id: uuid.UUID, user_id: uuid.UUID) -> bool:
|
||||
result = await self._session.execute(
|
||||
update(Notification)
|
||||
.where(Notification.id == notification_id)
|
||||
.where(Notification.user_id == user_id)
|
||||
.values(is_read=True)
|
||||
)
|
||||
await self._session.commit()
|
||||
return result.rowcount > 0 # type: ignore[union-attr]
|
||||
|
||||
async def mark_all_read(self, user_id: uuid.UUID) -> int:
|
||||
result = await self._session.execute(
|
||||
update(Notification)
|
||||
.where(Notification.user_id == user_id)
|
||||
.where(Notification.is_read.is_(False))
|
||||
.values(is_read=True)
|
||||
)
|
||||
await self._session.commit()
|
||||
return result.rowcount # type: ignore[return-value]
|
||||
|
||||
async def count_unread(self, user_id: uuid.UUID) -> int:
|
||||
result = await self._session.execute(
|
||||
select(func.count())
|
||||
.select_from(Notification)
|
||||
.where(Notification.user_id == user_id)
|
||||
.where(Notification.is_active.is_(True))
|
||||
.where(Notification.is_read.is_(False))
|
||||
)
|
||||
return result.scalar_one()
|
||||
@@ -0,0 +1,102 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import uuid
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, WebSocket, WebSocketDisconnect, status
|
||||
from jwt import ExpiredSignatureError, InvalidTokenError
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from cpv3.db.session import SessionLocal, get_db
|
||||
from cpv3.infrastructure.auth import get_current_user
|
||||
from cpv3.infrastructure.security import decode_token
|
||||
from cpv3.modules.notifications.repository import NotificationRepository
|
||||
from cpv3.modules.notifications.schemas import NotificationRead
|
||||
from cpv3.modules.notifications.service import subscribe_and_forward
|
||||
from cpv3.modules.users.models import User
|
||||
from cpv3.modules.users.repository import UserRepository
|
||||
|
||||
router = APIRouter(prefix="/api/notifications", tags=["notifications"])
|
||||
|
||||
|
||||
@router.websocket("/ws/")
|
||||
async def notifications_ws(
|
||||
websocket: WebSocket,
|
||||
token: str = Query(...),
|
||||
) -> None:
|
||||
"""WebSocket endpoint for real-time notifications."""
|
||||
try:
|
||||
payload = decode_token(token)
|
||||
except (ExpiredSignatureError, InvalidTokenError):
|
||||
await websocket.close(code=status.WS_1008_POLICY_VIOLATION)
|
||||
return
|
||||
|
||||
if payload.get("type") != "access":
|
||||
await websocket.close(code=status.WS_1008_POLICY_VIOLATION)
|
||||
return
|
||||
|
||||
sub = payload.get("sub")
|
||||
if not sub:
|
||||
await websocket.close(code=status.WS_1008_POLICY_VIOLATION)
|
||||
return
|
||||
|
||||
try:
|
||||
user_id = uuid.UUID(str(sub))
|
||||
except ValueError:
|
||||
await websocket.close(code=status.WS_1008_POLICY_VIOLATION)
|
||||
return
|
||||
|
||||
async with SessionLocal() as db:
|
||||
user_repo = UserRepository(db)
|
||||
user = await user_repo.get_by_id(user_id)
|
||||
if user is None or not user.is_active:
|
||||
await websocket.close(code=status.WS_1008_POLICY_VIOLATION)
|
||||
return
|
||||
|
||||
await websocket.accept()
|
||||
|
||||
try:
|
||||
await subscribe_and_forward(websocket, user_id)
|
||||
except WebSocketDisconnect:
|
||||
pass
|
||||
|
||||
|
||||
@router.get("/", response_model=list[NotificationRead])
|
||||
async def list_notifications(
|
||||
unread_only: bool = Query(False),
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
) -> list[NotificationRead]:
|
||||
repo = NotificationRepository(db)
|
||||
items = await repo.list_for_user(current_user.id, unread_only=unread_only)
|
||||
return [NotificationRead.model_validate(n) for n in items]
|
||||
|
||||
|
||||
@router.post("/{notification_id}/read/", status_code=status.HTTP_204_NO_CONTENT)
|
||||
async def mark_notification_read(
|
||||
notification_id: uuid.UUID,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
) -> None:
|
||||
repo = NotificationRepository(db)
|
||||
found = await repo.mark_read(notification_id, current_user.id)
|
||||
if not found:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Not found")
|
||||
|
||||
|
||||
@router.post("/read-all/", status_code=status.HTTP_204_NO_CONTENT)
|
||||
async def mark_all_read(
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
) -> None:
|
||||
repo = NotificationRepository(db)
|
||||
await repo.mark_all_read(current_user.id)
|
||||
|
||||
|
||||
@router.get("/unread-count/")
|
||||
async def unread_count(
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
) -> dict[str, int]:
|
||||
repo = NotificationRepository(db)
|
||||
count = await repo.count_unread(current_user.id)
|
||||
return {"count": count}
|
||||
@@ -0,0 +1,53 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Literal
|
||||
from uuid import UUID
|
||||
|
||||
from cpv3.common.schemas import Schema
|
||||
|
||||
|
||||
NotificationTypeEnum = Literal["task_progress", "task_complete", "task_failed"]
|
||||
|
||||
|
||||
class NotificationCreate(Schema):
|
||||
user_id: UUID
|
||||
job_id: UUID | None = None
|
||||
project_id: UUID | None = None
|
||||
notification_type: NotificationTypeEnum
|
||||
title: str
|
||||
message: str | None = None
|
||||
payload: dict | None = None
|
||||
|
||||
|
||||
class NotificationRead(Schema):
|
||||
id: UUID
|
||||
user_id: UUID
|
||||
job_id: UUID | None
|
||||
project_id: UUID | None
|
||||
notification_type: NotificationTypeEnum
|
||||
title: str
|
||||
message: str | None
|
||||
payload: dict | None
|
||||
is_read: bool
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
|
||||
class NotificationUpdate(Schema):
|
||||
is_read: bool | None = None
|
||||
|
||||
|
||||
class WebSocketMessage(Schema):
|
||||
"""JSON shape pushed over WebSocket."""
|
||||
|
||||
event: str
|
||||
notification_id: UUID | None = None
|
||||
job_id: UUID | None = None
|
||||
project_id: UUID | None = None
|
||||
job_type: str | None = None
|
||||
status: str | None = None
|
||||
progress_pct: float | None = None
|
||||
message: str | None = None
|
||||
title: str | None = None
|
||||
created_at: datetime | None = None
|
||||
@@ -0,0 +1,152 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
import uuid
|
||||
|
||||
import redis.asyncio as aioredis
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from cpv3.db.base import utcnow
|
||||
from cpv3.infrastructure.settings import get_settings
|
||||
from cpv3.modules.jobs.models import Job
|
||||
from cpv3.modules.notifications.repository import NotificationRepository
|
||||
from cpv3.modules.notifications.schemas import (
|
||||
NotificationCreate,
|
||||
NotificationTypeEnum,
|
||||
WebSocketMessage,
|
||||
)
|
||||
from cpv3.modules.tasks.schemas import TaskWebhookEvent
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
JOB_TYPE_LABELS: dict[str, str] = {
|
||||
"MEDIA_PROBE": "Анализ медиа",
|
||||
"SILENCE_REMOVE": "Удаление тишины",
|
||||
"SILENCE_DETECT": "Обнаружение тишины",
|
||||
"SILENCE_APPLY": "Применение вырезок",
|
||||
"MEDIA_CONVERT": "Конвертация",
|
||||
"TRANSCRIPTION_GENERATE": "Транскрипция",
|
||||
"CAPTIONS_GENERATE": "Генерация субтитров",
|
||||
}
|
||||
|
||||
STATUS_TITLES: dict[str, str] = {
|
||||
"RUNNING": "Задача запущена",
|
||||
"DONE": "Задача завершена",
|
||||
"FAILED": "Ошибка выполнения",
|
||||
}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# ConnectionManager — singleton for WebSocket pub/sub via Redis
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
_redis_client: aioredis.Redis | None = None
|
||||
|
||||
|
||||
async def _get_redis() -> aioredis.Redis:
|
||||
global _redis_client
|
||||
if _redis_client is None:
|
||||
settings = get_settings()
|
||||
_redis_client = aioredis.from_url(settings.redis_url, decode_responses=True)
|
||||
return _redis_client
|
||||
|
||||
|
||||
def _channel_name(user_id: uuid.UUID) -> str:
|
||||
return f"notifications:{user_id}"
|
||||
|
||||
|
||||
async def publish_to_user(user_id: uuid.UUID, message: WebSocketMessage) -> None:
|
||||
"""Publish a notification message to a user's Redis channel."""
|
||||
redis = await _get_redis()
|
||||
payload = message.model_dump_json()
|
||||
await redis.publish(_channel_name(user_id), payload)
|
||||
|
||||
|
||||
async def subscribe_and_forward(websocket: object, user_id: uuid.UUID) -> None:
|
||||
"""Subscribe to a user's Redis channel and forward messages to WebSocket.
|
||||
|
||||
``websocket`` must be a ``fastapi.WebSocket`` instance — typed as ``object``
|
||||
to avoid importing FastAPI at module level.
|
||||
"""
|
||||
from fastapi import WebSocket as _WS
|
||||
|
||||
ws: _WS = websocket # type: ignore[assignment]
|
||||
redis = await _get_redis()
|
||||
pubsub = redis.pubsub()
|
||||
await pubsub.subscribe(_channel_name(user_id))
|
||||
|
||||
try:
|
||||
async for raw_message in pubsub.listen():
|
||||
if raw_message["type"] != "message":
|
||||
continue
|
||||
await ws.send_text(raw_message["data"])
|
||||
finally:
|
||||
await pubsub.unsubscribe(_channel_name(user_id))
|
||||
await pubsub.aclose()
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# NotificationService
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class NotificationService:
|
||||
def __init__(self, session: AsyncSession) -> None:
|
||||
self._repo = NotificationRepository(session)
|
||||
|
||||
async def create_task_notification(
|
||||
self, *, user_id: uuid.UUID, job: Job, event: TaskWebhookEvent
|
||||
) -> None:
|
||||
"""Create a notification for a task status change and push via WebSocket."""
|
||||
notification_type: NotificationTypeEnum | None = None
|
||||
if event.status == "RUNNING":
|
||||
notification_type = "task_progress"
|
||||
elif event.status == "DONE":
|
||||
notification_type = "task_complete"
|
||||
elif event.status == "FAILED":
|
||||
notification_type = "task_failed"
|
||||
|
||||
job_type_label = JOB_TYPE_LABELS.get(job.job_type, job.job_type)
|
||||
now = utcnow()
|
||||
|
||||
# Only persist notifications on status changes (not progress-only updates)
|
||||
notification_id: uuid.UUID | None = None
|
||||
if notification_type is not None:
|
||||
title = STATUS_TITLES.get(event.status or "", job_type_label)
|
||||
notification = await self._repo.create(
|
||||
NotificationCreate(
|
||||
user_id=user_id,
|
||||
job_id=job.id,
|
||||
project_id=job.project_id,
|
||||
notification_type=notification_type,
|
||||
title=title,
|
||||
message=event.error_message or event.current_message,
|
||||
payload={
|
||||
"job_type": job.job_type,
|
||||
"progress_pct": event.progress_pct,
|
||||
"status": event.status,
|
||||
},
|
||||
)
|
||||
)
|
||||
notification_id = notification.id
|
||||
|
||||
# Always push WebSocket message (including progress-only updates)
|
||||
ws_event = "task_update"
|
||||
ws_message = WebSocketMessage(
|
||||
event=ws_event,
|
||||
notification_id=notification_id,
|
||||
job_id=job.id,
|
||||
project_id=job.project_id,
|
||||
job_type=job.job_type,
|
||||
status=event.status or job.status,
|
||||
progress_pct=event.progress_pct or job.project_pct,
|
||||
message=event.error_message or event.current_message or job.current_message,
|
||||
title=job_type_label,
|
||||
created_at=now,
|
||||
)
|
||||
|
||||
try:
|
||||
await publish_to_user(user_id, ws_message)
|
||||
except Exception:
|
||||
logger.exception("Failed to publish WebSocket notification for user %s", user_id)
|
||||
@@ -2,7 +2,7 @@ from __future__ import annotations
|
||||
|
||||
import uuid
|
||||
|
||||
from sqlalchemy import ForeignKey, String, Text
|
||||
from sqlalchemy import ForeignKey, JSON, String, Text
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
||||
@@ -23,3 +23,4 @@ class Project(Base, BaseModelMixin):
|
||||
language: Mapped[str] = mapped_column(String(4), default="auto")
|
||||
folder: Mapped[str | None] = mapped_column(String(1024), nullable=True)
|
||||
status: Mapped[str] = mapped_column(String(16), default="DRAFT")
|
||||
workspace_state: Mapped[dict | None] = mapped_column(JSON, nullable=True)
|
||||
|
||||
@@ -2,7 +2,7 @@ from __future__ import annotations
|
||||
|
||||
import uuid
|
||||
|
||||
from sqlalchemy import Select, select
|
||||
from sqlalchemy import Select, or_, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from cpv3.modules.projects.models import Project
|
||||
@@ -16,13 +16,31 @@ class ProjectRepository:
|
||||
def __init__(self, session: AsyncSession) -> None:
|
||||
self._session = session
|
||||
|
||||
async def list_all(self, *, requester: User) -> list[Project]:
|
||||
async def list_all(
|
||||
self,
|
||||
*,
|
||||
requester: User,
|
||||
search: str | None = None,
|
||||
status: str | None = None,
|
||||
) -> list[Project]:
|
||||
stmt: Select[tuple[Project]] = select(Project).where(
|
||||
Project.is_active.is_(True)
|
||||
)
|
||||
if not requester.is_staff:
|
||||
stmt = stmt.where(Project.owner_id == requester.id)
|
||||
|
||||
if search:
|
||||
pattern = f"%{search}%"
|
||||
stmt = stmt.where(
|
||||
or_(
|
||||
Project.name.ilike(pattern),
|
||||
Project.description.ilike(pattern),
|
||||
)
|
||||
)
|
||||
|
||||
if status:
|
||||
stmt = stmt.where(Project.status == status)
|
||||
|
||||
result = await self._session.execute(stmt.order_by(Project.created_at.desc()))
|
||||
return list(result.scalars().all())
|
||||
|
||||
@@ -34,14 +52,16 @@ class ProjectRepository:
|
||||
)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def create(self, *, requester: User, data: ProjectCreate) -> Project:
|
||||
async def create(
|
||||
self, *, requester: User, data: ProjectCreate, folder: str, status: str,
|
||||
) -> Project:
|
||||
project = Project(
|
||||
owner_id=requester.id,
|
||||
name=data.name,
|
||||
description=data.description,
|
||||
language=data.language,
|
||||
folder=data.folder,
|
||||
status=data.status,
|
||||
folder=folder,
|
||||
status=status,
|
||||
)
|
||||
|
||||
self._session.add(project)
|
||||
|
||||
@@ -2,7 +2,7 @@ from __future__ import annotations
|
||||
|
||||
import uuid
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Response, status
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, Response, status
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from cpv3.infrastructure.auth import get_current_user
|
||||
@@ -16,11 +16,17 @@ router = APIRouter(prefix="/api/projects", tags=["Projects"])
|
||||
|
||||
@router.get("/", response_model=list[ProjectRead])
|
||||
async def list_all_projects(
|
||||
search: str | None = Query(None, description="Поиск по названию или описанию"),
|
||||
status_filter: str | None = Query(
|
||||
None, alias="status", description="Фильтр по статусу проекта"
|
||||
),
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
) -> list[ProjectRead]:
|
||||
service = ProjectService(db)
|
||||
projects = await service.list_projects(requester=current_user)
|
||||
projects = await service.list_projects(
|
||||
requester=current_user, search=search, status=status_filter,
|
||||
)
|
||||
return [ProjectRead.model_validate(p) for p in projects]
|
||||
|
||||
|
||||
|
||||
@@ -19,6 +19,8 @@ class ProjectRead(Schema):
|
||||
folder: str | None
|
||||
status: ProjectStatusEnum
|
||||
|
||||
workspace_state: dict | None
|
||||
|
||||
is_active: bool
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
@@ -28,8 +30,6 @@ class ProjectCreate(Schema):
|
||||
name: str
|
||||
description: str | None = None
|
||||
language: str = "auto"
|
||||
folder: str | None = None
|
||||
status: ProjectStatusEnum = "DRAFT"
|
||||
|
||||
|
||||
class ProjectUpdate(Schema):
|
||||
@@ -38,3 +38,4 @@ class ProjectUpdate(Schema):
|
||||
language: str | None = None
|
||||
folder: str | None = None
|
||||
status: ProjectStatusEnum | None = None
|
||||
workspace_state: dict | None = None
|
||||
|
||||
@@ -16,14 +16,25 @@ class ProjectService:
|
||||
def __init__(self, session: AsyncSession) -> None:
|
||||
self._repo = ProjectRepository(session)
|
||||
|
||||
async def list_projects(self, *, requester: User) -> list[Project]:
|
||||
return await self._repo.list_all(requester=requester)
|
||||
async def list_projects(
|
||||
self,
|
||||
*,
|
||||
requester: User,
|
||||
search: str | None = None,
|
||||
status: str | None = None,
|
||||
) -> list[Project]:
|
||||
return await self._repo.list_all(
|
||||
requester=requester, search=search, status=status,
|
||||
)
|
||||
|
||||
async def get_project(self, project_id: uuid.UUID) -> Project | None:
|
||||
return await self._repo.get_by_id(project_id)
|
||||
|
||||
async def create_project(self, *, requester: User, data: ProjectCreate) -> Project:
|
||||
return await self._repo.create(requester=requester, data=data)
|
||||
folder = f"/{requester.username}/{data.name}"
|
||||
return await self._repo.create(
|
||||
requester=requester, data=data, folder=folder, status="DRAFT",
|
||||
)
|
||||
|
||||
async def update_project(self, project: Project, data: ProjectUpdate) -> Project:
|
||||
return await self._repo.update(project, data)
|
||||
|
||||
@@ -15,8 +15,11 @@ from cpv3.infrastructure.auth import get_current_user
|
||||
from cpv3.modules.jobs.service import JobService
|
||||
from cpv3.modules.tasks.schemas import (
|
||||
CaptionsGenerateRequest,
|
||||
FrameExtractRequest,
|
||||
MediaConvertRequest,
|
||||
MediaProbeRequest,
|
||||
SilenceApplyRequest,
|
||||
SilenceDetectRequest,
|
||||
SilenceRemoveRequest,
|
||||
TaskStatusEnum,
|
||||
TaskStatusResponse,
|
||||
@@ -61,6 +64,36 @@ async def submit_silence_remove(
|
||||
return await service.submit_silence_remove(requester=current_user, request=body)
|
||||
|
||||
|
||||
@router.post(
|
||||
"/silence-detect/",
|
||||
response_model=TaskSubmitResponse,
|
||||
status_code=status.HTTP_202_ACCEPTED,
|
||||
)
|
||||
async def submit_silence_detect(
|
||||
body: SilenceDetectRequest,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
) -> TaskSubmitResponse:
|
||||
"""Submit a background task to detect silent segments in media file."""
|
||||
service = TaskService(db)
|
||||
return await service.submit_silence_detect(requester=current_user, request=body)
|
||||
|
||||
|
||||
@router.post(
|
||||
"/silence-apply/",
|
||||
response_model=TaskSubmitResponse,
|
||||
status_code=status.HTTP_202_ACCEPTED,
|
||||
)
|
||||
async def submit_silence_apply(
|
||||
body: SilenceApplyRequest,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
) -> TaskSubmitResponse:
|
||||
"""Submit a background task to apply silence cuts to media file."""
|
||||
service = TaskService(db)
|
||||
return await service.submit_silence_apply(requester=current_user, request=body)
|
||||
|
||||
|
||||
@router.post(
|
||||
"/media-convert/",
|
||||
response_model=TaskSubmitResponse,
|
||||
@@ -93,6 +126,21 @@ async def submit_transcription_generate(
|
||||
)
|
||||
|
||||
|
||||
@router.post(
|
||||
"/frame-extract/",
|
||||
response_model=TaskSubmitResponse,
|
||||
status_code=status.HTTP_202_ACCEPTED,
|
||||
)
|
||||
async def submit_frame_extract(
|
||||
body: FrameExtractRequest,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
) -> TaskSubmitResponse:
|
||||
"""Submit a background task to extract video frames for timeline thumbnails."""
|
||||
service = TaskService(db)
|
||||
return await service.submit_frame_extract(requester=current_user, request=body)
|
||||
|
||||
|
||||
@router.post(
|
||||
"/captions-generate/",
|
||||
response_model=TaskSubmitResponse,
|
||||
|
||||
@@ -45,6 +45,36 @@ class SilenceRemoveRequest(Schema):
|
||||
)
|
||||
|
||||
|
||||
class SilenceDetectRequest(Schema):
|
||||
"""Request to detect silent segments in media file."""
|
||||
|
||||
file_key: str = Field(..., description="Storage key of the input file")
|
||||
project_id: UUID | None = Field(default=None, description="Associated project ID")
|
||||
min_silence_duration_ms: int = Field(
|
||||
default=200, description="Minimum silence duration in milliseconds"
|
||||
)
|
||||
silence_threshold_db: int = Field(
|
||||
default=16, description="Silence threshold in decibels"
|
||||
)
|
||||
padding_ms: int = Field(
|
||||
default=100, description="Padding around non-silent segments in milliseconds"
|
||||
)
|
||||
|
||||
|
||||
class SilenceApplyRequest(Schema):
|
||||
"""Request to apply silence cuts to media file."""
|
||||
|
||||
file_key: str = Field(..., description="Storage key of the input file")
|
||||
out_folder: str = Field(..., description="Output folder for processed file")
|
||||
project_id: UUID | None = Field(default=None, description="Associated project ID")
|
||||
output_name: str | None = Field(
|
||||
default=None, description="Display name for the output file"
|
||||
)
|
||||
cuts: list[dict] = Field(
|
||||
..., description="Cut regions: [{'start_ms': int, 'end_ms': int}, ...]"
|
||||
)
|
||||
|
||||
|
||||
class MediaConvertRequest(Schema):
|
||||
"""Request to convert media file to different format."""
|
||||
|
||||
@@ -75,6 +105,16 @@ class CaptionsGenerateRequest(Schema):
|
||||
project_id: UUID | None = Field(default=None, description="Associated project ID")
|
||||
|
||||
|
||||
class FrameExtractRequest(Schema):
|
||||
"""Request to extract video frames for timeline thumbnails."""
|
||||
|
||||
file_key: str = Field(..., description="S3 key of the video file")
|
||||
project_id: UUID | None = Field(default=None, description="Associated project ID")
|
||||
regenerate: bool = Field(
|
||||
default=False, description="Delete existing frames and re-extract"
|
||||
)
|
||||
|
||||
|
||||
# --- Response schemas ---
|
||||
|
||||
|
||||
|
||||
@@ -5,8 +5,12 @@ Task service for submitting and managing background tasks.
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import io
|
||||
import json
|
||||
import logging
|
||||
import time
|
||||
import uuid
|
||||
from pathlib import Path
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any
|
||||
|
||||
@@ -17,6 +21,8 @@ from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from cpv3.infrastructure.deps import _get_storage_service
|
||||
from cpv3.infrastructure.settings import get_settings
|
||||
from cpv3.modules.files.repository import FileRepository
|
||||
from cpv3.modules.files.schemas import FileCreate
|
||||
from cpv3.modules.jobs.models import Job
|
||||
from cpv3.modules.jobs.repository import JobEventRepository, JobRepository
|
||||
from cpv3.modules.jobs.schemas import (
|
||||
@@ -26,17 +32,26 @@ from cpv3.modules.jobs.schemas import (
|
||||
JobTypeEnum,
|
||||
JobUpdate,
|
||||
)
|
||||
from cpv3.modules.media.repository import ArtifactRepository
|
||||
from cpv3.modules.media.schemas import ArtifactMediaFileCreate
|
||||
from cpv3.modules.tasks.schemas import (
|
||||
CaptionsGenerateRequest,
|
||||
FrameExtractRequest,
|
||||
MediaConvertRequest,
|
||||
MediaProbeRequest,
|
||||
SilenceApplyRequest,
|
||||
SilenceDetectRequest,
|
||||
SilenceRemoveRequest,
|
||||
TaskSubmitResponse,
|
||||
TaskWebhookEvent,
|
||||
TranscriptionGenerateRequest,
|
||||
)
|
||||
from cpv3.infrastructure.storage.utils import get_user_folder
|
||||
from cpv3.modules.notifications.service import NotificationService
|
||||
from cpv3.modules.transcription.repository import TranscriptionRepository
|
||||
from cpv3.modules.transcription.schemas import TranscriptionCreate
|
||||
from cpv3.modules.users.models import User
|
||||
from cpv3.modules.users.repository import UserRepository
|
||||
from cpv3.modules.webhooks.repository import WebhookRepository
|
||||
from cpv3.modules.webhooks.schemas import WebhookCreate
|
||||
|
||||
@@ -49,9 +64,12 @@ JOB_STATUS_FAILED: JobStatusEnum = "FAILED"
|
||||
|
||||
JOB_TYPE_MEDIA_PROBE: JobTypeEnum = "MEDIA_PROBE"
|
||||
JOB_TYPE_SILENCE_REMOVE: JobTypeEnum = "SILENCE_REMOVE"
|
||||
JOB_TYPE_SILENCE_DETECT: JobTypeEnum = "SILENCE_DETECT"
|
||||
JOB_TYPE_SILENCE_APPLY: JobTypeEnum = "SILENCE_APPLY"
|
||||
JOB_TYPE_MEDIA_CONVERT: JobTypeEnum = "MEDIA_CONVERT"
|
||||
JOB_TYPE_TRANSCRIPTION_GENERATE: JobTypeEnum = "TRANSCRIPTION_GENERATE"
|
||||
JOB_TYPE_CAPTIONS_GENERATE: JobTypeEnum = "CAPTIONS_GENERATE"
|
||||
JOB_TYPE_FRAME_EXTRACT: JobTypeEnum = "FRAME_EXTRACT"
|
||||
|
||||
EVENT_TYPE_STATUS_PREFIX = "status_"
|
||||
EVENT_TYPE_PROGRESS = "progress"
|
||||
@@ -62,19 +80,41 @@ EVENT_TYPE_ERROR = "error"
|
||||
TASK_WEBHOOK_PATH = "/api/tasks/webhook/{job_id}/"
|
||||
WEBHOOK_TIMEOUT_SECONDS = 10
|
||||
|
||||
ERROR_NO_AUDIO_STREAM = "Файл не содержит аудиодорожки"
|
||||
ERROR_UNKNOWN_ENGINE = "Неизвестный движок транскрипции: {engine}"
|
||||
|
||||
ENGINE_MAP: dict[str, str] = {
|
||||
"whisper": "LOCAL_WHISPER",
|
||||
"google": "GOOGLE_SPEECH_CLOUD",
|
||||
}
|
||||
|
||||
MESSAGE_STARTING = "Starting"
|
||||
MESSAGE_COMPLETED = "Completed"
|
||||
MESSAGE_PROBING_MEDIA = "Probing media"
|
||||
MESSAGE_PROCESSING = "Processing"
|
||||
MESSAGE_CONVERTING = "Converting"
|
||||
MESSAGE_RENDERING_CAPTIONS = "Rendering captions"
|
||||
MESSAGE_EXTRACTING_FRAMES = "Извлечение кадров"
|
||||
MESSAGE_UPLOADING_FRAMES = "Загрузка кадров"
|
||||
MESSAGE_DELETING_OLD_FRAMES = "Удаление старых кадров"
|
||||
|
||||
PROGRESS_COMPLETE = 100.0
|
||||
PROGRESS_MEDIA_PROBE = 50.0
|
||||
PROGRESS_SILENCE_REMOVE = 30.0
|
||||
PROGRESS_MEDIA_CONVERT = 30.0
|
||||
PROGRESS_TRANSCRIPTION = 20.0
|
||||
PROGRESS_TRANSCRIPTION_START = 20.0
|
||||
PROGRESS_TRANSCRIPTION_END = 95.0
|
||||
PROGRESS_CAPTIONS = 30.0
|
||||
PROGRESS_FRAME_EXTRACT_START = 10.0
|
||||
PROGRESS_FRAME_EXTRACT_END = 95.0
|
||||
|
||||
PROGRESS_SILENCE_DETECT = 30.0
|
||||
PROGRESS_SILENCE_APPLY = 30.0
|
||||
|
||||
MESSAGE_DETECTING_SILENCE = "Обнаружение тишины"
|
||||
MESSAGE_APPLYING_CUTS = "Применение вырезок"
|
||||
|
||||
PROGRESS_THROTTLE_SECONDS = 3.0
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Dramatiq broker setup
|
||||
@@ -95,6 +135,18 @@ def _utc_now() -> datetime:
|
||||
return datetime.now(timezone.utc)
|
||||
|
||||
|
||||
def _parse_frame_rate(rate_str: str) -> float | None:
|
||||
"""Parse ffprobe frame rate string like '30/1' or '30000/1001'."""
|
||||
try:
|
||||
if "/" in rate_str:
|
||||
num, den = rate_str.split("/")
|
||||
den_val = int(den)
|
||||
return round(int(num) / den_val, 3) if den_val else None
|
||||
return float(rate_str)
|
||||
except (ValueError, ZeroDivisionError):
|
||||
return None
|
||||
|
||||
|
||||
def _build_webhook_url(job_id: uuid.UUID) -> str:
|
||||
"""Build the internal webhook URL for task updates."""
|
||||
settings = get_settings()
|
||||
@@ -267,6 +319,136 @@ def silence_remove_actor(
|
||||
raise
|
||||
|
||||
|
||||
@dramatiq.actor(max_retries=3, min_backoff=1000)
|
||||
def silence_detect_actor(
|
||||
job_id: str,
|
||||
webhook_url: str,
|
||||
file_key: str,
|
||||
min_silence_duration_ms: int,
|
||||
silence_threshold_db: int,
|
||||
padding_ms: int,
|
||||
) -> None:
|
||||
"""Detect silent segments in media file."""
|
||||
from cpv3.modules.media.service import detect_silence
|
||||
|
||||
job_uuid = uuid.UUID(job_id)
|
||||
_send_webhook_event(
|
||||
webhook_url,
|
||||
TaskWebhookEvent(
|
||||
status=JOB_STATUS_RUNNING,
|
||||
current_message=MESSAGE_STARTING,
|
||||
started_at=_utc_now(),
|
||||
),
|
||||
)
|
||||
|
||||
try:
|
||||
storage = _get_storage_service()
|
||||
_send_webhook_event(
|
||||
webhook_url,
|
||||
TaskWebhookEvent(
|
||||
current_message=MESSAGE_DETECTING_SILENCE,
|
||||
progress_pct=PROGRESS_SILENCE_DETECT,
|
||||
),
|
||||
)
|
||||
result = _run_async(
|
||||
detect_silence(
|
||||
storage,
|
||||
file_key=file_key,
|
||||
min_silence_duration_ms=min_silence_duration_ms,
|
||||
silence_threshold_db=silence_threshold_db,
|
||||
padding_ms=padding_ms,
|
||||
)
|
||||
)
|
||||
_send_webhook_event(
|
||||
webhook_url,
|
||||
TaskWebhookEvent(
|
||||
status=JOB_STATUS_DONE,
|
||||
current_message=MESSAGE_COMPLETED,
|
||||
progress_pct=PROGRESS_COMPLETE,
|
||||
output_data=result,
|
||||
finished_at=_utc_now(),
|
||||
),
|
||||
)
|
||||
except Exception as exc:
|
||||
logger.exception("silence_detect_actor failed: %s", job_uuid)
|
||||
_send_webhook_event(
|
||||
webhook_url,
|
||||
TaskWebhookEvent(
|
||||
status=JOB_STATUS_FAILED,
|
||||
error_message=str(exc),
|
||||
finished_at=_utc_now(),
|
||||
),
|
||||
)
|
||||
raise
|
||||
|
||||
|
||||
@dramatiq.actor(max_retries=3, min_backoff=1000)
|
||||
def silence_apply_actor(
|
||||
job_id: str,
|
||||
webhook_url: str,
|
||||
file_key: str,
|
||||
out_folder: str,
|
||||
cuts: list[dict],
|
||||
output_name: str | None,
|
||||
) -> None:
|
||||
"""Apply silence cuts to media file."""
|
||||
from cpv3.modules.media.service import apply_silence_cuts
|
||||
|
||||
job_uuid = uuid.UUID(job_id)
|
||||
_send_webhook_event(
|
||||
webhook_url,
|
||||
TaskWebhookEvent(
|
||||
status=JOB_STATUS_RUNNING,
|
||||
current_message=MESSAGE_STARTING,
|
||||
started_at=_utc_now(),
|
||||
),
|
||||
)
|
||||
|
||||
try:
|
||||
storage = _get_storage_service()
|
||||
_send_webhook_event(
|
||||
webhook_url,
|
||||
TaskWebhookEvent(
|
||||
current_message=MESSAGE_APPLYING_CUTS,
|
||||
progress_pct=PROGRESS_SILENCE_APPLY,
|
||||
),
|
||||
)
|
||||
result = _run_async(
|
||||
apply_silence_cuts(
|
||||
storage,
|
||||
file_key=file_key,
|
||||
out_folder=out_folder,
|
||||
cuts=cuts,
|
||||
output_name=output_name,
|
||||
)
|
||||
)
|
||||
_send_webhook_event(
|
||||
webhook_url,
|
||||
TaskWebhookEvent(
|
||||
status=JOB_STATUS_DONE,
|
||||
current_message=MESSAGE_COMPLETED,
|
||||
progress_pct=PROGRESS_COMPLETE,
|
||||
output_data={
|
||||
"file_path": result.file_path,
|
||||
"file_url": result.file_url,
|
||||
"file_size": result.file_size,
|
||||
},
|
||||
finished_at=_utc_now(),
|
||||
),
|
||||
)
|
||||
except Exception as exc:
|
||||
logger.exception("silence_apply_actor failed: %s", job_uuid)
|
||||
_send_webhook_event(
|
||||
webhook_url,
|
||||
TaskWebhookEvent(
|
||||
status=JOB_STATUS_FAILED,
|
||||
error_message=str(exc),
|
||||
finished_at=_utc_now(),
|
||||
),
|
||||
)
|
||||
raise
|
||||
|
||||
|
||||
@dramatiq.actor(max_retries=3, min_backoff=1000)
|
||||
def media_convert_actor(
|
||||
job_id: str,
|
||||
@@ -356,19 +538,60 @@ def transcription_generate_actor(
|
||||
)
|
||||
|
||||
try:
|
||||
from cpv3.modules.media.service import probe_media
|
||||
|
||||
storage = _get_storage_service()
|
||||
|
||||
probe = _run_async(probe_media(storage, file_key=file_key))
|
||||
has_audio = any(s.codec_type == "audio" for s in probe.streams)
|
||||
if not has_audio:
|
||||
raise ValueError(ERROR_NO_AUDIO_STREAM)
|
||||
|
||||
# Extract probe metadata for artifact creation
|
||||
duration_seconds = float(probe.format.duration) if probe.format and probe.format.duration else 0.0
|
||||
video_stream = next((s for s in probe.streams if s.codec_type == "video"), None)
|
||||
probe_meta = {
|
||||
"duration_seconds": duration_seconds,
|
||||
"frame_rate": _parse_frame_rate(video_stream.r_frame_rate) if video_stream and video_stream.r_frame_rate else None,
|
||||
"width": video_stream.width if video_stream else None,
|
||||
"height": video_stream.height if video_stream else None,
|
||||
}
|
||||
|
||||
_send_webhook_event(
|
||||
webhook_url,
|
||||
TaskWebhookEvent(
|
||||
current_message=f"Transcribing ({engine})",
|
||||
progress_pct=PROGRESS_TRANSCRIPTION,
|
||||
current_message=f"Транскрибирование ({engine})",
|
||||
progress_pct=PROGRESS_TRANSCRIPTION_START,
|
||||
),
|
||||
)
|
||||
|
||||
last_report_time = time.monotonic()
|
||||
|
||||
def _on_whisper_progress(pct: float) -> None:
|
||||
nonlocal last_report_time
|
||||
now = time.monotonic()
|
||||
if now - last_report_time < PROGRESS_THROTTLE_SECONDS:
|
||||
return
|
||||
last_report_time = now
|
||||
mapped = PROGRESS_TRANSCRIPTION_START + (
|
||||
pct / 100.0
|
||||
) * (PROGRESS_TRANSCRIPTION_END - PROGRESS_TRANSCRIPTION_START)
|
||||
_send_webhook_event(
|
||||
webhook_url,
|
||||
TaskWebhookEvent(
|
||||
current_message=f"Транскрибирование ({engine})",
|
||||
progress_pct=round(mapped, 1),
|
||||
),
|
||||
)
|
||||
|
||||
if engine == "whisper":
|
||||
document = _run_async(
|
||||
transcribe_with_whisper(
|
||||
storage, file_key=file_key, model_name=model, language=language
|
||||
storage,
|
||||
file_key=file_key,
|
||||
model_name=model,
|
||||
language=language,
|
||||
on_progress=_on_whisper_progress,
|
||||
)
|
||||
)
|
||||
elif engine == "google":
|
||||
@@ -379,7 +602,7 @@ def transcription_generate_actor(
|
||||
)
|
||||
)
|
||||
else:
|
||||
raise ValueError(f"Unknown engine: {engine}")
|
||||
raise ValueError(ERROR_UNKNOWN_ENGINE.format(engine=engine))
|
||||
|
||||
_send_webhook_event(
|
||||
webhook_url,
|
||||
@@ -387,7 +610,22 @@ def transcription_generate_actor(
|
||||
status=JOB_STATUS_DONE,
|
||||
current_message=MESSAGE_COMPLETED,
|
||||
progress_pct=PROGRESS_COMPLETE,
|
||||
output_data={"document": document.model_dump(mode="json")},
|
||||
output_data={
|
||||
"document": document.model_dump(mode="json"),
|
||||
"probe": probe_meta,
|
||||
},
|
||||
finished_at=_utc_now(),
|
||||
),
|
||||
)
|
||||
except (ValueError, RuntimeError) as exc:
|
||||
logger.exception(
|
||||
"transcription_generate_actor failed (non-transient): %s", job_uuid
|
||||
)
|
||||
_send_webhook_event(
|
||||
webhook_url,
|
||||
TaskWebhookEvent(
|
||||
status=JOB_STATUS_FAILED,
|
||||
error_message=str(exc),
|
||||
finished_at=_utc_now(),
|
||||
),
|
||||
)
|
||||
@@ -463,6 +701,115 @@ def captions_generate_actor(
|
||||
raise
|
||||
|
||||
|
||||
@dramatiq.actor(max_retries=2, min_backoff=2000)
|
||||
def frame_extract_actor(
|
||||
job_id: str,
|
||||
webhook_url: str,
|
||||
file_key: str,
|
||||
frames_folder: str,
|
||||
regenerate: bool,
|
||||
) -> None:
|
||||
"""Extract video frames at 1fps for timeline thumbnails."""
|
||||
from cpv3.modules.media.service import (
|
||||
delete_frames,
|
||||
extract_frames,
|
||||
read_frames_metadata,
|
||||
)
|
||||
|
||||
job_uuid = uuid.UUID(job_id)
|
||||
_send_webhook_event(
|
||||
webhook_url,
|
||||
TaskWebhookEvent(
|
||||
status=JOB_STATUS_RUNNING,
|
||||
current_message=MESSAGE_STARTING,
|
||||
started_at=_utc_now(),
|
||||
),
|
||||
)
|
||||
|
||||
try:
|
||||
storage = _get_storage_service()
|
||||
|
||||
# Delete old frames if regenerating
|
||||
if regenerate:
|
||||
_send_webhook_event(
|
||||
webhook_url,
|
||||
TaskWebhookEvent(
|
||||
current_message=MESSAGE_DELETING_OLD_FRAMES,
|
||||
progress_pct=PROGRESS_FRAME_EXTRACT_START,
|
||||
),
|
||||
)
|
||||
old_meta = _run_async(
|
||||
read_frames_metadata(storage, frames_folder=frames_folder)
|
||||
)
|
||||
if old_meta is not None:
|
||||
_run_async(
|
||||
delete_frames(
|
||||
storage,
|
||||
frames_folder=frames_folder,
|
||||
frame_count=old_meta.frame_count,
|
||||
)
|
||||
)
|
||||
|
||||
_send_webhook_event(
|
||||
webhook_url,
|
||||
TaskWebhookEvent(
|
||||
current_message=MESSAGE_EXTRACTING_FRAMES,
|
||||
progress_pct=PROGRESS_FRAME_EXTRACT_START,
|
||||
),
|
||||
)
|
||||
|
||||
last_report_time = time.monotonic()
|
||||
|
||||
def _on_progress(current: int, total: int) -> None:
|
||||
nonlocal last_report_time
|
||||
now = time.monotonic()
|
||||
if now - last_report_time < PROGRESS_THROTTLE_SECONDS:
|
||||
return
|
||||
last_report_time = now
|
||||
pct = current / total
|
||||
mapped = PROGRESS_FRAME_EXTRACT_START + pct * (
|
||||
PROGRESS_FRAME_EXTRACT_END - PROGRESS_FRAME_EXTRACT_START
|
||||
)
|
||||
_send_webhook_event(
|
||||
webhook_url,
|
||||
TaskWebhookEvent(
|
||||
current_message=MESSAGE_UPLOADING_FRAMES,
|
||||
progress_pct=round(mapped, 1),
|
||||
),
|
||||
)
|
||||
|
||||
metadata = _run_async(
|
||||
extract_frames(
|
||||
storage,
|
||||
file_key=file_key,
|
||||
frames_folder=frames_folder,
|
||||
on_progress=_on_progress,
|
||||
)
|
||||
)
|
||||
|
||||
_send_webhook_event(
|
||||
webhook_url,
|
||||
TaskWebhookEvent(
|
||||
status=JOB_STATUS_DONE,
|
||||
current_message=MESSAGE_COMPLETED,
|
||||
progress_pct=PROGRESS_COMPLETE,
|
||||
output_data=metadata.model_dump(mode="json"),
|
||||
finished_at=_utc_now(),
|
||||
),
|
||||
)
|
||||
except Exception as exc:
|
||||
logger.exception("frame_extract_actor failed: %s", job_uuid)
|
||||
_send_webhook_event(
|
||||
webhook_url,
|
||||
TaskWebhookEvent(
|
||||
status=JOB_STATUS_FAILED,
|
||||
error_message=str(exc),
|
||||
finished_at=_utc_now(),
|
||||
),
|
||||
)
|
||||
raise
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Task Service
|
||||
# ---------------------------------------------------------------------------
|
||||
@@ -557,8 +904,193 @@ class TaskService:
|
||||
await self._event_repo.create(
|
||||
JobEventCreate(job_id=job.id, event_type=event_type, payload=payload)
|
||||
)
|
||||
|
||||
# Save artifacts BEFORE sending notifications so data exists when frontend refetches
|
||||
if (
|
||||
job.job_type == JOB_TYPE_TRANSCRIPTION_GENERATE
|
||||
and event.status == JOB_STATUS_DONE
|
||||
):
|
||||
try:
|
||||
await self._save_transcription_artifacts(job)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Failed to save transcription artifacts for job %s", job_id
|
||||
)
|
||||
|
||||
if job.job_type == JOB_TYPE_MEDIA_CONVERT and event.status == JOB_STATUS_DONE:
|
||||
try:
|
||||
await self._save_convert_artifacts(job)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Failed to save convert artifacts for job %s", job_id
|
||||
)
|
||||
|
||||
# Push real-time notification via WebSocket (after artifacts are persisted)
|
||||
if job.user_id is not None:
|
||||
try:
|
||||
notification_service = NotificationService(self._session)
|
||||
await notification_service.create_task_notification(
|
||||
user_id=job.user_id, job=job, event=event
|
||||
)
|
||||
except Exception:
|
||||
logger.exception("Failed to create notification for job %s", job_id)
|
||||
|
||||
return job
|
||||
|
||||
async def _save_transcription_artifacts(self, job: Job) -> None:
|
||||
"""Create Transcription, ArtifactMediaFile and File records."""
|
||||
input_data = job.input_data or {}
|
||||
output_data = job.output_data or {}
|
||||
|
||||
file_key: str = input_data["file_key"]
|
||||
project_id: uuid.UUID | None = (
|
||||
uuid.UUID(input_data["project_id"]) if input_data.get("project_id") else None
|
||||
)
|
||||
engine_raw: str = input_data.get("engine", "whisper")
|
||||
language: str | None = input_data.get("language")
|
||||
|
||||
document: dict = output_data["document"]
|
||||
|
||||
# Resolve user
|
||||
user_repo = UserRepository(self._session)
|
||||
user = await user_repo.get_by_id(job.user_id) # type: ignore[arg-type]
|
||||
if user is None:
|
||||
logger.warning("User %s not found, skipping artifact save", job.user_id)
|
||||
return
|
||||
|
||||
# Find or create source File record
|
||||
file_repo = FileRepository(self._session)
|
||||
source_file = await file_repo.get_by_path(file_key)
|
||||
if source_file is None:
|
||||
source_file = await file_repo.create(
|
||||
requester=user,
|
||||
data=FileCreate(
|
||||
project_id=project_id,
|
||||
original_filename=file_key.rsplit("/", 1)[-1],
|
||||
path=file_key,
|
||||
storage_backend="S3",
|
||||
mime_type="application/octet-stream",
|
||||
size_bytes=0,
|
||||
is_uploaded=True,
|
||||
),
|
||||
)
|
||||
|
||||
# Upload document JSON to S3
|
||||
storage = _get_storage_service()
|
||||
user_folder = get_user_folder(user)
|
||||
json_bytes = json.dumps(document, ensure_ascii=False).encode("utf-8")
|
||||
|
||||
# Build display name: "Транскрипция <video_name>.json"
|
||||
video_stem = Path(source_file.original_filename).stem
|
||||
transcription_filename = f"Транскрипция {video_stem}.json"
|
||||
|
||||
artifact_key = await storage.upload_fileobj(
|
||||
fileobj=io.BytesIO(json_bytes),
|
||||
file_name=transcription_filename,
|
||||
folder=f"{user_folder}/artifacts",
|
||||
gen_name=True,
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
# Create File record for the JSON artifact (no project_id — only reachable via artifact)
|
||||
json_file = await file_repo.create(
|
||||
requester=user,
|
||||
data=FileCreate(
|
||||
project_id=None,
|
||||
original_filename=transcription_filename,
|
||||
path=artifact_key,
|
||||
storage_backend="S3",
|
||||
mime_type="application/json",
|
||||
size_bytes=len(json_bytes),
|
||||
file_format="json",
|
||||
is_uploaded=True,
|
||||
),
|
||||
)
|
||||
|
||||
# Create ArtifactMediaFile (no media_file_id — transcription is not a media file)
|
||||
artifact_repo = ArtifactRepository(self._session)
|
||||
artifact = await artifact_repo.create(
|
||||
data=ArtifactMediaFileCreate(
|
||||
project_id=project_id,
|
||||
file_id=json_file.id,
|
||||
media_file_id=None,
|
||||
artifact_type="TRANSCRIPTION_JSON",
|
||||
),
|
||||
)
|
||||
|
||||
# Create Transcription record
|
||||
transcription_repo = TranscriptionRepository(self._session)
|
||||
engine_db = ENGINE_MAP.get(engine_raw, "LOCAL_WHISPER")
|
||||
await transcription_repo.create(
|
||||
data=TranscriptionCreate(
|
||||
project_id=project_id,
|
||||
source_file_id=source_file.id,
|
||||
artifact_id=artifact.id,
|
||||
engine=engine_db, # type: ignore[arg-type]
|
||||
language=language,
|
||||
document=document,
|
||||
),
|
||||
)
|
||||
|
||||
logger.info("Saved transcription artifacts for job %s", job.id)
|
||||
|
||||
async def _save_convert_artifacts(self, job: Job) -> None:
|
||||
"""Create File and ArtifactMediaFile records for converted MP4."""
|
||||
input_data = job.input_data or {}
|
||||
output_data = job.output_data or {}
|
||||
|
||||
file_key: str = input_data["file_key"]
|
||||
project_id: uuid.UUID | None = (
|
||||
uuid.UUID(input_data["project_id"]) if input_data.get("project_id") else None
|
||||
)
|
||||
|
||||
file_path: str = output_data["file_path"]
|
||||
file_size: int = output_data.get("file_size", 0)
|
||||
|
||||
# Resolve user
|
||||
user_repo = UserRepository(self._session)
|
||||
user = await user_repo.get_by_id(job.user_id) # type: ignore[arg-type]
|
||||
if user is None:
|
||||
logger.warning("User %s not found, skipping convert artifact save", job.user_id)
|
||||
return
|
||||
|
||||
# Derive output filename from source file
|
||||
file_repo = FileRepository(self._session)
|
||||
source_file = await file_repo.get_by_path(file_key)
|
||||
if source_file is not None:
|
||||
stem = Path(source_file.original_filename).stem
|
||||
else:
|
||||
stem = Path(file_key).stem
|
||||
converted_filename = f"{stem}.mp4"
|
||||
|
||||
# Create File record for the converted MP4 (no project_id — only reachable via artifact)
|
||||
converted_file = await file_repo.create(
|
||||
requester=user,
|
||||
data=FileCreate(
|
||||
project_id=None,
|
||||
original_filename=converted_filename,
|
||||
path=file_path,
|
||||
storage_backend="S3",
|
||||
mime_type="video/mp4",
|
||||
size_bytes=file_size,
|
||||
file_format="mp4",
|
||||
is_uploaded=True,
|
||||
),
|
||||
)
|
||||
|
||||
# Create ArtifactMediaFile record
|
||||
artifact_repo = ArtifactRepository(self._session)
|
||||
await artifact_repo.create(
|
||||
data=ArtifactMediaFileCreate(
|
||||
project_id=project_id,
|
||||
file_id=converted_file.id,
|
||||
media_file_id=None,
|
||||
artifact_type="CONVERTED_VIDEO",
|
||||
),
|
||||
)
|
||||
|
||||
logger.info("Saved convert artifacts for job %s", job.id)
|
||||
|
||||
async def submit_media_probe(
|
||||
self, *, requester: User, request: MediaProbeRequest
|
||||
) -> TaskSubmitResponse:
|
||||
@@ -576,6 +1108,12 @@ class TaskService:
|
||||
self, *, requester: User, request: SilenceRemoveRequest
|
||||
) -> TaskSubmitResponse:
|
||||
"""Submit silence removal task."""
|
||||
user_folder = get_user_folder(requester)
|
||||
resolved_folder = (
|
||||
f"{user_folder}/{request.out_folder}"
|
||||
if request.out_folder
|
||||
else f"{user_folder}/output_files"
|
||||
)
|
||||
return await self._submit_task(
|
||||
requester=requester,
|
||||
job_type=JOB_TYPE_SILENCE_REMOVE,
|
||||
@@ -584,17 +1122,65 @@ class TaskService:
|
||||
actor=silence_remove_actor,
|
||||
actor_kwargs={
|
||||
"file_key": request.file_key,
|
||||
"out_folder": request.out_folder,
|
||||
"out_folder": resolved_folder,
|
||||
"min_silence_duration_ms": request.min_silence_duration_ms,
|
||||
"silence_threshold_db": request.silence_threshold_db,
|
||||
"padding_ms": request.padding_ms,
|
||||
},
|
||||
)
|
||||
|
||||
async def submit_silence_detect(
|
||||
self, *, requester: User, request: SilenceDetectRequest
|
||||
) -> TaskSubmitResponse:
|
||||
"""Submit silence detection task."""
|
||||
return await self._submit_task(
|
||||
requester=requester,
|
||||
job_type=JOB_TYPE_SILENCE_DETECT,
|
||||
project_id=request.project_id,
|
||||
input_data=request.model_dump(mode="json"),
|
||||
actor=silence_detect_actor,
|
||||
actor_kwargs={
|
||||
"file_key": request.file_key,
|
||||
"min_silence_duration_ms": request.min_silence_duration_ms,
|
||||
"silence_threshold_db": request.silence_threshold_db,
|
||||
"padding_ms": request.padding_ms,
|
||||
},
|
||||
)
|
||||
|
||||
async def submit_silence_apply(
|
||||
self, *, requester: User, request: SilenceApplyRequest
|
||||
) -> TaskSubmitResponse:
|
||||
"""Submit silence apply task."""
|
||||
user_folder = get_user_folder(requester)
|
||||
resolved_folder = (
|
||||
f"{user_folder}/{request.out_folder}"
|
||||
if request.out_folder
|
||||
else f"{user_folder}/output_files"
|
||||
)
|
||||
return await self._submit_task(
|
||||
requester=requester,
|
||||
job_type=JOB_TYPE_SILENCE_APPLY,
|
||||
project_id=request.project_id,
|
||||
input_data=request.model_dump(mode="json"),
|
||||
actor=silence_apply_actor,
|
||||
actor_kwargs={
|
||||
"file_key": request.file_key,
|
||||
"out_folder": resolved_folder,
|
||||
"cuts": request.cuts,
|
||||
"output_name": request.output_name,
|
||||
},
|
||||
)
|
||||
|
||||
async def submit_media_convert(
|
||||
self, *, requester: User, request: MediaConvertRequest
|
||||
) -> TaskSubmitResponse:
|
||||
"""Submit media conversion task."""
|
||||
user_folder = get_user_folder(requester)
|
||||
resolved_folder = (
|
||||
f"{user_folder}/{request.out_folder}"
|
||||
if request.out_folder
|
||||
else f"{user_folder}/output_files"
|
||||
)
|
||||
return await self._submit_task(
|
||||
requester=requester,
|
||||
job_type=JOB_TYPE_MEDIA_CONVERT,
|
||||
@@ -603,7 +1189,7 @@ class TaskService:
|
||||
actor=media_convert_actor,
|
||||
actor_kwargs={
|
||||
"file_key": request.file_key,
|
||||
"out_folder": request.out_folder,
|
||||
"out_folder": resolved_folder,
|
||||
"output_format": request.output_format,
|
||||
},
|
||||
)
|
||||
@@ -626,6 +1212,28 @@ class TaskService:
|
||||
},
|
||||
)
|
||||
|
||||
async def submit_frame_extract(
|
||||
self, *, requester: User, request: FrameExtractRequest
|
||||
) -> TaskSubmitResponse:
|
||||
"""Submit frame extraction task."""
|
||||
from cpv3.modules.media.service import get_frames_folder
|
||||
|
||||
user_folder = get_user_folder(requester)
|
||||
frames_folder = get_frames_folder(user_folder, request.file_key)
|
||||
|
||||
return await self._submit_task(
|
||||
requester=requester,
|
||||
job_type=JOB_TYPE_FRAME_EXTRACT,
|
||||
project_id=request.project_id,
|
||||
input_data=request.model_dump(mode="json"),
|
||||
actor=frame_extract_actor,
|
||||
actor_kwargs={
|
||||
"file_key": request.file_key,
|
||||
"frames_folder": frames_folder,
|
||||
"regenerate": request.regenerate,
|
||||
},
|
||||
)
|
||||
|
||||
async def submit_captions_generate(
|
||||
self, *, requester: User, request: CaptionsGenerateRequest
|
||||
) -> TaskSubmitResponse:
|
||||
@@ -635,6 +1243,13 @@ class TaskService:
|
||||
if transcription is None:
|
||||
raise ValueError(f"Transcription {request.transcription_id} not found")
|
||||
|
||||
user_folder = get_user_folder(requester)
|
||||
resolved_folder = (
|
||||
f"{user_folder}/{request.folder}"
|
||||
if request.folder
|
||||
else f"{user_folder}/output_files"
|
||||
)
|
||||
|
||||
return await self._submit_task(
|
||||
requester=requester,
|
||||
job_type=JOB_TYPE_CAPTIONS_GENERATE,
|
||||
@@ -643,7 +1258,7 @@ class TaskService:
|
||||
actor=captions_generate_actor,
|
||||
actor_kwargs={
|
||||
"video_s3_path": request.video_s3_path,
|
||||
"folder": request.folder,
|
||||
"folder": resolved_folder,
|
||||
"transcription_json": transcription.document,
|
||||
},
|
||||
)
|
||||
|
||||
@@ -32,6 +32,14 @@ class TranscriptionRepository:
|
||||
)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def get_by_artifact_id(self, artifact_id: uuid.UUID) -> Transcription | None:
|
||||
result = await self._session.execute(
|
||||
select(Transcription)
|
||||
.where(Transcription.artifact_id == artifact_id)
|
||||
.where(Transcription.is_active.is_(True))
|
||||
)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def create(self, data: TranscriptionCreate) -> Transcription:
|
||||
transcription = Transcription(
|
||||
project_id=data.project_id,
|
||||
|
||||
@@ -67,6 +67,21 @@ async def retrieve_transcription_entry(
|
||||
return TranscriptionRead.model_validate(transcription)
|
||||
|
||||
|
||||
@router.get("/transcriptions/by-artifact/{artifact_id}/", response_model=TranscriptionRead)
|
||||
async def retrieve_transcription_by_artifact(
|
||||
artifact_id: uuid.UUID,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
) -> TranscriptionRead:
|
||||
_ = current_user
|
||||
repo = TranscriptionRepository(db)
|
||||
transcription = await repo.get_by_artifact_id(artifact_id)
|
||||
if transcription is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Not found")
|
||||
|
||||
return TranscriptionRead.model_validate(transcription)
|
||||
|
||||
|
||||
@router.patch("/transcriptions/{transcription_id}/", response_model=TranscriptionRead)
|
||||
async def patch_transcription_entry(
|
||||
transcription_id: uuid.UUID,
|
||||
|
||||
@@ -240,11 +240,15 @@ def _make_document_from_segments(
|
||||
return Document(segments=result_segments)
|
||||
|
||||
|
||||
ProgressCallback = Callable[[float], None]
|
||||
|
||||
|
||||
def _whisper_transcribe_sync(
|
||||
*,
|
||||
local_file_path: str,
|
||||
model_name: str,
|
||||
language: str | None,
|
||||
on_progress: ProgressCallback | None = None,
|
||||
) -> Document:
|
||||
import whisper # type: ignore[import-untyped]
|
||||
|
||||
@@ -267,14 +271,35 @@ def _whisper_transcribe_sync(
|
||||
probs = cast(dict[str, float], probs_raw)
|
||||
language = max(probs, key=lambda k: probs[k])
|
||||
|
||||
result = whisper.transcribe(
|
||||
audio=whisper.load_audio(local_file_path),
|
||||
model=model,
|
||||
word_timestamps=True,
|
||||
temperature=0.2,
|
||||
language=language,
|
||||
verbose=False,
|
||||
)
|
||||
if on_progress is not None:
|
||||
from unittest.mock import patch
|
||||
from tqdm import tqdm as _orig_tqdm
|
||||
|
||||
class _ProgressTqdm(_orig_tqdm):
|
||||
def update(self, n=1):
|
||||
super().update(n)
|
||||
if self.total:
|
||||
on_progress(min(self.n / self.total * 100.0, 100.0))
|
||||
|
||||
with patch("whisper.transcribe.tqdm.tqdm", _ProgressTqdm):
|
||||
result = whisper.transcribe(
|
||||
audio=whisper.load_audio(local_file_path),
|
||||
model=model,
|
||||
word_timestamps=True,
|
||||
temperature=0.2,
|
||||
language=language,
|
||||
verbose=False,
|
||||
)
|
||||
on_progress(100.0)
|
||||
else:
|
||||
result = whisper.transcribe(
|
||||
audio=whisper.load_audio(local_file_path),
|
||||
model=model,
|
||||
word_timestamps=True,
|
||||
temperature=0.2,
|
||||
language=language,
|
||||
verbose=None,
|
||||
)
|
||||
|
||||
parsed = WhisperResult.model_validate(result)
|
||||
|
||||
@@ -296,6 +321,7 @@ async def transcribe_with_whisper(
|
||||
file_key: str,
|
||||
model_name: str = "tiny",
|
||||
language: str | None = None,
|
||||
on_progress: ProgressCallback | None = None,
|
||||
) -> Document:
|
||||
tmp = await storage.download_to_temp(file_key)
|
||||
try:
|
||||
@@ -304,6 +330,7 @@ async def transcribe_with_whisper(
|
||||
local_file_path=tmp.path,
|
||||
model_name=model_name,
|
||||
language=language,
|
||||
on_progress=on_progress,
|
||||
)
|
||||
)
|
||||
finally:
|
||||
|
||||
@@ -71,6 +71,10 @@ class UserRepository:
|
||||
await self._session.refresh(user)
|
||||
return user
|
||||
|
||||
async def update_password(self, user: User, new_hash: str) -> None:
|
||||
user.password_hash = new_hash
|
||||
await self._session.commit()
|
||||
|
||||
async def deactivate(self, user: User) -> None:
|
||||
user.is_active = False
|
||||
await self._session.commit()
|
||||
|
||||
@@ -2,17 +2,21 @@ from __future__ import annotations
|
||||
|
||||
import uuid
|
||||
from datetime import timedelta
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Response, status
|
||||
from jwt import ExpiredSignatureError, InvalidTokenError
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from cpv3.infrastructure.auth import get_current_user
|
||||
from cpv3.infrastructure.deps import get_storage
|
||||
from cpv3.infrastructure.security import create_token, decode_token
|
||||
from cpv3.infrastructure.settings import get_settings
|
||||
from cpv3.infrastructure.storage.base import StorageService
|
||||
from cpv3.db.session import get_db
|
||||
from cpv3.modules.users.models import User
|
||||
from cpv3.modules.users.schemas import (
|
||||
PasswordChange,
|
||||
TokenRefresh,
|
||||
TokenRefreshResponse,
|
||||
UserCreate,
|
||||
@@ -28,6 +32,21 @@ users_router = APIRouter(prefix="/api/users", tags=["Users"])
|
||||
auth_router = APIRouter(prefix="/auth", tags=["auth"])
|
||||
|
||||
|
||||
def _is_s3_key(value: str) -> bool:
|
||||
"""Return True if value looks like a bare S3 key, not a full URL."""
|
||||
parsed = urlparse(value)
|
||||
return not parsed.scheme and not parsed.netloc
|
||||
|
||||
|
||||
async def _resolve_avatar(user: User, storage: StorageService) -> UserRead:
|
||||
"""Build UserRead with a fresh presigned avatar URL."""
|
||||
data = UserRead.model_validate(user)
|
||||
if data.avatar:
|
||||
if _is_s3_key(data.avatar):
|
||||
data.avatar = await storage.url(data.avatar)
|
||||
return data
|
||||
|
||||
|
||||
def _issue_tokens(user: User) -> tuple[str, str]:
|
||||
settings = get_settings()
|
||||
|
||||
@@ -49,10 +68,11 @@ def _issue_tokens(user: User) -> tuple[str, str]:
|
||||
async def list_all_users(
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
storage: StorageService = Depends(get_storage),
|
||||
) -> list[UserRead]:
|
||||
service = UserService(db)
|
||||
users = await service.list_users(requester=current_user)
|
||||
return [UserRead.model_validate(u) for u in users]
|
||||
return [await _resolve_avatar(u, storage) for u in users]
|
||||
|
||||
|
||||
@users_router.post("/", response_model=UserRead, status_code=status.HTTP_201_CREATED)
|
||||
@@ -60,6 +80,7 @@ async def create_user_endpoint(
|
||||
body: UserCreate,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
storage: StorageService = Depends(get_storage),
|
||||
) -> UserRead:
|
||||
service = UserService(db)
|
||||
try:
|
||||
@@ -67,12 +88,29 @@ async def create_user_endpoint(
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(e)) from e
|
||||
|
||||
return UserRead.model_validate(user)
|
||||
return await _resolve_avatar(user, storage)
|
||||
|
||||
|
||||
@users_router.get("/me/", response_model=UserRead)
|
||||
async def me(current_user: User = Depends(get_current_user)) -> UserRead:
|
||||
return UserRead.model_validate(current_user)
|
||||
async def me(
|
||||
current_user: User = Depends(get_current_user),
|
||||
storage: StorageService = Depends(get_storage),
|
||||
) -> UserRead:
|
||||
return await _resolve_avatar(current_user, storage)
|
||||
|
||||
|
||||
@users_router.post("/me/change-password/", status_code=status.HTTP_204_NO_CONTENT)
|
||||
async def change_password(
|
||||
body: PasswordChange,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
) -> Response:
|
||||
service = UserService(db)
|
||||
try:
|
||||
await service.change_password(current_user, body.current_password, body.new_password)
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(e)) from e
|
||||
return Response(status_code=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
@users_router.get("/{user_id}/", response_model=UserRead)
|
||||
@@ -80,6 +118,7 @@ async def retrieve_user(
|
||||
user_id: uuid.UUID,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
storage: StorageService = Depends(get_storage),
|
||||
) -> UserRead:
|
||||
service = UserService(db)
|
||||
user = await service.get_user_by_id(user_id)
|
||||
@@ -89,7 +128,7 @@ async def retrieve_user(
|
||||
if not current_user.is_staff and user.id != current_user.id:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Forbidden")
|
||||
|
||||
return UserRead.model_validate(user)
|
||||
return await _resolve_avatar(user, storage)
|
||||
|
||||
|
||||
@users_router.patch("/{user_id}/", response_model=UserRead)
|
||||
@@ -98,6 +137,7 @@ async def patch_user(
|
||||
body: UserUpdate,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
storage: StorageService = Depends(get_storage),
|
||||
) -> UserRead:
|
||||
service = UserService(db)
|
||||
user = await service.get_user_by_id(user_id)
|
||||
@@ -112,7 +152,7 @@ async def patch_user(
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(e)) from e
|
||||
|
||||
return UserRead.model_validate(user)
|
||||
return await _resolve_avatar(user, storage)
|
||||
|
||||
|
||||
@users_router.delete("/{user_id}/", status_code=status.HTTP_204_NO_CONTENT)
|
||||
@@ -136,7 +176,11 @@ async def delete_user(
|
||||
@auth_router.post(
|
||||
"/register", response_model=UserRegisterResponse, status_code=status.HTTP_201_CREATED
|
||||
)
|
||||
async def register(body: UserRegister, db: AsyncSession = Depends(get_db)) -> UserRegisterResponse:
|
||||
async def register(
|
||||
body: UserRegister,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
storage: StorageService = Depends(get_storage),
|
||||
) -> UserRegisterResponse:
|
||||
service = UserService(db)
|
||||
try:
|
||||
user = await service.register_user(body)
|
||||
@@ -144,18 +188,24 @@ async def register(body: UserRegister, db: AsyncSession = Depends(get_db)) -> Us
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(e)) from e
|
||||
|
||||
access, refresh = _issue_tokens(user)
|
||||
return UserRegisterResponse(user=UserRead.model_validate(user), access=access, refresh=refresh)
|
||||
user_read = await _resolve_avatar(user, storage)
|
||||
return UserRegisterResponse(user=user_read, access=access, refresh=refresh)
|
||||
|
||||
|
||||
@auth_router.post("/login", response_model=UserRegisterResponse)
|
||||
async def login(body: UserLogin, db: AsyncSession = Depends(get_db)) -> UserRegisterResponse:
|
||||
async def login(
|
||||
body: UserLogin,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
storage: StorageService = Depends(get_storage),
|
||||
) -> UserRegisterResponse:
|
||||
service = UserService(db)
|
||||
user = await service.authenticate(body.username, body.password)
|
||||
if user is None:
|
||||
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid credentials")
|
||||
|
||||
access, refresh = _issue_tokens(user)
|
||||
return UserRegisterResponse(user=UserRead.model_validate(user), access=access, refresh=refresh)
|
||||
user_read = await _resolve_avatar(user, storage)
|
||||
return UserRegisterResponse(user=user_read, access=access, refresh=refresh)
|
||||
|
||||
|
||||
@auth_router.post("/refresh", response_model=TokenRefreshResponse)
|
||||
|
||||
@@ -66,6 +66,11 @@ class UserRegisterResponse(Schema):
|
||||
refresh: str
|
||||
|
||||
|
||||
class PasswordChange(Schema):
|
||||
current_password: str
|
||||
new_password: str
|
||||
|
||||
|
||||
class TokenRefresh(Schema):
|
||||
refresh: str
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ import uuid
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from cpv3.infrastructure.security import verify_password
|
||||
from cpv3.infrastructure.security import hash_password, verify_password
|
||||
from cpv3.modules.users.models import User
|
||||
from cpv3.modules.users.repository import UserRepository
|
||||
from cpv3.modules.users.schemas import UserCreate, UserRegister, UserUpdate
|
||||
@@ -40,6 +40,12 @@ class UserService:
|
||||
async def deactivate_user(self, user: User) -> None:
|
||||
await self._repo.deactivate(user)
|
||||
|
||||
async def change_password(self, user: User, current_password: str, new_password: str) -> None:
|
||||
if not verify_password(current_password, user.password_hash):
|
||||
raise ValueError("Current password is incorrect")
|
||||
new_hash = hash_password(new_password)
|
||||
await self._repo.update_password(user, new_hash)
|
||||
|
||||
async def authenticate(self, username: str, password: str) -> User | None:
|
||||
user = await self._repo.get_by_username(username)
|
||||
if user is None:
|
||||
@@ -87,6 +93,13 @@ async def deactivate_user(session: AsyncSession, user: User) -> None:
|
||||
await service.deactivate_user(user)
|
||||
|
||||
|
||||
async def change_password(
|
||||
session: AsyncSession, user: User, current_password: str, new_password: str
|
||||
) -> None:
|
||||
service = UserService(session)
|
||||
await service.change_password(user, current_password, new_password)
|
||||
|
||||
|
||||
async def authenticate(session: AsyncSession, username: str, password: str) -> User | None:
|
||||
service = UserService(session)
|
||||
return await service.authenticate(username, password)
|
||||
|
||||
@@ -1918,56 +1918,21 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "tiktoken"
|
||||
version = "0.12.0"
|
||||
version = "0.3.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "regex" },
|
||||
{ name = "requests" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/7d/ab/4d017d0f76ec3171d469d80fc03dfbb4e48a4bcaddaa831b31d526f05edc/tiktoken-0.12.0.tar.gz", hash = "sha256:b18ba7ee2b093863978fcb14f74b3707cdc8d4d4d3836853ce7ec60772139931", size = 37806, upload-time = "2025-10-06T20:22:45.419Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/8e/3a/20704b89b271cfebb1c981ef9f172fb18cb879b5c5cfc3b209083f71b229/tiktoken-0.3.3.tar.gz", hash = "sha256:97b58b7bfda945791ec855e53d166e8ec20c6378942b93851a6c919ddf9d0496", size = 25347, upload-time = "2023-03-28T23:39:18.396Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/de/46/21ea696b21f1d6d1efec8639c204bdf20fde8bafb351e1355c72c5d7de52/tiktoken-0.12.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6e227c7f96925003487c33b1b32265fad2fbcec2b7cf4817afb76d416f40f6bb", size = 1051565, upload-time = "2025-10-06T20:21:44.566Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c9/d9/35c5d2d9e22bb2a5f74ba48266fb56c63d76ae6f66e02feb628671c0283e/tiktoken-0.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c06cf0fcc24c2cb2adb5e185c7082a82cba29c17575e828518c2f11a01f445aa", size = 995284, upload-time = "2025-10-06T20:21:45.622Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/01/84/961106c37b8e49b9fdcf33fe007bb3a8fdcc380c528b20cc7fbba80578b8/tiktoken-0.12.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:f18f249b041851954217e9fd8e5c00b024ab2315ffda5ed77665a05fa91f42dc", size = 1129201, upload-time = "2025-10-06T20:21:47.074Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6a/d0/3d9275198e067f8b65076a68894bb52fd253875f3644f0a321a720277b8a/tiktoken-0.12.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:47a5bc270b8c3db00bb46ece01ef34ad050e364b51d406b6f9730b64ac28eded", size = 1152444, upload-time = "2025-10-06T20:21:48.139Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/78/db/a58e09687c1698a7c592e1038e01c206569b86a0377828d51635561f8ebf/tiktoken-0.12.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:508fa71810c0efdcd1b898fda574889ee62852989f7c1667414736bcb2b9a4bd", size = 1195080, upload-time = "2025-10-06T20:21:49.246Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9e/1b/a9e4d2bf91d515c0f74afc526fd773a812232dd6cda33ebea7f531202325/tiktoken-0.12.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a1af81a6c44f008cba48494089dd98cccb8b313f55e961a52f5b222d1e507967", size = 1255240, upload-time = "2025-10-06T20:21:50.274Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9d/15/963819345f1b1fb0809070a79e9dd96938d4ca41297367d471733e79c76c/tiktoken-0.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:3e68e3e593637b53e56f7237be560f7a394451cb8c11079755e80ae64b9e6def", size = 879422, upload-time = "2025-10-06T20:21:51.734Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a4/85/be65d39d6b647c79800fd9d29241d081d4eeb06271f383bb87200d74cf76/tiktoken-0.12.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b97f74aca0d78a1ff21b8cd9e9925714c15a9236d6ceacf5c7327c117e6e21e8", size = 1050728, upload-time = "2025-10-06T20:21:52.756Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4a/42/6573e9129bc55c9bf7300b3a35bef2c6b9117018acca0dc760ac2d93dffe/tiktoken-0.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2b90f5ad190a4bb7c3eb30c5fa32e1e182ca1ca79f05e49b448438c3e225a49b", size = 994049, upload-time = "2025-10-06T20:21:53.782Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/66/c5/ed88504d2f4a5fd6856990b230b56d85a777feab84e6129af0822f5d0f70/tiktoken-0.12.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:65b26c7a780e2139e73acc193e5c63ac754021f160df919add909c1492c0fb37", size = 1129008, upload-time = "2025-10-06T20:21:54.832Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/90/3dae6cc5436137ebd38944d396b5849e167896fc2073da643a49f372dc4f/tiktoken-0.12.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:edde1ec917dfd21c1f2f8046b86348b0f54a2c0547f68149d8600859598769ad", size = 1152665, upload-time = "2025-10-06T20:21:56.129Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a3/fe/26df24ce53ffde419a42f5f53d755b995c9318908288c17ec3f3448313a3/tiktoken-0.12.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:35a2f8ddd3824608b3d650a000c1ef71f730d0c56486845705a8248da00f9fe5", size = 1194230, upload-time = "2025-10-06T20:21:57.546Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/20/cc/b064cae1a0e9fac84b0d2c46b89f4e57051a5f41324e385d10225a984c24/tiktoken-0.12.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:83d16643edb7fa2c99eff2ab7733508aae1eebb03d5dfc46f5565862810f24e3", size = 1254688, upload-time = "2025-10-06T20:21:58.619Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/81/10/b8523105c590c5b8349f2587e2fdfe51a69544bd5a76295fc20f2374f470/tiktoken-0.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ffc5288f34a8bc02e1ea7047b8d041104791d2ddbf42d1e5fa07822cbffe16bd", size = 878694, upload-time = "2025-10-06T20:21:59.876Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/00/61/441588ee21e6b5cdf59d6870f86beb9789e532ee9718c251b391b70c68d6/tiktoken-0.12.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:775c2c55de2310cc1bc9a3ad8826761cbdc87770e586fd7b6da7d4589e13dab3", size = 1050802, upload-time = "2025-10-06T20:22:00.96Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1f/05/dcf94486d5c5c8d34496abe271ac76c5b785507c8eae71b3708f1ad9b45a/tiktoken-0.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a01b12f69052fbe4b080a2cfb867c4de12c704b56178edf1d1d7b273561db160", size = 993995, upload-time = "2025-10-06T20:22:02.788Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a0/70/5163fe5359b943f8db9946b62f19be2305de8c3d78a16f629d4165e2f40e/tiktoken-0.12.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:01d99484dc93b129cd0964f9d34eee953f2737301f18b3c7257bf368d7615baa", size = 1128948, upload-time = "2025-10-06T20:22:03.814Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0c/da/c028aa0babf77315e1cef357d4d768800c5f8a6de04d0eac0f377cb619fa/tiktoken-0.12.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:4a1a4fcd021f022bfc81904a911d3df0f6543b9e7627b51411da75ff2fe7a1be", size = 1151986, upload-time = "2025-10-06T20:22:05.173Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a0/5a/886b108b766aa53e295f7216b509be95eb7d60b166049ce2c58416b25f2a/tiktoken-0.12.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:981a81e39812d57031efdc9ec59fa32b2a5a5524d20d4776574c4b4bd2e9014a", size = 1194222, upload-time = "2025-10-06T20:22:06.265Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/f8/4db272048397636ac7a078d22773dd2795b1becee7bc4922fe6207288d57/tiktoken-0.12.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9baf52f84a3f42eef3ff4e754a0db79a13a27921b457ca9832cf944c6be4f8f3", size = 1255097, upload-time = "2025-10-06T20:22:07.403Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8e/32/45d02e2e0ea2be3a9ed22afc47d93741247e75018aac967b713b2941f8ea/tiktoken-0.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:b8a0cd0c789a61f31bf44851defbd609e8dd1e2c8589c614cc1060940ef1f697", size = 879117, upload-time = "2025-10-06T20:22:08.418Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/76/994fc868f88e016e6d05b0da5ac24582a14c47893f4474c3e9744283f1d5/tiktoken-0.12.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d5f89ea5680066b68bcb797ae85219c72916c922ef0fcdd3480c7d2315ffff16", size = 1050309, upload-time = "2025-10-06T20:22:10.939Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f6/b8/57ef1456504c43a849821920d582a738a461b76a047f352f18c0b26c6516/tiktoken-0.12.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b4e7ed1c6a7a8a60a3230965bdedba8cc58f68926b835e519341413370e0399a", size = 993712, upload-time = "2025-10-06T20:22:12.115Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/72/90/13da56f664286ffbae9dbcfadcc625439142675845baa62715e49b87b68b/tiktoken-0.12.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:fc530a28591a2d74bce821d10b418b26a094bf33839e69042a6e86ddb7a7fb27", size = 1128725, upload-time = "2025-10-06T20:22:13.541Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/05/df/4f80030d44682235bdaecd7346c90f67ae87ec8f3df4a3442cb53834f7e4/tiktoken-0.12.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:06a9f4f49884139013b138920a4c393aa6556b2f8f536345f11819389c703ebb", size = 1151875, upload-time = "2025-10-06T20:22:14.559Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/22/1f/ae535223a8c4ef4c0c1192e3f9b82da660be9eb66b9279e95c99288e9dab/tiktoken-0.12.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:04f0e6a985d95913cabc96a741c5ffec525a2c72e9df086ff17ebe35985c800e", size = 1194451, upload-time = "2025-10-06T20:22:15.545Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/78/a7/f8ead382fce0243cb625c4f266e66c27f65ae65ee9e77f59ea1653b6d730/tiktoken-0.12.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:0ee8f9ae00c41770b5f9b0bb1235474768884ae157de3beb5439ca0fd70f3e25", size = 1253794, upload-time = "2025-10-06T20:22:16.624Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/93/e0/6cc82a562bc6365785a3ff0af27a2a092d57c47d7a81d9e2295d8c36f011/tiktoken-0.12.0-cp313-cp313t-win_amd64.whl", hash = "sha256:dc2dd125a62cb2b3d858484d6c614d136b5b848976794edfb63688d539b8b93f", size = 878777, upload-time = "2025-10-06T20:22:18.036Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/72/05/3abc1db5d2c9aadc4d2c76fa5640134e475e58d9fbb82b5c535dc0de9b01/tiktoken-0.12.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:a90388128df3b3abeb2bfd1895b0681412a8d7dc644142519e6f0a97c2111646", size = 1050188, upload-time = "2025-10-06T20:22:19.563Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e3/7b/50c2f060412202d6c95f32b20755c7a6273543b125c0985d6fa9465105af/tiktoken-0.12.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:da900aa0ad52247d8794e307d6446bd3cdea8e192769b56276695d34d2c9aa88", size = 993978, upload-time = "2025-10-06T20:22:20.702Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/14/27/bf795595a2b897e271771cd31cb847d479073497344c637966bdf2853da1/tiktoken-0.12.0-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:285ba9d73ea0d6171e7f9407039a290ca77efcdb026be7769dccc01d2c8d7fff", size = 1129271, upload-time = "2025-10-06T20:22:22.06Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f5/de/9341a6d7a8f1b448573bbf3425fa57669ac58258a667eb48a25dfe916d70/tiktoken-0.12.0-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:d186a5c60c6a0213f04a7a802264083dea1bbde92a2d4c7069e1a56630aef830", size = 1151216, upload-time = "2025-10-06T20:22:23.085Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/75/0d/881866647b8d1be4d67cb24e50d0c26f9f807f994aa1510cb9ba2fe5f612/tiktoken-0.12.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:604831189bd05480f2b885ecd2d1986dc7686f609de48208ebbbddeea071fc0b", size = 1194860, upload-time = "2025-10-06T20:22:24.602Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b3/1e/b651ec3059474dab649b8d5b69f5c65cd8fcd8918568c1935bd4136c9392/tiktoken-0.12.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8f317e8530bb3a222547b85a58583238c8f74fd7a7408305f9f63246d1a0958b", size = 1254567, upload-time = "2025-10-06T20:22:25.671Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/80/57/ce64fd16ac390fafde001268c364d559447ba09b509181b2808622420eec/tiktoken-0.12.0-cp314-cp314-win_amd64.whl", hash = "sha256:399c3dd672a6406719d84442299a490420b458c44d3ae65516302a99675888f3", size = 921067, upload-time = "2025-10-06T20:22:26.753Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ac/a4/72eed53e8976a099539cdd5eb36f241987212c29629d0a52c305173e0a68/tiktoken-0.12.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c2c714c72bc00a38ca969dae79e8266ddec999c7ceccd603cc4f0d04ccd76365", size = 1050473, upload-time = "2025-10-06T20:22:27.775Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e6/d7/0110b8f54c008466b19672c615f2168896b83706a6611ba6e47313dbc6e9/tiktoken-0.12.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:cbb9a3ba275165a2cb0f9a83f5d7025afe6b9d0ab01a22b50f0e74fee2ad253e", size = 993855, upload-time = "2025-10-06T20:22:28.799Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5f/77/4f268c41a3957c418b084dd576ea2fad2e95da0d8e1ab705372892c2ca22/tiktoken-0.12.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:dfdfaa5ffff8993a3af94d1125870b1d27aed7cb97aa7eb8c1cefdbc87dbee63", size = 1129022, upload-time = "2025-10-06T20:22:29.981Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4e/2b/fc46c90fe5028bd094cd6ee25a7db321cb91d45dc87531e2bdbb26b4867a/tiktoken-0.12.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:584c3ad3d0c74f5269906eb8a659c8bfc6144a52895d9261cdaf90a0ae5f4de0", size = 1150736, upload-time = "2025-10-06T20:22:30.996Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/28/c0/3c7a39ff68022ddfd7d93f3337ad90389a342f761c4d71de99a3ccc57857/tiktoken-0.12.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:54c891b416a0e36b8e2045b12b33dd66fb34a4fe7965565f1b482da50da3e86a", size = 1194908, upload-time = "2025-10-06T20:22:32.073Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ab/0d/c1ad6f4016a3968c048545f5d9b8ffebf577774b2ede3e2e352553b685fe/tiktoken-0.12.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5edb8743b88d5be814b1a8a8854494719080c28faaa1ccbef02e87354fe71ef0", size = 1253706, upload-time = "2025-10-06T20:22:33.385Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/af/df/c7891ef9d2712ad774777271d39fdef63941ffba0a9d59b7ad1fd2765e57/tiktoken-0.12.0-cp314-cp314t-win_amd64.whl", hash = "sha256:f61c0aea5565ac82e2ec50a05e02a6c44734e91b51c10510b084ea1b8e633a71", size = 920667, upload-time = "2025-10-06T20:22:34.444Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e9/b4/c0dfa23742e72ac608e73932a8df2dde6730fc5398c0d594fd36c927891b/tiktoken-0.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4db2c40f79f8f7a21a9fdbf1c6dee32dea77b0d7402355dc584a3083251d2e15", size = 735570, upload-time = "2023-03-28T23:38:33.89Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d6/a1/e7b2bd5a447c9de9b4a842af0f13872ee844f584832542c6eceb53bf7c8c/tiktoken-0.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e3c0f2231aa3829a1a431a882201dc27858634fd9989898e0f7d991dbc6bcc9d", size = 706352, upload-time = "2023-03-28T23:38:36.687Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a6/02/dac0c35f81b025eff0b947aae4f22505aab852ac225f2418ff33bde521e5/tiktoken-0.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48c13186a479de16cfa2c72bb0631fa9c518350a5b7569e4d77590f7fee96be9", size = 1616907, upload-time = "2023-03-28T23:38:38.852Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ba/c0/44066de4a112e26808e0bd1ce200020446e211e8ae1ba5f27eaec5a723da/tiktoken-0.3.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6674e4e37ab225020135cd66a392589623d5164c6456ba28cc27505abed10d9e", size = 1661065, upload-time = "2023-03-28T23:38:40.84Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b0/dd/573493be4608a803ae85ba51b3f0be3fc92a321a3c4ca2abc0c594e4bf94/tiktoken-0.3.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4a0c1357f6191211c544f935d5aa3cb9d7abd118c8f3c7124196d5ecd029b4af", size = 1645494, upload-time = "2023-03-28T23:38:43.126Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c4/1c/15e8a21bf4256b622c3ff3ce41b01f2cae7d0d2e4ee15d19e1f1eb990884/tiktoken-0.3.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2e948d167fc3b04483cbc33426766fd742e7cefe5346cd62b0cbd7279ef59539", size = 1689481, upload-time = "2023-03-28T23:38:44.979Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/16/8c/63ae6e51c5db57d1c3a0d039eec9ba083d433dfad946098b2c80aed3e26d/tiktoken-0.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:5dca434c8680b987eacde2dbc449e9ea4526574dbf9f3d8938665f638095be82", size = 579436, upload-time = "2023-03-28T23:38:47.402Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
Reference in New Issue
Block a user