chore: something changed, commit before reorg

This commit is contained in:
Daniil
2026-04-27 23:19:04 +03:00
parent 259d3da89f
commit b9030a863e
19 changed files with 2753 additions and 146 deletions
+17 -11
View File
@@ -6,11 +6,14 @@ from __future__ import annotations
import uuid
from datetime import timedelta
from pathlib import Path
from tempfile import NamedTemporaryFile
from typing import AsyncGenerator
from unittest.mock import AsyncMock, MagicMock
import pytest
from httpx import ASGITransport, AsyncClient
from sqlalchemy import create_engine
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
from cpv3.db.base import Base
@@ -22,20 +25,23 @@ from cpv3.main import app
from cpv3.modules.users.models import User
# Use in-memory SQLite for tests (or configure a test database)
TEST_DATABASE_URL = "sqlite+aiosqlite:///:memory:"
@pytest.fixture
async def test_engine():
"""Create a test database engine with tables."""
engine = create_async_engine(TEST_DATABASE_URL, echo=False)
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
yield engine
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)
await engine.dispose()
with NamedTemporaryFile(suffix=".sqlite3", delete=False) as tmp_db:
db_path = Path(tmp_db.name)
sync_engine = create_engine(f"sqlite:///{db_path}", echo=False)
Base.metadata.create_all(bind=sync_engine)
engine = create_async_engine(f"sqlite+aiosqlite:///{db_path}", echo=False)
try:
yield engine
finally:
await engine.dispose()
Base.metadata.drop_all(bind=sync_engine)
sync_engine.dispose()
db_path.unlink(missing_ok=True)
@pytest.fixture
@@ -0,0 +1,212 @@
from __future__ import annotations
import uuid
import pytest
from httpx import AsyncClient
from sqlalchemy.ext.asyncio import AsyncSession
from cpv3.modules.files.models import File
from cpv3.modules.projects.models import Project
from cpv3.modules.tasks.schemas import TaskSubmitResponse
from cpv3.modules.tasks.service import TaskService
from cpv3.modules.users.models import User
pytest.importorskip("greenlet")
@pytest.fixture
async def workflow_project(test_db_session: AsyncSession, test_user: User) -> Project:
project = Project(
id=uuid.uuid4(),
owner_id=test_user.id,
name="Workflow Project",
description="Typed workflow test project",
language="ru",
status="DRAFT",
is_active=True,
)
test_db_session.add(project)
await test_db_session.commit()
await test_db_session.refresh(project)
return project
@pytest.fixture
async def source_file(
test_db_session: AsyncSession,
test_user: User,
workflow_project: Project,
) -> File:
file = File(
id=uuid.uuid4(),
owner_id=test_user.id,
project_id=workflow_project.id,
original_filename="source.mp4",
path="users/test/source.mp4",
storage_backend="S3",
mime_type="video/mp4",
size_bytes=1024,
file_format="mp4",
is_uploaded=True,
is_deleted=False,
)
test_db_session.add(file)
await test_db_session.commit()
await test_db_session.refresh(file)
return file
class TestProjectWorkspaceEndpoints:
async def test_get_workspace_returns_default_state(
self,
auth_client: AsyncClient,
workflow_project: Project,
) -> None:
response = await auth_client.get(f"/api/projects/{workflow_project.id}/workspace")
assert response.status_code == 200
data = response.json()
assert data["project_id"] == str(workflow_project.id)
assert data["revision"] == 0
assert data["version"] == 1
assert data["phase"] == "INGEST"
assert data["current_screen"] == "upload"
assert data["source_file_id"] is None
assert data["active_job"] is None
assert data["workspace_view"] == {
"used_file_ids": [],
"selected_file_id": None,
}
async def test_get_workspace_forbidden_for_other_users_project(
self,
auth_client: AsyncClient,
test_db_session: AsyncSession,
other_user: User,
) -> None:
foreign_project = Project(
id=uuid.uuid4(),
owner_id=other_user.id,
name="Other Project",
description=None,
language="ru",
status="DRAFT",
is_active=True,
)
test_db_session.add(foreign_project)
await test_db_session.commit()
response = await auth_client.get(f"/api/projects/{foreign_project.id}/workspace")
assert response.status_code == 403
async def test_set_source_file_action_updates_workspace(
self,
auth_client: AsyncClient,
workflow_project: Project,
source_file: File,
) -> None:
response = await auth_client.post(
f"/api/projects/{workflow_project.id}/workflow/actions",
json={
"type": "SET_SOURCE_FILE",
"revision": 0,
"file_id": str(source_file.id),
},
)
assert response.status_code == 200
data = response.json()
assert data["revision"] == 1
assert data["phase"] == "VERIFY"
assert data["current_screen"] == "verify"
assert data["source_file_id"] == str(source_file.id)
assert data["workspace_view"] == {
"used_file_ids": [str(source_file.id)],
"selected_file_id": str(source_file.id),
}
async def test_action_returns_conflict_on_stale_revision(
self,
auth_client: AsyncClient,
workflow_project: Project,
source_file: File,
) -> None:
first_response = await auth_client.post(
f"/api/projects/{workflow_project.id}/workflow/actions",
json={
"type": "SET_SOURCE_FILE",
"revision": 0,
"file_id": str(source_file.id),
},
)
assert first_response.status_code == 200
response = await auth_client.post(
f"/api/projects/{workflow_project.id}/workflow/actions",
json={
"type": "RESET_SOURCE_FILE",
"revision": 0,
},
)
assert response.status_code == 409
async def test_start_media_convert_action_sets_active_job(
self,
auth_client: AsyncClient,
workflow_project: Project,
source_file: File,
monkeypatch: pytest.MonkeyPatch,
) -> None:
async def fake_submit_media_convert(
self,
*,
requester: User,
request,
) -> TaskSubmitResponse:
assert requester.id == workflow_project.owner_id
assert request.file_key == source_file.path
assert request.project_id == workflow_project.id
return TaskSubmitResponse(
job_id=uuid.UUID("00000000-0000-4000-a000-000000000123"),
webhook_url=("http://test/api/tasks/webhook/00000000-0000-4000-a000-000000000123/"),
status="PENDING",
)
monkeypatch.setattr(
TaskService,
"submit_media_convert",
fake_submit_media_convert,
)
set_source_response = await auth_client.post(
f"/api/projects/{workflow_project.id}/workflow/actions",
json={
"type": "SET_SOURCE_FILE",
"revision": 0,
"file_id": str(source_file.id),
},
)
assert set_source_response.status_code == 200
response = await auth_client.post(
f"/api/projects/{workflow_project.id}/workflow/actions",
json={
"type": "START_MEDIA_CONVERT",
"revision": 1,
"output_format": "mp4",
"out_folder": "output_files",
},
)
assert response.status_code == 200
data = response.json()
assert data["revision"] == 2
assert data["phase"] == "VERIFY"
assert data["current_screen"] == "verify"
assert data["active_job"] == {
"job_id": "00000000-0000-4000-a000-000000000123",
"job_type": "MEDIA_CONVERT",
}
@@ -0,0 +1,210 @@
from __future__ import annotations
import uuid
from types import SimpleNamespace
import pytest
from cpv3.modules.project_workspaces.schemas import (
ProjectWorkspaceState,
build_workspace_state_from_legacy,
)
from cpv3.modules.project_workspaces.service import ProjectWorkspaceService
def test_build_workspace_state_from_legacy_maps_known_fields() -> None:
source_file_id = uuid.uuid4()
active_job_id = uuid.uuid4()
silence_job_id = uuid.uuid4()
artifact_id = uuid.uuid4()
workspace = build_workspace_state_from_legacy(
{
"wizard": {
"current_step": "subtitle-revision",
"primary_file_id": str(source_file_id),
"active_job_id": str(active_job_id),
"active_job_type": "TRANSCRIPTION_GENERATE",
"silence_job_id": str(silence_job_id),
"transcription_artifact_id": str(artifact_id),
"silence_settings": {
"min_silence_duration_ms": 350,
"silence_threshold_db": 21,
"padding_ms": 180,
},
"unknown_field": "ignored",
},
"used_files": [
{"id": str(source_file_id), "path": "users/test/source.mp4"},
{"id": "not-a-uuid", "path": "broken"},
],
"unknown_root": {"ignored": True},
}
)
assert workspace.phase == "TRANSCRIPTION"
assert workspace.source_file_id == source_file_id
assert workspace.active_job is not None
assert workspace.active_job.job_id == active_job_id
assert workspace.active_job.job_type == "TRANSCRIPTION_GENERATE"
assert workspace.workspace_view.used_file_ids == [source_file_id]
assert workspace.workspace_view.selected_file_id == source_file_id
assert workspace.silence.detect_job_id == silence_job_id
assert workspace.silence.settings.min_silence_duration_ms == 350
assert workspace.silence.settings.silence_threshold_db == 21
assert workspace.silence.settings.padding_ms == 180
assert workspace.transcription.artifact_id == artifact_id
assert workspace.captions.output_file_id is None
@pytest.mark.asyncio
@pytest.mark.parametrize(
("job_type", "output_data", "initial_state", "expected"),
[
(
"MEDIA_CONVERT",
{"file_id": "00000000-0000-4000-a000-000000000101"},
{
"phase": "VERIFY",
"source_file_id": "00000000-0000-4000-a000-000000000001",
"active_job": {
"job_id": "00000000-0000-4000-a000-000000000010",
"job_type": "MEDIA_CONVERT",
},
},
{
"phase": "VERIFY",
"source_file_id": "00000000-0000-4000-a000-000000000101",
"active_job": None,
"current_screen": "verify",
},
),
(
"SILENCE_DETECT",
{
"silent_segments": [{"start_ms": 100, "end_ms": 220}],
"duration_ms": 1000,
},
{
"phase": "SILENCE",
"source_file_id": "00000000-0000-4000-a000-000000000001",
"active_job": {
"job_id": "00000000-0000-4000-a000-000000000011",
"job_type": "SILENCE_DETECT",
},
"silence": {"status": "DETECTING"},
},
{
"phase": "SILENCE",
"active_job": None,
"silence_status": "REVIEWING",
"current_screen": "fragments",
},
),
(
"SILENCE_APPLY",
{"file_id": "00000000-0000-4000-a000-000000000102"},
{
"phase": "SILENCE",
"source_file_id": "00000000-0000-4000-a000-000000000001",
"active_job": {
"job_id": "00000000-0000-4000-a000-000000000012",
"job_type": "SILENCE_APPLY",
},
"silence": {"status": "APPLYING"},
},
{
"phase": "TRANSCRIPTION",
"active_job": None,
"silence_status": "COMPLETED",
"current_screen": "transcription-settings",
},
),
(
"TRANSCRIPTION_GENERATE",
{
"artifact_id": "00000000-0000-4000-a000-000000000103",
"transcription_id": "00000000-0000-4000-a000-000000000104",
},
{
"phase": "TRANSCRIPTION",
"source_file_id": "00000000-0000-4000-a000-000000000001",
"active_job": {
"job_id": "00000000-0000-4000-a000-000000000013",
"job_type": "TRANSCRIPTION_GENERATE",
},
"transcription": {"status": "PROCESSING"},
},
{
"phase": "TRANSCRIPTION",
"active_job": None,
"transcription_status": "REVIEWING",
"current_screen": "subtitle-revision",
},
),
(
"CAPTIONS_GENERATE",
{"file_id": "00000000-0000-4000-a000-000000000105"},
{
"phase": "CAPTIONS",
"source_file_id": "00000000-0000-4000-a000-000000000001",
"active_job": {
"job_id": "00000000-0000-4000-a000-000000000014",
"job_type": "CAPTIONS_GENERATE",
},
"captions": {"status": "PROCESSING"},
},
{
"phase": "DONE",
"active_job": None,
"captions_status": "COMPLETED",
"current_screen": "caption-result",
},
),
],
)
async def test_apply_job_event_advances_workspace_for_done_jobs(
job_type: str,
output_data: dict[str, object],
initial_state: dict[str, object],
expected: dict[str, object],
) -> None:
service = ProjectWorkspaceService(session=SimpleNamespace())
state = ProjectWorkspaceState.model_validate(
{
"version": 1,
"phase": "INGEST",
"active_job": None,
"source_file_id": None,
"workspace_view": {"used_file_ids": [], "selected_file_id": None},
"silence": {},
"transcription": {},
"captions": {},
**initial_state,
}
)
job = SimpleNamespace(
id=uuid.UUID(str(state.active_job.job_id)) if state.active_job else uuid.uuid4(),
project_id=uuid.uuid4(),
job_type=job_type,
status="DONE",
output_data=output_data,
)
next_state = service._apply_job_event_to_state(state, job)
current_screen = service._derive_current_screen(next_state)
assert next_state.phase == expected["phase"]
assert next_state.active_job == expected["active_job"]
assert current_screen == expected["current_screen"]
if "source_file_id" in expected:
assert str(next_state.source_file_id) == expected["source_file_id"]
if "silence_status" in expected:
assert next_state.silence.status == expected["silence_status"]
if "transcription_status" in expected:
assert next_state.transcription.status == expected["transcription_status"]
if "captions_status" in expected:
assert next_state.captions.status == expected["captions_status"]
@@ -0,0 +1,331 @@
from __future__ import annotations
import uuid
from types import SimpleNamespace
from unittest.mock import AsyncMock
import pytest
from cpv3.modules.project_workspaces.schemas import (
ActiveJobState,
ProjectWorkspaceState,
SetSourceFileAction,
SilenceSettingsState,
SilenceState,
StartSilenceDetectAction,
StartTranscriptionAction,
TranscriptionRequestState,
)
from cpv3.modules.project_workspaces.service import (
ProjectWorkspaceService,
WorkspaceRevisionConflictError,
)
@pytest.mark.asyncio
async def test_get_workspace_returns_default_state_when_workspace_missing() -> None:
project = SimpleNamespace(id=uuid.uuid4(), workspace_state=None)
service = ProjectWorkspaceService(session=AsyncMock())
service._repo = SimpleNamespace(
get_by_project_id=AsyncMock(return_value=None),
create=AsyncMock(
return_value=SimpleNamespace(
project_id=project.id,
revision=0,
state=ProjectWorkspaceState().model_dump(mode="json"),
)
),
)
workspace = await service.get_workspace(project=project)
assert workspace.revision == 0
assert workspace.phase == "INGEST"
assert workspace.current_screen == "upload"
assert workspace.active_job is None
assert workspace.source_file_id is None
assert workspace.workspace_view.used_file_ids == []
assert workspace.workspace_view.selected_file_id is None
@pytest.mark.asyncio
async def test_apply_action_set_source_file_moves_workspace_to_verify() -> None:
project = SimpleNamespace(id=uuid.uuid4(), workspace_state=None)
requester = SimpleNamespace(id=uuid.uuid4(), is_staff=False)
file_id = uuid.uuid4()
workspace_row = SimpleNamespace(
project_id=project.id,
revision=0,
state=ProjectWorkspaceState().model_dump(mode="json"),
)
saved_state: dict[str, object] = {}
async def update_state(*, project_id, expected_revision, state):
saved_state.update(state)
return SimpleNamespace(
project_id=project_id,
revision=expected_revision + 1,
state=state,
)
service = ProjectWorkspaceService(session=AsyncMock())
service._repo = SimpleNamespace(
get_by_project_id=AsyncMock(return_value=workspace_row),
create=AsyncMock(),
update_state=AsyncMock(side_effect=update_state),
)
service._file_repo = SimpleNamespace(
get_by_id=AsyncMock(
return_value=SimpleNamespace(
id=file_id,
owner_id=requester.id,
project_id=project.id,
path="users/test/source.mp4",
)
)
)
workspace = await service.apply_action(
requester=requester,
project=project,
action=SetSourceFileAction(
type="SET_SOURCE_FILE",
revision=0,
file_id=file_id,
),
)
assert workspace.revision == 1
assert workspace.phase == "VERIFY"
assert workspace.current_screen == "verify"
assert workspace.source_file_id == file_id
assert saved_state["source_file_id"] == str(file_id)
assert saved_state["workspace_view"] == {
"used_file_ids": [str(file_id)],
"selected_file_id": str(file_id),
}
@pytest.mark.asyncio
async def test_apply_action_rejects_stale_revision() -> None:
project = SimpleNamespace(id=uuid.uuid4(), workspace_state=None)
workspace_row = SimpleNamespace(
project_id=project.id,
revision=2,
state=ProjectWorkspaceState().model_dump(mode="json"),
)
service = ProjectWorkspaceService(session=AsyncMock())
service._repo = SimpleNamespace(
get_by_project_id=AsyncMock(return_value=workspace_row),
create=AsyncMock(),
update_state=AsyncMock(),
)
with pytest.raises(WorkspaceRevisionConflictError):
await service.apply_action(
requester=SimpleNamespace(id=uuid.uuid4(), is_staff=False),
project=project,
action=SetSourceFileAction(
type="SET_SOURCE_FILE",
revision=1,
file_id=uuid.uuid4(),
),
)
@pytest.mark.asyncio
async def test_start_silence_detect_submits_task_and_tracks_active_job() -> None:
project = SimpleNamespace(id=uuid.uuid4(), workspace_state=None)
requester = SimpleNamespace(id=uuid.uuid4(), is_staff=False)
source_file_id = uuid.uuid4()
workspace_state = ProjectWorkspaceState(
phase="SILENCE",
source_file_id=source_file_id,
silence=SilenceState(
status="CONFIGURED",
settings=SilenceSettingsState(
min_silence_duration_ms=250,
silence_threshold_db=18,
padding_ms=125,
),
),
)
workspace_row = SimpleNamespace(
project_id=project.id,
revision=0,
state=workspace_state.model_dump(mode="json"),
)
submitted_response = SimpleNamespace(job_id=uuid.uuid4(), status="PENDING")
async def update_state(*, project_id, expected_revision, state):
return SimpleNamespace(
project_id=project_id,
revision=expected_revision + 1,
state=state,
)
task_service = SimpleNamespace(
submit_silence_detect=AsyncMock(return_value=submitted_response),
)
service = ProjectWorkspaceService(session=AsyncMock())
service._repo = SimpleNamespace(
get_by_project_id=AsyncMock(return_value=workspace_row),
create=AsyncMock(),
update_state=AsyncMock(side_effect=update_state),
)
service._file_repo = SimpleNamespace(
get_by_id=AsyncMock(
return_value=SimpleNamespace(
id=source_file_id,
owner_id=requester.id,
project_id=project.id,
path="projects/test/video.mp4",
)
)
)
service._task_service_factory = lambda: task_service
workspace = await service.apply_action(
requester=requester,
project=project,
action=StartSilenceDetectAction(
type="START_SILENCE_DETECT",
revision=0,
),
)
task_service.submit_silence_detect.assert_awaited_once()
assert workspace.current_screen == "processing"
assert workspace.active_job == ActiveJobState(
job_id=submitted_response.job_id,
job_type="SILENCE_DETECT",
)
assert workspace.silence.detect_job_id == submitted_response.job_id
@pytest.mark.asyncio
async def test_start_transcription_persists_request_and_processing_job() -> None:
project = SimpleNamespace(id=uuid.uuid4(), workspace_state=None)
requester = SimpleNamespace(id=uuid.uuid4(), is_staff=False)
source_file_id = uuid.uuid4()
workspace_state = ProjectWorkspaceState(
phase="TRANSCRIPTION",
source_file_id=source_file_id,
)
workspace_row = SimpleNamespace(
project_id=project.id,
revision=3,
state=workspace_state.model_dump(mode="json"),
)
submitted_response = SimpleNamespace(job_id=uuid.uuid4(), status="PENDING")
async def update_state(*, project_id, expected_revision, state):
return SimpleNamespace(
project_id=project_id,
revision=expected_revision + 1,
state=state,
)
task_service = SimpleNamespace(
submit_transcription_generate=AsyncMock(return_value=submitted_response),
)
service = ProjectWorkspaceService(session=AsyncMock())
service._repo = SimpleNamespace(
get_by_project_id=AsyncMock(return_value=workspace_row),
create=AsyncMock(),
update_state=AsyncMock(side_effect=update_state),
)
service._file_repo = SimpleNamespace(
get_by_id=AsyncMock(
return_value=SimpleNamespace(
id=source_file_id,
owner_id=requester.id,
project_id=project.id,
path="projects/test/video.mp4",
)
)
)
service._task_service_factory = lambda: task_service
request = TranscriptionRequestState(engine="whisper", language="ru", model="base")
workspace = await service.apply_action(
requester=requester,
project=project,
action=StartTranscriptionAction(
type="START_TRANSCRIPTION",
revision=3,
request=request,
),
)
assert workspace.current_screen == "transcription-processing"
assert workspace.transcription.request == request
assert workspace.active_job == ActiveJobState(
job_id=submitted_response.job_id,
job_type="TRANSCRIPTION_GENERATE",
)
@pytest.mark.asyncio
async def test_apply_job_update_moves_transcription_job_to_review() -> None:
project = SimpleNamespace(id=uuid.uuid4(), workspace_state=None)
job_id = uuid.uuid4()
transcription_id = uuid.uuid4()
artifact_id = uuid.uuid4()
workspace_state = ProjectWorkspaceState(
phase="TRANSCRIPTION",
active_job=ActiveJobState(job_id=job_id, job_type="TRANSCRIPTION_GENERATE"),
transcription={
"status": "PROCESSING",
"job_id": job_id,
"artifact_id": None,
"transcription_id": None,
"reviewed": False,
},
)
workspace_row = SimpleNamespace(
project_id=project.id,
revision=4,
state=workspace_state.model_dump(mode="json"),
)
async def update_state(*, project_id, expected_revision, state):
return SimpleNamespace(
project_id=project_id,
revision=expected_revision + 1,
state=state,
)
service = ProjectWorkspaceService(session=AsyncMock())
service._repo = SimpleNamespace(
get_by_project_id=AsyncMock(return_value=workspace_row),
create=AsyncMock(),
update_state=AsyncMock(side_effect=update_state),
)
workspace = await service.apply_job_update(
project=project,
job=SimpleNamespace(
id=job_id,
project_id=project.id,
job_type="TRANSCRIPTION_GENERATE",
status="DONE",
output_data={
"transcription_id": str(transcription_id),
"artifact_id": str(artifact_id),
},
),
)
assert workspace is not None
assert workspace.revision == 5
assert workspace.phase == "TRANSCRIPTION"
assert workspace.current_screen == "subtitle-revision"
assert workspace.active_job is None
assert workspace.transcription.transcription_id == transcription_id
assert workspace.transcription.artifact_id == artifact_id
assert workspace.transcription.reviewed is False
+190 -106
View File
@@ -12,134 +12,218 @@ from cpv3.modules.tasks.service import TaskService
@pytest.mark.asyncio
async def test_submit_captions_generate_reuses_existing_active_job() -> None:
service = TaskService(session=AsyncMock())
existing_job_id = uuid.uuid4()
existing_job = SimpleNamespace(
id=existing_job_id,
status="RUNNING",
)
service = TaskService(session=AsyncMock())
existing_job_id = uuid.uuid4()
existing_job = SimpleNamespace(
id=existing_job_id,
status="RUNNING",
)
service._find_duplicate_active_job = AsyncMock(return_value=existing_job)
service._submit_task = AsyncMock()
service._find_duplicate_active_job = AsyncMock(return_value=existing_job)
service._submit_task = AsyncMock()
response = await service.submit_captions_generate(
requester=SimpleNamespace(id=uuid.uuid4()),
request=CaptionsGenerateRequest(
video_s3_path="projects/test/video.mp4",
folder="output_files",
transcription_id=uuid.uuid4(),
project_id=uuid.uuid4(),
preset_id=uuid.uuid4(),
),
)
response = await service.submit_captions_generate(
requester=SimpleNamespace(id=uuid.uuid4()),
request=CaptionsGenerateRequest(
video_s3_path="projects/test/video.mp4",
folder="output_files",
transcription_id=uuid.uuid4(),
project_id=uuid.uuid4(),
preset_id=uuid.uuid4(),
),
)
assert response.job_id == existing_job_id
assert response.status == "RUNNING"
assert response.webhook_url.endswith(f"/api/tasks/webhook/{existing_job_id}/")
service._submit_task.assert_not_awaited()
assert response.job_id == existing_job_id
assert response.status == "RUNNING"
assert response.webhook_url.endswith(f"/api/tasks/webhook/{existing_job_id}/")
service._submit_task.assert_not_awaited()
@pytest.mark.asyncio
async def test_record_webhook_event_ignores_cancelled_job() -> None:
cancelled_job = SimpleNamespace(
id=uuid.uuid4(),
status="CANCELLED",
)
job_repo = SimpleNamespace(
get_by_id=AsyncMock(return_value=cancelled_job),
update=AsyncMock(),
)
event_repo = SimpleNamespace(create=AsyncMock())
cancelled_job = SimpleNamespace(
id=uuid.uuid4(),
status="CANCELLED",
)
job_repo = SimpleNamespace(
get_by_id=AsyncMock(return_value=cancelled_job),
update=AsyncMock(),
)
event_repo = SimpleNamespace(create=AsyncMock())
service = TaskService(session=AsyncMock())
service._job_repo = job_repo
service._event_repo = event_repo
service = TaskService(session=AsyncMock())
service._job_repo = job_repo
service._event_repo = event_repo
result = await service.record_webhook_event(
job_id=cancelled_job.id,
event=TaskWebhookEvent(
status="DONE",
current_message="Готово",
output_data={"output_path": "projects/test/output.mp4"},
),
)
result = await service.record_webhook_event(
job_id=cancelled_job.id,
event=TaskWebhookEvent(
status="DONE",
current_message="Готово",
output_data={"output_path": "projects/test/output.mp4"},
),
)
assert result is cancelled_job
job_repo.update.assert_not_awaited()
event_repo.create.assert_not_awaited()
assert result is cancelled_job
job_repo.update.assert_not_awaited()
event_repo.create.assert_not_awaited()
@pytest.mark.asyncio
async def test_cancel_job_marks_job_cancelled_and_keeps_record() -> None:
job_id = uuid.uuid4()
user_id = uuid.uuid4()
job = SimpleNamespace(
id=job_id,
status="PENDING",
broker_id="default:redis-message-id",
job_type="CAPTIONS_GENERATE",
user_id=user_id,
)
cancelled_job = SimpleNamespace(
id=job_id,
status="CANCELLED",
broker_id="default:redis-message-id",
job_type="CAPTIONS_GENERATE",
user_id=user_id,
current_message="Отменено пользователем",
)
job_id = uuid.uuid4()
user_id = uuid.uuid4()
job = SimpleNamespace(
id=job_id,
status="PENDING",
broker_id="default:redis-message-id",
job_type="CAPTIONS_GENERATE",
user_id=user_id,
)
cancelled_job = SimpleNamespace(
id=job_id,
status="CANCELLED",
broker_id="default:redis-message-id",
job_type="CAPTIONS_GENERATE",
user_id=user_id,
current_message="Отменено пользователем",
)
service = TaskService(session=AsyncMock())
service._job_repo = SimpleNamespace(update=AsyncMock(return_value=cancelled_job))
service._event_repo = SimpleNamespace(create=AsyncMock())
service._cancel_dramatiq_message = AsyncMock()
service._cancel_caption_render = AsyncMock()
service._create_cancellation_notification = AsyncMock()
service = TaskService(session=AsyncMock())
service._job_repo = SimpleNamespace(update=AsyncMock(return_value=cancelled_job))
service._event_repo = SimpleNamespace(create=AsyncMock())
service._cancel_dramatiq_message = AsyncMock()
service._cancel_caption_render = AsyncMock()
service._create_cancellation_notification = AsyncMock()
service._sync_project_workspace_after_webhook = AsyncMock()
result = await service.cancel_job(job)
result = await service.cancel_job(job)
assert result is cancelled_job
service._job_repo.update.assert_awaited_once()
service._event_repo.create.assert_awaited_once()
service._cancel_dramatiq_message.assert_awaited_once_with(job.broker_id)
service._cancel_caption_render.assert_awaited_once_with(job)
service._create_cancellation_notification.assert_awaited_once_with(
cancelled_job
)
assert result is cancelled_job
service._job_repo.update.assert_awaited_once()
service._event_repo.create.assert_awaited_once()
service._cancel_dramatiq_message.assert_awaited_once_with(job.broker_id)
service._cancel_caption_render.assert_awaited_once_with(job)
service._create_cancellation_notification.assert_awaited_once_with(cancelled_job)
service._sync_project_workspace_after_webhook.assert_awaited_once_with(cancelled_job)
@pytest.mark.asyncio
async def test_record_webhook_event_updates_progress_for_conversion_job() -> None:
job = SimpleNamespace(
id=uuid.uuid4(),
status="RUNNING",
job_type="MEDIA_CONVERT",
user_id=None,
)
updated_job = SimpleNamespace(**job.__dict__, project_pct=52.5)
job_repo = SimpleNamespace(
get_by_id=AsyncMock(return_value=job),
update=AsyncMock(return_value=updated_job),
)
event_repo = SimpleNamespace(create=AsyncMock())
job = SimpleNamespace(
id=uuid.uuid4(),
status="RUNNING",
job_type="MEDIA_CONVERT",
project_id=uuid.uuid4(),
user_id=None,
)
updated_job = SimpleNamespace(**job.__dict__, project_pct=52.5)
job_repo = SimpleNamespace(
get_by_id=AsyncMock(return_value=job),
update=AsyncMock(return_value=updated_job),
)
event_repo = SimpleNamespace(create=AsyncMock())
service = TaskService(session=AsyncMock())
service._job_repo = job_repo
service._event_repo = event_repo
service = TaskService(session=AsyncMock())
service._job_repo = job_repo
service._event_repo = event_repo
service._sync_project_workspace_after_webhook = AsyncMock()
result = await service.record_webhook_event(
job_id=job.id,
event=TaskWebhookEvent(
progress_pct=52.5,
current_message="Конвертация видео",
),
)
result = await service.record_webhook_event(
job_id=job.id,
event=TaskWebhookEvent(
progress_pct=52.5,
current_message="Конвертация видео",
),
)
update_call = job_repo.update.await_args.args[1]
event_call = event_repo.create.await_args.args[0]
update_call = job_repo.update.await_args.args[1]
event_call = event_repo.create.await_args.args[0]
assert result is updated_job
assert update_call.project_pct == 52.5
assert update_call.current_message == "Конвертация видео"
assert event_call.event_type == "progress"
assert event_call.payload["progress_pct"] == 52.5
assert result is updated_job
assert update_call.project_pct == 52.5
assert update_call.current_message == "Конвертация видео"
assert event_call.event_type == "progress"
assert event_call.payload["progress_pct"] == 52.5
@pytest.mark.asyncio
async def test_record_webhook_event_syncs_workspace_for_completed_supported_job() -> None:
job = SimpleNamespace(
id=uuid.uuid4(),
status="RUNNING",
job_type="SILENCE_DETECT",
user_id=None,
project_id=uuid.uuid4(),
output_data={"silent_segments": []},
)
updated_job = SimpleNamespace(**{**job.__dict__, "status": "DONE"})
job_repo = SimpleNamespace(
get_by_id=AsyncMock(return_value=job),
update=AsyncMock(return_value=updated_job),
)
event_repo = SimpleNamespace(create=AsyncMock())
service = TaskService(session=AsyncMock())
service._job_repo = job_repo
service._event_repo = event_repo
service._sync_project_workspace_after_webhook = AsyncMock()
result = await service.record_webhook_event(
job_id=job.id,
event=TaskWebhookEvent(
status="DONE",
current_message="Готово",
output_data={"silent_segments": []},
),
)
assert result is updated_job
service._sync_project_workspace_after_webhook.assert_awaited_once_with(updated_job)
@pytest.mark.asyncio
async def test_record_webhook_event_projects_workspace_after_done_job() -> None:
job = SimpleNamespace(
id=uuid.uuid4(),
status="RUNNING",
job_type="MEDIA_CONVERT",
project_id=uuid.uuid4(),
user_id=None,
output_data={"file_path": "users/test/converted.mp4"},
)
updated_job = SimpleNamespace(
**{
**job.__dict__,
"status": "DONE",
"output_data": {
"file_path": "users/test/converted.mp4",
"file_id": "00000000-0000-4000-a000-000000000777",
},
}
)
job_repo = SimpleNamespace(
get_by_id=AsyncMock(return_value=job),
update=AsyncMock(return_value=updated_job),
)
event_repo = SimpleNamespace(create=AsyncMock())
workspace_service = SimpleNamespace(handle_job_update=AsyncMock())
service = TaskService(session=AsyncMock())
service._job_repo = job_repo
service._event_repo = event_repo
service._save_convert_artifacts = AsyncMock(return_value=updated_job)
service._get_project_workspace_service = lambda: workspace_service
result = await service.record_webhook_event(
job_id=job.id,
event=TaskWebhookEvent(
status="DONE",
current_message="Готово",
output_data={"file_path": "users/test/converted.mp4"},
),
)
assert result is updated_job
service._save_convert_artifacts.assert_awaited_once_with(updated_job)
workspace_service.handle_job_update.assert_awaited_once_with(job=updated_job)