chore: something changed, commit before reorg

This commit is contained in:
Daniil
2026-04-27 23:28:28 +03:00
parent 46f34bdcac
commit 20928e9a60
16 changed files with 1967 additions and 1262 deletions
@@ -43,57 +43,16 @@ export const CaptionResultStep: FunctionComponent<ICaptionResultStepProps> = ({
className,
}): JSX.Element => {
const {
projectId,
captionedVideoFileId,
captionedVideoPath,
goToStep,
markStepCompleted,
setCaptionedVideoFileId,
setCaptionedVideoPath,
reopenCaptionConfig,
} = useWizard()
// Recovery: if wizard state lost the file data, look up the latest caption job
const needsRecovery = !captionedVideoFileId && !captionedVideoPath
const { data: jobs } = api.useQuery(
"get",
"/api/jobs/jobs/",
{},
{ enabled: needsRecovery },
)
const recoveredJob = useMemo(() => {
if (!needsRecovery || !jobs) return null
return jobs.find(
(j) =>
j.project_id === projectId &&
j.job_type === "CAPTIONS_GENERATE" &&
j.status === "DONE" &&
j.output_data?.file_id,
)
}, [needsRecovery, jobs, projectId])
const effectiveFileId =
captionedVideoFileId ??
(recoveredJob?.output_data?.file_id as string | undefined) ??
null
const effectivePath =
captionedVideoPath ??
(recoveredJob?.output_data?.output_path as string | undefined) ??
null
// Persist recovered values back to wizard state
if (recoveredJob && !captionedVideoFileId && effectiveFileId) {
setCaptionedVideoFileId(effectiveFileId)
}
if (recoveredJob && !captionedVideoPath && effectivePath) {
setCaptionedVideoPath(effectivePath)
}
const { data: fileInfo, isLoading } = api.useQuery(
"get",
"/api/files/files/{file_id}/resolve/",
{ params: { path: { file_id: effectiveFileId ?? "" } } },
{ enabled: !!effectiveFileId },
{ params: { path: { file_id: captionedVideoFileId ?? "" } } },
{ enabled: !!captionedVideoFileId },
)
const videoUrl = fileInfo?.file_url ?? ""
@@ -107,7 +66,7 @@ export const CaptionResultStep: FunctionComponent<ICaptionResultStepProps> = ({
}
const handleRerender = () => {
goToStep("caption-settings")
void reopenCaptionConfig()
}
const handleFinish = () => {
@@ -210,21 +169,21 @@ export const CaptionResultStep: FunctionComponent<ICaptionResultStepProps> = ({
{/* Footer */}
<div className={styles.footer}>
<Button variant="ghost" onClick={handleRerender}>
<RefreshCw size={16} />
Перегенерировать
</Button>
<div className={styles.rightActions}>
<Button variant="primary" onClick={handleDownload}>
<Button variant="ghost" onClick={handleRerender}>
<RefreshCw size={16} />
Перегенерировать
</Button>
<div className={styles.rightActions}>
<Button variant="primary" onClick={handleDownload}>
<Download size={16} />
Скачать
</Button>
<Button variant="outline" onClick={handleFinish}>
<Check size={16} />
Завершить
</Button>
<Button variant="outline" onClick={handleFinish}>
<Check size={16} />
Завершить
</Button>
</div>
</div>
</div>
</div>
)
}
@@ -6,142 +6,58 @@ import type { JSX } from "react"
import {
FunctionComponent,
useEffect,
useMemo,
useRef,
useState,
} from "react"
import cs from "classnames"
import api from "@shared/api"
import { useWizard } from "@shared/context/WizardContext"
import { Button } from "@shared/ui"
import { PresetGrid } from "./PresetGrid"
import { StyleEditor } from "./StyleEditor"
import { useSubmitCaptionGenerate } from "./useSubmitCaptionGenerate"
import styles from "./CaptionSettingsStep.module.scss"
type CaptionPresetRead = components["schemas"]["CaptionPresetRead"]
const ERROR_SUBMIT = "Не удалось запустить генерацию субтитров"
const ERROR_MISSING_DATA =
"Для генерации субтитров необходимы видеофайл и транскрипция. Пройдите предыдущие шаги."
const TRANSCRIPTION_ARTIFACT_TYPE = "TRANSCRIPTION_JSON"
export const CaptionSettingsStep: FunctionComponent<
ICaptionSettingsStepProps
> = ({ className }): JSX.Element => {
const {
projectId,
primaryFileKey,
transcriptionArtifactId: contextArtifactId,
captionPresetId,
setCaptionPresetId,
setTranscriptionArtifactId,
startProcessingJob,
selectCaptionPreset,
startCaptionRender,
goBack,
} = useWizard()
const { data: artifacts, isLoading: isArtifactsLoading } = api.useQuery(
"get",
"/api/media/artifacts/",
{},
{ enabled: !contextArtifactId },
)
const transcriptionArtifactId = useMemo(() => {
if (contextArtifactId) return contextArtifactId
if (!artifacts) return null
const match = artifacts.find(
(artifact) =>
artifact.project_id === projectId &&
artifact.artifact_type === TRANSCRIPTION_ARTIFACT_TYPE &&
!artifact.is_deleted,
)
return match?.id ?? null
}, [artifacts, contextArtifactId, projectId])
useEffect(() => {
if (
!transcriptionArtifactId ||
transcriptionArtifactId === contextArtifactId
) {
return
}
setTranscriptionArtifactId(transcriptionArtifactId)
}, [
contextArtifactId,
setTranscriptionArtifactId,
transcriptionArtifactId,
])
const { data: transcriptionEntry, isLoading: isTranscriptionLoading } =
api.useQuery(
"get",
"/api/transcribe/transcriptions/by-artifact/{artifact_id}/",
{
params: {
path: { artifact_id: transcriptionArtifactId ?? "" },
},
},
{ enabled: !!transcriptionArtifactId },
)
const [activeTab, setActiveTab] = useState<"select" | "editor">("select")
const [editingPreset, setEditingPreset] = useState<CaptionPresetRead | null>(
null,
)
const [submitError, setSubmitError] = useState<string | null>(null)
const [isSubmitting, setIsSubmitting] = useState(false)
const submitLockRef = useRef(false)
const isResolvingSourceData = isArtifactsLoading || isTranscriptionLoading
const { mutate, isPending } = useSubmitCaptionGenerate({
onSuccess: (data) => {
if (!data?.job_id) {
submitLockRef.current = false
return
}
if (data?.job_id) {
startProcessingJob(
data.job_id,
"CAPTIONS_GENERATE",
"caption-processing",
"caption-settings",
)
}
},
onError: () => {
submitLockRef.current = false
setSubmitError(ERROR_SUBMIT)
},
})
const handleGenerate = () => {
if (submitLockRef.current || isPending) return
const transcriptionId = transcriptionEntry?.id
if (!primaryFileKey || !transcriptionId) {
setSubmitError(ERROR_MISSING_DATA)
return
}
const handleGenerate = async () => {
if (submitLockRef.current || isSubmitting) return
if (!captionPresetId) return
submitLockRef.current = true
setSubmitError(null)
mutate({
body: {
video_s3_path: primaryFileKey,
folder: "output_files",
transcription_id: transcriptionId,
project_id: projectId,
preset_id: captionPresetId,
},
})
setIsSubmitting(true)
try {
await startCaptionRender()
submitLockRef.current = false
} catch {
submitLockRef.current = false
setSubmitError(ERROR_SUBMIT)
} finally {
setIsSubmitting(false)
}
}
const handleEdit = (preset: CaptionPresetRead) => {
@@ -155,10 +71,14 @@ export const CaptionSettingsStep: FunctionComponent<
}
const handleSaved = (presetId: string) => {
setCaptionPresetId(presetId)
void selectCaptionPreset(presetId)
setActiveTab("select")
}
const handleSelectPreset = (presetId: string | null) => {
void selectCaptionPreset(presetId)
}
if (activeTab === "editor") {
return (
<div
@@ -187,7 +107,7 @@ export const CaptionSettingsStep: FunctionComponent<
<div className={styles.scrollArea}>
<PresetGrid
selectedPresetId={captionPresetId}
onSelect={setCaptionPresetId}
onSelect={handleSelectPreset}
onEdit={handleEdit}
onCreateNew={handleCreateNew}
/>
@@ -202,11 +122,9 @@ export const CaptionSettingsStep: FunctionComponent<
<Button
variant="primary"
onClick={handleGenerate}
disabled={
!captionPresetId || isPending || isResolvingSourceData
}
disabled={!captionPresetId || isSubmitting}
>
{isPending ? "Запуск..." : "Генерировать"}
{isSubmitting ? "Запуск..." : "Генерировать"}
</Button>
</div>
</div>
@@ -23,12 +23,11 @@ import {
import WaveSurfer from "wavesurfer.js"
import api from "@shared/api"
import { useProjectWorkspaceQuery } from "@shared/api/projectWorkflow"
import { useWizard } from "@shared/context/WizardContext"
import { useSegmentResize } from "@shared/hooks/useSegmentResize"
import { Button } from "@shared/ui"
import { useSubmitSilenceApply } from "../SilenceResultModal/useSubmitSilenceApply"
import styles from "./FragmentsStep.module.scss"
const MIN_REGION_MS = 100
@@ -71,14 +70,12 @@ export const FragmentsStep: FunctionComponent<IFragmentsStepProps> = ({
}): JSX.Element => {
const {
projectId,
silenceJobId,
primaryFileId,
primaryFileKey,
startProcessingJob,
startSilenceApply,
skipSilenceApply,
goBack,
markStepCompleted,
goToStep,
} = useWizard()
const { data: workspace } = useProjectWorkspaceQuery(projectId)
const [cutRegions, setCutRegions] = useState<CutRegion[]>([])
const [pixelsPerSecond, setPixelsPerSecond] = useState(DEFAULT_PPS)
@@ -94,16 +91,7 @@ export const FragmentsStep: FunctionComponent<IFragmentsStepProps> = ({
const waveformRef = useRef<HTMLDivElement>(null)
const wsRef = useRef<WaveSurfer | null>(null)
/* ---- Data loading ---- */
const { data: taskStatus } = api.useQuery(
"get",
"/api/tasks/status/{job_id}/",
{ params: { path: { job_id: silenceJobId ?? "" } } },
{ enabled: !!silenceJobId },
)
const outputData = taskStatus?.output_data as Record<string, unknown> | null
const fileKey = primaryFileKey ?? ((outputData?.file_key as string) ?? "")
const silenceState = workspace?.silence
const { data: fileInfo } = api.useQuery(
"get",
@@ -116,11 +104,12 @@ export const FragmentsStep: FunctionComponent<IFragmentsStepProps> = ({
/* ---- Initialize cut regions from detection results ---- */
useEffect(() => {
if (!outputData) return
const segments = outputData.silent_segments as
| { start_ms: number; end_ms: number }[]
| undefined
const dur = outputData.duration_ms as number | undefined
if (!silenceState) return
const segments =
silenceState.reviewed_cuts.length > 0
? silenceState.reviewed_cuts
: silenceState.detected_segments
const dur = silenceState.duration_ms
if (segments && dur) {
setDurationMs(dur)
@@ -132,8 +121,7 @@ export const FragmentsStep: FunctionComponent<IFragmentsStepProps> = ({
})),
)
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [outputData])
}, [silenceState])
/* ---- Timeline calculations ---- */
const totalWidth = Math.max(1, (durationMs / 1000) * pixelsPerSecond)
@@ -599,49 +587,30 @@ export const FragmentsStep: FunctionComponent<IFragmentsStepProps> = ({
}
}, [drawRuler, drawFrames])
/* ---- Apply ---- */
const { mutate: applyMutate, isPending: isApplying } =
useSubmitSilenceApply({
onSuccess: (data) => {
const result = data as { job_id?: string }
if (result?.job_id) {
startProcessingJob(
result.job_id,
"SILENCE_APPLY",
"silence-apply-processing",
"fragments",
)
}
},
onError: (error) => {
console.error("Silence apply failed:", error)
},
})
const [isApplying, setIsApplying] = useState(false)
const handleApply = () => {
const handleApply = async () => {
if (cutRegions.length === 0) {
markStepCompleted("fragments")
goToStep("transcription-settings")
setIsApplying(true)
try {
await skipSilenceApply()
} finally {
setIsApplying(false)
}
return
}
if (!fileKey) return
const fileName = fileKey.split("/").pop() ?? "video.mp4"
const outputName = `Без тишины ${fileName}`
;(applyMutate as (args: { body: Record<string, unknown> }) => void)({
body: {
file_key: fileKey,
out_folder: "",
project_id: projectId,
output_name: outputName,
cuts: cutRegions.map((r) => ({
setIsApplying(true)
try {
await startSilenceApply(
cutRegions.map((r) => ({
start_ms: Math.round(r.startMs),
end_ms: Math.round(r.endMs),
})),
},
})
)
} finally {
setIsApplying(false)
}
}
return (
@@ -23,6 +23,7 @@ import {
import WaveSurfer from "wavesurfer.js"
import api from "@shared/api"
import { useProjectWorkspaceQuery } from "@shared/api/projectWorkflow"
import { useSegmentResize } from "@shared/hooks/useSegmentResize"
import { Button, Modal } from "@shared/ui"
@@ -70,6 +71,7 @@ export const SilenceResultModal: FunctionComponent<ISilenceResultModalProps> = (
projectId,
jobId,
}): JSX.Element => {
const { data: workspace } = useProjectWorkspaceQuery(projectId)
const [cutRegions, setCutRegions] = useState<CutRegion[]>([])
const [pixelsPerSecond, setPixelsPerSecond] = useState(DEFAULT_PPS)
const [durationMs, setDurationMs] = useState(0)
@@ -95,16 +97,7 @@ export const SilenceResultModal: FunctionComponent<ISilenceResultModalProps> = (
const outputData = taskStatus?.output_data as Record<string, unknown> | null
const fileKey = (outputData?.file_key as string) ?? ""
const { data: project } = api.useQuery(
"get",
"/api/projects/{project_id}/",
{ params: { path: { project_id: projectId } } },
{ enabled: open },
)
const primaryFileId =
(project?.workspace_state as { wizard?: { primary_file_id?: string | null } } | null)
?.wizard?.primary_file_id ?? null
const primaryFileId = workspace?.source_file_id ?? null
const { data: fileInfo } = api.useQuery(
"get",
@@ -4,57 +4,32 @@ import type { ISilenceSettingsStepProps } from "./SilenceSettingsStep.d"
import type { JSX } from "react"
import cs from "classnames"
import { FunctionComponent, useCallback } from "react"
import { FunctionComponent, useCallback, useEffect, useState } from "react"
import { useWizard } from "@shared/context/WizardContext"
import { Button, Slider } from "@shared/ui"
import { useSubmitSilenceDetect } from "../SilenceSettingsModal/useSubmitSilenceDetect"
import styles from "./SilenceSettingsStep.module.scss"
export const SilenceSettingsStep: FunctionComponent<
ISilenceSettingsStepProps
> = ({ className }): JSX.Element => {
const {
projectId,
primaryFileKey,
silenceSettings,
setSilenceSettings,
startProcessingJob,
startSilenceDetect,
goBack,
} = useWizard()
const [localSettings, setLocalSettings] = useState(silenceSettings)
const { mutate, isPending } = useSubmitSilenceDetect({
onSuccess: (data) => {
const result = data as { job_id?: string }
if (result?.job_id) {
startProcessingJob(
result.job_id,
"SILENCE_DETECT",
"processing",
"silence-settings",
)
}
},
onError: (error) => {
console.error("Silence detect submit failed:", error)
},
})
useEffect(() => {
setLocalSettings(silenceSettings)
}, [silenceSettings])
const handleSubmit = useCallback(() => {
if (!primaryFileKey) return
;(mutate as (args: { body: Record<string, unknown> }) => void)({
body: {
file_key: primaryFileKey,
project_id: projectId,
min_silence_duration_ms: silenceSettings.min_silence_duration_ms,
silence_threshold_db: silenceSettings.silence_threshold_db,
padding_ms: silenceSettings.padding_ms,
},
})
}, [mutate, primaryFileKey, projectId, silenceSettings])
void startSilenceDetect(localSettings)
}, [localSettings, primaryFileKey, startSilenceDetect])
return (
<div
@@ -73,15 +48,15 @@ export const SilenceSettingsStep: FunctionComponent<
<div className={styles.fields}>
<Slider
label="Мин. длительность тишины"
value={silenceSettings.min_silence_duration_ms}
value={localSettings.min_silence_duration_ms}
min={100}
max={2000}
step={50}
unit="мс"
helpText="Минимальная длительность тихого участка для обнаружения"
onChange={(v) =>
setSilenceSettings({
...silenceSettings,
setLocalSettings({
...localSettings,
min_silence_duration_ms: v,
})
}
@@ -89,15 +64,15 @@ export const SilenceSettingsStep: FunctionComponent<
<Slider
label="Порог тишины"
value={silenceSettings.silence_threshold_db}
value={localSettings.silence_threshold_db}
min={6}
max={40}
step={2}
unit="дБ"
helpText="Уровень громкости ниже которого звук считается тишиной"
onChange={(v) =>
setSilenceSettings({
...silenceSettings,
setLocalSettings({
...localSettings,
silence_threshold_db: v,
})
}
@@ -105,15 +80,15 @@ export const SilenceSettingsStep: FunctionComponent<
<Slider
label="Отступ"
value={silenceSettings.padding_ms}
value={localSettings.padding_ms}
min={0}
max={500}
step={25}
unit="мс"
helpText="Дополнительный отступ по краям тихих участков"
onChange={(v) =>
setSilenceSettings({
...silenceSettings,
setLocalSettings({
...localSettings,
padding_ms: v,
})
}
@@ -123,15 +98,15 @@ export const SilenceSettingsStep: FunctionComponent<
{/* Footer */}
<div className={styles.footer}>
<Button variant="outline" onClick={goBack} disabled={isPending}>
<Button variant="outline" onClick={goBack}>
Назад
</Button>
<Button
variant="primary"
onClick={handleSubmit}
disabled={isPending || !primaryFileKey}
disabled={!primaryFileKey}
>
{isPending ? "Запуск..." : "Далее"}
Далее
</Button>
</div>
</div>
@@ -15,13 +15,10 @@ import {
import "@vidstack/react/player/styles/default/theme.css"
import "@vidstack/react/player/styles/default/layouts/video.css"
import cs from "classnames"
import { FunctionComponent, useEffect, useMemo, useRef } from "react"
import { FunctionComponent, useEffect, useRef } from "react"
import api from "@shared/api"
import {
StaticWorkspaceProvider,
useWorkspaceFiles,
} from "@shared/context/WorkspaceContext"
import { useWorkspaceFiles } from "@shared/context/WorkspaceContext"
import { useWizard } from "@shared/context/WizardContext"
import { Button } from "@shared/ui"
import { TranscriptionEditor } from "@features/project"
@@ -29,8 +26,6 @@ import { TimelinePanel } from "@widgets/TimelinePanel"
import styles from "./SubtitleRevisionStep.module.scss"
const TRANSCRIPTION_ARTIFACT_TYPE = "TRANSCRIPTION_JSON"
/**
* Auto-initializes WorkspaceContext with the video file
* and transcription artifact so TimelinePanel and
@@ -87,51 +82,15 @@ const SubtitleRevisionContent: FunctionComponent<{
projectId,
videoUrl,
primaryFileKey,
transcriptionArtifactId: contextArtifactId,
setTranscriptionArtifactId,
transcriptionArtifactId,
goBack,
goToStep,
markStepCompleted,
markTranscriptionReviewed,
} = useWizard()
const { data: artifacts, isLoading: isArtifactsLoading } = api.useQuery(
"get",
"/api/media/artifacts/",
{},
{ enabled: !contextArtifactId },
)
const transcriptionArtifactId = useMemo(() => {
if (contextArtifactId) return contextArtifactId
if (!artifacts) return null
const match = artifacts.find(
(a) =>
a.project_id === projectId &&
a.artifact_type === TRANSCRIPTION_ARTIFACT_TYPE &&
!a.is_deleted,
)
return match?.id ?? null
}, [contextArtifactId, artifacts, projectId])
const isArtifactResolving = !contextArtifactId && isArtifactsLoading
const isArtifactResolving = false
const isTranscriptionReady = Boolean(transcriptionArtifactId)
const isTranscriptionUnavailable =
!isTranscriptionReady && !isArtifactResolving
useEffect(() => {
if (
!transcriptionArtifactId ||
transcriptionArtifactId === contextArtifactId
) {
return
}
setTranscriptionArtifactId(transcriptionArtifactId)
}, [
contextArtifactId,
setTranscriptionArtifactId,
transcriptionArtifactId,
])
// Auto-trigger frame extraction so video frames appear in timeline
const frameExtractMutation = api.useMutation(
"post",
@@ -154,9 +113,7 @@ const SubtitleRevisionContent: FunctionComponent<{
const handleFinish = () => {
if (!isTranscriptionReady) return
markStepCompleted("subtitle-revision")
goToStep("caption-settings")
void markTranscriptionReviewed()
}
return (
@@ -280,9 +237,5 @@ const SubtitleRevisionContent: FunctionComponent<{
export const SubtitleRevisionStep: FunctionComponent<
ISubtitleRevisionStepProps
> = ({ className }): JSX.Element => {
return (
<StaticWorkspaceProvider>
<SubtitleRevisionContent className={className} />
</StaticWorkspaceProvider>
)
return <SubtitleRevisionContent className={className} />
}
@@ -13,8 +13,6 @@ import api from "@shared/api"
import { useWizard } from "@shared/context/WizardContext"
import { useAppSelector } from "@shared/hooks/useAppSelector"
import { Button, CircularProgress, Form, Select, SelectItem } from "@shared/ui"
import { useSubmitTranscription } from "../TranscriptionModal/useSubmitTranscription"
import { buildCancelJobPayload, useCancelJob } from "../useCancelJob"
import styles from "./TranscriptionSettingsStep.module.scss"
@@ -53,12 +51,11 @@ export const TranscriptionSettingsStep: FunctionComponent<
ITranscriptionSettingsStepProps
> = ({ className }): JSX.Element => {
const {
projectId,
primaryFileKey,
activeJobId,
activeJobType,
setActiveJob,
startProcessingJob,
startTranscription,
goToStep,
} = useWizard()
@@ -66,6 +63,7 @@ export const TranscriptionSettingsStep: FunctionComponent<
!!activeJobId && activeJobType === "TRANSCRIPTION_GENERATE"
const [submitError, setSubmitError] = useState<string | null>(null)
const [isSubmitting, setIsSubmitting] = useState(false)
const { mutate: cancelJob, isPending: isCancelling } = useCancelJob()
const { control, handleSubmit, watch, setValue } =
@@ -87,36 +85,23 @@ export const TranscriptionSettingsStep: FunctionComponent<
}
}, [engine, setValue])
const { mutate, isPending } = useSubmitTranscription({
onSuccess: (data) => {
if (data?.job_id) {
startProcessingJob(
data.job_id,
"TRANSCRIPTION_GENERATE",
"transcription-processing",
"transcription-settings",
)
}
},
onError: (error) => {
console.error("Transcription submit failed:", error)
setSubmitError("Не удалось запустить транскрипцию")
},
})
const onSubmit = (data: ITranscriptionFormData): void => {
const onSubmit = async (data: ITranscriptionFormData): Promise<void> => {
if (!primaryFileKey) return
setSubmitError(null)
setIsSubmitting(true)
mutate({
body: {
file_key: primaryFileKey,
project_id: projectId,
try {
await startTranscription({
engine: data.engine,
language: data.language === "auto" ? undefined : data.language,
model: data.model,
},
})
})
} catch (error) {
console.error("Transcription submit failed:", error)
setSubmitError("Не удалось запустить транскрипцию")
} finally {
setIsSubmitting(false)
}
}
/* ---- Processing state (inline) ---- */
@@ -309,7 +294,7 @@ export const TranscriptionSettingsStep: FunctionComponent<
<Button
type="button"
variant="outline"
disabled={isPending}
disabled={isSubmitting}
onClick={() => goToStep("fragments")}
>
Назад
@@ -317,9 +302,9 @@ export const TranscriptionSettingsStep: FunctionComponent<
<Button
type="submit"
variant="primary"
disabled={isPending || !primaryFileKey}
disabled={isSubmitting || !primaryFileKey}
>
{isPending ? "Запуск..." : "Сгенерировать субтитры"}
{isSubmitting ? "Запуск..." : "Сгенерировать субтитры"}
</Button>
</div>
</Form>
@@ -20,7 +20,7 @@ const ERROR_UPLOAD_FAILED = "Не удалось загрузить файл"
export const UploadStep: FunctionComponent<IUploadStepProps> = ({
className,
}): JSX.Element => {
const { projectId, setFileKey, markStepCompleted, goNext } = useWizard()
const { projectId, setFileKey } = useWizard()
const [isDragging, setIsDragging] = useState(false)
const [isUploading, setIsUploading] = useState(false)
const [progress, setProgress] = useState(0)
@@ -39,16 +39,18 @@ export const UploadStep: FunctionComponent<IUploadStepProps> = ({
`projects/${projectId}`,
setProgress,
)
setFileKey(result.file_path, result.file_id, result.filename ?? null)
markStepCompleted("upload")
goNext()
await setFileKey(
result.file_path,
result.file_id,
result.filename ?? null,
)
} catch {
setError(ERROR_UPLOAD_FAILED)
} finally {
setIsUploading(false)
}
},
[projectId, setFileKey, markStepCompleted, goNext],
[projectId, setFileKey],
)
const handleFileChange = useCallback(
+11 -57
View File
@@ -32,7 +32,7 @@ import {
import cs from "classnames"
import api, { fetchClient } from "@shared/api"
import api from "@shared/api"
import { useWizard } from "@shared/context/WizardContext"
import { useTaskProgressState } from "@shared/hooks/useTaskProgressState"
import { Badge, Button, CircularProgress } from "@shared/ui"
@@ -55,19 +55,16 @@ export const VerifyStep: FunctionComponent<IVerifyStepProps> = ({
className,
}): JSX.Element => {
const {
projectId,
primaryFileKey,
videoUrl,
originalFileName,
activeJobId,
activeJobType,
goBack,
goNext,
goToStep,
markStepCompleted,
confirmVerify,
setFileKey,
setActiveJob,
startProcessingJob,
startMediaConvert,
} = useWizard()
const [convertError, setConvertError] = useState<string | null>(null)
@@ -113,27 +110,11 @@ export const VerifyStep: FunctionComponent<IVerifyStepProps> = ({
/* ---- Conversion logic ---- */
const convertMutation = api.useMutation("post", "/api/tasks/media-convert/", {
onSuccess: (data) => {
startProcessingJob(data.job_id, "MEDIA_CONVERT", "verify")
setConvertError(null)
},
onError: () => {
setConvertError(ERROR_CONVERT_FAILED)
},
})
const handleConvert = useCallback(() => {
if (!primaryFileKey) return
convertMutation.mutate({
body: {
file_key: primaryFileKey,
out_folder: `projects/${projectId}`,
output_format: "mp4",
project_id: projectId,
},
void startMediaConvert().catch(() => {
setConvertError(ERROR_CONVERT_FAILED)
})
}, [convertMutation, primaryFileKey, projectId])
}, [startMediaConvert])
const {
progressPct: convertProgressPct,
@@ -149,47 +130,20 @@ export const VerifyStep: FunctionComponent<IVerifyStepProps> = ({
useEffect(() => {
if (!convertJobId || convertStatus !== "converting") return
if (convertTaskStatus === "DONE") {
fetchConvertedFileFromJob(convertJobId)
}
if (convertTaskStatus === "FAILED") {
setActiveJob(null)
setConvertError(convertErrorMessage ?? "Ошибка конвертации")
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [convertErrorMessage, convertJobId, convertStatus, convertTaskStatus])
const fetchConvertedFileFromJob = useCallback(
async (jobId: string) => {
const { data: taskStatus } = await fetchClient.GET(
"/api/tasks/status/{job_id}/",
{ params: { path: { job_id: jobId } } },
)
const outputData = taskStatus?.output_data as {
file_id?: string
file_path?: string
} | null
if (outputData?.file_id && outputData?.file_path) {
const convertedName = outputData.file_path.split("/").pop() ?? null
setFileKey(outputData.file_path, outputData.file_id, convertedName)
setActiveJob(null)
}
},
[setFileKey, setActiveJob],
)
}, [convertErrorMessage, convertJobId, convertStatus, convertTaskStatus, setActiveJob])
/* ---- Handlers ---- */
const handleReplace = () => {
setFileKey(null, null, null)
goToStep("upload")
void setFileKey(null, null, null)
}
const handleNext = () => {
markStepCompleted("verify")
goNext()
void confirmVerify()
}
/* ---- Converting view ---- */
@@ -266,7 +220,7 @@ export const VerifyStep: FunctionComponent<IVerifyStepProps> = ({
variant="primary"
size="sm"
onClick={handleConvert}
disabled={convertMutation.isPending}
disabled={!primaryFileKey}
>
Конвертировать в MP4
</Button>
@@ -357,7 +311,7 @@ export const VerifyStep: FunctionComponent<IVerifyStepProps> = ({
variant="primary"
size="sm"
onClick={handleConvert}
disabled={convertMutation.isPending}
disabled={!primaryFileKey}
>
Конвертировать в MP4
</Button>
+477 -8
View File
@@ -200,6 +200,40 @@ export interface paths {
patch: operations["patch_project_api_projects__project_id___patch"];
trace?: never;
};
"/api/projects/{project_id}/workspace": {
parameters: {
query?: never;
header?: never;
path?: never;
cookie?: never;
};
/** Get Project Workspace */
get: operations["get_project_workspace_api_projects__project_id__workspace_get"];
put?: never;
post?: never;
delete?: never;
options?: never;
head?: never;
patch?: never;
trace?: never;
};
"/api/projects/{project_id}/workflow/actions": {
parameters: {
query?: never;
header?: never;
path?: never;
cookie?: never;
};
get?: never;
put?: never;
/** Dispatch Project Workflow Action */
post: operations["dispatch_project_workflow_action_api_projects__project_id__workflow_actions_post"];
delete?: never;
options?: never;
head?: never;
patch?: never;
trace?: never;
};
"/api/files/upload/": {
parameters: {
query?: never;
@@ -987,6 +1021,19 @@ export interface paths {
export type webhooks = Record<string, never>;
export interface components {
schemas: {
/** ActiveJobState */
ActiveJobState: {
/**
* Job Id
* Format: uuid
*/
job_id: string;
/**
* Job Type
* @enum {string}
*/
job_type: "MEDIA_PROBE" | "SILENCE_REMOVE" | "SILENCE_DETECT" | "SILENCE_APPLY" | "MEDIA_CONVERT" | "TRANSCRIPTION_GENERATE" | "CAPTIONS_GENERATE" | "FRAME_EXTRACT";
};
/** ArtifactMediaFileCreate */
ArtifactMediaFileCreate: {
/** Project Id */
@@ -1286,6 +1333,43 @@ export interface components {
/** Result */
result: string;
};
/** CaptionsState */
CaptionsState: {
/** @default IDLE */
status: components["schemas"]["CaptionsWorkflowStatus"];
/** Preset Id */
preset_id?: string | null;
/** Style Config */
style_config?: {
[key: string]: unknown;
} | null;
/** Render Job Id */
render_job_id?: string | null;
/** Output File Id */
output_file_id?: string | null;
};
/**
* CaptionsWorkflowStatus
* @enum {string}
*/
CaptionsWorkflowStatus: "IDLE" | "CONFIGURED" | "PROCESSING" | "COMPLETED";
/** ConfirmVerifyAction */
ConfirmVerifyAction: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
type: "CONFIRM_VERIFY";
/** Revision */
revision: number;
};
/** CutRegionState */
CutRegionState: {
/** Start Ms */
start_ms: number;
/** End Ms */
end_ms: number;
};
/** DispositionSchema */
DispositionSchema: {
/** Default */
@@ -1653,6 +1737,16 @@ export interface components {
/** Words */
words: components["schemas"]["WordNode"][];
};
/** MarkTranscriptionReviewedAction */
MarkTranscriptionReviewedAction: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
type: "MARK_TRANSCRIPTION_REVIEWED";
/** Revision */
revision: number;
};
/**
* MediaConvertRequest
* @description Request to convert media file to different format.
@@ -1909,10 +2003,6 @@ export interface components {
* @enum {string}
*/
status: "DRAFT" | "PROCESSING" | "DONE" | "FAILED";
/** Workspace State */
workspace_state: {
[key: string]: unknown;
} | null;
/** Is Active */
is_active: boolean;
/**
@@ -1938,10 +2028,71 @@ export interface components {
folder?: string | null;
/** Status */
status?: ("DRAFT" | "PROCESSING" | "DONE" | "FAILED") | null;
/** Workspace State */
workspace_state?: {
[key: string]: unknown;
} | null;
};
/** ProjectWorkspaceRead */
ProjectWorkspaceRead: {
/**
* Project Id
* Format: uuid
*/
project_id: string;
/** Revision */
revision: number;
/** Version */
version: number;
phase: components["schemas"]["WorkflowPhase"];
/**
* Current Screen
* @enum {string}
*/
current_screen: "upload" | "verify" | "silence-settings" | "processing" | "fragments" | "silence-apply-processing" | "transcription-settings" | "transcription-processing" | "subtitle-revision" | "caption-settings" | "caption-processing" | "caption-result";
active_job: components["schemas"]["ActiveJobState"] | null;
/** Source File Id */
source_file_id: string | null;
workspace_view: components["schemas"]["WorkspaceViewState"];
silence: components["schemas"]["SilenceState"];
transcription: components["schemas"]["TranscriptionState"];
captions: components["schemas"]["CaptionsState"];
};
/** ReopenCaptionConfigAction */
ReopenCaptionConfigAction: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
type: "REOPEN_CAPTION_CONFIG";
/** Revision */
revision: number;
};
/** ReopenSilenceReviewAction */
ReopenSilenceReviewAction: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
type: "REOPEN_SILENCE_REVIEW";
/** Revision */
revision: number;
};
/** ReopenTranscriptionConfigAction */
ReopenTranscriptionConfigAction: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
type: "REOPEN_TRANSCRIPTION_CONFIG";
/** Revision */
revision: number;
};
/** ResetSourceFileAction */
ResetSourceFileAction: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
type: "RESET_SOURCE_FILE";
/** Revision */
revision: number;
};
/** SaluteSpeechParams */
SaluteSpeechParams: {
@@ -1979,6 +2130,71 @@ export interface components {
/** Lines */
lines: components["schemas"]["LineNode-Output"][];
};
/** SelectCaptionPresetAction */
SelectCaptionPresetAction: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
type: "SELECT_CAPTION_PRESET";
/** Revision */
revision: number;
/** Preset Id */
preset_id?: string | null;
/** Style Config */
style_config?: {
[key: string]: unknown;
} | null;
};
/** SetSilenceCutsAction */
SetSilenceCutsAction: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
type: "SET_SILENCE_CUTS";
/** Revision */
revision: number;
/** Cuts */
cuts: components["schemas"]["CutRegionState"][];
};
/** SetSilenceSettingsAction */
SetSilenceSettingsAction: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
type: "SET_SILENCE_SETTINGS";
/** Revision */
revision: number;
settings?: components["schemas"]["SilenceSettingsState"];
};
/** SetSourceFileAction */
SetSourceFileAction: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
type: "SET_SOURCE_FILE";
/** Revision */
revision: number;
/**
* File Id
* Format: uuid
*/
file_id: string;
};
/** SetWorkspaceViewAction */
SetWorkspaceViewAction: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
type: "SET_WORKSPACE_VIEW";
/** Revision */
revision: number;
workspace_view: components["schemas"]["WorkspaceViewState"];
};
/**
* SilenceApplyRequest
* @description Request to apply silence cuts to media file.
@@ -2085,6 +2301,143 @@ export interface components {
*/
padding_ms: number;
};
/** SilenceSettingsState */
SilenceSettingsState: {
/**
* Min Silence Duration Ms
* @default 200
*/
min_silence_duration_ms: number;
/**
* Silence Threshold Db
* @default 16
*/
silence_threshold_db: number;
/**
* Padding Ms
* @default 100
*/
padding_ms: number;
};
/** SilenceState */
SilenceState: {
/** @default IDLE */
status: components["schemas"]["SilenceWorkflowStatus"];
settings?: components["schemas"]["SilenceSettingsState"];
/** Detect Job Id */
detect_job_id?: string | null;
/** Detected Segments */
detected_segments?: components["schemas"]["CutRegionState"][];
/** Reviewed Cuts */
reviewed_cuts?: components["schemas"]["CutRegionState"][];
/** Duration Ms */
duration_ms?: number | null;
/** Applied Output File Id */
applied_output_file_id?: string | null;
};
/**
* SilenceWorkflowStatus
* @enum {string}
*/
SilenceWorkflowStatus: "IDLE" | "CONFIGURED" | "DETECTING" | "REVIEWING" | "APPLYING" | "COMPLETED" | "SKIPPED";
/** SkipSilenceApplyAction */
SkipSilenceApplyAction: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
type: "SKIP_SILENCE_APPLY";
/** Revision */
revision: number;
};
/** StartCaptionRenderAction */
StartCaptionRenderAction: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
type: "START_CAPTION_RENDER";
/** Revision */
revision: number;
/**
* Folder
* @default output_files
*/
folder: string;
};
/** StartMediaConvertAction */
StartMediaConvertAction: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
type: "START_MEDIA_CONVERT";
/** Revision */
revision: number;
/**
* Output Format
* @default mp4
*/
output_format: string;
/**
* Out Folder
* @default output_files
*/
out_folder: string;
};
/** StartSilenceApplyAction */
StartSilenceApplyAction: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
type: "START_SILENCE_APPLY";
/** Revision */
revision: number;
/** Cuts */
cuts?: components["schemas"]["CutRegionState"][] | null;
/**
* Out Folder
* @default output_files
*/
out_folder: string;
/** Output Name */
output_name?: string | null;
};
/** StartSilenceDetectAction */
StartSilenceDetectAction: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
type: "START_SILENCE_DETECT";
/** Revision */
revision: number;
};
/** StartTranscriptionAction */
StartTranscriptionAction: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
type: "START_TRANSCRIPTION";
/** Revision */
revision: number;
/**
* Engine
* @default whisper
* @enum {string}
*/
engine: "whisper" | "google" | "salutespeech";
/** Language */
language?: string | null;
/**
* Model
* @default base
*/
model: string;
request?: components["schemas"]["TranscriptionRequestState"] | null;
};
/** StreamSchema */
StreamSchema: {
/** Index */
@@ -2324,6 +2677,39 @@ export interface components {
*/
updated_at: string;
};
/** TranscriptionRequestState */
TranscriptionRequestState: {
/**
* Engine
* @default whisper
* @enum {string}
*/
engine: "whisper" | "google" | "salutespeech";
/** Language */
language?: string | null;
/**
* Model
* @default base
*/
model: string;
};
/** TranscriptionState */
TranscriptionState: {
/** @default IDLE */
status: components["schemas"]["TranscriptionWorkflowStatus"];
request?: components["schemas"]["TranscriptionRequestState"];
/** Job Id */
job_id?: string | null;
/** Artifact Id */
artifact_id?: string | null;
/** Transcription Id */
transcription_id?: string | null;
/**
* Reviewed
* @default false
*/
reviewed: boolean;
};
/** TranscriptionUpdate */
TranscriptionUpdate: {
/** Document */
@@ -2335,6 +2721,11 @@ export interface components {
[key: string]: unknown;
} | null;
};
/**
* TranscriptionWorkflowStatus
* @enum {string}
*/
TranscriptionWorkflowStatus: "IDLE" | "PROCESSING" | "REVIEWING" | "COMPLETED";
/** UserCreate */
UserCreate: {
/** Username */
@@ -2542,6 +2933,18 @@ export interface components {
structure_tags: components["schemas"]["Tag"][];
time: components["schemas"]["TimeRange"];
};
/**
* WorkflowPhase
* @enum {string}
*/
WorkflowPhase: "INGEST" | "VERIFY" | "SILENCE" | "TRANSCRIPTION" | "CAPTIONS" | "DONE";
/** WorkspaceViewState */
WorkspaceViewState: {
/** Used File Ids */
used_file_ids?: string[];
/** Selected File Id */
selected_file_id?: string | null;
};
};
responses: never;
parameters: never;
@@ -3055,6 +3458,72 @@ export interface operations {
};
};
};
get_project_workspace_api_projects__project_id__workspace_get: {
parameters: {
query?: never;
header?: never;
path: {
project_id: string;
};
cookie?: never;
};
requestBody?: never;
responses: {
/** @description Successful Response */
200: {
headers: {
[name: string]: unknown;
};
content: {
"application/json": components["schemas"]["ProjectWorkspaceRead"];
};
};
/** @description Validation Error */
422: {
headers: {
[name: string]: unknown;
};
content: {
"application/json": components["schemas"]["HTTPValidationError"];
};
};
};
};
dispatch_project_workflow_action_api_projects__project_id__workflow_actions_post: {
parameters: {
query?: never;
header?: never;
path: {
project_id: string;
};
cookie?: never;
};
requestBody: {
content: {
"application/json": components["schemas"]["SetSourceFileAction"] | components["schemas"]["ResetSourceFileAction"] | components["schemas"]["StartMediaConvertAction"] | components["schemas"]["ConfirmVerifyAction"] | components["schemas"]["SetSilenceSettingsAction"] | components["schemas"]["StartSilenceDetectAction"] | components["schemas"]["SetSilenceCutsAction"] | components["schemas"]["SkipSilenceApplyAction"] | components["schemas"]["StartSilenceApplyAction"] | components["schemas"]["ReopenSilenceReviewAction"] | components["schemas"]["StartTranscriptionAction"] | components["schemas"]["ReopenTranscriptionConfigAction"] | components["schemas"]["MarkTranscriptionReviewedAction"] | components["schemas"]["SelectCaptionPresetAction"] | components["schemas"]["StartCaptionRenderAction"] | components["schemas"]["ReopenCaptionConfigAction"] | components["schemas"]["SetWorkspaceViewAction"];
};
};
responses: {
/** @description Successful Response */
200: {
headers: {
[name: string]: unknown;
};
content: {
"application/json": components["schemas"]["ProjectWorkspaceRead"];
};
};
/** @description Validation Error */
422: {
headers: {
[name: string]: unknown;
};
content: {
"application/json": components["schemas"]["HTTPValidationError"];
};
};
};
};
upload_file_api_files_upload__post: {
parameters: {
query?: never;
+246
View File
@@ -0,0 +1,246 @@
"use client"
import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query"
import { ACCESS_TOKEN_REGEXP, API_URL } from "@shared/lib/constants"
export type WorkflowPhase =
| "INGEST"
| "VERIFY"
| "SILENCE"
| "TRANSCRIPTION"
| "CAPTIONS"
| "DONE"
export type WorkflowScreen =
| "upload"
| "verify"
| "silence-settings"
| "processing"
| "fragments"
| "silence-apply-processing"
| "transcription-settings"
| "transcription-processing"
| "subtitle-revision"
| "caption-settings"
| "caption-processing"
| "caption-result"
export interface SilenceSettingsPayload {
min_silence_duration_ms: number
silence_threshold_db: number
padding_ms: number
}
export interface WorkflowCutRegionPayload {
start_ms: number
end_ms: number
}
export interface WorkflowActiveJob {
job_id: string
job_type: string
}
export interface WorkflowWorkspaceView {
used_file_ids: string[]
selected_file_id: string | null
}
export interface WorkflowSilenceState {
status: string | null
settings: SilenceSettingsPayload | null
detect_job_id: string | null
detected_segments: WorkflowCutRegionPayload[]
reviewed_cuts: WorkflowCutRegionPayload[]
duration_ms: number | null
applied_output_file_id: string | null
}
export interface WorkflowTranscriptionRequest {
engine: "whisper" | "google" | "salutespeech"
language?: string
model: string
}
export interface WorkflowTranscriptionState {
status: string | null
job_id: string | null
request: WorkflowTranscriptionRequest | null
artifact_id: string | null
transcription_id: string | null
reviewed: boolean
}
export interface WorkflowCaptionsState {
status: string | null
preset_id: string | null
style_config: Record<string, unknown> | null
render_job_id: string | null
output_file_id: string | null
}
export interface ProjectWorkspaceRead {
revision: number
phase: WorkflowPhase
current_screen: WorkflowScreen
active_job: WorkflowActiveJob | null
source_file_id: string | null
workspace_view: WorkflowWorkspaceView
silence: WorkflowSilenceState
transcription: WorkflowTranscriptionState
captions: WorkflowCaptionsState
}
type WorkflowActionBase<TActionType extends string> = {
type: TActionType
revision: number
}
export type WorkflowActionRequest =
| (WorkflowActionBase<"SET_SOURCE_FILE"> & {
file_id: string
})
| WorkflowActionBase<"RESET_SOURCE_FILE">
| (WorkflowActionBase<"START_MEDIA_CONVERT"> & {
output_format?: "mp4"
})
| WorkflowActionBase<"CONFIRM_VERIFY">
| (WorkflowActionBase<"SET_SILENCE_SETTINGS"> & {
settings: SilenceSettingsPayload
})
| WorkflowActionBase<"START_SILENCE_DETECT">
| (WorkflowActionBase<"SET_SILENCE_CUTS"> & {
cuts: WorkflowCutRegionPayload[]
})
| WorkflowActionBase<"SKIP_SILENCE_APPLY">
| (WorkflowActionBase<"START_SILENCE_APPLY"> & {
cuts: WorkflowCutRegionPayload[]
})
| WorkflowActionBase<"REOPEN_SILENCE_REVIEW">
| (WorkflowActionBase<"START_TRANSCRIPTION"> & {
request: WorkflowTranscriptionRequest
})
| WorkflowActionBase<"REOPEN_TRANSCRIPTION_CONFIG">
| WorkflowActionBase<"MARK_TRANSCRIPTION_REVIEWED">
| (WorkflowActionBase<"SELECT_CAPTION_PRESET"> & {
preset_id: string | null
})
| WorkflowActionBase<"START_CAPTION_RENDER">
| WorkflowActionBase<"REOPEN_CAPTION_CONFIG">
| (WorkflowActionBase<"SET_WORKSPACE_VIEW"> & {
workspace_view: WorkflowWorkspaceView
})
class WorkflowApiError extends Error {
status: number
constructor(status: number, message: string) {
super(message)
this.name = "WorkflowApiError"
this.status = status
}
}
function getBaseApiUrl(): string {
if (API_URL?.length) return API_URL
if (typeof window !== "undefined") return window.location.origin
return ""
}
function getAccessToken(): string | null {
if (typeof document === "undefined") return null
const token = document.cookie.replace(ACCESS_TOKEN_REGEXP, "$1")
return token.length ? token : null
}
async function requestJson<TResponse>(
path: string,
init?: RequestInit,
): Promise<TResponse> {
const token = getAccessToken()
const response = await fetch(`${getBaseApiUrl()}${path}`, {
credentials: "include",
...init,
headers: {
"Content-Type": "application/json",
...(token ? { Authorization: `Bearer ${token}` } : {}),
...(init?.headers ?? {}),
},
})
if (!response.ok) {
const message = response.statusText || "Workflow request failed"
throw new WorkflowApiError(response.status, message)
}
if (response.status === 204) {
return null as TResponse
}
return (await response.json()) as TResponse
}
export function getProjectWorkspaceQueryKey(projectId: string) {
return ["project-workspace", projectId] as const
}
export async function fetchProjectWorkspace(
projectId: string,
): Promise<ProjectWorkspaceRead> {
return requestJson<ProjectWorkspaceRead>(
`/api/projects/${projectId}/workspace`,
{ method: "GET" },
)
}
export async function postWorkflowAction(
projectId: string,
action: WorkflowActionRequest,
): Promise<ProjectWorkspaceRead | null> {
return requestJson<ProjectWorkspaceRead | null>(
`/api/projects/${projectId}/workflow/actions`,
{
method: "POST",
body: JSON.stringify(action),
},
)
}
export function useProjectWorkspaceQuery(projectId: string) {
return useQuery({
queryKey: getProjectWorkspaceQueryKey(projectId),
queryFn: () => fetchProjectWorkspace(projectId),
enabled: !!projectId,
})
}
export function useWorkflowAction(projectId: string) {
const queryClient = useQueryClient()
const queryKey = getProjectWorkspaceQueryKey(projectId)
return useMutation({
mutationFn: (action: WorkflowActionRequest) =>
postWorkflowAction(projectId, action),
onSuccess: (workspace) => {
if (workspace) {
queryClient.setQueryData(queryKey, workspace)
return
}
queryClient.invalidateQueries({ queryKey })
},
onError: (error) => {
if (
error instanceof WorkflowApiError &&
error.status === 409
) {
queryClient.invalidateQueries({ queryKey })
}
},
})
}
export function isWorkflowConflictError(error: unknown): boolean {
return error instanceof WorkflowApiError && error.status === 409
}
+7
View File
@@ -14,6 +14,7 @@ import {
NotificationItem,
setNotifications,
} from "@shared/store/notifications"
import { getProjectWorkspaceQueryKey } from "@shared/api/projectWorkflow"
interface SocketContextValue {
isConnected: boolean
@@ -246,6 +247,12 @@ export const SocketProvider = ({
queryKey: ["get", "/api/files/files/"],
})
}
if (data.project_id) {
queryClient.invalidateQueries({
queryKey: getProjectWorkspaceQueryKey(data.project_id),
})
}
} catch {
// Ignore malformed messages
}
File diff suppressed because it is too large Load Diff
+156 -131
View File
@@ -13,12 +13,13 @@ import {
} from "react"
import api from "@shared/api"
import {
type WorkflowWorkspaceView,
useProjectWorkspaceQuery,
useWorkflowAction,
} from "@shared/api/projectWorkflow"
import { useDebounce } from "@shared/hooks/useDebounce"
/* ------------------------------------------------------------------ */
/* Types */
/* ------------------------------------------------------------------ */
export type SelectedFile = {
id: string
path: string
@@ -43,98 +44,182 @@ interface WorkspaceFileContextValue {
isLoaded: boolean
}
/* ------------------------------------------------------------------ */
/* Context */
/* ------------------------------------------------------------------ */
const FileContext = createContext<WorkspaceFileContextValue | null>(null)
/* ------------------------------------------------------------------ */
/* Provider */
/* ------------------------------------------------------------------ */
const DEBOUNCE_MS = 300
const DEBOUNCE_MS = 1000
function getFileIconType(mimeType: string | null | undefined) {
if (!mimeType) return "other" as const
if (mimeType.startsWith("video/")) return "video" as const
if (mimeType.startsWith("audio/")) return "audio" as const
if (mimeType.includes("json") || mimeType.startsWith("text/")) {
return "text" as const
}
return "other" as const
}
function getArtifactDisplayName(artifactType: string | null | undefined): string {
switch (artifactType) {
case "TRANSCRIPTION_JSON":
return "Субтитры"
default:
return artifactType ?? "Артефакт"
}
}
export const WorkspaceProvider: FunctionComponent<{
projectId: string
children: ReactNode
}> = ({ projectId, children }) => {
const [selectedFile, setSelectedFileState] = useState<SelectedFile | null>(
const { data: workspace } = useProjectWorkspaceQuery(projectId)
const workflowAction = useWorkflowAction(projectId)
const [usedFileIds, setUsedFileIds] = useState<string[]>([])
const [selectedPersistedId, setSelectedPersistedId] = useState<string | null>(
null,
)
const [usedFiles, setUsedFiles] = useState<UsedFile[]>([])
const isInitializedRef = useRef(false)
const initialValueRef = useRef<string | null>(null)
const [selectedFile, setSelectedFileState] = useState<SelectedFile | null>(null)
const latestRevisionRef = useRef<number | null>(null)
/* ---- Load from server ---- */
useEffect(() => {
if (!workspace) return
const { data: project, isSuccess } = api.useQuery(
"get",
"/api/projects/{project_id}/",
{ params: { path: { project_id: projectId } } },
{ enabled: !!projectId },
if (latestRevisionRef.current === workspace.revision) {
return
}
latestRevisionRef.current = workspace.revision
setUsedFileIds(workspace.workspace_view.used_file_ids)
setSelectedPersistedId(workspace.workspace_view.selected_file_id)
}, [workspace])
const { data: files } = api.useQuery("get", "/api/files/files/", {})
const { data: artifacts } = api.useQuery("get", "/api/media/artifacts/", {})
const fileMap = useMemo(() => {
const nextMap = new Map<string, UsedFile>()
for (const file of files ?? []) {
if (file.project_id !== projectId || file.is_deleted) continue
nextMap.set(file.id, {
id: file.id,
path: file.path,
source: "file",
mimeType: file.mime_type,
displayName: file.original_filename,
iconType: getFileIconType(file.mime_type),
})
}
return nextMap
}, [files, projectId])
const artifactMap = useMemo(() => {
const nextMap = new Map<string, UsedFile>()
for (const artifact of artifacts ?? []) {
if (artifact.project_id !== projectId || artifact.is_deleted) continue
nextMap.set(artifact.id, {
id: artifact.id,
path: "transcription",
source: "artifact",
artifactType: artifact.artifact_type,
displayName: getArtifactDisplayName(artifact.artifact_type),
iconType:
artifact.artifact_type === "TRANSCRIPTION_JSON" ? "text" : "other",
})
}
return nextMap
}, [artifacts, projectId])
const resolveUsedFile = useCallback(
(fileId: string, previous?: UsedFile | null): UsedFile | null => {
return fileMap.get(fileId) ?? artifactMap.get(fileId) ?? previous ?? null
},
[fileMap, artifactMap],
)
const usedFiles = useMemo(
() =>
usedFileIds
.map((fileId) => resolveUsedFile(fileId))
.filter((file): file is UsedFile => file !== null),
[resolveUsedFile, usedFileIds],
)
useEffect(() => {
if (!isSuccess || isInitializedRef.current) return
setSelectedFileState((prev) => {
if (!selectedPersistedId) return null
const saved = project?.workspace_state as
| { used_files?: UsedFile[] }
| null
| undefined
const loaded = saved?.used_files ?? []
const resolved = resolveUsedFile(
selectedPersistedId,
prev as UsedFile | null,
)
if (!resolved) return prev
setUsedFiles(loaded)
initialValueRef.current = JSON.stringify(loaded)
isInitializedRef.current = true
}, [isSuccess, project])
if (prev?.id === selectedPersistedId) {
return {
...resolved,
scrollToSegmentIndex: prev.scrollToSegmentIndex,
}
}
/* ---- Save to server (debounced) ---- */
const debouncedUsedFiles = useDebounce(usedFiles, DEBOUNCE_MS)
const saveMutation = api.useMutation(
"patch",
"/api/projects/{project_id}/",
)
useEffect(() => {
if (!isInitializedRef.current) return
const serialized = JSON.stringify(debouncedUsedFiles)
if (serialized === initialValueRef.current) return
initialValueRef.current = serialized
saveMutation.mutate({
params: { path: { project_id: projectId } },
body: {
workspace_state: { used_files: debouncedUsedFiles },
},
return resolved
})
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [debouncedUsedFiles, projectId])
}, [resolveUsedFile, selectedPersistedId])
/* ---- Actions ---- */
const setSelectedFile = useCallback(
(file: SelectedFile | null) => setSelectedFileState(file),
[],
const persistableWorkspaceView = useMemo<WorkflowWorkspaceView>(
() => ({
used_file_ids: usedFileIds,
selected_file_id: selectedPersistedId,
}),
[selectedPersistedId, usedFileIds],
)
const debouncedWorkspaceView = useDebounce(
persistableWorkspaceView,
DEBOUNCE_MS,
)
useEffect(() => {
if (!workspace) return
const localSignature = JSON.stringify(debouncedWorkspaceView)
const serverSignature = JSON.stringify(workspace.workspace_view)
if (localSignature === serverSignature) return
void workflowAction.mutateAsync({
type: "SET_WORKSPACE_VIEW",
revision: workspace.revision,
workspace_view: debouncedWorkspaceView,
})
}, [debouncedWorkspaceView, workflowAction, workspace])
const setSelectedFile = useCallback((file: SelectedFile | null) => {
setSelectedFileState(file)
setSelectedPersistedId(file?.id ?? null)
}, [])
const addUsedFile = useCallback((file: UsedFile) => {
setUsedFiles((prev) => {
if (prev.some((f) => f.id === file.id)) return prev
return [...prev, file]
setUsedFileIds((prev) => {
if (prev.includes(file.id)) return prev
return [...prev, file.id]
})
}, [])
const removeUsedFile = useCallback((id: string) => {
setUsedFiles((prev) => prev.filter((f) => f.id !== id))
setUsedFileIds((prev) => prev.filter((fileId) => fileId !== id))
setSelectedPersistedId((prev) => (prev === id ? null : prev))
setSelectedFileState((prev) => (prev?.id === id ? null : prev))
}, [])
const isFileUsed = useCallback(
(id: string) => usedFiles.some((f) => f.id === id),
[usedFiles],
(id: string) => usedFileIds.includes(id),
[usedFileIds],
)
const value = useMemo<WorkspaceFileContextValue>(
@@ -145,82 +230,22 @@ export const WorkspaceProvider: FunctionComponent<{
addUsedFile,
removeUsedFile,
isFileUsed,
isLoaded: isInitializedRef.current,
isLoaded: Boolean(workspace),
}),
[
addUsedFile,
isFileUsed,
removeUsedFile,
selectedFile,
setSelectedFile,
usedFiles,
addUsedFile,
removeUsedFile,
isFileUsed,
workspace,
],
)
return <FileContext.Provider value={value}>{children}</FileContext.Provider>
}
/* ------------------------------------------------------------------ */
/* Static provider (in-memory only, no server persistence) */
/* ------------------------------------------------------------------ */
export const StaticWorkspaceProvider: FunctionComponent<{
children: ReactNode
}> = ({ children }) => {
const [selectedFile, setSelectedFileState] = useState<SelectedFile | null>(
null,
)
const [usedFiles, setUsedFiles] = useState<UsedFile[]>([])
const setSelectedFile = useCallback(
(file: SelectedFile | null) => setSelectedFileState(file),
[],
)
const addUsedFile = useCallback((file: UsedFile) => {
setUsedFiles((prev) => {
if (prev.some((f) => f.id === file.id)) return prev
return [...prev, file]
})
}, [])
const removeUsedFile = useCallback((id: string) => {
setUsedFiles((prev) => prev.filter((f) => f.id !== id))
}, [])
const isFileUsed = useCallback(
(id: string) => usedFiles.some((f) => f.id === id),
[usedFiles],
)
const value = useMemo<WorkspaceFileContextValue>(
() => ({
selectedFile,
setSelectedFile,
usedFiles,
addUsedFile,
removeUsedFile,
isFileUsed,
isLoaded: true,
}),
[
selectedFile,
setSelectedFile,
usedFiles,
addUsedFile,
removeUsedFile,
isFileUsed,
],
)
return <FileContext.Provider value={value}>{children}</FileContext.Provider>
}
/* ------------------------------------------------------------------ */
/* Hook */
/* ------------------------------------------------------------------ */
/** File selection & used-files list — stable during playback */
export function useWorkspaceFiles(): WorkspaceFileContextValue {
const ctx = useContext(FileContext)
if (!ctx) {
+166 -118
View File
@@ -2,12 +2,14 @@ import { expect, test } from "@playwright/test"
const USER_ID = "00000000-0000-0000-0000-000000000001"
const PROJECT_ID = "65df675b-013b-4b1f-ab2d-075dadbcd0d9"
const SOURCE_FILE_ID = "00000000-0000-0000-0000-000000000011"
const CAPTION_PRESET_ID = "00000000-0000-0000-0000-000000000010"
const TRANSCRIPTION_ARTIFACT_ID =
"00000000-0000-0000-0000-000000000020"
const TRANSCRIPTION_ID = "00000000-0000-0000-0000-000000000030"
const CAPTION_JOB_ID = "00000000-0000-0000-0000-000000000040"
const PRIMARY_FILE_KEY = "projects/test/video.mp4"
const PRIMARY_FILE_URL = "http://localhost:4444/files/video.mp4"
const DEFAULT_USER = {
id: USER_ID,
@@ -26,53 +28,49 @@ const DEFAULT_USER = {
}
test.describe("Caption Settings Step", () => {
test("should recover a missing transcription artifact from project data", async ({
test("should render from typed workspace and start caption render via workflow action", async ({
page,
}) => {
let project: Record<string, unknown> = {
id: PROJECT_ID,
owner_id: USER_ID,
name: "Тестовый проект",
description: null,
language: "auto",
folder: null,
status: "DRAFT",
workspace_state: {
wizard: {
current_step: "caption-settings",
completed_steps: [
"upload",
"verify",
"silence-settings",
"processing",
"fragments",
"transcription-settings",
"transcription-processing",
"subtitle-revision",
],
primary_file_key: PRIMARY_FILE_KEY,
video_url: "http://localhost:9000/projects/test/video.mp4",
silence_settings: {
min_silence_duration_ms: 200,
silence_threshold_db: 16,
padding_ms: 100,
},
active_job_id: null,
active_job_type: null,
silence_job_id: null,
transcription_artifact_id: null,
caption_preset_id: CAPTION_PRESET_ID,
caption_style_config: null,
captioned_video_path: null,
},
let workflowActions: Array<Record<string, unknown>> = []
let workspace: Record<string, any> = {
revision: 1,
phase: "CAPTIONS",
current_screen: "caption-settings",
active_job: null,
source_file_id: SOURCE_FILE_ID,
workspace_view: {
used_file_ids: [],
selected_file_id: null,
},
silence: {
status: "SKIPPED",
settings: {
min_silence_duration_ms: 200,
silence_threshold_db: 16,
padding_ms: 100,
},
detect_job_id: null,
detected_segments: [],
reviewed_cuts: [],
duration_ms: null,
applied_output_file_id: null,
},
transcription: {
status: "REVIEW_READY",
job_id: null,
request: null,
artifact_id: TRANSCRIPTION_ARTIFACT_ID,
transcription_id: TRANSCRIPTION_ID,
reviewed: true,
},
captions: {
status: "CONFIG_READY",
preset_id: CAPTION_PRESET_ID,
style_config: null,
render_job_id: null,
output_file_id: null,
},
is_active: true,
created_at: "2025-06-01T00:00:00Z",
updated_at: "2025-06-01T00:00:00Z",
}
let savedWizardState: Record<string, unknown> | null = null
let generateRequestBody: Record<string, unknown> | null = null
let generateRequestCount = 0
await page.context().addCookies([
{
@@ -98,37 +96,128 @@ test.describe("Caption Settings Step", () => {
})
await page.route(`**/api/projects/${PROJECT_ID}/`, async (route) => {
if (route.request().method() === "GET") {
await route.fulfill({
status: 200,
contentType: "application/json",
body: JSON.stringify(project),
})
return
}
if (route.request().method() === "PATCH") {
const body = route.request().postDataJSON() as {
workspace_state?: { wizard?: Record<string, unknown> }
}
savedWizardState = body.workspace_state?.wizard ?? null
project = {
...project,
workspace_state: body.workspace_state ?? project.workspace_state,
}
await route.fulfill({
status: 200,
contentType: "application/json",
body: JSON.stringify(project),
})
return
}
await route.fallback()
await route.fulfill({
status: 200,
contentType: "application/json",
body: JSON.stringify({
id: PROJECT_ID,
owner_id: USER_ID,
name: "Тестовый проект",
description: null,
language: "auto",
folder: null,
status: "DRAFT",
workspace_state: null,
is_active: true,
created_at: "2025-06-01T00:00:00Z",
updated_at: "2025-06-01T00:00:00Z",
}),
})
})
await page.route(`**/api/projects/${PROJECT_ID}/workspace*`, async (route) => {
await route.fulfill({
status: 200,
contentType: "application/json",
body: JSON.stringify(workspace),
})
})
await page.route(
`**/api/projects/${PROJECT_ID}/workflow/actions*`,
async (route) => {
const action = route.request().postDataJSON() as Record<string, unknown>
workflowActions.push(action)
if (action.type === "START_CAPTION_RENDER") {
workspace = {
...workspace,
revision: 2,
current_screen: "caption-processing",
active_job: {
job_id: CAPTION_JOB_ID,
job_type: "CAPTIONS_GENERATE",
},
captions: {
...workspace.captions,
status: "RUNNING",
render_job_id: CAPTION_JOB_ID,
},
}
}
await route.fulfill({
status: 200,
contentType: "application/json",
body: JSON.stringify(workspace),
})
},
)
await page.route("**/api/files/files/", async (route) => {
await route.fulfill({
status: 200,
contentType: "application/json",
body: JSON.stringify([
{
id: SOURCE_FILE_ID,
project_id: PROJECT_ID,
owner_id: USER_ID,
original_filename: "video.mp4",
path: PRIMARY_FILE_KEY,
storage_backend: "S3",
mime_type: "video/mp4",
size_bytes: 1024,
checksum: null,
file_format: "mp4",
is_uploaded: true,
is_deleted: false,
is_active: true,
created_at: "2025-06-01T00:00:00Z",
},
]),
})
})
await page.route(`**/api/files/files/${SOURCE_FILE_ID}/`, async (route) => {
await route.fulfill({
status: 200,
contentType: "application/json",
body: JSON.stringify({
id: SOURCE_FILE_ID,
project_id: PROJECT_ID,
owner_id: USER_ID,
original_filename: "video.mp4",
path: PRIMARY_FILE_KEY,
storage_backend: "S3",
mime_type: "video/mp4",
size_bytes: 1024,
checksum: null,
file_format: "mp4",
is_uploaded: true,
is_deleted: false,
is_active: true,
created_at: "2025-06-01T00:00:00Z",
}),
})
})
await page.route(
`**/api/files/files/${SOURCE_FILE_ID}/resolve/`,
async (route) => {
await route.fulfill({
status: 200,
contentType: "application/json",
body: JSON.stringify({
file_id: SOURCE_FILE_ID,
file_url: PRIMARY_FILE_URL,
file_path: PRIMARY_FILE_KEY,
filename: "video.mp4",
}),
})
},
)
await page.route("**/api/media/artifacts/", async (route) => {
await route.fulfill({
status: 200,
@@ -149,20 +238,6 @@ test.describe("Caption Settings Step", () => {
})
})
await page.route(
`**/api/transcribe/transcriptions/by-artifact/${TRANSCRIPTION_ARTIFACT_ID}/`,
async (route) => {
await route.fulfill({
status: 200,
contentType: "application/json",
body: JSON.stringify({
id: TRANSCRIPTION_ID,
artifact_id: TRANSCRIPTION_ARTIFACT_ID,
}),
})
},
)
await page.route("**/api/captions/presets/", async (route) => {
await route.fulfill({
status: 200,
@@ -183,27 +258,13 @@ test.describe("Caption Settings Step", () => {
})
})
await page.route("**/api/tasks/captions-generate/", async (route) => {
generateRequestCount += 1
generateRequestBody = route.request().postDataJSON() as Record<
string,
unknown
>
await route.fulfill({
status: 200,
contentType: "application/json",
body: JSON.stringify({ job_id: CAPTION_JOB_ID }),
})
})
await page.route("**/api/tasks/status/**", async (route) => {
await route.fulfill({
status: 200,
contentType: "application/json",
body: JSON.stringify({
status: "RUNNING",
progress_pct: 0,
progress_pct: 25,
output_data: null,
}),
})
@@ -220,26 +281,13 @@ test.describe("Caption Settings Step", () => {
await expect(captionStep.getByText("Системный пресет")).toBeVisible()
await expect(generateButton).toBeEnabled()
await expect
.poll(() => savedWizardState?.transcription_artifact_id ?? null)
.toBe(TRANSCRIPTION_ARTIFACT_ID)
await generateButton.click()
expect(generateRequestBody).toMatchObject({
video_s3_path: PRIMARY_FILE_KEY,
transcription_id: TRANSCRIPTION_ID,
project_id: PROJECT_ID,
preset_id: CAPTION_PRESET_ID,
expect(workflowActions).toHaveLength(1)
expect(workflowActions[0]).toMatchObject({
type: "START_CAPTION_RENDER",
revision: 1,
})
expect(generateRequestCount).toBe(1)
await expect
.poll(() => savedWizardState?.current_step ?? null)
.toBe("caption-processing")
await expect
.poll(() => savedWizardState?.active_job_id ?? null)
.toBe(CAPTION_JOB_ID)
await expect(page.locator("[data-testid='ProcessingStep']")).toBeVisible()
})
+257 -121
View File
@@ -2,7 +2,6 @@ import { expect, test } from "@playwright/test"
const USER_ID = "00000000-0000-0000-0000-000000000001"
const PROJECT_ID = "75df675b-013b-4b1f-ab2d-075dadbcd0d9"
const DETECT_JOB_ID = "00000000-0000-0000-0000-000000000050"
const APPLY_JOB_ID = "00000000-0000-0000-0000-000000000051"
const TRANSCRIPTION_JOB_ID = "00000000-0000-0000-0000-000000000052"
const ORIGINAL_FILE_ID = "00000000-0000-0000-0000-000000000060"
@@ -34,51 +33,50 @@ const MOCK_SEGMENTS = [
]
test.describe("Silence Apply Flow", () => {
test("should show processing for cut application and transcribe the processed video", async ({
test("should persist cuts via workflow actions and continue to transcription on processed source file", async ({
page,
}) => {
let project: Record<string, unknown> = {
id: PROJECT_ID,
owner_id: USER_ID,
name: "Тестовый проект",
description: null,
language: "auto",
folder: null,
status: "DRAFT",
workspace_state: {
wizard: {
current_step: "fragments",
completed_steps: [
"upload",
"verify",
"silence-settings",
"processing",
],
primary_file_id: ORIGINAL_FILE_ID,
primary_file_key: ORIGINAL_FILE_KEY,
original_file_name: "original-video.mp4",
silence_settings: {
min_silence_duration_ms: 200,
silence_threshold_db: 16,
padding_ms: 100,
},
active_job_id: null,
active_job_type: null,
silence_job_id: DETECT_JOB_ID,
transcription_artifact_id: null,
caption_preset_id: null,
caption_style_config: null,
captioned_video_path: null,
captioned_video_file_id: null,
},
let applyStatus: "RUNNING" | "DONE" = "RUNNING"
const workflowActions: Array<Record<string, unknown>> = []
let workspace: Record<string, any> = {
revision: 1,
phase: "SILENCE",
current_screen: "fragments",
active_job: null,
source_file_id: ORIGINAL_FILE_ID,
workspace_view: {
used_file_ids: [],
selected_file_id: null,
},
silence: {
status: "REVIEW_READY",
settings: {
min_silence_duration_ms: 200,
silence_threshold_db: 16,
padding_ms: 100,
},
detect_job_id: "00000000-0000-0000-0000-000000000050",
detected_segments: MOCK_SEGMENTS,
reviewed_cuts: [],
duration_ms: 30000,
applied_output_file_id: null,
},
transcription: {
status: "IDLE",
job_id: null,
request: null,
artifact_id: null,
transcription_id: null,
reviewed: false,
},
captions: {
status: "IDLE",
preset_id: null,
style_config: null,
render_job_id: null,
output_file_id: null,
},
is_active: true,
created_at: "2025-06-01T00:00:00Z",
updated_at: "2025-06-01T00:00:00Z",
}
let savedWizardState: Record<string, unknown> | null = null
let applyStatus = "RUNNING"
let transcriptionRequestBody: Record<string, unknown> | null = null
await page.context().addCookies([
{
@@ -104,35 +102,154 @@ test.describe("Silence Apply Flow", () => {
})
await page.route(`**/api/projects/${PROJECT_ID}/`, async (route) => {
if (route.request().method() === "GET") {
await route.fulfill({
status: 200,
contentType: "application/json",
body: JSON.stringify(project),
})
return
await route.fulfill({
status: 200,
contentType: "application/json",
body: JSON.stringify({
id: PROJECT_ID,
owner_id: USER_ID,
name: "Тестовый проект",
description: null,
language: "auto",
folder: null,
status: "DRAFT",
workspace_state: null,
is_active: true,
created_at: "2025-06-01T00:00:00Z",
updated_at: "2025-06-01T00:00:00Z",
}),
})
})
await page.route(`**/api/projects/${PROJECT_ID}/workspace*`, async (route) => {
if (
applyStatus === "DONE" &&
workspace.current_screen === "silence-apply-processing"
) {
workspace = {
...workspace,
revision: 4,
phase: "TRANSCRIPTION",
current_screen: "transcription-settings",
active_job: null,
source_file_id: CUT_FILE_ID,
silence: {
...workspace.silence,
status: "APPLIED",
applied_output_file_id: CUT_FILE_ID,
},
}
}
if (route.request().method() === "PATCH") {
const body = route.request().postDataJSON() as {
workspace_state?: { wizard?: Record<string, unknown> }
await route.fulfill({
status: 200,
contentType: "application/json",
body: JSON.stringify(workspace),
})
})
await page.route(
`**/api/projects/${PROJECT_ID}/workflow/actions*`,
async (route) => {
const action = route.request().postDataJSON() as Record<string, unknown>
workflowActions.push(action)
if (action.type === "SET_SILENCE_CUTS") {
workspace = {
...workspace,
revision: 2,
silence: {
...workspace.silence,
reviewed_cuts: action.cuts as typeof MOCK_SEGMENTS,
},
}
}
savedWizardState = body.workspace_state?.wizard ?? null
project = {
...project,
workspace_state: body.workspace_state ?? project.workspace_state,
if (action.type === "START_SILENCE_APPLY") {
workspace = {
...workspace,
revision: 3,
current_screen: "silence-apply-processing",
active_job: {
job_id: APPLY_JOB_ID,
job_type: "SILENCE_APPLY",
},
silence: {
...workspace.silence,
status: "APPLYING",
},
}
}
if (action.type === "START_TRANSCRIPTION") {
workspace = {
...workspace,
revision: 5,
current_screen: "transcription-processing",
active_job: {
job_id: TRANSCRIPTION_JOB_ID,
job_type: "TRANSCRIPTION_GENERATE",
},
transcription: {
...workspace.transcription,
status: "RUNNING",
job_id: TRANSCRIPTION_JOB_ID,
request: action.request as {
engine: "whisper"
language?: string
model: string
},
},
}
}
await route.fulfill({
status: 200,
contentType: "application/json",
body: JSON.stringify(project),
body: JSON.stringify(workspace),
})
return
}
},
)
await route.fallback()
await page.route("**/api/files/files/", async (route) => {
await route.fulfill({
status: 200,
contentType: "application/json",
body: JSON.stringify([
{
id: ORIGINAL_FILE_ID,
project_id: PROJECT_ID,
owner_id: USER_ID,
original_filename: "original-video.mp4",
path: ORIGINAL_FILE_KEY,
storage_backend: "S3",
mime_type: "video/mp4",
size_bytes: 1024,
checksum: null,
file_format: "mp4",
is_uploaded: true,
is_deleted: false,
is_active: true,
created_at: "2025-06-01T00:00:00Z",
},
{
id: CUT_FILE_ID,
project_id: PROJECT_ID,
owner_id: USER_ID,
original_filename: "cut-video.mp4",
path: CUT_FILE_KEY,
storage_backend: "S3",
mime_type: "video/mp4",
size_bytes: 1024,
checksum: null,
file_format: "mp4",
is_uploaded: true,
is_deleted: false,
is_active: true,
created_at: "2025-06-01T00:00:00Z",
},
]),
})
})
await page.route("**/api/files/files/*/resolve/", async (route) => {
@@ -146,30 +263,50 @@ test.describe("Silence Apply Flow", () => {
file_id: isCutFile ? CUT_FILE_ID : ORIGINAL_FILE_ID,
file_url: isCutFile ? CUT_FILE_URL : ORIGINAL_FILE_URL,
file_path: isCutFile ? CUT_FILE_KEY : ORIGINAL_FILE_KEY,
filename: isCutFile ? "cut-video.mp4" : "original-video.mp4",
}),
})
})
await page.route("**/api/files/files/*/", async (route) => {
const fileId = route.request().url().split("/files/")[1]?.split("/")[0]
const isCutFile = fileId === CUT_FILE_ID
await route.fulfill({
status: 200,
contentType: "application/json",
body: JSON.stringify({
id: isCutFile ? CUT_FILE_ID : ORIGINAL_FILE_ID,
project_id: PROJECT_ID,
owner_id: USER_ID,
original_filename: isCutFile
? "cut-video.mp4"
: "original-video.mp4",
path: isCutFile ? CUT_FILE_KEY : ORIGINAL_FILE_KEY,
storage_backend: "S3",
mime_type: "video/mp4",
size_bytes: 1024,
checksum: null,
file_format: "mp4",
is_uploaded: true,
is_deleted: false,
is_active: true,
created_at: "2025-06-01T00:00:00Z",
}),
})
})
await page.route("**/api/media/artifacts/", async (route) => {
await route.fulfill({
status: 200,
contentType: "application/json",
body: JSON.stringify([]),
})
})
await page.route("**/api/tasks/status/**", async (route) => {
const url = route.request().url()
if (url.includes(DETECT_JOB_ID)) {
await route.fulfill({
status: 200,
contentType: "application/json",
body: JSON.stringify({
status: "DONE",
job_type: "SILENCE_DETECT",
progress_pct: 100,
output_data: {
silent_segments: MOCK_SEGMENTS,
duration_ms: 30000,
},
}),
})
return
}
if (url.includes(APPLY_JOB_ID)) {
await route.fulfill({
status: 200,
@@ -190,6 +327,20 @@ test.describe("Silence Apply Flow", () => {
return
}
if (url.includes(TRANSCRIPTION_JOB_ID)) {
await route.fulfill({
status: 200,
contentType: "application/json",
body: JSON.stringify({
status: "RUNNING",
job_type: "TRANSCRIPTION_GENERATE",
progress_pct: 10,
output_data: null,
}),
})
return
}
await route.fulfill({
status: 200,
contentType: "application/json",
@@ -201,60 +352,45 @@ test.describe("Silence Apply Flow", () => {
})
})
await page.route("**/api/tasks/silence-apply/", async (route) => {
await route.fulfill({
status: 202,
contentType: "application/json",
body: JSON.stringify({ job_id: APPLY_JOB_ID }),
})
})
await page.route("**/api/tasks/transcription-generate/", async (route) => {
transcriptionRequestBody = route.request().postDataJSON() as Record<
string,
unknown
>
await route.fulfill({
status: 202,
contentType: "application/json",
body: JSON.stringify({ job_id: TRANSCRIPTION_JOB_ID }),
})
})
await page.goto(`/projects/${PROJECT_ID}`)
const fragmentsStep = page.locator("[data-testid='FragmentsStep']")
await expect(fragmentsStep).toBeVisible()
await expect(page.locator("[data-testid='FragmentsStep']")).toBeVisible()
await expect(page.locator("[data-testid='cut-region']")).toHaveCount(2)
await fragmentsStep.getByRole("button", { name: "Применить" }).click()
await page.getByRole("button", { name: "Применить" }).click()
await expect.poll(() => workflowActions.length).toBe(2)
expect(workflowActions[0]).toMatchObject({
type: "SET_SILENCE_CUTS",
revision: 1,
})
expect(workflowActions[1]).toMatchObject({
type: "START_SILENCE_APPLY",
revision: 2,
})
await expect(page.locator("[data-testid='ProcessingStep']")).toBeVisible()
await expect
.poll(() => savedWizardState?.active_job_type ?? null)
.toBe("SILENCE_APPLY")
await expect
.poll(() => savedWizardState?.current_step ?? null)
.toBe("processing")
applyStatus = "DONE"
const transcriptionStep = page.locator(
"[data-testid='TranscriptionSettingsStep']",
)
await expect(transcriptionStep).toBeVisible({ timeout: 10_000 })
await expect(
page.locator("[data-testid='TranscriptionSettingsStep']"),
).toBeVisible()
await expect
.poll(() => savedWizardState?.primary_file_key ?? null)
.toBe(CUT_FILE_KEY)
await page.getByRole("button", { name: "Сгенерировать субтитры" }).click()
await transcriptionStep
.getByRole("button", { name: "Сгенерировать субтитры" })
.click()
expect(transcriptionRequestBody).toMatchObject({
file_key: CUT_FILE_KEY,
project_id: PROJECT_ID,
expect(workflowActions[2]).toMatchObject({
type: "START_TRANSCRIPTION",
revision: 4,
request: {
engine: "whisper",
model: "base",
},
})
await expect(page.locator("[data-testid='ProcessingStep']")).toContainText(
"ТРАНСКРИБАЦИЯ",
)
})
})