chore: something changed, commit before reorg

This commit is contained in:
Daniil
2026-04-27 23:28:28 +03:00
parent 46f34bdcac
commit 20928e9a60
16 changed files with 1967 additions and 1262 deletions
@@ -43,57 +43,16 @@ export const CaptionResultStep: FunctionComponent<ICaptionResultStepProps> = ({
className, className,
}): JSX.Element => { }): JSX.Element => {
const { const {
projectId,
captionedVideoFileId, captionedVideoFileId,
captionedVideoPath,
goToStep,
markStepCompleted, markStepCompleted,
setCaptionedVideoFileId, reopenCaptionConfig,
setCaptionedVideoPath,
} = useWizard() } = useWizard()
// Recovery: if wizard state lost the file data, look up the latest caption job
const needsRecovery = !captionedVideoFileId && !captionedVideoPath
const { data: jobs } = api.useQuery(
"get",
"/api/jobs/jobs/",
{},
{ enabled: needsRecovery },
)
const recoveredJob = useMemo(() => {
if (!needsRecovery || !jobs) return null
return jobs.find(
(j) =>
j.project_id === projectId &&
j.job_type === "CAPTIONS_GENERATE" &&
j.status === "DONE" &&
j.output_data?.file_id,
)
}, [needsRecovery, jobs, projectId])
const effectiveFileId =
captionedVideoFileId ??
(recoveredJob?.output_data?.file_id as string | undefined) ??
null
const effectivePath =
captionedVideoPath ??
(recoveredJob?.output_data?.output_path as string | undefined) ??
null
// Persist recovered values back to wizard state
if (recoveredJob && !captionedVideoFileId && effectiveFileId) {
setCaptionedVideoFileId(effectiveFileId)
}
if (recoveredJob && !captionedVideoPath && effectivePath) {
setCaptionedVideoPath(effectivePath)
}
const { data: fileInfo, isLoading } = api.useQuery( const { data: fileInfo, isLoading } = api.useQuery(
"get", "get",
"/api/files/files/{file_id}/resolve/", "/api/files/files/{file_id}/resolve/",
{ params: { path: { file_id: effectiveFileId ?? "" } } }, { params: { path: { file_id: captionedVideoFileId ?? "" } } },
{ enabled: !!effectiveFileId }, { enabled: !!captionedVideoFileId },
) )
const videoUrl = fileInfo?.file_url ?? "" const videoUrl = fileInfo?.file_url ?? ""
@@ -107,7 +66,7 @@ export const CaptionResultStep: FunctionComponent<ICaptionResultStepProps> = ({
} }
const handleRerender = () => { const handleRerender = () => {
goToStep("caption-settings") void reopenCaptionConfig()
} }
const handleFinish = () => { const handleFinish = () => {
@@ -6,142 +6,58 @@ import type { JSX } from "react"
import { import {
FunctionComponent, FunctionComponent,
useEffect,
useMemo,
useRef, useRef,
useState, useState,
} from "react" } from "react"
import cs from "classnames" import cs from "classnames"
import api from "@shared/api"
import { useWizard } from "@shared/context/WizardContext" import { useWizard } from "@shared/context/WizardContext"
import { Button } from "@shared/ui" import { Button } from "@shared/ui"
import { PresetGrid } from "./PresetGrid" import { PresetGrid } from "./PresetGrid"
import { StyleEditor } from "./StyleEditor" import { StyleEditor } from "./StyleEditor"
import { useSubmitCaptionGenerate } from "./useSubmitCaptionGenerate"
import styles from "./CaptionSettingsStep.module.scss" import styles from "./CaptionSettingsStep.module.scss"
type CaptionPresetRead = components["schemas"]["CaptionPresetRead"] type CaptionPresetRead = components["schemas"]["CaptionPresetRead"]
const ERROR_SUBMIT = "Не удалось запустить генерацию субтитров" const ERROR_SUBMIT = "Не удалось запустить генерацию субтитров"
const ERROR_MISSING_DATA =
"Для генерации субтитров необходимы видеофайл и транскрипция. Пройдите предыдущие шаги."
const TRANSCRIPTION_ARTIFACT_TYPE = "TRANSCRIPTION_JSON"
export const CaptionSettingsStep: FunctionComponent< export const CaptionSettingsStep: FunctionComponent<
ICaptionSettingsStepProps ICaptionSettingsStepProps
> = ({ className }): JSX.Element => { > = ({ className }): JSX.Element => {
const { const {
projectId,
primaryFileKey,
transcriptionArtifactId: contextArtifactId,
captionPresetId, captionPresetId,
setCaptionPresetId, selectCaptionPreset,
setTranscriptionArtifactId, startCaptionRender,
startProcessingJob,
goBack, goBack,
} = useWizard() } = useWizard()
const { data: artifacts, isLoading: isArtifactsLoading } = api.useQuery(
"get",
"/api/media/artifacts/",
{},
{ enabled: !contextArtifactId },
)
const transcriptionArtifactId = useMemo(() => {
if (contextArtifactId) return contextArtifactId
if (!artifacts) return null
const match = artifacts.find(
(artifact) =>
artifact.project_id === projectId &&
artifact.artifact_type === TRANSCRIPTION_ARTIFACT_TYPE &&
!artifact.is_deleted,
)
return match?.id ?? null
}, [artifacts, contextArtifactId, projectId])
useEffect(() => {
if (
!transcriptionArtifactId ||
transcriptionArtifactId === contextArtifactId
) {
return
}
setTranscriptionArtifactId(transcriptionArtifactId)
}, [
contextArtifactId,
setTranscriptionArtifactId,
transcriptionArtifactId,
])
const { data: transcriptionEntry, isLoading: isTranscriptionLoading } =
api.useQuery(
"get",
"/api/transcribe/transcriptions/by-artifact/{artifact_id}/",
{
params: {
path: { artifact_id: transcriptionArtifactId ?? "" },
},
},
{ enabled: !!transcriptionArtifactId },
)
const [activeTab, setActiveTab] = useState<"select" | "editor">("select") const [activeTab, setActiveTab] = useState<"select" | "editor">("select")
const [editingPreset, setEditingPreset] = useState<CaptionPresetRead | null>( const [editingPreset, setEditingPreset] = useState<CaptionPresetRead | null>(
null, null,
) )
const [submitError, setSubmitError] = useState<string | null>(null) const [submitError, setSubmitError] = useState<string | null>(null)
const [isSubmitting, setIsSubmitting] = useState(false)
const submitLockRef = useRef(false) const submitLockRef = useRef(false)
const isResolvingSourceData = isArtifactsLoading || isTranscriptionLoading
const { mutate, isPending } = useSubmitCaptionGenerate({ const handleGenerate = async () => {
onSuccess: (data) => { if (submitLockRef.current || isSubmitting) return
if (!data?.job_id) { if (!captionPresetId) return
submitLockRef.current = false
return
}
if (data?.job_id) {
startProcessingJob(
data.job_id,
"CAPTIONS_GENERATE",
"caption-processing",
"caption-settings",
)
}
},
onError: () => {
submitLockRef.current = false
setSubmitError(ERROR_SUBMIT)
},
})
const handleGenerate = () => {
if (submitLockRef.current || isPending) return
const transcriptionId = transcriptionEntry?.id
if (!primaryFileKey || !transcriptionId) {
setSubmitError(ERROR_MISSING_DATA)
return
}
submitLockRef.current = true submitLockRef.current = true
setSubmitError(null) setSubmitError(null)
mutate({ setIsSubmitting(true)
body: {
video_s3_path: primaryFileKey, try {
folder: "output_files", await startCaptionRender()
transcription_id: transcriptionId, submitLockRef.current = false
project_id: projectId, } catch {
preset_id: captionPresetId, submitLockRef.current = false
}, setSubmitError(ERROR_SUBMIT)
}) } finally {
setIsSubmitting(false)
}
} }
const handleEdit = (preset: CaptionPresetRead) => { const handleEdit = (preset: CaptionPresetRead) => {
@@ -155,10 +71,14 @@ export const CaptionSettingsStep: FunctionComponent<
} }
const handleSaved = (presetId: string) => { const handleSaved = (presetId: string) => {
setCaptionPresetId(presetId) void selectCaptionPreset(presetId)
setActiveTab("select") setActiveTab("select")
} }
const handleSelectPreset = (presetId: string | null) => {
void selectCaptionPreset(presetId)
}
if (activeTab === "editor") { if (activeTab === "editor") {
return ( return (
<div <div
@@ -187,7 +107,7 @@ export const CaptionSettingsStep: FunctionComponent<
<div className={styles.scrollArea}> <div className={styles.scrollArea}>
<PresetGrid <PresetGrid
selectedPresetId={captionPresetId} selectedPresetId={captionPresetId}
onSelect={setCaptionPresetId} onSelect={handleSelectPreset}
onEdit={handleEdit} onEdit={handleEdit}
onCreateNew={handleCreateNew} onCreateNew={handleCreateNew}
/> />
@@ -202,11 +122,9 @@ export const CaptionSettingsStep: FunctionComponent<
<Button <Button
variant="primary" variant="primary"
onClick={handleGenerate} onClick={handleGenerate}
disabled={ disabled={!captionPresetId || isSubmitting}
!captionPresetId || isPending || isResolvingSourceData
}
> >
{isPending ? "Запуск..." : "Генерировать"} {isSubmitting ? "Запуск..." : "Генерировать"}
</Button> </Button>
</div> </div>
</div> </div>
@@ -23,12 +23,11 @@ import {
import WaveSurfer from "wavesurfer.js" import WaveSurfer from "wavesurfer.js"
import api from "@shared/api" import api from "@shared/api"
import { useProjectWorkspaceQuery } from "@shared/api/projectWorkflow"
import { useWizard } from "@shared/context/WizardContext" import { useWizard } from "@shared/context/WizardContext"
import { useSegmentResize } from "@shared/hooks/useSegmentResize" import { useSegmentResize } from "@shared/hooks/useSegmentResize"
import { Button } from "@shared/ui" import { Button } from "@shared/ui"
import { useSubmitSilenceApply } from "../SilenceResultModal/useSubmitSilenceApply"
import styles from "./FragmentsStep.module.scss" import styles from "./FragmentsStep.module.scss"
const MIN_REGION_MS = 100 const MIN_REGION_MS = 100
@@ -71,14 +70,12 @@ export const FragmentsStep: FunctionComponent<IFragmentsStepProps> = ({
}): JSX.Element => { }): JSX.Element => {
const { const {
projectId, projectId,
silenceJobId,
primaryFileId, primaryFileId,
primaryFileKey, startSilenceApply,
startProcessingJob, skipSilenceApply,
goBack, goBack,
markStepCompleted,
goToStep,
} = useWizard() } = useWizard()
const { data: workspace } = useProjectWorkspaceQuery(projectId)
const [cutRegions, setCutRegions] = useState<CutRegion[]>([]) const [cutRegions, setCutRegions] = useState<CutRegion[]>([])
const [pixelsPerSecond, setPixelsPerSecond] = useState(DEFAULT_PPS) const [pixelsPerSecond, setPixelsPerSecond] = useState(DEFAULT_PPS)
@@ -94,16 +91,7 @@ export const FragmentsStep: FunctionComponent<IFragmentsStepProps> = ({
const waveformRef = useRef<HTMLDivElement>(null) const waveformRef = useRef<HTMLDivElement>(null)
const wsRef = useRef<WaveSurfer | null>(null) const wsRef = useRef<WaveSurfer | null>(null)
/* ---- Data loading ---- */ const silenceState = workspace?.silence
const { data: taskStatus } = api.useQuery(
"get",
"/api/tasks/status/{job_id}/",
{ params: { path: { job_id: silenceJobId ?? "" } } },
{ enabled: !!silenceJobId },
)
const outputData = taskStatus?.output_data as Record<string, unknown> | null
const fileKey = primaryFileKey ?? ((outputData?.file_key as string) ?? "")
const { data: fileInfo } = api.useQuery( const { data: fileInfo } = api.useQuery(
"get", "get",
@@ -116,11 +104,12 @@ export const FragmentsStep: FunctionComponent<IFragmentsStepProps> = ({
/* ---- Initialize cut regions from detection results ---- */ /* ---- Initialize cut regions from detection results ---- */
useEffect(() => { useEffect(() => {
if (!outputData) return if (!silenceState) return
const segments = outputData.silent_segments as const segments =
| { start_ms: number; end_ms: number }[] silenceState.reviewed_cuts.length > 0
| undefined ? silenceState.reviewed_cuts
const dur = outputData.duration_ms as number | undefined : silenceState.detected_segments
const dur = silenceState.duration_ms
if (segments && dur) { if (segments && dur) {
setDurationMs(dur) setDurationMs(dur)
@@ -132,8 +121,7 @@ export const FragmentsStep: FunctionComponent<IFragmentsStepProps> = ({
})), })),
) )
} }
// eslint-disable-next-line react-hooks/exhaustive-deps }, [silenceState])
}, [outputData])
/* ---- Timeline calculations ---- */ /* ---- Timeline calculations ---- */
const totalWidth = Math.max(1, (durationMs / 1000) * pixelsPerSecond) const totalWidth = Math.max(1, (durationMs / 1000) * pixelsPerSecond)
@@ -599,49 +587,30 @@ export const FragmentsStep: FunctionComponent<IFragmentsStepProps> = ({
} }
}, [drawRuler, drawFrames]) }, [drawRuler, drawFrames])
/* ---- Apply ---- */ const [isApplying, setIsApplying] = useState(false)
const { mutate: applyMutate, isPending: isApplying } =
useSubmitSilenceApply({
onSuccess: (data) => {
const result = data as { job_id?: string }
if (result?.job_id) {
startProcessingJob(
result.job_id,
"SILENCE_APPLY",
"silence-apply-processing",
"fragments",
)
}
},
onError: (error) => {
console.error("Silence apply failed:", error)
},
})
const handleApply = () => { const handleApply = async () => {
if (cutRegions.length === 0) { if (cutRegions.length === 0) {
markStepCompleted("fragments") setIsApplying(true)
goToStep("transcription-settings") try {
await skipSilenceApply()
} finally {
setIsApplying(false)
}
return return
} }
if (!fileKey) return setIsApplying(true)
try {
const fileName = fileKey.split("/").pop() ?? "video.mp4" await startSilenceApply(
const outputName = `Без тишины ${fileName}` cutRegions.map((r) => ({
;(applyMutate as (args: { body: Record<string, unknown> }) => void)({
body: {
file_key: fileKey,
out_folder: "",
project_id: projectId,
output_name: outputName,
cuts: cutRegions.map((r) => ({
start_ms: Math.round(r.startMs), start_ms: Math.round(r.startMs),
end_ms: Math.round(r.endMs), end_ms: Math.round(r.endMs),
})), })),
}, )
}) } finally {
setIsApplying(false)
}
} }
return ( return (
@@ -23,6 +23,7 @@ import {
import WaveSurfer from "wavesurfer.js" import WaveSurfer from "wavesurfer.js"
import api from "@shared/api" import api from "@shared/api"
import { useProjectWorkspaceQuery } from "@shared/api/projectWorkflow"
import { useSegmentResize } from "@shared/hooks/useSegmentResize" import { useSegmentResize } from "@shared/hooks/useSegmentResize"
import { Button, Modal } from "@shared/ui" import { Button, Modal } from "@shared/ui"
@@ -70,6 +71,7 @@ export const SilenceResultModal: FunctionComponent<ISilenceResultModalProps> = (
projectId, projectId,
jobId, jobId,
}): JSX.Element => { }): JSX.Element => {
const { data: workspace } = useProjectWorkspaceQuery(projectId)
const [cutRegions, setCutRegions] = useState<CutRegion[]>([]) const [cutRegions, setCutRegions] = useState<CutRegion[]>([])
const [pixelsPerSecond, setPixelsPerSecond] = useState(DEFAULT_PPS) const [pixelsPerSecond, setPixelsPerSecond] = useState(DEFAULT_PPS)
const [durationMs, setDurationMs] = useState(0) const [durationMs, setDurationMs] = useState(0)
@@ -95,16 +97,7 @@ export const SilenceResultModal: FunctionComponent<ISilenceResultModalProps> = (
const outputData = taskStatus?.output_data as Record<string, unknown> | null const outputData = taskStatus?.output_data as Record<string, unknown> | null
const fileKey = (outputData?.file_key as string) ?? "" const fileKey = (outputData?.file_key as string) ?? ""
const { data: project } = api.useQuery( const primaryFileId = workspace?.source_file_id ?? null
"get",
"/api/projects/{project_id}/",
{ params: { path: { project_id: projectId } } },
{ enabled: open },
)
const primaryFileId =
(project?.workspace_state as { wizard?: { primary_file_id?: string | null } } | null)
?.wizard?.primary_file_id ?? null
const { data: fileInfo } = api.useQuery( const { data: fileInfo } = api.useQuery(
"get", "get",
@@ -4,57 +4,32 @@ import type { ISilenceSettingsStepProps } from "./SilenceSettingsStep.d"
import type { JSX } from "react" import type { JSX } from "react"
import cs from "classnames" import cs from "classnames"
import { FunctionComponent, useCallback } from "react" import { FunctionComponent, useCallback, useEffect, useState } from "react"
import { useWizard } from "@shared/context/WizardContext" import { useWizard } from "@shared/context/WizardContext"
import { Button, Slider } from "@shared/ui" import { Button, Slider } from "@shared/ui"
import { useSubmitSilenceDetect } from "../SilenceSettingsModal/useSubmitSilenceDetect"
import styles from "./SilenceSettingsStep.module.scss" import styles from "./SilenceSettingsStep.module.scss"
export const SilenceSettingsStep: FunctionComponent< export const SilenceSettingsStep: FunctionComponent<
ISilenceSettingsStepProps ISilenceSettingsStepProps
> = ({ className }): JSX.Element => { > = ({ className }): JSX.Element => {
const { const {
projectId,
primaryFileKey, primaryFileKey,
silenceSettings, silenceSettings,
setSilenceSettings, startSilenceDetect,
startProcessingJob,
goBack, goBack,
} = useWizard() } = useWizard()
const [localSettings, setLocalSettings] = useState(silenceSettings)
const { mutate, isPending } = useSubmitSilenceDetect({ useEffect(() => {
onSuccess: (data) => { setLocalSettings(silenceSettings)
const result = data as { job_id?: string } }, [silenceSettings])
if (result?.job_id) {
startProcessingJob(
result.job_id,
"SILENCE_DETECT",
"processing",
"silence-settings",
)
}
},
onError: (error) => {
console.error("Silence detect submit failed:", error)
},
})
const handleSubmit = useCallback(() => { const handleSubmit = useCallback(() => {
if (!primaryFileKey) return if (!primaryFileKey) return
void startSilenceDetect(localSettings)
;(mutate as (args: { body: Record<string, unknown> }) => void)({ }, [localSettings, primaryFileKey, startSilenceDetect])
body: {
file_key: primaryFileKey,
project_id: projectId,
min_silence_duration_ms: silenceSettings.min_silence_duration_ms,
silence_threshold_db: silenceSettings.silence_threshold_db,
padding_ms: silenceSettings.padding_ms,
},
})
}, [mutate, primaryFileKey, projectId, silenceSettings])
return ( return (
<div <div
@@ -73,15 +48,15 @@ export const SilenceSettingsStep: FunctionComponent<
<div className={styles.fields}> <div className={styles.fields}>
<Slider <Slider
label="Мин. длительность тишины" label="Мин. длительность тишины"
value={silenceSettings.min_silence_duration_ms} value={localSettings.min_silence_duration_ms}
min={100} min={100}
max={2000} max={2000}
step={50} step={50}
unit="мс" unit="мс"
helpText="Минимальная длительность тихого участка для обнаружения" helpText="Минимальная длительность тихого участка для обнаружения"
onChange={(v) => onChange={(v) =>
setSilenceSettings({ setLocalSettings({
...silenceSettings, ...localSettings,
min_silence_duration_ms: v, min_silence_duration_ms: v,
}) })
} }
@@ -89,15 +64,15 @@ export const SilenceSettingsStep: FunctionComponent<
<Slider <Slider
label="Порог тишины" label="Порог тишины"
value={silenceSettings.silence_threshold_db} value={localSettings.silence_threshold_db}
min={6} min={6}
max={40} max={40}
step={2} step={2}
unit="дБ" unit="дБ"
helpText="Уровень громкости ниже которого звук считается тишиной" helpText="Уровень громкости ниже которого звук считается тишиной"
onChange={(v) => onChange={(v) =>
setSilenceSettings({ setLocalSettings({
...silenceSettings, ...localSettings,
silence_threshold_db: v, silence_threshold_db: v,
}) })
} }
@@ -105,15 +80,15 @@ export const SilenceSettingsStep: FunctionComponent<
<Slider <Slider
label="Отступ" label="Отступ"
value={silenceSettings.padding_ms} value={localSettings.padding_ms}
min={0} min={0}
max={500} max={500}
step={25} step={25}
unit="мс" unit="мс"
helpText="Дополнительный отступ по краям тихих участков" helpText="Дополнительный отступ по краям тихих участков"
onChange={(v) => onChange={(v) =>
setSilenceSettings({ setLocalSettings({
...silenceSettings, ...localSettings,
padding_ms: v, padding_ms: v,
}) })
} }
@@ -123,15 +98,15 @@ export const SilenceSettingsStep: FunctionComponent<
{/* Footer */} {/* Footer */}
<div className={styles.footer}> <div className={styles.footer}>
<Button variant="outline" onClick={goBack} disabled={isPending}> <Button variant="outline" onClick={goBack}>
Назад Назад
</Button> </Button>
<Button <Button
variant="primary" variant="primary"
onClick={handleSubmit} onClick={handleSubmit}
disabled={isPending || !primaryFileKey} disabled={!primaryFileKey}
> >
{isPending ? "Запуск..." : "Далее"} Далее
</Button> </Button>
</div> </div>
</div> </div>
@@ -15,13 +15,10 @@ import {
import "@vidstack/react/player/styles/default/theme.css" import "@vidstack/react/player/styles/default/theme.css"
import "@vidstack/react/player/styles/default/layouts/video.css" import "@vidstack/react/player/styles/default/layouts/video.css"
import cs from "classnames" import cs from "classnames"
import { FunctionComponent, useEffect, useMemo, useRef } from "react" import { FunctionComponent, useEffect, useRef } from "react"
import api from "@shared/api" import api from "@shared/api"
import { import { useWorkspaceFiles } from "@shared/context/WorkspaceContext"
StaticWorkspaceProvider,
useWorkspaceFiles,
} from "@shared/context/WorkspaceContext"
import { useWizard } from "@shared/context/WizardContext" import { useWizard } from "@shared/context/WizardContext"
import { Button } from "@shared/ui" import { Button } from "@shared/ui"
import { TranscriptionEditor } from "@features/project" import { TranscriptionEditor } from "@features/project"
@@ -29,8 +26,6 @@ import { TimelinePanel } from "@widgets/TimelinePanel"
import styles from "./SubtitleRevisionStep.module.scss" import styles from "./SubtitleRevisionStep.module.scss"
const TRANSCRIPTION_ARTIFACT_TYPE = "TRANSCRIPTION_JSON"
/** /**
* Auto-initializes WorkspaceContext with the video file * Auto-initializes WorkspaceContext with the video file
* and transcription artifact so TimelinePanel and * and transcription artifact so TimelinePanel and
@@ -87,51 +82,15 @@ const SubtitleRevisionContent: FunctionComponent<{
projectId, projectId,
videoUrl, videoUrl,
primaryFileKey, primaryFileKey,
transcriptionArtifactId: contextArtifactId, transcriptionArtifactId,
setTranscriptionArtifactId,
goBack, goBack,
goToStep, markTranscriptionReviewed,
markStepCompleted,
} = useWizard() } = useWizard()
const isArtifactResolving = false
const { data: artifacts, isLoading: isArtifactsLoading } = api.useQuery(
"get",
"/api/media/artifacts/",
{},
{ enabled: !contextArtifactId },
)
const transcriptionArtifactId = useMemo(() => {
if (contextArtifactId) return contextArtifactId
if (!artifacts) return null
const match = artifacts.find(
(a) =>
a.project_id === projectId &&
a.artifact_type === TRANSCRIPTION_ARTIFACT_TYPE &&
!a.is_deleted,
)
return match?.id ?? null
}, [contextArtifactId, artifacts, projectId])
const isArtifactResolving = !contextArtifactId && isArtifactsLoading
const isTranscriptionReady = Boolean(transcriptionArtifactId) const isTranscriptionReady = Boolean(transcriptionArtifactId)
const isTranscriptionUnavailable = const isTranscriptionUnavailable =
!isTranscriptionReady && !isArtifactResolving !isTranscriptionReady && !isArtifactResolving
useEffect(() => {
if (
!transcriptionArtifactId ||
transcriptionArtifactId === contextArtifactId
) {
return
}
setTranscriptionArtifactId(transcriptionArtifactId)
}, [
contextArtifactId,
setTranscriptionArtifactId,
transcriptionArtifactId,
])
// Auto-trigger frame extraction so video frames appear in timeline // Auto-trigger frame extraction so video frames appear in timeline
const frameExtractMutation = api.useMutation( const frameExtractMutation = api.useMutation(
"post", "post",
@@ -154,9 +113,7 @@ const SubtitleRevisionContent: FunctionComponent<{
const handleFinish = () => { const handleFinish = () => {
if (!isTranscriptionReady) return if (!isTranscriptionReady) return
void markTranscriptionReviewed()
markStepCompleted("subtitle-revision")
goToStep("caption-settings")
} }
return ( return (
@@ -280,9 +237,5 @@ const SubtitleRevisionContent: FunctionComponent<{
export const SubtitleRevisionStep: FunctionComponent< export const SubtitleRevisionStep: FunctionComponent<
ISubtitleRevisionStepProps ISubtitleRevisionStepProps
> = ({ className }): JSX.Element => { > = ({ className }): JSX.Element => {
return ( return <SubtitleRevisionContent className={className} />
<StaticWorkspaceProvider>
<SubtitleRevisionContent className={className} />
</StaticWorkspaceProvider>
)
} }
@@ -13,8 +13,6 @@ import api from "@shared/api"
import { useWizard } from "@shared/context/WizardContext" import { useWizard } from "@shared/context/WizardContext"
import { useAppSelector } from "@shared/hooks/useAppSelector" import { useAppSelector } from "@shared/hooks/useAppSelector"
import { Button, CircularProgress, Form, Select, SelectItem } from "@shared/ui" import { Button, CircularProgress, Form, Select, SelectItem } from "@shared/ui"
import { useSubmitTranscription } from "../TranscriptionModal/useSubmitTranscription"
import { buildCancelJobPayload, useCancelJob } from "../useCancelJob" import { buildCancelJobPayload, useCancelJob } from "../useCancelJob"
import styles from "./TranscriptionSettingsStep.module.scss" import styles from "./TranscriptionSettingsStep.module.scss"
@@ -53,12 +51,11 @@ export const TranscriptionSettingsStep: FunctionComponent<
ITranscriptionSettingsStepProps ITranscriptionSettingsStepProps
> = ({ className }): JSX.Element => { > = ({ className }): JSX.Element => {
const { const {
projectId,
primaryFileKey, primaryFileKey,
activeJobId, activeJobId,
activeJobType, activeJobType,
setActiveJob, setActiveJob,
startProcessingJob, startTranscription,
goToStep, goToStep,
} = useWizard() } = useWizard()
@@ -66,6 +63,7 @@ export const TranscriptionSettingsStep: FunctionComponent<
!!activeJobId && activeJobType === "TRANSCRIPTION_GENERATE" !!activeJobId && activeJobType === "TRANSCRIPTION_GENERATE"
const [submitError, setSubmitError] = useState<string | null>(null) const [submitError, setSubmitError] = useState<string | null>(null)
const [isSubmitting, setIsSubmitting] = useState(false)
const { mutate: cancelJob, isPending: isCancelling } = useCancelJob() const { mutate: cancelJob, isPending: isCancelling } = useCancelJob()
const { control, handleSubmit, watch, setValue } = const { control, handleSubmit, watch, setValue } =
@@ -87,36 +85,23 @@ export const TranscriptionSettingsStep: FunctionComponent<
} }
}, [engine, setValue]) }, [engine, setValue])
const { mutate, isPending } = useSubmitTranscription({ const onSubmit = async (data: ITranscriptionFormData): Promise<void> => {
onSuccess: (data) => {
if (data?.job_id) {
startProcessingJob(
data.job_id,
"TRANSCRIPTION_GENERATE",
"transcription-processing",
"transcription-settings",
)
}
},
onError: (error) => {
console.error("Transcription submit failed:", error)
setSubmitError("Не удалось запустить транскрипцию")
},
})
const onSubmit = (data: ITranscriptionFormData): void => {
if (!primaryFileKey) return if (!primaryFileKey) return
setSubmitError(null) setSubmitError(null)
setIsSubmitting(true)
mutate({ try {
body: { await startTranscription({
file_key: primaryFileKey,
project_id: projectId,
engine: data.engine, engine: data.engine,
language: data.language === "auto" ? undefined : data.language, language: data.language === "auto" ? undefined : data.language,
model: data.model, model: data.model,
},
}) })
} catch (error) {
console.error("Transcription submit failed:", error)
setSubmitError("Не удалось запустить транскрипцию")
} finally {
setIsSubmitting(false)
}
} }
/* ---- Processing state (inline) ---- */ /* ---- Processing state (inline) ---- */
@@ -309,7 +294,7 @@ export const TranscriptionSettingsStep: FunctionComponent<
<Button <Button
type="button" type="button"
variant="outline" variant="outline"
disabled={isPending} disabled={isSubmitting}
onClick={() => goToStep("fragments")} onClick={() => goToStep("fragments")}
> >
Назад Назад
@@ -317,9 +302,9 @@ export const TranscriptionSettingsStep: FunctionComponent<
<Button <Button
type="submit" type="submit"
variant="primary" variant="primary"
disabled={isPending || !primaryFileKey} disabled={isSubmitting || !primaryFileKey}
> >
{isPending ? "Запуск..." : "Сгенерировать субтитры"} {isSubmitting ? "Запуск..." : "Сгенерировать субтитры"}
</Button> </Button>
</div> </div>
</Form> </Form>
@@ -20,7 +20,7 @@ const ERROR_UPLOAD_FAILED = "Не удалось загрузить файл"
export const UploadStep: FunctionComponent<IUploadStepProps> = ({ export const UploadStep: FunctionComponent<IUploadStepProps> = ({
className, className,
}): JSX.Element => { }): JSX.Element => {
const { projectId, setFileKey, markStepCompleted, goNext } = useWizard() const { projectId, setFileKey } = useWizard()
const [isDragging, setIsDragging] = useState(false) const [isDragging, setIsDragging] = useState(false)
const [isUploading, setIsUploading] = useState(false) const [isUploading, setIsUploading] = useState(false)
const [progress, setProgress] = useState(0) const [progress, setProgress] = useState(0)
@@ -39,16 +39,18 @@ export const UploadStep: FunctionComponent<IUploadStepProps> = ({
`projects/${projectId}`, `projects/${projectId}`,
setProgress, setProgress,
) )
setFileKey(result.file_path, result.file_id, result.filename ?? null) await setFileKey(
markStepCompleted("upload") result.file_path,
goNext() result.file_id,
result.filename ?? null,
)
} catch { } catch {
setError(ERROR_UPLOAD_FAILED) setError(ERROR_UPLOAD_FAILED)
} finally { } finally {
setIsUploading(false) setIsUploading(false)
} }
}, },
[projectId, setFileKey, markStepCompleted, goNext], [projectId, setFileKey],
) )
const handleFileChange = useCallback( const handleFileChange = useCallback(
+11 -57
View File
@@ -32,7 +32,7 @@ import {
import cs from "classnames" import cs from "classnames"
import api, { fetchClient } from "@shared/api" import api from "@shared/api"
import { useWizard } from "@shared/context/WizardContext" import { useWizard } from "@shared/context/WizardContext"
import { useTaskProgressState } from "@shared/hooks/useTaskProgressState" import { useTaskProgressState } from "@shared/hooks/useTaskProgressState"
import { Badge, Button, CircularProgress } from "@shared/ui" import { Badge, Button, CircularProgress } from "@shared/ui"
@@ -55,19 +55,16 @@ export const VerifyStep: FunctionComponent<IVerifyStepProps> = ({
className, className,
}): JSX.Element => { }): JSX.Element => {
const { const {
projectId,
primaryFileKey, primaryFileKey,
videoUrl, videoUrl,
originalFileName, originalFileName,
activeJobId, activeJobId,
activeJobType, activeJobType,
goBack, goBack,
goNext, confirmVerify,
goToStep,
markStepCompleted,
setFileKey, setFileKey,
setActiveJob, setActiveJob,
startProcessingJob, startMediaConvert,
} = useWizard() } = useWizard()
const [convertError, setConvertError] = useState<string | null>(null) const [convertError, setConvertError] = useState<string | null>(null)
@@ -113,27 +110,11 @@ export const VerifyStep: FunctionComponent<IVerifyStepProps> = ({
/* ---- Conversion logic ---- */ /* ---- Conversion logic ---- */
const convertMutation = api.useMutation("post", "/api/tasks/media-convert/", {
onSuccess: (data) => {
startProcessingJob(data.job_id, "MEDIA_CONVERT", "verify")
setConvertError(null)
},
onError: () => {
setConvertError(ERROR_CONVERT_FAILED)
},
})
const handleConvert = useCallback(() => { const handleConvert = useCallback(() => {
if (!primaryFileKey) return void startMediaConvert().catch(() => {
convertMutation.mutate({ setConvertError(ERROR_CONVERT_FAILED)
body: {
file_key: primaryFileKey,
out_folder: `projects/${projectId}`,
output_format: "mp4",
project_id: projectId,
},
}) })
}, [convertMutation, primaryFileKey, projectId]) }, [startMediaConvert])
const { const {
progressPct: convertProgressPct, progressPct: convertProgressPct,
@@ -149,47 +130,20 @@ export const VerifyStep: FunctionComponent<IVerifyStepProps> = ({
useEffect(() => { useEffect(() => {
if (!convertJobId || convertStatus !== "converting") return if (!convertJobId || convertStatus !== "converting") return
if (convertTaskStatus === "DONE") {
fetchConvertedFileFromJob(convertJobId)
}
if (convertTaskStatus === "FAILED") { if (convertTaskStatus === "FAILED") {
setActiveJob(null) setActiveJob(null)
setConvertError(convertErrorMessage ?? "Ошибка конвертации") setConvertError(convertErrorMessage ?? "Ошибка конвертации")
} }
// eslint-disable-next-line react-hooks/exhaustive-deps }, [convertErrorMessage, convertJobId, convertStatus, convertTaskStatus, setActiveJob])
}, [convertErrorMessage, convertJobId, convertStatus, convertTaskStatus])
const fetchConvertedFileFromJob = useCallback(
async (jobId: string) => {
const { data: taskStatus } = await fetchClient.GET(
"/api/tasks/status/{job_id}/",
{ params: { path: { job_id: jobId } } },
)
const outputData = taskStatus?.output_data as {
file_id?: string
file_path?: string
} | null
if (outputData?.file_id && outputData?.file_path) {
const convertedName = outputData.file_path.split("/").pop() ?? null
setFileKey(outputData.file_path, outputData.file_id, convertedName)
setActiveJob(null)
}
},
[setFileKey, setActiveJob],
)
/* ---- Handlers ---- */ /* ---- Handlers ---- */
const handleReplace = () => { const handleReplace = () => {
setFileKey(null, null, null) void setFileKey(null, null, null)
goToStep("upload")
} }
const handleNext = () => { const handleNext = () => {
markStepCompleted("verify") void confirmVerify()
goNext()
} }
/* ---- Converting view ---- */ /* ---- Converting view ---- */
@@ -266,7 +220,7 @@ export const VerifyStep: FunctionComponent<IVerifyStepProps> = ({
variant="primary" variant="primary"
size="sm" size="sm"
onClick={handleConvert} onClick={handleConvert}
disabled={convertMutation.isPending} disabled={!primaryFileKey}
> >
Конвертировать в MP4 Конвертировать в MP4
</Button> </Button>
@@ -357,7 +311,7 @@ export const VerifyStep: FunctionComponent<IVerifyStepProps> = ({
variant="primary" variant="primary"
size="sm" size="sm"
onClick={handleConvert} onClick={handleConvert}
disabled={convertMutation.isPending} disabled={!primaryFileKey}
> >
Конвертировать в MP4 Конвертировать в MP4
</Button> </Button>
+477 -8
View File
@@ -200,6 +200,40 @@ export interface paths {
patch: operations["patch_project_api_projects__project_id___patch"]; patch: operations["patch_project_api_projects__project_id___patch"];
trace?: never; trace?: never;
}; };
"/api/projects/{project_id}/workspace": {
parameters: {
query?: never;
header?: never;
path?: never;
cookie?: never;
};
/** Get Project Workspace */
get: operations["get_project_workspace_api_projects__project_id__workspace_get"];
put?: never;
post?: never;
delete?: never;
options?: never;
head?: never;
patch?: never;
trace?: never;
};
"/api/projects/{project_id}/workflow/actions": {
parameters: {
query?: never;
header?: never;
path?: never;
cookie?: never;
};
get?: never;
put?: never;
/** Dispatch Project Workflow Action */
post: operations["dispatch_project_workflow_action_api_projects__project_id__workflow_actions_post"];
delete?: never;
options?: never;
head?: never;
patch?: never;
trace?: never;
};
"/api/files/upload/": { "/api/files/upload/": {
parameters: { parameters: {
query?: never; query?: never;
@@ -987,6 +1021,19 @@ export interface paths {
export type webhooks = Record<string, never>; export type webhooks = Record<string, never>;
export interface components { export interface components {
schemas: { schemas: {
/** ActiveJobState */
ActiveJobState: {
/**
* Job Id
* Format: uuid
*/
job_id: string;
/**
* Job Type
* @enum {string}
*/
job_type: "MEDIA_PROBE" | "SILENCE_REMOVE" | "SILENCE_DETECT" | "SILENCE_APPLY" | "MEDIA_CONVERT" | "TRANSCRIPTION_GENERATE" | "CAPTIONS_GENERATE" | "FRAME_EXTRACT";
};
/** ArtifactMediaFileCreate */ /** ArtifactMediaFileCreate */
ArtifactMediaFileCreate: { ArtifactMediaFileCreate: {
/** Project Id */ /** Project Id */
@@ -1286,6 +1333,43 @@ export interface components {
/** Result */ /** Result */
result: string; result: string;
}; };
/** CaptionsState */
CaptionsState: {
/** @default IDLE */
status: components["schemas"]["CaptionsWorkflowStatus"];
/** Preset Id */
preset_id?: string | null;
/** Style Config */
style_config?: {
[key: string]: unknown;
} | null;
/** Render Job Id */
render_job_id?: string | null;
/** Output File Id */
output_file_id?: string | null;
};
/**
* CaptionsWorkflowStatus
* @enum {string}
*/
CaptionsWorkflowStatus: "IDLE" | "CONFIGURED" | "PROCESSING" | "COMPLETED";
/** ConfirmVerifyAction */
ConfirmVerifyAction: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
type: "CONFIRM_VERIFY";
/** Revision */
revision: number;
};
/** CutRegionState */
CutRegionState: {
/** Start Ms */
start_ms: number;
/** End Ms */
end_ms: number;
};
/** DispositionSchema */ /** DispositionSchema */
DispositionSchema: { DispositionSchema: {
/** Default */ /** Default */
@@ -1653,6 +1737,16 @@ export interface components {
/** Words */ /** Words */
words: components["schemas"]["WordNode"][]; words: components["schemas"]["WordNode"][];
}; };
/** MarkTranscriptionReviewedAction */
MarkTranscriptionReviewedAction: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
type: "MARK_TRANSCRIPTION_REVIEWED";
/** Revision */
revision: number;
};
/** /**
* MediaConvertRequest * MediaConvertRequest
* @description Request to convert media file to different format. * @description Request to convert media file to different format.
@@ -1909,10 +2003,6 @@ export interface components {
* @enum {string} * @enum {string}
*/ */
status: "DRAFT" | "PROCESSING" | "DONE" | "FAILED"; status: "DRAFT" | "PROCESSING" | "DONE" | "FAILED";
/** Workspace State */
workspace_state: {
[key: string]: unknown;
} | null;
/** Is Active */ /** Is Active */
is_active: boolean; is_active: boolean;
/** /**
@@ -1938,10 +2028,71 @@ export interface components {
folder?: string | null; folder?: string | null;
/** Status */ /** Status */
status?: ("DRAFT" | "PROCESSING" | "DONE" | "FAILED") | null; status?: ("DRAFT" | "PROCESSING" | "DONE" | "FAILED") | null;
/** Workspace State */ };
workspace_state?: { /** ProjectWorkspaceRead */
[key: string]: unknown; ProjectWorkspaceRead: {
} | null; /**
* Project Id
* Format: uuid
*/
project_id: string;
/** Revision */
revision: number;
/** Version */
version: number;
phase: components["schemas"]["WorkflowPhase"];
/**
* Current Screen
* @enum {string}
*/
current_screen: "upload" | "verify" | "silence-settings" | "processing" | "fragments" | "silence-apply-processing" | "transcription-settings" | "transcription-processing" | "subtitle-revision" | "caption-settings" | "caption-processing" | "caption-result";
active_job: components["schemas"]["ActiveJobState"] | null;
/** Source File Id */
source_file_id: string | null;
workspace_view: components["schemas"]["WorkspaceViewState"];
silence: components["schemas"]["SilenceState"];
transcription: components["schemas"]["TranscriptionState"];
captions: components["schemas"]["CaptionsState"];
};
/** ReopenCaptionConfigAction */
ReopenCaptionConfigAction: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
type: "REOPEN_CAPTION_CONFIG";
/** Revision */
revision: number;
};
/** ReopenSilenceReviewAction */
ReopenSilenceReviewAction: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
type: "REOPEN_SILENCE_REVIEW";
/** Revision */
revision: number;
};
/** ReopenTranscriptionConfigAction */
ReopenTranscriptionConfigAction: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
type: "REOPEN_TRANSCRIPTION_CONFIG";
/** Revision */
revision: number;
};
/** ResetSourceFileAction */
ResetSourceFileAction: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
type: "RESET_SOURCE_FILE";
/** Revision */
revision: number;
}; };
/** SaluteSpeechParams */ /** SaluteSpeechParams */
SaluteSpeechParams: { SaluteSpeechParams: {
@@ -1979,6 +2130,71 @@ export interface components {
/** Lines */ /** Lines */
lines: components["schemas"]["LineNode-Output"][]; lines: components["schemas"]["LineNode-Output"][];
}; };
/** SelectCaptionPresetAction */
SelectCaptionPresetAction: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
type: "SELECT_CAPTION_PRESET";
/** Revision */
revision: number;
/** Preset Id */
preset_id?: string | null;
/** Style Config */
style_config?: {
[key: string]: unknown;
} | null;
};
/** SetSilenceCutsAction */
SetSilenceCutsAction: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
type: "SET_SILENCE_CUTS";
/** Revision */
revision: number;
/** Cuts */
cuts: components["schemas"]["CutRegionState"][];
};
/** SetSilenceSettingsAction */
SetSilenceSettingsAction: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
type: "SET_SILENCE_SETTINGS";
/** Revision */
revision: number;
settings?: components["schemas"]["SilenceSettingsState"];
};
/** SetSourceFileAction */
SetSourceFileAction: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
type: "SET_SOURCE_FILE";
/** Revision */
revision: number;
/**
* File Id
* Format: uuid
*/
file_id: string;
};
/** SetWorkspaceViewAction */
SetWorkspaceViewAction: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
type: "SET_WORKSPACE_VIEW";
/** Revision */
revision: number;
workspace_view: components["schemas"]["WorkspaceViewState"];
};
/** /**
* SilenceApplyRequest * SilenceApplyRequest
* @description Request to apply silence cuts to media file. * @description Request to apply silence cuts to media file.
@@ -2085,6 +2301,143 @@ export interface components {
*/ */
padding_ms: number; padding_ms: number;
}; };
/** SilenceSettingsState */
SilenceSettingsState: {
/**
* Min Silence Duration Ms
* @default 200
*/
min_silence_duration_ms: number;
/**
* Silence Threshold Db
* @default 16
*/
silence_threshold_db: number;
/**
* Padding Ms
* @default 100
*/
padding_ms: number;
};
/** SilenceState */
SilenceState: {
/** @default IDLE */
status: components["schemas"]["SilenceWorkflowStatus"];
settings?: components["schemas"]["SilenceSettingsState"];
/** Detect Job Id */
detect_job_id?: string | null;
/** Detected Segments */
detected_segments?: components["schemas"]["CutRegionState"][];
/** Reviewed Cuts */
reviewed_cuts?: components["schemas"]["CutRegionState"][];
/** Duration Ms */
duration_ms?: number | null;
/** Applied Output File Id */
applied_output_file_id?: string | null;
};
/**
* SilenceWorkflowStatus
* @enum {string}
*/
SilenceWorkflowStatus: "IDLE" | "CONFIGURED" | "DETECTING" | "REVIEWING" | "APPLYING" | "COMPLETED" | "SKIPPED";
/** SkipSilenceApplyAction */
SkipSilenceApplyAction: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
type: "SKIP_SILENCE_APPLY";
/** Revision */
revision: number;
};
/** StartCaptionRenderAction */
StartCaptionRenderAction: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
type: "START_CAPTION_RENDER";
/** Revision */
revision: number;
/**
* Folder
* @default output_files
*/
folder: string;
};
/** StartMediaConvertAction */
StartMediaConvertAction: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
type: "START_MEDIA_CONVERT";
/** Revision */
revision: number;
/**
* Output Format
* @default mp4
*/
output_format: string;
/**
* Out Folder
* @default output_files
*/
out_folder: string;
};
/** StartSilenceApplyAction */
StartSilenceApplyAction: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
type: "START_SILENCE_APPLY";
/** Revision */
revision: number;
/** Cuts */
cuts?: components["schemas"]["CutRegionState"][] | null;
/**
* Out Folder
* @default output_files
*/
out_folder: string;
/** Output Name */
output_name?: string | null;
};
/** StartSilenceDetectAction */
StartSilenceDetectAction: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
type: "START_SILENCE_DETECT";
/** Revision */
revision: number;
};
/** StartTranscriptionAction */
StartTranscriptionAction: {
/**
* @description discriminator enum property added by openapi-typescript
* @enum {string}
*/
type: "START_TRANSCRIPTION";
/** Revision */
revision: number;
/**
* Engine
* @default whisper
* @enum {string}
*/
engine: "whisper" | "google" | "salutespeech";
/** Language */
language?: string | null;
/**
* Model
* @default base
*/
model: string;
request?: components["schemas"]["TranscriptionRequestState"] | null;
};
/** StreamSchema */ /** StreamSchema */
StreamSchema: { StreamSchema: {
/** Index */ /** Index */
@@ -2324,6 +2677,39 @@ export interface components {
*/ */
updated_at: string; updated_at: string;
}; };
/** TranscriptionRequestState */
TranscriptionRequestState: {
/**
* Engine
* @default whisper
* @enum {string}
*/
engine: "whisper" | "google" | "salutespeech";
/** Language */
language?: string | null;
/**
* Model
* @default base
*/
model: string;
};
/** TranscriptionState */
TranscriptionState: {
/** @default IDLE */
status: components["schemas"]["TranscriptionWorkflowStatus"];
request?: components["schemas"]["TranscriptionRequestState"];
/** Job Id */
job_id?: string | null;
/** Artifact Id */
artifact_id?: string | null;
/** Transcription Id */
transcription_id?: string | null;
/**
* Reviewed
* @default false
*/
reviewed: boolean;
};
/** TranscriptionUpdate */ /** TranscriptionUpdate */
TranscriptionUpdate: { TranscriptionUpdate: {
/** Document */ /** Document */
@@ -2335,6 +2721,11 @@ export interface components {
[key: string]: unknown; [key: string]: unknown;
} | null; } | null;
}; };
/**
* TranscriptionWorkflowStatus
* @enum {string}
*/
TranscriptionWorkflowStatus: "IDLE" | "PROCESSING" | "REVIEWING" | "COMPLETED";
/** UserCreate */ /** UserCreate */
UserCreate: { UserCreate: {
/** Username */ /** Username */
@@ -2542,6 +2933,18 @@ export interface components {
structure_tags: components["schemas"]["Tag"][]; structure_tags: components["schemas"]["Tag"][];
time: components["schemas"]["TimeRange"]; time: components["schemas"]["TimeRange"];
}; };
/**
* WorkflowPhase
* @enum {string}
*/
WorkflowPhase: "INGEST" | "VERIFY" | "SILENCE" | "TRANSCRIPTION" | "CAPTIONS" | "DONE";
/** WorkspaceViewState */
WorkspaceViewState: {
/** Used File Ids */
used_file_ids?: string[];
/** Selected File Id */
selected_file_id?: string | null;
};
}; };
responses: never; responses: never;
parameters: never; parameters: never;
@@ -3055,6 +3458,72 @@ export interface operations {
}; };
}; };
}; };
get_project_workspace_api_projects__project_id__workspace_get: {
parameters: {
query?: never;
header?: never;
path: {
project_id: string;
};
cookie?: never;
};
requestBody?: never;
responses: {
/** @description Successful Response */
200: {
headers: {
[name: string]: unknown;
};
content: {
"application/json": components["schemas"]["ProjectWorkspaceRead"];
};
};
/** @description Validation Error */
422: {
headers: {
[name: string]: unknown;
};
content: {
"application/json": components["schemas"]["HTTPValidationError"];
};
};
};
};
dispatch_project_workflow_action_api_projects__project_id__workflow_actions_post: {
parameters: {
query?: never;
header?: never;
path: {
project_id: string;
};
cookie?: never;
};
requestBody: {
content: {
"application/json": components["schemas"]["SetSourceFileAction"] | components["schemas"]["ResetSourceFileAction"] | components["schemas"]["StartMediaConvertAction"] | components["schemas"]["ConfirmVerifyAction"] | components["schemas"]["SetSilenceSettingsAction"] | components["schemas"]["StartSilenceDetectAction"] | components["schemas"]["SetSilenceCutsAction"] | components["schemas"]["SkipSilenceApplyAction"] | components["schemas"]["StartSilenceApplyAction"] | components["schemas"]["ReopenSilenceReviewAction"] | components["schemas"]["StartTranscriptionAction"] | components["schemas"]["ReopenTranscriptionConfigAction"] | components["schemas"]["MarkTranscriptionReviewedAction"] | components["schemas"]["SelectCaptionPresetAction"] | components["schemas"]["StartCaptionRenderAction"] | components["schemas"]["ReopenCaptionConfigAction"] | components["schemas"]["SetWorkspaceViewAction"];
};
};
responses: {
/** @description Successful Response */
200: {
headers: {
[name: string]: unknown;
};
content: {
"application/json": components["schemas"]["ProjectWorkspaceRead"];
};
};
/** @description Validation Error */
422: {
headers: {
[name: string]: unknown;
};
content: {
"application/json": components["schemas"]["HTTPValidationError"];
};
};
};
};
upload_file_api_files_upload__post: { upload_file_api_files_upload__post: {
parameters: { parameters: {
query?: never; query?: never;
+246
View File
@@ -0,0 +1,246 @@
"use client"
import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query"
import { ACCESS_TOKEN_REGEXP, API_URL } from "@shared/lib/constants"
export type WorkflowPhase =
| "INGEST"
| "VERIFY"
| "SILENCE"
| "TRANSCRIPTION"
| "CAPTIONS"
| "DONE"
export type WorkflowScreen =
| "upload"
| "verify"
| "silence-settings"
| "processing"
| "fragments"
| "silence-apply-processing"
| "transcription-settings"
| "transcription-processing"
| "subtitle-revision"
| "caption-settings"
| "caption-processing"
| "caption-result"
export interface SilenceSettingsPayload {
min_silence_duration_ms: number
silence_threshold_db: number
padding_ms: number
}
export interface WorkflowCutRegionPayload {
start_ms: number
end_ms: number
}
export interface WorkflowActiveJob {
job_id: string
job_type: string
}
export interface WorkflowWorkspaceView {
used_file_ids: string[]
selected_file_id: string | null
}
export interface WorkflowSilenceState {
status: string | null
settings: SilenceSettingsPayload | null
detect_job_id: string | null
detected_segments: WorkflowCutRegionPayload[]
reviewed_cuts: WorkflowCutRegionPayload[]
duration_ms: number | null
applied_output_file_id: string | null
}
export interface WorkflowTranscriptionRequest {
engine: "whisper" | "google" | "salutespeech"
language?: string
model: string
}
export interface WorkflowTranscriptionState {
status: string | null
job_id: string | null
request: WorkflowTranscriptionRequest | null
artifact_id: string | null
transcription_id: string | null
reviewed: boolean
}
export interface WorkflowCaptionsState {
status: string | null
preset_id: string | null
style_config: Record<string, unknown> | null
render_job_id: string | null
output_file_id: string | null
}
export interface ProjectWorkspaceRead {
revision: number
phase: WorkflowPhase
current_screen: WorkflowScreen
active_job: WorkflowActiveJob | null
source_file_id: string | null
workspace_view: WorkflowWorkspaceView
silence: WorkflowSilenceState
transcription: WorkflowTranscriptionState
captions: WorkflowCaptionsState
}
type WorkflowActionBase<TActionType extends string> = {
type: TActionType
revision: number
}
export type WorkflowActionRequest =
| (WorkflowActionBase<"SET_SOURCE_FILE"> & {
file_id: string
})
| WorkflowActionBase<"RESET_SOURCE_FILE">
| (WorkflowActionBase<"START_MEDIA_CONVERT"> & {
output_format?: "mp4"
})
| WorkflowActionBase<"CONFIRM_VERIFY">
| (WorkflowActionBase<"SET_SILENCE_SETTINGS"> & {
settings: SilenceSettingsPayload
})
| WorkflowActionBase<"START_SILENCE_DETECT">
| (WorkflowActionBase<"SET_SILENCE_CUTS"> & {
cuts: WorkflowCutRegionPayload[]
})
| WorkflowActionBase<"SKIP_SILENCE_APPLY">
| (WorkflowActionBase<"START_SILENCE_APPLY"> & {
cuts: WorkflowCutRegionPayload[]
})
| WorkflowActionBase<"REOPEN_SILENCE_REVIEW">
| (WorkflowActionBase<"START_TRANSCRIPTION"> & {
request: WorkflowTranscriptionRequest
})
| WorkflowActionBase<"REOPEN_TRANSCRIPTION_CONFIG">
| WorkflowActionBase<"MARK_TRANSCRIPTION_REVIEWED">
| (WorkflowActionBase<"SELECT_CAPTION_PRESET"> & {
preset_id: string | null
})
| WorkflowActionBase<"START_CAPTION_RENDER">
| WorkflowActionBase<"REOPEN_CAPTION_CONFIG">
| (WorkflowActionBase<"SET_WORKSPACE_VIEW"> & {
workspace_view: WorkflowWorkspaceView
})
class WorkflowApiError extends Error {
status: number
constructor(status: number, message: string) {
super(message)
this.name = "WorkflowApiError"
this.status = status
}
}
function getBaseApiUrl(): string {
if (API_URL?.length) return API_URL
if (typeof window !== "undefined") return window.location.origin
return ""
}
function getAccessToken(): string | null {
if (typeof document === "undefined") return null
const token = document.cookie.replace(ACCESS_TOKEN_REGEXP, "$1")
return token.length ? token : null
}
async function requestJson<TResponse>(
path: string,
init?: RequestInit,
): Promise<TResponse> {
const token = getAccessToken()
const response = await fetch(`${getBaseApiUrl()}${path}`, {
credentials: "include",
...init,
headers: {
"Content-Type": "application/json",
...(token ? { Authorization: `Bearer ${token}` } : {}),
...(init?.headers ?? {}),
},
})
if (!response.ok) {
const message = response.statusText || "Workflow request failed"
throw new WorkflowApiError(response.status, message)
}
if (response.status === 204) {
return null as TResponse
}
return (await response.json()) as TResponse
}
export function getProjectWorkspaceQueryKey(projectId: string) {
return ["project-workspace", projectId] as const
}
export async function fetchProjectWorkspace(
projectId: string,
): Promise<ProjectWorkspaceRead> {
return requestJson<ProjectWorkspaceRead>(
`/api/projects/${projectId}/workspace`,
{ method: "GET" },
)
}
export async function postWorkflowAction(
projectId: string,
action: WorkflowActionRequest,
): Promise<ProjectWorkspaceRead | null> {
return requestJson<ProjectWorkspaceRead | null>(
`/api/projects/${projectId}/workflow/actions`,
{
method: "POST",
body: JSON.stringify(action),
},
)
}
export function useProjectWorkspaceQuery(projectId: string) {
return useQuery({
queryKey: getProjectWorkspaceQueryKey(projectId),
queryFn: () => fetchProjectWorkspace(projectId),
enabled: !!projectId,
})
}
export function useWorkflowAction(projectId: string) {
const queryClient = useQueryClient()
const queryKey = getProjectWorkspaceQueryKey(projectId)
return useMutation({
mutationFn: (action: WorkflowActionRequest) =>
postWorkflowAction(projectId, action),
onSuccess: (workspace) => {
if (workspace) {
queryClient.setQueryData(queryKey, workspace)
return
}
queryClient.invalidateQueries({ queryKey })
},
onError: (error) => {
if (
error instanceof WorkflowApiError &&
error.status === 409
) {
queryClient.invalidateQueries({ queryKey })
}
},
})
}
export function isWorkflowConflictError(error: unknown): boolean {
return error instanceof WorkflowApiError && error.status === 409
}
+7
View File
@@ -14,6 +14,7 @@ import {
NotificationItem, NotificationItem,
setNotifications, setNotifications,
} from "@shared/store/notifications" } from "@shared/store/notifications"
import { getProjectWorkspaceQueryKey } from "@shared/api/projectWorkflow"
interface SocketContextValue { interface SocketContextValue {
isConnected: boolean isConnected: boolean
@@ -246,6 +247,12 @@ export const SocketProvider = ({
queryKey: ["get", "/api/files/files/"], queryKey: ["get", "/api/files/files/"],
}) })
} }
if (data.project_id) {
queryClient.invalidateQueries({
queryKey: getProjectWorkspaceQueryKey(data.project_id),
})
}
} catch { } catch {
// Ignore malformed messages // Ignore malformed messages
} }
File diff suppressed because it is too large Load Diff
+155 -130
View File
@@ -13,12 +13,13 @@ import {
} from "react" } from "react"
import api from "@shared/api" import api from "@shared/api"
import {
type WorkflowWorkspaceView,
useProjectWorkspaceQuery,
useWorkflowAction,
} from "@shared/api/projectWorkflow"
import { useDebounce } from "@shared/hooks/useDebounce" import { useDebounce } from "@shared/hooks/useDebounce"
/* ------------------------------------------------------------------ */
/* Types */
/* ------------------------------------------------------------------ */
export type SelectedFile = { export type SelectedFile = {
id: string id: string
path: string path: string
@@ -43,98 +44,182 @@ interface WorkspaceFileContextValue {
isLoaded: boolean isLoaded: boolean
} }
/* ------------------------------------------------------------------ */
/* Context */
/* ------------------------------------------------------------------ */
const FileContext = createContext<WorkspaceFileContextValue | null>(null) const FileContext = createContext<WorkspaceFileContextValue | null>(null)
/* ------------------------------------------------------------------ */ const DEBOUNCE_MS = 300
/* Provider */
/* ------------------------------------------------------------------ */
const DEBOUNCE_MS = 1000 function getFileIconType(mimeType: string | null | undefined) {
if (!mimeType) return "other" as const
if (mimeType.startsWith("video/")) return "video" as const
if (mimeType.startsWith("audio/")) return "audio" as const
if (mimeType.includes("json") || mimeType.startsWith("text/")) {
return "text" as const
}
return "other" as const
}
function getArtifactDisplayName(artifactType: string | null | undefined): string {
switch (artifactType) {
case "TRANSCRIPTION_JSON":
return "Субтитры"
default:
return artifactType ?? "Артефакт"
}
}
export const WorkspaceProvider: FunctionComponent<{ export const WorkspaceProvider: FunctionComponent<{
projectId: string projectId: string
children: ReactNode children: ReactNode
}> = ({ projectId, children }) => { }> = ({ projectId, children }) => {
const [selectedFile, setSelectedFileState] = useState<SelectedFile | null>( const { data: workspace } = useProjectWorkspaceQuery(projectId)
const workflowAction = useWorkflowAction(projectId)
const [usedFileIds, setUsedFileIds] = useState<string[]>([])
const [selectedPersistedId, setSelectedPersistedId] = useState<string | null>(
null, null,
) )
const [usedFiles, setUsedFiles] = useState<UsedFile[]>([]) const [selectedFile, setSelectedFileState] = useState<SelectedFile | null>(null)
const isInitializedRef = useRef(false) const latestRevisionRef = useRef<number | null>(null)
const initialValueRef = useRef<string | null>(null)
/* ---- Load from server ---- */
const { data: project, isSuccess } = api.useQuery(
"get",
"/api/projects/{project_id}/",
{ params: { path: { project_id: projectId } } },
{ enabled: !!projectId },
)
useEffect(() => { useEffect(() => {
if (!isSuccess || isInitializedRef.current) return if (!workspace) return
const saved = project?.workspace_state as if (latestRevisionRef.current === workspace.revision) {
| { used_files?: UsedFile[] } return
| null }
| undefined
const loaded = saved?.used_files ?? []
setUsedFiles(loaded) latestRevisionRef.current = workspace.revision
initialValueRef.current = JSON.stringify(loaded) setUsedFileIds(workspace.workspace_view.used_file_ids)
isInitializedRef.current = true setSelectedPersistedId(workspace.workspace_view.selected_file_id)
}, [isSuccess, project]) }, [workspace])
/* ---- Save to server (debounced) ---- */ const { data: files } = api.useQuery("get", "/api/files/files/", {})
const { data: artifacts } = api.useQuery("get", "/api/media/artifacts/", {})
const debouncedUsedFiles = useDebounce(usedFiles, DEBOUNCE_MS) const fileMap = useMemo(() => {
const nextMap = new Map<string, UsedFile>()
const saveMutation = api.useMutation( for (const file of files ?? []) {
"patch", if (file.project_id !== projectId || file.is_deleted) continue
"/api/projects/{project_id}/",
)
useEffect(() => { nextMap.set(file.id, {
if (!isInitializedRef.current) return id: file.id,
path: file.path,
source: "file",
mimeType: file.mime_type,
displayName: file.original_filename,
iconType: getFileIconType(file.mime_type),
})
}
const serialized = JSON.stringify(debouncedUsedFiles) return nextMap
if (serialized === initialValueRef.current) return }, [files, projectId])
initialValueRef.current = serialized const artifactMap = useMemo(() => {
saveMutation.mutate({ const nextMap = new Map<string, UsedFile>()
params: { path: { project_id: projectId } },
body: { for (const artifact of artifacts ?? []) {
workspace_state: { used_files: debouncedUsedFiles }, if (artifact.project_id !== projectId || artifact.is_deleted) continue
nextMap.set(artifact.id, {
id: artifact.id,
path: "transcription",
source: "artifact",
artifactType: artifact.artifact_type,
displayName: getArtifactDisplayName(artifact.artifact_type),
iconType:
artifact.artifact_type === "TRANSCRIPTION_JSON" ? "text" : "other",
})
}
return nextMap
}, [artifacts, projectId])
const resolveUsedFile = useCallback(
(fileId: string, previous?: UsedFile | null): UsedFile | null => {
return fileMap.get(fileId) ?? artifactMap.get(fileId) ?? previous ?? null
}, },
}) [fileMap, artifactMap],
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [debouncedUsedFiles, projectId])
/* ---- Actions ---- */
const setSelectedFile = useCallback(
(file: SelectedFile | null) => setSelectedFileState(file),
[],
) )
const usedFiles = useMemo(
() =>
usedFileIds
.map((fileId) => resolveUsedFile(fileId))
.filter((file): file is UsedFile => file !== null),
[resolveUsedFile, usedFileIds],
)
useEffect(() => {
setSelectedFileState((prev) => {
if (!selectedPersistedId) return null
const resolved = resolveUsedFile(
selectedPersistedId,
prev as UsedFile | null,
)
if (!resolved) return prev
if (prev?.id === selectedPersistedId) {
return {
...resolved,
scrollToSegmentIndex: prev.scrollToSegmentIndex,
}
}
return resolved
})
}, [resolveUsedFile, selectedPersistedId])
const persistableWorkspaceView = useMemo<WorkflowWorkspaceView>(
() => ({
used_file_ids: usedFileIds,
selected_file_id: selectedPersistedId,
}),
[selectedPersistedId, usedFileIds],
)
const debouncedWorkspaceView = useDebounce(
persistableWorkspaceView,
DEBOUNCE_MS,
)
useEffect(() => {
if (!workspace) return
const localSignature = JSON.stringify(debouncedWorkspaceView)
const serverSignature = JSON.stringify(workspace.workspace_view)
if (localSignature === serverSignature) return
void workflowAction.mutateAsync({
type: "SET_WORKSPACE_VIEW",
revision: workspace.revision,
workspace_view: debouncedWorkspaceView,
})
}, [debouncedWorkspaceView, workflowAction, workspace])
const setSelectedFile = useCallback((file: SelectedFile | null) => {
setSelectedFileState(file)
setSelectedPersistedId(file?.id ?? null)
}, [])
const addUsedFile = useCallback((file: UsedFile) => { const addUsedFile = useCallback((file: UsedFile) => {
setUsedFiles((prev) => { setUsedFileIds((prev) => {
if (prev.some((f) => f.id === file.id)) return prev if (prev.includes(file.id)) return prev
return [...prev, file] return [...prev, file.id]
}) })
}, []) }, [])
const removeUsedFile = useCallback((id: string) => { const removeUsedFile = useCallback((id: string) => {
setUsedFiles((prev) => prev.filter((f) => f.id !== id)) setUsedFileIds((prev) => prev.filter((fileId) => fileId !== id))
setSelectedPersistedId((prev) => (prev === id ? null : prev))
setSelectedFileState((prev) => (prev?.id === id ? null : prev))
}, []) }, [])
const isFileUsed = useCallback( const isFileUsed = useCallback(
(id: string) => usedFiles.some((f) => f.id === id), (id: string) => usedFileIds.includes(id),
[usedFiles], [usedFileIds],
) )
const value = useMemo<WorkspaceFileContextValue>( const value = useMemo<WorkspaceFileContextValue>(
@@ -145,82 +230,22 @@ export const WorkspaceProvider: FunctionComponent<{
addUsedFile, addUsedFile,
removeUsedFile, removeUsedFile,
isFileUsed, isFileUsed,
isLoaded: isInitializedRef.current, isLoaded: Boolean(workspace),
}), }),
[ [
addUsedFile,
isFileUsed,
removeUsedFile,
selectedFile, selectedFile,
setSelectedFile, setSelectedFile,
usedFiles, usedFiles,
addUsedFile, workspace,
removeUsedFile,
isFileUsed,
], ],
) )
return <FileContext.Provider value={value}>{children}</FileContext.Provider> return <FileContext.Provider value={value}>{children}</FileContext.Provider>
} }
/* ------------------------------------------------------------------ */
/* Static provider (in-memory only, no server persistence) */
/* ------------------------------------------------------------------ */
export const StaticWorkspaceProvider: FunctionComponent<{
children: ReactNode
}> = ({ children }) => {
const [selectedFile, setSelectedFileState] = useState<SelectedFile | null>(
null,
)
const [usedFiles, setUsedFiles] = useState<UsedFile[]>([])
const setSelectedFile = useCallback(
(file: SelectedFile | null) => setSelectedFileState(file),
[],
)
const addUsedFile = useCallback((file: UsedFile) => {
setUsedFiles((prev) => {
if (prev.some((f) => f.id === file.id)) return prev
return [...prev, file]
})
}, [])
const removeUsedFile = useCallback((id: string) => {
setUsedFiles((prev) => prev.filter((f) => f.id !== id))
}, [])
const isFileUsed = useCallback(
(id: string) => usedFiles.some((f) => f.id === id),
[usedFiles],
)
const value = useMemo<WorkspaceFileContextValue>(
() => ({
selectedFile,
setSelectedFile,
usedFiles,
addUsedFile,
removeUsedFile,
isFileUsed,
isLoaded: true,
}),
[
selectedFile,
setSelectedFile,
usedFiles,
addUsedFile,
removeUsedFile,
isFileUsed,
],
)
return <FileContext.Provider value={value}>{children}</FileContext.Provider>
}
/* ------------------------------------------------------------------ */
/* Hook */
/* ------------------------------------------------------------------ */
/** File selection & used-files list — stable during playback */
export function useWorkspaceFiles(): WorkspaceFileContextValue { export function useWorkspaceFiles(): WorkspaceFileContextValue {
const ctx = useContext(FileContext) const ctx = useContext(FileContext)
if (!ctx) { if (!ctx) {
+148 -100
View File
@@ -2,12 +2,14 @@ import { expect, test } from "@playwright/test"
const USER_ID = "00000000-0000-0000-0000-000000000001" const USER_ID = "00000000-0000-0000-0000-000000000001"
const PROJECT_ID = "65df675b-013b-4b1f-ab2d-075dadbcd0d9" const PROJECT_ID = "65df675b-013b-4b1f-ab2d-075dadbcd0d9"
const SOURCE_FILE_ID = "00000000-0000-0000-0000-000000000011"
const CAPTION_PRESET_ID = "00000000-0000-0000-0000-000000000010" const CAPTION_PRESET_ID = "00000000-0000-0000-0000-000000000010"
const TRANSCRIPTION_ARTIFACT_ID = const TRANSCRIPTION_ARTIFACT_ID =
"00000000-0000-0000-0000-000000000020" "00000000-0000-0000-0000-000000000020"
const TRANSCRIPTION_ID = "00000000-0000-0000-0000-000000000030" const TRANSCRIPTION_ID = "00000000-0000-0000-0000-000000000030"
const CAPTION_JOB_ID = "00000000-0000-0000-0000-000000000040" const CAPTION_JOB_ID = "00000000-0000-0000-0000-000000000040"
const PRIMARY_FILE_KEY = "projects/test/video.mp4" const PRIMARY_FILE_KEY = "projects/test/video.mp4"
const PRIMARY_FILE_URL = "http://localhost:4444/files/video.mp4"
const DEFAULT_USER = { const DEFAULT_USER = {
id: USER_ID, id: USER_ID,
@@ -26,53 +28,49 @@ const DEFAULT_USER = {
} }
test.describe("Caption Settings Step", () => { test.describe("Caption Settings Step", () => {
test("should recover a missing transcription artifact from project data", async ({ test("should render from typed workspace and start caption render via workflow action", async ({
page, page,
}) => { }) => {
let project: Record<string, unknown> = { let workflowActions: Array<Record<string, unknown>> = []
id: PROJECT_ID, let workspace: Record<string, any> = {
owner_id: USER_ID, revision: 1,
name: "Тестовый проект", phase: "CAPTIONS",
description: null, current_screen: "caption-settings",
language: "auto", active_job: null,
folder: null, source_file_id: SOURCE_FILE_ID,
status: "DRAFT", workspace_view: {
workspace_state: { used_file_ids: [],
wizard: { selected_file_id: null,
current_step: "caption-settings", },
completed_steps: [ silence: {
"upload", status: "SKIPPED",
"verify", settings: {
"silence-settings",
"processing",
"fragments",
"transcription-settings",
"transcription-processing",
"subtitle-revision",
],
primary_file_key: PRIMARY_FILE_KEY,
video_url: "http://localhost:9000/projects/test/video.mp4",
silence_settings: {
min_silence_duration_ms: 200, min_silence_duration_ms: 200,
silence_threshold_db: 16, silence_threshold_db: 16,
padding_ms: 100, padding_ms: 100,
}, },
active_job_id: null, detect_job_id: null,
active_job_type: null, detected_segments: [],
silence_job_id: null, reviewed_cuts: [],
transcription_artifact_id: null, duration_ms: null,
caption_preset_id: CAPTION_PRESET_ID, applied_output_file_id: null,
caption_style_config: null,
captioned_video_path: null,
}, },
transcription: {
status: "REVIEW_READY",
job_id: null,
request: null,
artifact_id: TRANSCRIPTION_ARTIFACT_ID,
transcription_id: TRANSCRIPTION_ID,
reviewed: true,
},
captions: {
status: "CONFIG_READY",
preset_id: CAPTION_PRESET_ID,
style_config: null,
render_job_id: null,
output_file_id: null,
}, },
is_active: true,
created_at: "2025-06-01T00:00:00Z",
updated_at: "2025-06-01T00:00:00Z",
} }
let savedWizardState: Record<string, unknown> | null = null
let generateRequestBody: Record<string, unknown> | null = null
let generateRequestCount = 0
await page.context().addCookies([ await page.context().addCookies([
{ {
@@ -98,36 +96,127 @@ test.describe("Caption Settings Step", () => {
}) })
await page.route(`**/api/projects/${PROJECT_ID}/`, async (route) => { await page.route(`**/api/projects/${PROJECT_ID}/`, async (route) => {
if (route.request().method() === "GET") {
await route.fulfill({ await route.fulfill({
status: 200, status: 200,
contentType: "application/json", contentType: "application/json",
body: JSON.stringify(project), body: JSON.stringify({
id: PROJECT_ID,
owner_id: USER_ID,
name: "Тестовый проект",
description: null,
language: "auto",
folder: null,
status: "DRAFT",
workspace_state: null,
is_active: true,
created_at: "2025-06-01T00:00:00Z",
updated_at: "2025-06-01T00:00:00Z",
}),
})
}) })
return
}
if (route.request().method() === "PATCH") { await page.route(`**/api/projects/${PROJECT_ID}/workspace*`, async (route) => {
const body = route.request().postDataJSON() as { await route.fulfill({
workspace_state?: { wizard?: Record<string, unknown> } status: 200,
} contentType: "application/json",
body: JSON.stringify(workspace),
})
})
savedWizardState = body.workspace_state?.wizard ?? null await page.route(
project = { `**/api/projects/${PROJECT_ID}/workflow/actions*`,
...project, async (route) => {
workspace_state: body.workspace_state ?? project.workspace_state, const action = route.request().postDataJSON() as Record<string, unknown>
workflowActions.push(action)
if (action.type === "START_CAPTION_RENDER") {
workspace = {
...workspace,
revision: 2,
current_screen: "caption-processing",
active_job: {
job_id: CAPTION_JOB_ID,
job_type: "CAPTIONS_GENERATE",
},
captions: {
...workspace.captions,
status: "RUNNING",
render_job_id: CAPTION_JOB_ID,
},
}
} }
await route.fulfill({ await route.fulfill({
status: 200, status: 200,
contentType: "application/json", contentType: "application/json",
body: JSON.stringify(project), body: JSON.stringify(workspace),
}) })
return },
} )
await route.fallback() await page.route("**/api/files/files/", async (route) => {
await route.fulfill({
status: 200,
contentType: "application/json",
body: JSON.stringify([
{
id: SOURCE_FILE_ID,
project_id: PROJECT_ID,
owner_id: USER_ID,
original_filename: "video.mp4",
path: PRIMARY_FILE_KEY,
storage_backend: "S3",
mime_type: "video/mp4",
size_bytes: 1024,
checksum: null,
file_format: "mp4",
is_uploaded: true,
is_deleted: false,
is_active: true,
created_at: "2025-06-01T00:00:00Z",
},
]),
}) })
})
await page.route(`**/api/files/files/${SOURCE_FILE_ID}/`, async (route) => {
await route.fulfill({
status: 200,
contentType: "application/json",
body: JSON.stringify({
id: SOURCE_FILE_ID,
project_id: PROJECT_ID,
owner_id: USER_ID,
original_filename: "video.mp4",
path: PRIMARY_FILE_KEY,
storage_backend: "S3",
mime_type: "video/mp4",
size_bytes: 1024,
checksum: null,
file_format: "mp4",
is_uploaded: true,
is_deleted: false,
is_active: true,
created_at: "2025-06-01T00:00:00Z",
}),
})
})
await page.route(
`**/api/files/files/${SOURCE_FILE_ID}/resolve/`,
async (route) => {
await route.fulfill({
status: 200,
contentType: "application/json",
body: JSON.stringify({
file_id: SOURCE_FILE_ID,
file_url: PRIMARY_FILE_URL,
file_path: PRIMARY_FILE_KEY,
filename: "video.mp4",
}),
})
},
)
await page.route("**/api/media/artifacts/", async (route) => { await page.route("**/api/media/artifacts/", async (route) => {
await route.fulfill({ await route.fulfill({
@@ -149,20 +238,6 @@ test.describe("Caption Settings Step", () => {
}) })
}) })
await page.route(
`**/api/transcribe/transcriptions/by-artifact/${TRANSCRIPTION_ARTIFACT_ID}/`,
async (route) => {
await route.fulfill({
status: 200,
contentType: "application/json",
body: JSON.stringify({
id: TRANSCRIPTION_ID,
artifact_id: TRANSCRIPTION_ARTIFACT_ID,
}),
})
},
)
await page.route("**/api/captions/presets/", async (route) => { await page.route("**/api/captions/presets/", async (route) => {
await route.fulfill({ await route.fulfill({
status: 200, status: 200,
@@ -183,27 +258,13 @@ test.describe("Caption Settings Step", () => {
}) })
}) })
await page.route("**/api/tasks/captions-generate/", async (route) => {
generateRequestCount += 1
generateRequestBody = route.request().postDataJSON() as Record<
string,
unknown
>
await route.fulfill({
status: 200,
contentType: "application/json",
body: JSON.stringify({ job_id: CAPTION_JOB_ID }),
})
})
await page.route("**/api/tasks/status/**", async (route) => { await page.route("**/api/tasks/status/**", async (route) => {
await route.fulfill({ await route.fulfill({
status: 200, status: 200,
contentType: "application/json", contentType: "application/json",
body: JSON.stringify({ body: JSON.stringify({
status: "RUNNING", status: "RUNNING",
progress_pct: 0, progress_pct: 25,
output_data: null, output_data: null,
}), }),
}) })
@@ -220,26 +281,13 @@ test.describe("Caption Settings Step", () => {
await expect(captionStep.getByText("Системный пресет")).toBeVisible() await expect(captionStep.getByText("Системный пресет")).toBeVisible()
await expect(generateButton).toBeEnabled() await expect(generateButton).toBeEnabled()
await expect
.poll(() => savedWizardState?.transcription_artifact_id ?? null)
.toBe(TRANSCRIPTION_ARTIFACT_ID)
await generateButton.click() await generateButton.click()
expect(generateRequestBody).toMatchObject({ expect(workflowActions).toHaveLength(1)
video_s3_path: PRIMARY_FILE_KEY, expect(workflowActions[0]).toMatchObject({
transcription_id: TRANSCRIPTION_ID, type: "START_CAPTION_RENDER",
project_id: PROJECT_ID, revision: 1,
preset_id: CAPTION_PRESET_ID,
}) })
expect(generateRequestCount).toBe(1)
await expect
.poll(() => savedWizardState?.current_step ?? null)
.toBe("caption-processing")
await expect
.poll(() => savedWizardState?.active_job_id ?? null)
.toBe(CAPTION_JOB_ID)
await expect(page.locator("[data-testid='ProcessingStep']")).toBeVisible() await expect(page.locator("[data-testid='ProcessingStep']")).toBeVisible()
}) })
+251 -115
View File
@@ -2,7 +2,6 @@ import { expect, test } from "@playwright/test"
const USER_ID = "00000000-0000-0000-0000-000000000001" const USER_ID = "00000000-0000-0000-0000-000000000001"
const PROJECT_ID = "75df675b-013b-4b1f-ab2d-075dadbcd0d9" const PROJECT_ID = "75df675b-013b-4b1f-ab2d-075dadbcd0d9"
const DETECT_JOB_ID = "00000000-0000-0000-0000-000000000050"
const APPLY_JOB_ID = "00000000-0000-0000-0000-000000000051" const APPLY_JOB_ID = "00000000-0000-0000-0000-000000000051"
const TRANSCRIPTION_JOB_ID = "00000000-0000-0000-0000-000000000052" const TRANSCRIPTION_JOB_ID = "00000000-0000-0000-0000-000000000052"
const ORIGINAL_FILE_ID = "00000000-0000-0000-0000-000000000060" const ORIGINAL_FILE_ID = "00000000-0000-0000-0000-000000000060"
@@ -34,51 +33,50 @@ const MOCK_SEGMENTS = [
] ]
test.describe("Silence Apply Flow", () => { test.describe("Silence Apply Flow", () => {
test("should show processing for cut application and transcribe the processed video", async ({ test("should persist cuts via workflow actions and continue to transcription on processed source file", async ({
page, page,
}) => { }) => {
let project: Record<string, unknown> = { let applyStatus: "RUNNING" | "DONE" = "RUNNING"
id: PROJECT_ID, const workflowActions: Array<Record<string, unknown>> = []
owner_id: USER_ID, let workspace: Record<string, any> = {
name: "Тестовый проект", revision: 1,
description: null, phase: "SILENCE",
language: "auto", current_screen: "fragments",
folder: null, active_job: null,
status: "DRAFT", source_file_id: ORIGINAL_FILE_ID,
workspace_state: { workspace_view: {
wizard: { used_file_ids: [],
current_step: "fragments", selected_file_id: null,
completed_steps: [ },
"upload", silence: {
"verify", status: "REVIEW_READY",
"silence-settings", settings: {
"processing",
],
primary_file_id: ORIGINAL_FILE_ID,
primary_file_key: ORIGINAL_FILE_KEY,
original_file_name: "original-video.mp4",
silence_settings: {
min_silence_duration_ms: 200, min_silence_duration_ms: 200,
silence_threshold_db: 16, silence_threshold_db: 16,
padding_ms: 100, padding_ms: 100,
}, },
active_job_id: null, detect_job_id: "00000000-0000-0000-0000-000000000050",
active_job_type: null, detected_segments: MOCK_SEGMENTS,
silence_job_id: DETECT_JOB_ID, reviewed_cuts: [],
transcription_artifact_id: null, duration_ms: 30000,
caption_preset_id: null, applied_output_file_id: null,
caption_style_config: null,
captioned_video_path: null,
captioned_video_file_id: null,
}, },
transcription: {
status: "IDLE",
job_id: null,
request: null,
artifact_id: null,
transcription_id: null,
reviewed: false,
},
captions: {
status: "IDLE",
preset_id: null,
style_config: null,
render_job_id: null,
output_file_id: null,
}, },
is_active: true,
created_at: "2025-06-01T00:00:00Z",
updated_at: "2025-06-01T00:00:00Z",
} }
let savedWizardState: Record<string, unknown> | null = null
let applyStatus = "RUNNING"
let transcriptionRequestBody: Record<string, unknown> | null = null
await page.context().addCookies([ await page.context().addCookies([
{ {
@@ -104,35 +102,154 @@ test.describe("Silence Apply Flow", () => {
}) })
await page.route(`**/api/projects/${PROJECT_ID}/`, async (route) => { await page.route(`**/api/projects/${PROJECT_ID}/`, async (route) => {
if (route.request().method() === "GET") {
await route.fulfill({ await route.fulfill({
status: 200, status: 200,
contentType: "application/json", contentType: "application/json",
body: JSON.stringify(project), body: JSON.stringify({
id: PROJECT_ID,
owner_id: USER_ID,
name: "Тестовый проект",
description: null,
language: "auto",
folder: null,
status: "DRAFT",
workspace_state: null,
is_active: true,
created_at: "2025-06-01T00:00:00Z",
updated_at: "2025-06-01T00:00:00Z",
}),
})
}) })
return
}
if (route.request().method() === "PATCH") { await page.route(`**/api/projects/${PROJECT_ID}/workspace*`, async (route) => {
const body = route.request().postDataJSON() as { if (
workspace_state?: { wizard?: Record<string, unknown> } applyStatus === "DONE" &&
workspace.current_screen === "silence-apply-processing"
) {
workspace = {
...workspace,
revision: 4,
phase: "TRANSCRIPTION",
current_screen: "transcription-settings",
active_job: null,
source_file_id: CUT_FILE_ID,
silence: {
...workspace.silence,
status: "APPLIED",
applied_output_file_id: CUT_FILE_ID,
},
} }
savedWizardState = body.workspace_state?.wizard ?? null
project = {
...project,
workspace_state: body.workspace_state ?? project.workspace_state,
} }
await route.fulfill({ await route.fulfill({
status: 200, status: 200,
contentType: "application/json", contentType: "application/json",
body: JSON.stringify(project), body: JSON.stringify(workspace),
}) })
return })
await page.route(
`**/api/projects/${PROJECT_ID}/workflow/actions*`,
async (route) => {
const action = route.request().postDataJSON() as Record<string, unknown>
workflowActions.push(action)
if (action.type === "SET_SILENCE_CUTS") {
workspace = {
...workspace,
revision: 2,
silence: {
...workspace.silence,
reviewed_cuts: action.cuts as typeof MOCK_SEGMENTS,
},
}
} }
await route.fallback() if (action.type === "START_SILENCE_APPLY") {
workspace = {
...workspace,
revision: 3,
current_screen: "silence-apply-processing",
active_job: {
job_id: APPLY_JOB_ID,
job_type: "SILENCE_APPLY",
},
silence: {
...workspace.silence,
status: "APPLYING",
},
}
}
if (action.type === "START_TRANSCRIPTION") {
workspace = {
...workspace,
revision: 5,
current_screen: "transcription-processing",
active_job: {
job_id: TRANSCRIPTION_JOB_ID,
job_type: "TRANSCRIPTION_GENERATE",
},
transcription: {
...workspace.transcription,
status: "RUNNING",
job_id: TRANSCRIPTION_JOB_ID,
request: action.request as {
engine: "whisper"
language?: string
model: string
},
},
}
}
await route.fulfill({
status: 200,
contentType: "application/json",
body: JSON.stringify(workspace),
})
},
)
await page.route("**/api/files/files/", async (route) => {
await route.fulfill({
status: 200,
contentType: "application/json",
body: JSON.stringify([
{
id: ORIGINAL_FILE_ID,
project_id: PROJECT_ID,
owner_id: USER_ID,
original_filename: "original-video.mp4",
path: ORIGINAL_FILE_KEY,
storage_backend: "S3",
mime_type: "video/mp4",
size_bytes: 1024,
checksum: null,
file_format: "mp4",
is_uploaded: true,
is_deleted: false,
is_active: true,
created_at: "2025-06-01T00:00:00Z",
},
{
id: CUT_FILE_ID,
project_id: PROJECT_ID,
owner_id: USER_ID,
original_filename: "cut-video.mp4",
path: CUT_FILE_KEY,
storage_backend: "S3",
mime_type: "video/mp4",
size_bytes: 1024,
checksum: null,
file_format: "mp4",
is_uploaded: true,
is_deleted: false,
is_active: true,
created_at: "2025-06-01T00:00:00Z",
},
]),
})
}) })
await page.route("**/api/files/files/*/resolve/", async (route) => { await page.route("**/api/files/files/*/resolve/", async (route) => {
@@ -146,30 +263,50 @@ test.describe("Silence Apply Flow", () => {
file_id: isCutFile ? CUT_FILE_ID : ORIGINAL_FILE_ID, file_id: isCutFile ? CUT_FILE_ID : ORIGINAL_FILE_ID,
file_url: isCutFile ? CUT_FILE_URL : ORIGINAL_FILE_URL, file_url: isCutFile ? CUT_FILE_URL : ORIGINAL_FILE_URL,
file_path: isCutFile ? CUT_FILE_KEY : ORIGINAL_FILE_KEY, file_path: isCutFile ? CUT_FILE_KEY : ORIGINAL_FILE_KEY,
filename: isCutFile ? "cut-video.mp4" : "original-video.mp4",
}), }),
}) })
}) })
await page.route("**/api/files/files/*/", async (route) => {
const fileId = route.request().url().split("/files/")[1]?.split("/")[0]
const isCutFile = fileId === CUT_FILE_ID
await route.fulfill({
status: 200,
contentType: "application/json",
body: JSON.stringify({
id: isCutFile ? CUT_FILE_ID : ORIGINAL_FILE_ID,
project_id: PROJECT_ID,
owner_id: USER_ID,
original_filename: isCutFile
? "cut-video.mp4"
: "original-video.mp4",
path: isCutFile ? CUT_FILE_KEY : ORIGINAL_FILE_KEY,
storage_backend: "S3",
mime_type: "video/mp4",
size_bytes: 1024,
checksum: null,
file_format: "mp4",
is_uploaded: true,
is_deleted: false,
is_active: true,
created_at: "2025-06-01T00:00:00Z",
}),
})
})
await page.route("**/api/media/artifacts/", async (route) => {
await route.fulfill({
status: 200,
contentType: "application/json",
body: JSON.stringify([]),
})
})
await page.route("**/api/tasks/status/**", async (route) => { await page.route("**/api/tasks/status/**", async (route) => {
const url = route.request().url() const url = route.request().url()
if (url.includes(DETECT_JOB_ID)) {
await route.fulfill({
status: 200,
contentType: "application/json",
body: JSON.stringify({
status: "DONE",
job_type: "SILENCE_DETECT",
progress_pct: 100,
output_data: {
silent_segments: MOCK_SEGMENTS,
duration_ms: 30000,
},
}),
})
return
}
if (url.includes(APPLY_JOB_ID)) { if (url.includes(APPLY_JOB_ID)) {
await route.fulfill({ await route.fulfill({
status: 200, status: 200,
@@ -190,6 +327,20 @@ test.describe("Silence Apply Flow", () => {
return return
} }
if (url.includes(TRANSCRIPTION_JOB_ID)) {
await route.fulfill({
status: 200,
contentType: "application/json",
body: JSON.stringify({
status: "RUNNING",
job_type: "TRANSCRIPTION_GENERATE",
progress_pct: 10,
output_data: null,
}),
})
return
}
await route.fulfill({ await route.fulfill({
status: 200, status: 200,
contentType: "application/json", contentType: "application/json",
@@ -201,60 +352,45 @@ test.describe("Silence Apply Flow", () => {
}) })
}) })
await page.route("**/api/tasks/silence-apply/", async (route) => {
await route.fulfill({
status: 202,
contentType: "application/json",
body: JSON.stringify({ job_id: APPLY_JOB_ID }),
})
})
await page.route("**/api/tasks/transcription-generate/", async (route) => {
transcriptionRequestBody = route.request().postDataJSON() as Record<
string,
unknown
>
await route.fulfill({
status: 202,
contentType: "application/json",
body: JSON.stringify({ job_id: TRANSCRIPTION_JOB_ID }),
})
})
await page.goto(`/projects/${PROJECT_ID}`) await page.goto(`/projects/${PROJECT_ID}`)
const fragmentsStep = page.locator("[data-testid='FragmentsStep']") await expect(page.locator("[data-testid='FragmentsStep']")).toBeVisible()
await expect(fragmentsStep).toBeVisible() await expect(page.locator("[data-testid='cut-region']")).toHaveCount(2)
await fragmentsStep.getByRole("button", { name: "Применить" }).click() await page.getByRole("button", { name: "Применить" }).click()
await expect.poll(() => workflowActions.length).toBe(2)
expect(workflowActions[0]).toMatchObject({
type: "SET_SILENCE_CUTS",
revision: 1,
})
expect(workflowActions[1]).toMatchObject({
type: "START_SILENCE_APPLY",
revision: 2,
})
await expect(page.locator("[data-testid='ProcessingStep']")).toBeVisible() await expect(page.locator("[data-testid='ProcessingStep']")).toBeVisible()
await expect
.poll(() => savedWizardState?.active_job_type ?? null)
.toBe("SILENCE_APPLY")
await expect
.poll(() => savedWizardState?.current_step ?? null)
.toBe("processing")
applyStatus = "DONE" applyStatus = "DONE"
const transcriptionStep = page.locator( await expect(
"[data-testid='TranscriptionSettingsStep']", page.locator("[data-testid='TranscriptionSettingsStep']"),
).toBeVisible()
await page.getByRole("button", { name: "Сгенерировать субтитры" }).click()
expect(workflowActions[2]).toMatchObject({
type: "START_TRANSCRIPTION",
revision: 4,
request: {
engine: "whisper",
model: "base",
},
})
await expect(page.locator("[data-testid='ProcessingStep']")).toContainText(
"ТРАНСКРИБАЦИЯ",
) )
await expect(transcriptionStep).toBeVisible({ timeout: 10_000 })
await expect
.poll(() => savedWizardState?.primary_file_key ?? null)
.toBe(CUT_FILE_KEY)
await transcriptionStep
.getByRole("button", { name: "Сгенерировать субтитры" })
.click()
expect(transcriptionRequestBody).toMatchObject({
file_key: CUT_FILE_KEY,
project_id: PROJECT_ID,
})
}) })
}) })