Compare commits
10 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| b9030a863e | |||
| 259d3da89f | |||
| 7d2f444e1c | |||
| 9f780f7f81 | |||
| 269724d553 | |||
| 7f7db41bc3 | |||
| c40aeab8be | |||
| 2c9c11fa17 | |||
| a8881e29be | |||
| 9a71fa9f25 |
@@ -0,0 +1,145 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIG6DCCBNCgAwIBAgICEAUwDQYJKoZIhvcNAQELBQAwcDELMAkGA1UEBhMCUlUx
|
||||
PzA9BgNVBAoMNlRoZSBNaW5pc3RyeSBvZiBEaWdpdGFsIERldmVsb3BtZW50IGFu
|
||||
ZCBDb21tdW5pY2F0aW9uczEgMB4GA1UEAwwXUnVzc2lhbiBUcnVzdGVkIFJvb3Qg
|
||||
Q0EwHhcNMjQwNzE1MTI1MDQxWhcNMjkwNzE5MTI1MDQxWjBvMQswCQYDVQQGEwJS
|
||||
VTE/MD0GA1UECgw2VGhlIE1pbmlzdHJ5IG9mIERpZ2l0YWwgRGV2ZWxvcG1lbnQg
|
||||
YW5kIENvbW11bmljYXRpb25zMR8wHQYDVQQDDBZSdXNzaWFuIFRydXN0ZWQgU3Vi
|
||||
IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA1j0rkZECOt1S8o7I
|
||||
JY+4YKAxuEa5xaHKHXT2EpkuC/0krqMOjUy2oPIRNgR5g8X0Jl6jamxeGLc4Q1tf
|
||||
ju6or9oSRYThIUhRsFDQNBiBBEXoBgWxTfiKB2eyT97+pz5TBtBiRCPaLGRHYLRb
|
||||
9Jz2HkJlxbtNPjtDrF5DPHym+mZ1M1z3hIQYAqJwLpsEBnsw/VxWMlxqHoeewd0h
|
||||
uJMd71KQ5vOKlz7KrIZ6EobNNa6wItuvsfj3kYCK7O78uLHGXXFxdr8Hae9lMUmC
|
||||
8F7AFwa+bO1LRlTlqW7rE3rLf+jj70N01N8T3o22v14YBaFBWQWncAVYD2JuL3tH
|
||||
252+kdNOERf1fLbLRigJAbd+hOhWYlNf963TFDgnNPliHNIW72SygVBnI2V3JwO1
|
||||
dp1hVKpK/zt8ziGdHW4gmOLTsH50YKdR4jNqUgQv4wASlKn9OpN6zHYc5G8h86fY
|
||||
BM+zxE5ikGI+I/vIqBuI0eaDU92AWN/YjFLpu8tMu9kLRSCf1vug6FIfDPWVo7iP
|
||||
ac/SI2v8jnnpaW7ph/Pz3WkzaG7ZZJsfFs+8dploWc6LOoDtbFBhMdGMxu024msC
|
||||
0PSjZb5ODXPIaO2NsA7fMiAtZcoK6anTUJh4zOP/stA9qsJGNxdrEmiPXSmBZY/N
|
||||
Y0wkZgZ6JTDhw7038bPvctkblJkCAwEAAaOCAYswggGHMB0GA1UdDgQWBBR3Pdk5
|
||||
r0K93FvKduru/c4+YSkwXzAfBgNVHSMEGDAWgBTh0YHlzlpfBKrS6badZrHF+qws
|
||||
hzAOBgNVHQ8BAf8EBAMCAYYwEgYDVR0TAQH/BAgwBgEB/wIBADCBmAYIKwYBBQUH
|
||||
AQEEgYswgYgwQAYIKwYBBQUHMAKGNGh0dHA6Ly9udWMtY2RwLnZvc2tob2QucnUv
|
||||
Y2RwL3Jvb3RjYV9zc2xfcnNhMjAyMi5jcnQwRAYIKwYBBQUHMAKGOGh0dHA6Ly9u
|
||||
dWMtY2RwLmRpZ2l0YWwuZ292LnJ1L2NkcC9yb290Y2Ffc3NsX3JzYTIwMjIuY3J0
|
||||
MIGFBgNVHR8EfjB8MDqgOKA2hjRodHRwOi8vbnVjLWNkcC52b3NraG9kLnJ1L2Nk
|
||||
cC9yb290Y2Ffc3NsX3JzYTIwMjIuY3JsMD6gPKA6hjhodHRwOi8vbnVjLWNkcC5k
|
||||
aWdpdGFsLmdvdi5ydS9jZHAvcm9vdGNhX3NzbF9yc2EyMDIyLmNybDANBgkqhkiG
|
||||
9w0BAQsFAAOCAgEAmsINXtQ7wwUWvIeOr80MdJS/5G4xhyZOVEmeUorThquT672y
|
||||
cCg3XCxc4fwbiZqSSbBqntQ7RtiTAKMYMvBageKoVHbzz+R4jX01tKcTx8cDePrz
|
||||
dJ73bLNUorE7RU9QsW4KyiUeRmjMDV23AUlEvuQFTwgkHXvbac1BBdPn9CrssQuF
|
||||
5EGohZKcQPFiAAc4SHbRNhlr7uAwgpc/erzI9EAcvA6BVAXcVKoeGpV01uexUgZ6
|
||||
St5RP9UmDWNA7T4yVXWJ233N0Q8bl+6AswINQ3PosPu6yQQHQjr65YS06epK+AeI
|
||||
6j+oGR4xI7EhTQhQvaobnGmX/8QQ7XDRYCP2HXYxiffnn/CfZ/BVyKLYeY1ZipjE
|
||||
nzqdQIC2+Q3WtY8jsVRQMP38WFRmtsIt5snehnPTs5bKGVIcYzj3o3Ex/K7agEz0
|
||||
zAJ0JR5ivXZOvNkT0g9x1v+S1IkU3e/nX1a+tpRquMtnHX0L2lXArNHUbaOO9EJt
|
||||
d57WaIpofV5cVhhwShOgAuBc9UMJF3/n4t4RKiPxtsK8P67gcmphMhslj7AMYrYM
|
||||
ej2NvQZY4m3ub3CPC/PrTjDONvb+8g5xrKtxBjYqC74HSB4dg9G3WimSDUuP2Su6
|
||||
G2y2TUeyJuCvCLz289VoO0vg7cNdMobE3KCqAiiNhN2VBFxHAUKmUoRcRdw=
|
||||
-----END CERTIFICATE-----
|
||||
2 s:C=RU, O=The Ministry of Digital Development and Communications, CN=Russian Trusted Sub CA
|
||||
i:C=RU, O=The Ministry of Digital Development and Communications, CN=Russian Trusted Root CA
|
||||
a:PKEY: RSA, 4096 (bit); sigalg: sha256WithRSAEncryption
|
||||
v:NotBefore: Mar 2 11:25:19 2022 GMT; NotAfter: Mar 6 11:25:19 2027 GMT
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIHQjCCBSqgAwIBAgICEAIwDQYJKoZIhvcNAQELBQAwcDELMAkGA1UEBhMCUlUx
|
||||
PzA9BgNVBAoMNlRoZSBNaW5pc3RyeSBvZiBEaWdpdGFsIERldmVsb3BtZW50IGFu
|
||||
ZCBDb21tdW5pY2F0aW9uczEgMB4GA1UEAwwXUnVzc2lhbiBUcnVzdGVkIFJvb3Qg
|
||||
Q0EwHhcNMjIwMzAyMTEyNTE5WhcNMjcwMzA2MTEyNTE5WjBvMQswCQYDVQQGEwJS
|
||||
VTE/MD0GA1UECgw2VGhlIE1pbmlzdHJ5IG9mIERpZ2l0YWwgRGV2ZWxvcG1lbnQg
|
||||
YW5kIENvbW11bmljYXRpb25zMR8wHQYDVQQDDBZSdXNzaWFuIFRydXN0ZWQgU3Vi
|
||||
IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA9YPqBKOk19NFymrE
|
||||
wehzrhBEgT2atLezpduB24mQ7CiOa/HVpFCDRZzdxqlh8drku408/tTmWzlNH/br
|
||||
HuQhZ/miWKOf35lpKzjyBd6TPM23uAfJvEOQ2/dnKGGJbsUo1/udKSvxQwVHpVv3
|
||||
S80OlluKfhWPDEXQpgyFqIzPoxIQTLZ0deirZwMVHarZ5u8HqHetRuAtmO2ZDGQn
|
||||
vVOJYAjls+Hiueq7Lj7Oce7CQsTwVZeP+XQx28PAaEZ3y6sQEt6rL06ddpSdoTMp
|
||||
BnCqTbxW+eWMyjkIn6t9GBtUV45yB1EkHNnj2Ex4GwCiN9T84QQjKSr+8f0psGrZ
|
||||
vPbCbQAwNFJjisLixnjlGPLKa5vOmNwIh/LAyUW5DjpkCx004LPDuqPpFsKXNKpa
|
||||
L2Dm6uc0x4Jo5m+gUTVORB6hOSzWnWDj2GWfomLzzyjG81DRGFBpco/O93zecsIN
|
||||
3SL2Ysjpq1zdoS01CMYxie//9zWvYwzI25/OZigtnpCIrcd2j1Y6dMUFQAzAtHE+
|
||||
qsXflSL8HIS+IJEFIQobLlYhHkoE3avgNx5jlu+OLYe0dF0Ykx1PGNjbwqvTX37R
|
||||
Cn32NMjlotW2QcGEZhDKj+3urZizp5xdTPZitA+aEjZM/Ni71VOdiOP0igbw6asZ
|
||||
2fxdozZ1TnSSYNYvNATwthNmZysCAwEAAaOCAeUwggHhMBIGA1UdEwEB/wQIMAYB
|
||||
Af8CAQAwDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBTR4XENCy2BTm6KSo9MI7NM
|
||||
XqtpCzAfBgNVHSMEGDAWgBTh0YHlzlpfBKrS6badZrHF+qwshzCBxwYIKwYBBQUH
|
||||
AQEEgbowgbcwOwYIKwYBBQUHMAKGL2h0dHA6Ly9yb3N0ZWxlY29tLnJ1L2NkcC9y
|
||||
b290Y2Ffc3NsX3JzYTIwMjIuY3J0MDsGCCsGAQUFBzAChi9odHRwOi8vY29tcGFu
|
||||
eS5ydC5ydS9jZHAvcm9vdGNhX3NzbF9yc2EyMDIyLmNydDA7BggrBgEFBQcwAoYv
|
||||
aHR0cDovL3JlZXN0ci1wa2kucnUvY2RwL3Jvb3RjYV9zc2xfcnNhMjAyMi5jcnQw
|
||||
gbAGA1UdHwSBqDCBpTA1oDOgMYYvaHR0cDovL3Jvc3RlbGVjb20ucnUvY2RwL3Jv
|
||||
b3RjYV9zc2xfcnNhMjAyMi5jcmwwNaAzoDGGL2h0dHA6Ly9jb21wYW55LnJ0LnJ1
|
||||
L2NkcC9yb290Y2Ffc3NsX3JzYTIwMjIuY3JsMDWgM6Axhi9odHRwOi8vcmVlc3Ry
|
||||
LXBraS5ydS9jZHAvcm9vdGNhX3NzbF9yc2EyMDIyLmNybDANBgkqhkiG9w0BAQsF
|
||||
AAOCAgEARBVzZls79AdiSCpar15dA5Hr/rrT4WbrOfzlpI+xrLeRPrUG6eUWIW4v
|
||||
Sui1yx3iqGLCjPcKb+HOTwoRMbI6ytP/ndp3TlYua2advYBEhSvjs+4vDZNwXr/D
|
||||
anbwIWdurZmViQRBDFebpkvnIvru/RpWud/5r624Wp8voZMRtj/cm6aI9LtvBfT9
|
||||
cfzhOaexI/99c14dyiuk1+6QhdwKaCRTc1mdfNQmnfWNRbfWhWBlK3h4GGE9JK33
|
||||
Gk8ZS8DMrkdAh0xby4xAQ/mSWAfWrBmfzlOqGyoB1U47WTOeqNbWkkoAP2ys94+s
|
||||
Jg4NTkiDVtXRF6nr6fYi0bSOvOFg0IQrMXO2Y8gyg9ARdPJwKtvWX8VPADCYMiWH
|
||||
h4n8bZokIrImVKLDQKHY4jCsND2HHdJfnrdL2YJw1qFskNO4cSNmZydw0Wkgjv9k
|
||||
F+KxqrDKlB8MZu2Hclph6v/CZ0fQ9YuE8/lsHZ0Qc2HyiSMnvjgK5fDc3TD4fa8F
|
||||
E8gMNurM+kV8PT8LNIM+4Zs+LKEV8nqRWBaxkIVJGekkVKO8xDBOG/aN62AZKHOe
|
||||
GcyIdu7yNMMRihGVZCYr8rYiJoKiOzDqOkPkLOPdhtVlgnhowzHDxMHND/E2WA5p
|
||||
ZHuNM/m0TXt2wTTPL7JH2YC0gPz/BvvSzjksgzU5rLbRyUKQkgU=
|
||||
-----END CERTIFICATE-----
|
||||
3 s:C=RU, O=The Ministry of Digital Development and Communications, CN=Russian Trusted Root CA
|
||||
i:C=RU, O=The Ministry of Digital Development and Communications, CN=Russian Trusted Root CA
|
||||
a:PKEY: RSA, 4096 (bit); sigalg: sha256WithRSAEncryption
|
||||
v:NotBefore: Mar 1 21:04:15 2022 GMT; NotAfter: Feb 27 21:04:15 2032 GMT
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFwjCCA6qgAwIBAgICEAAwDQYJKoZIhvcNAQELBQAwcDELMAkGA1UEBhMCUlUx
|
||||
PzA9BgNVBAoMNlRoZSBNaW5pc3RyeSBvZiBEaWdpdGFsIERldmVsb3BtZW50IGFu
|
||||
ZCBDb21tdW5pY2F0aW9uczEgMB4GA1UEAwwXUnVzc2lhbiBUcnVzdGVkIFJvb3Qg
|
||||
Q0EwHhcNMjIwMzAxMjEwNDE1WhcNMzIwMjI3MjEwNDE1WjBwMQswCQYDVQQGEwJS
|
||||
VTE/MD0GA1UECgw2VGhlIE1pbmlzdHJ5IG9mIERpZ2l0YWwgRGV2ZWxvcG1lbnQg
|
||||
YW5kIENvbW11bmljYXRpb25zMSAwHgYDVQQDDBdSdXNzaWFuIFRydXN0ZWQgUm9v
|
||||
dCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMfFOZ8pUAL3+r2n
|
||||
qqE0Zp52selXsKGFYoG0GM5bwz1bSFtCt+AZQMhkWQheI3poZAToYJu69pHLKS6Q
|
||||
XBiwBC1cvzYmUYKMYZC7jE5YhEU2bSL0mX7NaMxMDmH2/NwuOVRj8OImVa5s1F4U
|
||||
zn4Kv3PFlDBjjSjXKVY9kmjUBsXQrIHeaqmUIsPIlNWUnimXS0I0abExqkbdrXbX
|
||||
YwCOXhOO2pDUx3ckmJlCMUGacUTnylyQW2VsJIyIGA8V0xzdaeUXg0VZ6ZmNUr5Y
|
||||
Ber/EAOLPb8NYpsAhJe2mXjMB/J9HNsoFMBFJ0lLOT/+dQvjbdRZoOT8eqJpWnVD
|
||||
U+QL/qEZnz57N88OWM3rabJkRNdU/Z7x5SFIM9FrqtN8xewsiBWBI0K6XFuOBOTD
|
||||
4V08o4TzJ8+Ccq5XlCUW2L48pZNCYuBDfBh7FxkB7qDgGDiaftEkZZfApRg2E+M9
|
||||
G8wkNKTPLDc4wH0FDTijhgxR3Y4PiS1HL2Zhw7bD3CbslmEGgfnnZojNkJtcLeBH
|
||||
BLa52/dSwNU4WWLubaYSiAmA9IUMX1/RpfpxOxd4Ykmhz97oFbUaDJFipIggx5sX
|
||||
ePAlkTdWnv+RWBxlJwMQ25oEHmRguNYf4Zr/Rxr9cS93Y+mdXIZaBEE0KS2iLRqa
|
||||
OiWBki9IMQU4phqPOBAaG7A+eP8PAgMBAAGjZjBkMB0GA1UdDgQWBBTh0YHlzlpf
|
||||
BKrS6badZrHF+qwshzAfBgNVHSMEGDAWgBTh0YHlzlpfBKrS6badZrHF+qwshzAS
|
||||
BgNVHRMBAf8ECDAGAQH/AgEEMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsF
|
||||
AAOCAgEAALIY1wkilt/urfEVM5vKzr6utOeDWCUczmWX/RX4ljpRdgF+5fAIS4vH
|
||||
tmXkqpSCOVeWUrJV9QvZn6L227ZwuE15cWi8DCDal3Ue90WgAJJZMfTshN4OI8cq
|
||||
W9E4EG9wglbEtMnObHlms8F3CHmrw3k6KmUkWGoa+/ENmcVl68u/cMRl1JbW2bM+
|
||||
/3A+SAg2c6iPDlehczKx2oa95QW0SkPPWGuNA/CE8CpyANIhu9XFrj3RQ3EqeRcS
|
||||
AQQod1RNuHpfETLU/A2gMmvn/w/sx7TB3W5BPs6rprOA37tutPq9u6FTZOcG1Oqj
|
||||
C/B7yTqgI7rbyvox7DEXoX7rIiEqyNNUguTk/u3SZ4VXE2kmxdmSh3TQvybfbnXV
|
||||
4JbCZVaqiZraqc7oZMnRoWrXRG3ztbnbes/9qhRGI7PqXqeKJBztxRTEVj8ONs1d
|
||||
WN5szTwaPIvhkhO3CO5ErU2rVdUr89wKpNXbBODFKRtgxUT70YpmJ46VVaqdAhOZ
|
||||
D9EUUn4YaeLaS8AjSF/h7UkjOibNc4qVDiPP+rkehFWM66PVnP1Msh93tc+taIfC
|
||||
EYVMxjh8zNbFuoc7fzvvrFILLe7ifvEIUqSVIC/AzplM/Jxw7buXFeGP1qVCBEHq
|
||||
391d/9RAfaZ12zkwFsl+IKwE/OZxW8AHa9i1p4GO0YSNuczzEm4=
|
||||
-----END CERTIFICATE-----
|
||||
---
|
||||
Server certificate
|
||||
subject=C=RU, ST=Moscow, L=Moscow, O=Public Joint-Stock Company Sberbank of Russia, OU=IT, CN=smartspeech.sber.ru
|
||||
issuer=C=RU, O=The Ministry of Digital Development and Communications, CN=Russian Trusted Sub CA
|
||||
---
|
||||
No client certificate CA names sent
|
||||
Peer signing digest: SHA256
|
||||
Peer signature type: rsa_pss_rsae_sha256
|
||||
Peer Temp Key: X25519, 253 bits
|
||||
---
|
||||
SSL handshake has read 7717 bytes and written 1614 bytes
|
||||
Verification: OK
|
||||
---
|
||||
New, TLSv1.3, Cipher is TLS_AES_128_GCM_SHA256
|
||||
Protocol: TLSv1.3
|
||||
Server public key is 2048 bit
|
||||
This TLS version forbids renegotiation.
|
||||
Compression: NONE
|
||||
Expansion: NONE
|
||||
No ALPN negotiated
|
||||
Early data was not sent
|
||||
Verify return code: 0 (ok)
|
||||
---
|
||||
DONE
|
||||
@@ -1,10 +0,0 @@
|
||||
name: New MCP server
|
||||
version: 0.0.1
|
||||
schema: v1
|
||||
mcpServers:
|
||||
- name: New MCP server
|
||||
command: npx
|
||||
args:
|
||||
- -y
|
||||
- <your-mcp-server>
|
||||
env: {}
|
||||
+20
-1
@@ -1,2 +1,21 @@
|
||||
.env
|
||||
.env.*
|
||||
.git
|
||||
.gitignore
|
||||
.pytest_cache
|
||||
.mypy_cache
|
||||
.ruff_cache
|
||||
.venv
|
||||
.ruff_cache
|
||||
__pycache__
|
||||
*.pyc
|
||||
*.pyo
|
||||
*.pyd
|
||||
*.egg-info
|
||||
docs
|
||||
tests
|
||||
src
|
||||
.s_data
|
||||
.claude
|
||||
CODEX_REFACTOR_PLAN.md
|
||||
CLAUDE.md
|
||||
query_codex.md
|
||||
|
||||
@@ -1 +1,3 @@
|
||||
REMOTION_SERVICE_URL = http://remotion:3001
|
||||
REMOTION_SERVICE_URL = http://remotion:3001
|
||||
SALUTE_AUTH_KEY=MDE5ZDU0ZjEtODhlYS03MDk1LTkzYWYtZDM2MzU2MTUyYWVmOjg4NDlkNjZmLWMyY2UtNGFhZS1iYzY2LTQ2ZWRhMjNkYjgxYQ==
|
||||
SALUTE_CA_CERT_PATH=./.certs/russian_trusted_ca_bundle.pem
|
||||
@@ -10,6 +10,10 @@ __pycache__/
|
||||
build/
|
||||
dist/
|
||||
|
||||
# Environment
|
||||
.env
|
||||
.env.*
|
||||
|
||||
# OS / editor
|
||||
.DS_Store
|
||||
.vscode/
|
||||
|
||||
@@ -1,600 +1,15 @@
|
||||
# AGENTS.md - AI Coding Guidelines for CofeeProject Backend
|
||||
# AGENTS.md — Coffee Project Backend
|
||||
|
||||
This document provides guidelines and best practices for AI agents working with this codebase.
|
||||
Primary workflow guidance lives in `../AGENTS.md`.
|
||||
|
||||
---
|
||||
Use `./CLAUDE.md` as the service-specific source of truth for:
|
||||
|
||||
## Core Principles
|
||||
- backend commands
|
||||
- module architecture
|
||||
- backend patterns and gotchas
|
||||
|
||||
### 1. Code Should Be Simple, Readable, and Well Supported
|
||||
OpenCode/Codex notes:
|
||||
|
||||
- Write code that humans can understand at first glance
|
||||
- Prefer explicit over implicit behavior
|
||||
- Use clear control flow patterns (avoid deeply nested conditions)
|
||||
- Add docstrings for public functions, classes, and modules
|
||||
- Keep functions short and focused (ideally under 30 lines)
|
||||
|
||||
### 2. Less Overhead Is Better
|
||||
|
||||
- Avoid unnecessary abstractions and over-engineering
|
||||
- Don't add layers of indirection without clear benefit
|
||||
- Prefer direct solutions over clever ones
|
||||
- Minimize dependencies where possible
|
||||
- Use built-in Python features before reaching for external libraries
|
||||
|
||||
### 3. No Magic Values
|
||||
|
||||
- Define constants with meaningful names at module level
|
||||
- Use enums or `Literal` types for fixed sets of values (see `ArtifactTypeEnum` pattern)
|
||||
- Configuration values belong in `Settings` class with explicit defaults
|
||||
- Never hardcode timeouts, limits, or thresholds inline
|
||||
- Store user-facing error messages as module-level constants with `ERROR_` prefix
|
||||
- Example: `ERROR_NO_AUDIO_STREAM = "Файл не содержит аудиодорожки"`
|
||||
|
||||
```python
|
||||
# BAD
|
||||
if silence_db > 16:
|
||||
...
|
||||
|
||||
# GOOD
|
||||
SILENCE_THRESHOLD_DB = 16
|
||||
|
||||
if silence_db > SILENCE_THRESHOLD_DB:
|
||||
...
|
||||
```
|
||||
|
||||
### 4. One Function Should Implement One Purpose
|
||||
|
||||
- Each function should do exactly one thing
|
||||
- If a function needs "and" in its description, split it
|
||||
- Extract helper functions for distinct subtasks
|
||||
- Keep side effects isolated and predictable
|
||||
|
||||
```python
|
||||
# BAD
|
||||
async def get_and_validate_and_process_media(file_key: str) -> MediaResult:
|
||||
...
|
||||
|
||||
# GOOD
|
||||
async def download_media(file_key: str) -> TempFile:
|
||||
...
|
||||
|
||||
def validate_media_format(file_path: str) -> bool:
|
||||
...
|
||||
|
||||
async def process_media(file_path: str) -> MediaResult:
|
||||
...
|
||||
```
|
||||
|
||||
### 5. All Variable Names Should Have Meaning Based on Context
|
||||
|
||||
- Use descriptive names that explain purpose, not type
|
||||
- Avoid single-letter variables (except for trivial loops)
|
||||
- Prefix boolean variables with `is_`, `has_`, `can_`, `should_`
|
||||
- Use domain terminology consistently
|
||||
|
||||
```python
|
||||
# BAD
|
||||
x = await repo.get(id)
|
||||
flag = x.is_deleted
|
||||
|
||||
# GOOD
|
||||
media_file = await media_repository.get_by_id(media_file_id)
|
||||
is_soft_deleted = media_file.is_deleted
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Project Architecture
|
||||
|
||||
### Layer Structure
|
||||
|
||||
```
|
||||
cpv3/
|
||||
├── api/v1/ # API version routing
|
||||
├── common/ # Shared schemas and utilities
|
||||
├── db/ # Database base classes and session
|
||||
├── infrastructure/ # Cross-cutting concerns (auth, storage, settings)
|
||||
└── modules/ # Feature modules (domain logic)
|
||||
└── <module>/
|
||||
├── models.py # SQLAlchemy models
|
||||
├── schemas.py # Pydantic DTOs
|
||||
├── repository.py # Database access layer
|
||||
├── service.py # Business logic
|
||||
└── router.py # FastAPI endpoints
|
||||
```
|
||||
|
||||
### Module Responsibilities
|
||||
|
||||
| Layer | Responsibility | Dependencies |
|
||||
| --------------- | ------------------------------------------ | ----------------------------- |
|
||||
| `router.py` | HTTP request/response handling, validation | schemas, service, repository |
|
||||
| `service.py` | Business logic, orchestration | repository, external services |
|
||||
| `repository.py` | Database queries, CRUD operations | models, session |
|
||||
| `schemas.py` | Data transfer objects, validation | pydantic |
|
||||
| `models.py` | Database table definitions | SQLAlchemy |
|
||||
|
||||
---
|
||||
|
||||
## Coding Standards
|
||||
|
||||
### Python Version & Style
|
||||
|
||||
- **Python 3.11+** required
|
||||
- Use `from __future__ import annotations` for forward references
|
||||
- Line length: **100 characters** (configured in ruff)
|
||||
- Use type hints for all function signatures
|
||||
- Async-first approach for I/O operations
|
||||
|
||||
### Imports
|
||||
|
||||
```python
|
||||
# Standard library
|
||||
from __future__ import annotations
|
||||
import uuid
|
||||
from datetime import datetime, timezone
|
||||
from typing import Literal
|
||||
|
||||
# Third-party
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
# Local imports (absolute paths)
|
||||
from cpv3.infrastructure.auth import get_current_user
|
||||
from cpv3.modules.media.schemas import MediaFileRead
|
||||
from cpv3.modules.media.repository import MediaFileRepository
|
||||
```
|
||||
|
||||
### Pydantic Schemas
|
||||
|
||||
- Inherit from `cpv3.common.schemas.Schema` for consistent config
|
||||
- Use `Literal` types for enums with string values
|
||||
- Suffix schema names: `*Create`, `*Update`, `*Read`
|
||||
|
||||
```python
|
||||
from cpv3.common.schemas import Schema
|
||||
|
||||
class MediaFileRead(Schema):
|
||||
id: UUID
|
||||
owner_id: UUID
|
||||
duration_seconds: float
|
||||
is_deleted: bool
|
||||
created_at: datetime
|
||||
```
|
||||
|
||||
### SQLAlchemy Models
|
||||
|
||||
- Inherit from `Base` and `BaseModelMixin`
|
||||
- Use explicit column types
|
||||
- Add indexes for frequently queried fields
|
||||
- Use soft deletes (`is_deleted` flag)
|
||||
|
||||
```python
|
||||
from cpv3.db.base import Base, BaseModelMixin
|
||||
|
||||
class MediaFile(Base, BaseModelMixin):
|
||||
__tablename__ = "media_files"
|
||||
|
||||
owner_id: Mapped[uuid.UUID] = mapped_column(
|
||||
UUID(as_uuid=True), ForeignKey("users.id", ondelete="RESTRICT"), index=True
|
||||
)
|
||||
is_deleted: Mapped[bool] = mapped_column(Boolean, default=False)
|
||||
```
|
||||
|
||||
### Repository Pattern
|
||||
|
||||
- One repository per model
|
||||
- Accept `AsyncSession` in constructor
|
||||
- Methods should be atomic and focused
|
||||
- Filter soft-deleted records by default
|
||||
|
||||
```python
|
||||
class MediaFileRepository:
|
||||
def __init__(self, session: AsyncSession) -> None:
|
||||
self._session = session
|
||||
|
||||
async def get_by_id(self, media_file_id: uuid.UUID) -> MediaFile | None:
|
||||
result = await self._session.execute(
|
||||
select(MediaFile).where(MediaFile.id == media_file_id)
|
||||
)
|
||||
media_file = result.scalar_one_or_none()
|
||||
if media_file is None or media_file.is_deleted:
|
||||
return None
|
||||
return media_file
|
||||
```
|
||||
|
||||
### FastAPI Endpoints
|
||||
|
||||
- Use dependency injection for DB session, auth, and services
|
||||
- Return typed response models
|
||||
- Use appropriate HTTP status codes
|
||||
- Handle errors with `HTTPException`
|
||||
|
||||
```python
|
||||
@router.get("/mediafiles/{media_file_id}", response_model=MediaFileRead)
|
||||
async def get_mediafile(
|
||||
media_file_id: uuid.UUID,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
) -> MediaFileRead:
|
||||
repo = MediaFileRepository(db)
|
||||
media_file = await repo.get_by_id(media_file_id)
|
||||
if media_file is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Not found")
|
||||
return MediaFileRead.model_validate(media_file)
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Configuration & Settings
|
||||
|
||||
### Environment Variables
|
||||
|
||||
- All configuration through `Settings` class in `infrastructure/settings.py`
|
||||
- Use `Field(default=..., alias="ENV_VAR_NAME")` pattern
|
||||
- Provide sensible defaults for local development
|
||||
- Never commit secrets to repository
|
||||
|
||||
```python
|
||||
class Settings(BaseSettings):
|
||||
jwt_secret_key: str = Field(default="dev-secret", alias="JWT_SECRET_KEY")
|
||||
jwt_algorithm: str = Field(default="HS256", alias="JWT_ALGORITHM")
|
||||
jwt_access_ttl_minutes: int = Field(default=60, alias="JWT_ACCESS_TTL_MINUTES")
|
||||
```
|
||||
|
||||
### Accessing Settings
|
||||
|
||||
```python
|
||||
from cpv3.infrastructure.settings import get_settings
|
||||
|
||||
settings = get_settings() # Cached via @lru_cache
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Testing Guidelines
|
||||
|
||||
### Test Structure
|
||||
|
||||
```
|
||||
tests/
|
||||
├── conftest.py # Shared fixtures
|
||||
├── unit/ # Unit tests (isolated)
|
||||
└── integration/ # Integration tests (with DB/services)
|
||||
```
|
||||
|
||||
### Fixtures
|
||||
|
||||
- Use `pytest-asyncio` for async tests
|
||||
- Create isolated database sessions per test
|
||||
- Mock external services (storage, APIs)
|
||||
|
||||
```python
|
||||
@pytest.fixture
|
||||
async def test_user(test_db_session: AsyncSession) -> User:
|
||||
user = User(
|
||||
id=uuid.uuid4(),
|
||||
username="testuser",
|
||||
email="test@example.com",
|
||||
password_hash=hash_password("testpassword"),
|
||||
is_active=True,
|
||||
)
|
||||
test_db_session.add(user)
|
||||
await test_db_session.commit()
|
||||
return user
|
||||
```
|
||||
|
||||
### Test Naming
|
||||
|
||||
```python
|
||||
# Pattern: test_<action>_<condition>_<expected_result>
|
||||
async def test_get_mediafile_when_not_found_returns_404():
|
||||
...
|
||||
|
||||
async def test_create_mediafile_with_valid_data_returns_201():
|
||||
...
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Common Patterns
|
||||
|
||||
### Error Handling
|
||||
|
||||
```python
|
||||
# Use specific HTTP exceptions
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Media file not found"
|
||||
)
|
||||
|
||||
# Re-raise with context
|
||||
try:
|
||||
result = await external_service.call()
|
||||
except ExternalError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_502_BAD_GATEWAY,
|
||||
detail="External service unavailable"
|
||||
) from e
|
||||
```
|
||||
|
||||
### Async Operations
|
||||
|
||||
```python
|
||||
# For CPU-bound work in async context
|
||||
import anyio
|
||||
|
||||
result = await anyio.to_thread.run_sync(cpu_intensive_function, arg1, arg2)
|
||||
|
||||
# For subprocess calls
|
||||
proc = await asyncio.create_subprocess_exec(
|
||||
"ffprobe", "-v", "error", file_path,
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
stderr=asyncio.subprocess.PIPE,
|
||||
)
|
||||
stdout, stderr = await proc.communicate()
|
||||
```
|
||||
|
||||
### Temporary Files
|
||||
|
||||
```python
|
||||
from tempfile import NamedTemporaryFile
|
||||
|
||||
with NamedTemporaryFile(suffix=".mp4", delete=False) as tmp:
|
||||
tmp_path = tmp.name
|
||||
try:
|
||||
# Use tmp_path
|
||||
...
|
||||
finally:
|
||||
# Clean up
|
||||
Path(tmp_path).unlink(missing_ok=True)
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Do's and Don'ts
|
||||
|
||||
### ✅ DO
|
||||
|
||||
- Use type hints everywhere
|
||||
- Write async code for I/O operations
|
||||
- Use dependency injection
|
||||
- Keep modules self-contained
|
||||
- Write tests for new features
|
||||
- Use meaningful commit messages
|
||||
- Follow existing patterns in the codebase
|
||||
|
||||
### ❌ DON'T
|
||||
|
||||
- Use global mutable state
|
||||
- Put business logic in routers
|
||||
- Hardcode configuration values
|
||||
- Ignore type checker warnings
|
||||
- Write overly clever code
|
||||
- Skip error handling
|
||||
- Mix sync and async DB operations
|
||||
|
||||
---
|
||||
|
||||
## Quick Reference
|
||||
|
||||
| Task | Location |
|
||||
| --------------------- | ------------------------------------- |
|
||||
| Add new endpoint | `modules/<module>/router.py` |
|
||||
| Add database model | `modules/<module>/models.py` |
|
||||
| Add validation schema | `modules/<module>/schemas.py` |
|
||||
| Add business logic | `modules/<module>/service.py` |
|
||||
| Add database query | `modules/<module>/repository.py` |
|
||||
| Add configuration | `infrastructure/settings.py` |
|
||||
| Add shared utility | `common/` |
|
||||
| Add migration | Run `alembic revision --autogenerate` |
|
||||
|
||||
---
|
||||
|
||||
## Package Management
|
||||
|
||||
This project uses **[uv](https://docs.astral.sh/uv/)** as the package manager - a fast Python package installer and resolver written in Rust.
|
||||
|
||||
### Common Commands
|
||||
|
||||
```bash
|
||||
# Install all dependencies
|
||||
uv sync
|
||||
|
||||
# Add a new dependency
|
||||
uv add <package-name>
|
||||
|
||||
# Add a dev dependency
|
||||
uv add --group dev <package-name>
|
||||
|
||||
# Run a command in the virtual environment
|
||||
uv run <command>
|
||||
|
||||
# Run the development server
|
||||
uv run uvicorn cpv3.main:app --reload
|
||||
|
||||
# Run tests
|
||||
uv run pytest
|
||||
```
|
||||
|
||||
### Why uv?
|
||||
|
||||
- **Speed** - 10-100x faster than pip
|
||||
- **Reliable** - Deterministic dependency resolution
|
||||
- **Compatible** - Works with standard `pyproject.toml`
|
||||
|
||||
---
|
||||
|
||||
## Dependencies
|
||||
|
||||
Key dependencies used in this project:
|
||||
|
||||
- **FastAPI** - Web framework
|
||||
- **SQLAlchemy 2.0** - ORM (async mode)
|
||||
- **Pydantic 2.x** - Data validation
|
||||
- **asyncpg** - PostgreSQL async driver
|
||||
- **Alembic** - Database migrations
|
||||
- **pytest-asyncio** - Async testing
|
||||
- **boto3** - AWS S3 storage
|
||||
- **pydub** - Audio processing
|
||||
- **openai-whisper** - Transcription
|
||||
- **Dramatiq** - Background task queue (with Redis broker)
|
||||
|
||||
---
|
||||
|
||||
## Common AI Agent Mistakes to Avoid
|
||||
|
||||
This section documents real errors made during AI-assisted development sessions. Learn from these mistakes.
|
||||
|
||||
### 1. Over-Engineering and Breaking Module Structure
|
||||
|
||||
**What happened:** When asked to implement background tasks, the agent created excessive files:
|
||||
|
||||
```
|
||||
# BAD - What was created
|
||||
cpv3/modules/tasks/
|
||||
├── __init__.py
|
||||
├── actors.py # ❌ Non-standard
|
||||
├── base.py # ❌ Non-standard
|
||||
├── db_helpers.py # ❌ Non-standard
|
||||
├── webhook_dispatch.py # ❌ Non-standard
|
||||
├── handlers/ # ❌ Non-standard directory
|
||||
│ ├── __init__.py
|
||||
│ ├── base.py
|
||||
│ ├── media_probe.py
|
||||
│ ├── silence_remove.py
|
||||
│ └── ...
|
||||
├── schemas.py
|
||||
├── service.py
|
||||
└── router.py
|
||||
|
||||
# GOOD - Standard module structure
|
||||
cpv3/modules/tasks/
|
||||
├── __init__.py
|
||||
├── schemas.py # DTOs only
|
||||
├── service.py # All business logic including actors
|
||||
└── router.py # Endpoints only
|
||||
```
|
||||
|
||||
**Why it's wrong:**
|
||||
|
||||
- Ignored existing module patterns in the codebase
|
||||
- Added unnecessary abstraction layers (BaseTaskHandler, registry pattern)
|
||||
- Created cognitive overhead for maintainers
|
||||
|
||||
**Advice:**
|
||||
|
||||
- **ALWAYS examine existing modules first** before creating new ones
|
||||
- **Match the existing file naming conventions exactly**
|
||||
- Standard module files: `__init__.py`, `models.py`, `schemas.py`, `repository.py`, `service.py`, `router.py`
|
||||
- Only create files from this list; consolidate everything else into `service.py`
|
||||
|
||||
---
|
||||
|
||||
### 2. Misinterpreting "Make It Flexible" or "Apply SRP"
|
||||
|
||||
**What happened:** When asked to "make tasks module more flexible with SRP compliance", the agent interpreted this as creating:
|
||||
|
||||
- Abstract base classes (`BaseTaskHandler`, `BaseTaskSubmitter`)
|
||||
- A registry pattern with dynamic handler registration
|
||||
- Separate files for each handler implementation
|
||||
- Complex inheritance hierarchies
|
||||
|
||||
**Why it's wrong:**
|
||||
|
||||
- SRP doesn't mean "one class per file" or "maximum abstraction"
|
||||
- Flexibility doesn't mean "prepare for every possible future change"
|
||||
- This violates the project's core principle: **"Less Overhead Is Better"**
|
||||
|
||||
**Advice:**
|
||||
|
||||
- SRP = one function does one thing, NOT one file per concept
|
||||
- "Flexible" = easy to modify, NOT infinitely extensible
|
||||
- When in doubt, keep it in one file and refactor later if needed
|
||||
- Abstract base classes are rarely needed; prefer composition
|
||||
|
||||
```python
|
||||
# BAD - Over-abstracted
|
||||
class BaseTaskHandler(ABC):
|
||||
@abstractmethod
|
||||
async def validate(self, request): ...
|
||||
@abstractmethod
|
||||
async def execute(self, job_id): ...
|
||||
@abstractmethod
|
||||
async def on_complete(self, result): ...
|
||||
|
||||
class MediaProbeHandler(BaseTaskHandler):
|
||||
...
|
||||
|
||||
# GOOD - Simple and direct
|
||||
@dramatiq.actor
|
||||
def media_probe_actor(job_id: str, media_file_id: str) -> None:
|
||||
"""Probe media file for metadata."""
|
||||
# All logic here, no inheritance needed
|
||||
...
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### 3. Not Reading AGENTS.md Before Starting
|
||||
|
||||
**What happened:** The agent proceeded with implementation without fully considering the documented principles, particularly:
|
||||
|
||||
- "Avoid unnecessary abstractions and over-engineering"
|
||||
- "Don't add layers of indirection without clear benefit"
|
||||
- "Prefer direct solutions over clever ones"
|
||||
|
||||
**Advice:**
|
||||
|
||||
- **Read AGENTS.md completely before any implementation**
|
||||
- Re-read relevant sections when making architectural decisions
|
||||
- When the user's request conflicts with AGENTS.md principles, ask for clarification
|
||||
|
||||
---
|
||||
|
||||
### 4. Creating Files Without Checking Existing Patterns
|
||||
|
||||
**What happened:** The agent created `handlers/` subdirectory and multiple utility files without checking how other modules handle similar needs.
|
||||
|
||||
**Advice:**
|
||||
|
||||
- Before creating ANY new file, run: `ls cpv3/modules/<similar_module>/`
|
||||
- Check if the functionality can fit into existing standard files
|
||||
- If you need a helper function, put it in `service.py`, not a new file
|
||||
- Subdirectories within modules are almost never appropriate
|
||||
|
||||
---
|
||||
|
||||
### 5. Ignoring the "Quick Reference" Table
|
||||
|
||||
The AGENTS.md contains a clear reference:
|
||||
|
||||
| Task | Location |
|
||||
| --------------------- | -------------------------------- |
|
||||
| Add new endpoint | `modules/<module>/router.py` |
|
||||
| Add database model | `modules/<module>/models.py` |
|
||||
| Add validation schema | `modules/<module>/schemas.py` |
|
||||
| Add business logic | `modules/<module>/service.py` |
|
||||
| Add database query | `modules/<module>/repository.py` |
|
||||
|
||||
**Advice:**
|
||||
|
||||
- Use this table as the ONLY guide for file placement
|
||||
- If something doesn't fit these categories, it probably belongs in `service.py`
|
||||
- Cross-cutting concerns go in `infrastructure/`, not in module subdirectories
|
||||
|
||||
---
|
||||
|
||||
### Summary: The Golden Rules
|
||||
|
||||
1. **Check existing patterns first** - Look at 2-3 similar modules before creating anything
|
||||
2. **Standard files only** - `__init__.py`, `models.py`, `schemas.py`, `repository.py`, `service.py`, `router.py`
|
||||
3. **No subdirectories in modules** - Everything fits in the standard files
|
||||
4. **Consolidate, don't split** - When unsure, put it in `service.py`
|
||||
5. **Simple > Clever** - Direct code beats abstract patterns
|
||||
6. **YAGNI** - Don't build for hypothetical future requirements
|
||||
|
||||
---
|
||||
|
||||
_Last updated: February 2026_
|
||||
- Keep `../AGENTS.md` as the workflow and delegation source of truth.
|
||||
- Treat `CLAUDE.md` as architecture, commands, and conventions only.
|
||||
- Do not rely on `.claude/` directory contents.
|
||||
|
||||
+77
-16
@@ -1,31 +1,92 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
# syntax=docker/dockerfile:1.7
|
||||
|
||||
FROM python:3.11-slim
|
||||
# ---------------------------------------------------------------------------
|
||||
# Stage 1: base — system dependencies shared by dev and prod
|
||||
# ---------------------------------------------------------------------------
|
||||
FROM python:3.11-slim AS base
|
||||
|
||||
COPY --from=ghcr.io/astral-sh/uv:0.8.15 /uv /uvx /bin/
|
||||
|
||||
ENV PYTHONDONTWRITEBYTECODE=1 \
|
||||
PYTHONUNBUFFERED=1 \
|
||||
PATH="/app/.venv/bin:/root/.local/bin:${PATH}"
|
||||
PATH="/app/.venv/bin:${PATH}"
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
build-essential \
|
||||
curl \
|
||||
ffmpeg \
|
||||
RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update && apt-get install -y --no-install-recommends \
|
||||
ffmpeg \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install uv
|
||||
RUN curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||
# ---------------------------------------------------------------------------
|
||||
# Stage 2: deps — install Python dependencies (no project code)
|
||||
# This layer is cached as long as pyproject.toml and uv.lock don't change.
|
||||
# build-essential is needed here for compiling C extensions (e.g. psycopg2)
|
||||
# but is NOT carried into the prod stage (prod inherits from base instead).
|
||||
# ---------------------------------------------------------------------------
|
||||
FROM base AS deps
|
||||
|
||||
RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update && apt-get install -y --no-install-recommends \
|
||||
build-essential \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install deps (expects uv.lock)
|
||||
COPY pyproject.toml uv.lock ./
|
||||
RUN uv sync --frozen --no-dev
|
||||
RUN --mount=type=cache,target=/root/.cache/uv \
|
||||
uv sync --frozen --no-dev --no-install-project
|
||||
|
||||
# Copy source
|
||||
COPY cpv3 ./cpv3
|
||||
COPY alembic ./alembic
|
||||
COPY alembic.ini ./
|
||||
# ---------------------------------------------------------------------------
|
||||
# Stage 3: dev — development target (used by docker-compose)
|
||||
#
|
||||
# Does NOT install the cpv3 package into site-packages at all. The source
|
||||
# code is bind-mounted from the host at /app/cpv3, and Python finds it via
|
||||
# sys.path (WORKDIR /app is on sys.path by default for scripts run from /app).
|
||||
# This means:
|
||||
# - No second `uv sync` step that could go stale or conflict with the mount
|
||||
# - Hot reload works because uvicorn watches the bind-mounted directory
|
||||
# - No risk of importing a stale copy from site-packages
|
||||
# ---------------------------------------------------------------------------
|
||||
FROM deps AS dev
|
||||
|
||||
# Without `uv sync` (which creates an editable .pth finder in site-packages),
|
||||
# Python cannot find the cpv3 package. PYTHONPATH=/app makes /app/cpv3
|
||||
# discoverable as a regular package. This is the standard approach for
|
||||
# development containers with bind-mounted source code.
|
||||
ENV PYTHONPATH=/app
|
||||
|
||||
# watchfiles is already included via uvicorn[standard] in main deps.
|
||||
# It is used by uvicorn --reload and by the worker auto-restart wrapper.
|
||||
|
||||
EXPOSE 8000
|
||||
|
||||
CMD ["sh", "-c", "uv run alembic upgrade head && uv run uvicorn cpv3.main:app --host 0.0.0.0 --port 8000"]
|
||||
CMD ["sh", "-c", "alembic upgrade head && uvicorn cpv3.main:app --host 0.0.0.0 --port 8000 --reload --reload-dir /app/cpv3"]
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Stage 4: prod — production target (used by CI/CD image builds)
|
||||
#
|
||||
# Inherits from base (not deps) so build-essential is excluded from the
|
||||
# final image (~200MB savings). Pre-compiled .venv is copied from deps.
|
||||
# Runs as non-root user for container security.
|
||||
# ---------------------------------------------------------------------------
|
||||
FROM base AS prod
|
||||
|
||||
ENV UV_LINK_MODE=copy
|
||||
|
||||
COPY --from=deps /app/.venv /app/.venv
|
||||
COPY pyproject.toml uv.lock ./
|
||||
COPY cpv3 ./cpv3
|
||||
COPY alembic ./alembic
|
||||
COPY alembic.ini ./
|
||||
RUN --mount=type=cache,target=/root/.cache/uv \
|
||||
uv sync --frozen --no-dev
|
||||
|
||||
RUN groupadd --gid 1000 app && \
|
||||
useradd --uid 1000 --gid app --create-home app
|
||||
RUN chown -R app:app /app
|
||||
USER app
|
||||
|
||||
EXPOSE 8000
|
||||
|
||||
CMD ["sh", "-c", "alembic upgrade head && uvicorn cpv3.main:app --host 0.0.0.0 --port 8000"]
|
||||
|
||||
@@ -0,0 +1,244 @@
|
||||
"""add project_workspaces table
|
||||
|
||||
Revision ID: e6f7a8b9c0d1
|
||||
Revises: d5e6f7a8b9c0
|
||||
Create Date: 2026-04-07 16:00:00.000000
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import uuid
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any, Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
|
||||
revision: str = "e6f7a8b9c0d1"
|
||||
down_revision: Union[str, None] = "d5e6f7a8b9c0"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def _utc_now() -> datetime:
|
||||
return datetime.now(timezone.utc)
|
||||
|
||||
|
||||
def _parse_uuid(raw_value: object) -> str | None:
|
||||
if raw_value is None:
|
||||
return None
|
||||
try:
|
||||
return str(uuid.UUID(str(raw_value)))
|
||||
except (TypeError, ValueError):
|
||||
return None
|
||||
|
||||
|
||||
def _default_state() -> dict[str, Any]:
|
||||
return {
|
||||
"version": 1,
|
||||
"phase": "INGEST",
|
||||
"active_job": None,
|
||||
"source_file_id": None,
|
||||
"workspace_view": {
|
||||
"used_file_ids": [],
|
||||
"selected_file_id": None,
|
||||
},
|
||||
"silence": {
|
||||
"status": "IDLE",
|
||||
"settings": {
|
||||
"min_silence_duration_ms": 200,
|
||||
"silence_threshold_db": 16,
|
||||
"padding_ms": 100,
|
||||
},
|
||||
"detect_job_id": None,
|
||||
"detected_segments": [],
|
||||
"reviewed_cuts": [],
|
||||
"duration_ms": None,
|
||||
"applied_output_file_id": None,
|
||||
},
|
||||
"transcription": {
|
||||
"status": "IDLE",
|
||||
"request": {
|
||||
"engine": "whisper",
|
||||
"language": None,
|
||||
"model": "base",
|
||||
},
|
||||
"job_id": None,
|
||||
"artifact_id": None,
|
||||
"transcription_id": None,
|
||||
"reviewed": False,
|
||||
},
|
||||
"captions": {
|
||||
"status": "IDLE",
|
||||
"preset_id": None,
|
||||
"style_config": None,
|
||||
"render_job_id": None,
|
||||
"output_file_id": None,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def _backfill_state(legacy_workspace_state: dict | None) -> dict[str, Any]:
|
||||
state = _default_state()
|
||||
if not isinstance(legacy_workspace_state, dict):
|
||||
return state
|
||||
|
||||
wizard = legacy_workspace_state.get("wizard")
|
||||
if not isinstance(wizard, dict):
|
||||
wizard = {}
|
||||
|
||||
current_step = wizard.get("current_step")
|
||||
step_phase_map = {
|
||||
"upload": "INGEST",
|
||||
"verify": "VERIFY",
|
||||
"silence-settings": "SILENCE",
|
||||
"processing": "SILENCE",
|
||||
"fragments": "SILENCE",
|
||||
"silence-apply-processing": "SILENCE",
|
||||
"transcription-settings": "TRANSCRIPTION",
|
||||
"transcription-processing": "TRANSCRIPTION",
|
||||
"subtitle-revision": "TRANSCRIPTION",
|
||||
"caption-settings": "CAPTIONS",
|
||||
"caption-processing": "CAPTIONS",
|
||||
"caption-result": "DONE",
|
||||
}
|
||||
if current_step in step_phase_map:
|
||||
state["phase"] = step_phase_map[current_step]
|
||||
|
||||
source_file_id = _parse_uuid(wizard.get("primary_file_id"))
|
||||
if source_file_id is not None:
|
||||
state["source_file_id"] = source_file_id
|
||||
|
||||
silence_job_id = _parse_uuid(wizard.get("silence_job_id"))
|
||||
if silence_job_id is not None:
|
||||
state["silence"]["detect_job_id"] = silence_job_id
|
||||
|
||||
transcription_artifact_id = _parse_uuid(wizard.get("transcription_artifact_id"))
|
||||
if transcription_artifact_id is not None:
|
||||
state["transcription"]["artifact_id"] = transcription_artifact_id
|
||||
|
||||
caption_preset_id = _parse_uuid(wizard.get("caption_preset_id"))
|
||||
if caption_preset_id is not None:
|
||||
state["captions"]["preset_id"] = caption_preset_id
|
||||
|
||||
caption_style_config = wizard.get("caption_style_config")
|
||||
if isinstance(caption_style_config, dict):
|
||||
state["captions"]["style_config"] = caption_style_config
|
||||
|
||||
captioned_video_file_id = _parse_uuid(wizard.get("captioned_video_file_id"))
|
||||
if captioned_video_file_id is not None:
|
||||
state["captions"]["output_file_id"] = captioned_video_file_id
|
||||
state["captions"]["status"] = "COMPLETED"
|
||||
state["phase"] = "DONE"
|
||||
|
||||
active_job_id = _parse_uuid(wizard.get("active_job_id"))
|
||||
active_job_type = wizard.get("active_job_type")
|
||||
if active_job_id is not None and isinstance(active_job_type, str):
|
||||
state["active_job"] = {
|
||||
"job_id": active_job_id,
|
||||
"job_type": active_job_type,
|
||||
}
|
||||
if active_job_type == "TRANSCRIPTION_GENERATE":
|
||||
state["transcription"]["job_id"] = active_job_id
|
||||
if active_job_type == "CAPTIONS_GENERATE":
|
||||
state["captions"]["render_job_id"] = active_job_id
|
||||
|
||||
silence_settings = wizard.get("silence_settings")
|
||||
if isinstance(silence_settings, dict):
|
||||
state["silence"]["settings"] = {
|
||||
"min_silence_duration_ms": silence_settings.get("min_silence_duration_ms", 200),
|
||||
"silence_threshold_db": silence_settings.get("silence_threshold_db", 16),
|
||||
"padding_ms": silence_settings.get("padding_ms", 100),
|
||||
}
|
||||
state["silence"]["status"] = "CONFIGURED"
|
||||
|
||||
if current_step == "processing":
|
||||
state["silence"]["status"] = "DETECTING"
|
||||
elif current_step == "fragments":
|
||||
state["silence"]["status"] = "REVIEWING"
|
||||
elif current_step == "silence-apply-processing":
|
||||
state["silence"]["status"] = "APPLYING"
|
||||
elif current_step == "transcription-processing":
|
||||
state["transcription"]["status"] = "PROCESSING"
|
||||
elif current_step == "subtitle-revision":
|
||||
state["transcription"]["status"] = "REVIEWING"
|
||||
elif current_step == "caption-settings":
|
||||
state["captions"]["status"] = "CONFIGURED"
|
||||
elif current_step == "caption-processing":
|
||||
state["captions"]["status"] = "PROCESSING"
|
||||
elif current_step == "caption-result":
|
||||
state["captions"]["status"] = "COMPLETED"
|
||||
|
||||
used_files = legacy_workspace_state.get("used_files")
|
||||
if isinstance(used_files, list):
|
||||
parsed_ids: list[str] = []
|
||||
for item in used_files:
|
||||
if not isinstance(item, dict):
|
||||
continue
|
||||
parsed_id = _parse_uuid(item.get("id"))
|
||||
if parsed_id is not None and parsed_id not in parsed_ids:
|
||||
parsed_ids.append(parsed_id)
|
||||
state["workspace_view"]["used_file_ids"] = parsed_ids
|
||||
if source_file_id in parsed_ids:
|
||||
state["workspace_view"]["selected_file_id"] = source_file_id
|
||||
|
||||
return state
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.create_table(
|
||||
"project_workspaces",
|
||||
sa.Column(
|
||||
"project_id",
|
||||
postgresql.UUID(as_uuid=True),
|
||||
sa.ForeignKey("projects.id", ondelete="CASCADE"),
|
||||
primary_key=True,
|
||||
),
|
||||
sa.Column("revision", sa.Integer(), nullable=False, server_default=sa.text("0")),
|
||||
sa.Column("state", postgresql.JSONB(astext_type=sa.Text()), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False),
|
||||
)
|
||||
|
||||
connection = op.get_bind()
|
||||
projects_table = sa.table(
|
||||
"projects",
|
||||
sa.column("id", postgresql.UUID(as_uuid=True)),
|
||||
sa.column("workspace_state", sa.JSON()),
|
||||
sa.column("is_active", sa.Boolean()),
|
||||
)
|
||||
workspaces_table = sa.table(
|
||||
"project_workspaces",
|
||||
sa.column("project_id", postgresql.UUID(as_uuid=True)),
|
||||
sa.column("revision", sa.Integer()),
|
||||
sa.column("state", postgresql.JSONB(astext_type=sa.Text())),
|
||||
sa.column("created_at", sa.DateTime(timezone=True)),
|
||||
sa.column("updated_at", sa.DateTime(timezone=True)),
|
||||
)
|
||||
|
||||
rows = connection.execute(
|
||||
sa.select(projects_table.c.id, projects_table.c.workspace_state).where(
|
||||
projects_table.c.is_active.is_(True)
|
||||
)
|
||||
)
|
||||
|
||||
now = _utc_now()
|
||||
payloads = [
|
||||
{
|
||||
"project_id": row.id,
|
||||
"revision": 0,
|
||||
"state": _backfill_state(row.workspace_state),
|
||||
"created_at": now,
|
||||
"updated_at": now,
|
||||
}
|
||||
for row in rows
|
||||
]
|
||||
if payloads:
|
||||
connection.execute(sa.insert(workspaces_table), payloads)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_table("project_workspaces")
|
||||
@@ -10,6 +10,7 @@ from cpv3.modules.captions.router import router as captions_router
|
||||
from cpv3.modules.files.router import router as files_router
|
||||
from cpv3.modules.jobs.router import events_router, jobs_router
|
||||
from cpv3.modules.media.router import artifacts_router, media_router, mediafiles_router
|
||||
from cpv3.modules.project_workspaces.router import router as project_workspaces_router
|
||||
from cpv3.modules.projects.router import router as projects_router
|
||||
from cpv3.modules.system.router import router as system_router
|
||||
from cpv3.modules.tasks.router import router as tasks_router
|
||||
@@ -29,6 +30,7 @@ api_router.include_router(users_router)
|
||||
|
||||
# Projects
|
||||
api_router.include_router(projects_router)
|
||||
api_router.include_router(project_workspaces_router)
|
||||
|
||||
# Files (storage module renamed)
|
||||
api_router.include_router(files_router)
|
||||
|
||||
@@ -2,6 +2,7 @@ from cpv3.db.base import Base
|
||||
from cpv3.modules.captions.models import CaptionPreset
|
||||
from cpv3.modules.jobs.models import Job, JobEvent
|
||||
from cpv3.modules.media.models import ArtifactMediaFile, MediaFile
|
||||
from cpv3.modules.project_workspaces.models import ProjectWorkspace
|
||||
from cpv3.modules.projects.models import Project
|
||||
from cpv3.modules.files.models import File
|
||||
from cpv3.modules.transcription.models import Transcription
|
||||
@@ -14,6 +15,7 @@ __all__ = [
|
||||
"CaptionPreset",
|
||||
"User",
|
||||
"Project",
|
||||
"ProjectWorkspace",
|
||||
"File",
|
||||
"MediaFile",
|
||||
"ArtifactMediaFile",
|
||||
|
||||
@@ -26,36 +26,36 @@ async def get_current_user(
|
||||
payload = decode_token(token)
|
||||
except ExpiredSignatureError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED, detail="Token expired"
|
||||
status_code=status.HTTP_401_UNAUTHORIZED, detail="Токен истёк"
|
||||
) from e
|
||||
except InvalidTokenError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid token"
|
||||
status_code=status.HTTP_401_UNAUTHORIZED, detail="Недействительный токен"
|
||||
) from e
|
||||
|
||||
if payload.get("type") != "access":
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid token"
|
||||
status_code=status.HTTP_401_UNAUTHORIZED, detail="Недействительный токен"
|
||||
)
|
||||
|
||||
sub = payload.get("sub")
|
||||
if not sub:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid token"
|
||||
status_code=status.HTTP_401_UNAUTHORIZED, detail="Недействительный токен"
|
||||
)
|
||||
|
||||
try:
|
||||
user_id = uuid.UUID(str(sub))
|
||||
except ValueError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid token"
|
||||
status_code=status.HTTP_401_UNAUTHORIZED, detail="Недействительный токен"
|
||||
) from e
|
||||
|
||||
user_repo = UserRepository(db)
|
||||
user = await user_repo.get_by_id(user_id)
|
||||
if user is None or not user.is_active:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid credentials"
|
||||
status_code=status.HTTP_401_UNAUTHORIZED, detail="Неверные учётные данные"
|
||||
)
|
||||
|
||||
return user
|
||||
|
||||
@@ -98,6 +98,7 @@ class Settings(BaseSettings):
|
||||
|
||||
# SaluteSpeech
|
||||
salute_auth_key: str = Field(default="", alias="SALUTE_AUTH_KEY")
|
||||
salute_ssl_verify: bool = Field(default=True, alias="SALUTE_SSL_VERIFY")
|
||||
salute_ca_cert_path: Path | None = Field(
|
||||
default=None, alias="SALUTE_CA_CERT_PATH"
|
||||
)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import uuid
|
||||
from pathlib import Path
|
||||
|
||||
from fastapi import (
|
||||
APIRouter,
|
||||
@@ -42,8 +43,10 @@ MAX_MB_SIZE = 1024
|
||||
async def upload_file(
|
||||
file: UploadFile = FastAPIFile(...),
|
||||
folder: str = Form(default=""),
|
||||
project_id: uuid.UUID | None = Form(default=None),
|
||||
current_user: User = Depends(get_current_user),
|
||||
storage: StorageService = Depends(get_storage),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
) -> FileInfoResponse:
|
||||
# Validate max file size (matches old behavior).
|
||||
file.file.seek(0, 2)
|
||||
@@ -54,11 +57,18 @@ async def upload_file(
|
||||
if size_bytes > max_size:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"File size exceeds the maximum limit of {MAX_MB_SIZE} MB.",
|
||||
detail=f"Размер файла превышает допустимый лимит в {MAX_MB_SIZE} МБ.",
|
||||
)
|
||||
|
||||
user_folder = get_user_folder(current_user)
|
||||
resolved_folder = f"{user_folder}/{folder}" if folder else f"{user_folder}/user_upload"
|
||||
inferred_project_id = project_id
|
||||
if inferred_project_id is None and folder.startswith("projects/"):
|
||||
project_token = folder.removeprefix("projects/").split("/", 1)[0]
|
||||
try:
|
||||
inferred_project_id = uuid.UUID(project_token)
|
||||
except ValueError:
|
||||
inferred_project_id = None
|
||||
|
||||
key = await storage.upload_fileobj(
|
||||
fileobj=file.file,
|
||||
@@ -68,8 +78,23 @@ async def upload_file(
|
||||
content_type=file.content_type,
|
||||
)
|
||||
|
||||
service = FileService(db)
|
||||
file_entry = await service.create_file(
|
||||
requester=current_user,
|
||||
data=FileCreate(
|
||||
project_id=inferred_project_id,
|
||||
original_filename=file.filename or "upload.bin",
|
||||
path=key,
|
||||
storage_backend=get_settings().storage_backend.upper(),
|
||||
mime_type=file.content_type or "application/octet-stream",
|
||||
size_bytes=size_bytes,
|
||||
file_format=Path(file.filename or "upload.bin").suffix.lstrip(".") or None,
|
||||
is_uploaded=True,
|
||||
),
|
||||
)
|
||||
info = await storage.get_file_info(key)
|
||||
return FileInfoResponse(
|
||||
file_id=file_entry.id,
|
||||
file_path=info.file_path,
|
||||
file_url=info.file_url,
|
||||
file_size=info.file_size,
|
||||
@@ -82,17 +107,24 @@ async def get_file_info(
|
||||
file_path: str = Query(...),
|
||||
current_user: User = Depends(get_current_user),
|
||||
storage: StorageService = Depends(get_storage),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
) -> FileInfoResponse:
|
||||
if not current_user.is_staff:
|
||||
user_prefix = f"{get_user_folder(current_user)}/"
|
||||
if not file_path.startswith(user_prefix):
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Forbidden")
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Доступ запрещён")
|
||||
|
||||
if not await storage.exists(file_path):
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Not found")
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Не найдено")
|
||||
|
||||
service = FileService(db)
|
||||
file = await service.get_file_by_path(file_path)
|
||||
if file is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Не найдено")
|
||||
|
||||
info = await storage.get_file_info(file_path)
|
||||
return FileInfoResponse(
|
||||
file_id=file.id,
|
||||
file_path=info.file_path,
|
||||
file_url=info.file_url,
|
||||
file_size=info.file_size,
|
||||
@@ -110,7 +142,7 @@ async def get_local_file(
|
||||
settings = get_settings()
|
||||
full_path = (settings.local_storage_dir / file_path).resolve()
|
||||
if not full_path.exists():
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Not found")
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Не найдено")
|
||||
|
||||
return FileResponse(full_path)
|
||||
|
||||
@@ -145,14 +177,42 @@ async def retrieve_file_entry(
|
||||
service = FileService(db)
|
||||
file = await service.get_file(file_id)
|
||||
if file is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Not found")
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Не найдено")
|
||||
|
||||
if not current_user.is_staff and file.owner_id != current_user.id:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Forbidden")
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Доступ запрещён")
|
||||
|
||||
return FileRead.model_validate(file)
|
||||
|
||||
|
||||
@router.get("/files/{file_id}/resolve/", response_model=FileInfoResponse)
|
||||
async def resolve_file_entry(
|
||||
file_id: uuid.UUID,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
storage: StorageService = Depends(get_storage),
|
||||
) -> FileInfoResponse:
|
||||
service = FileService(db)
|
||||
file = await service.get_file(file_id)
|
||||
if file is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Не найдено")
|
||||
|
||||
if not current_user.is_staff and file.owner_id != current_user.id:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Доступ запрещён")
|
||||
|
||||
if not await storage.exists(file.path):
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Не найдено")
|
||||
|
||||
info = await storage.get_file_info(file.path)
|
||||
return FileInfoResponse(
|
||||
file_id=file.id,
|
||||
file_path=file.path,
|
||||
file_url=info.file_url,
|
||||
file_size=info.file_size,
|
||||
filename=file.original_filename or info.filename,
|
||||
)
|
||||
|
||||
|
||||
@router.patch("/files/{file_id}/", response_model=FileRead)
|
||||
async def patch_file_entry(
|
||||
file_id: uuid.UUID,
|
||||
@@ -163,10 +223,10 @@ async def patch_file_entry(
|
||||
service = FileService(db)
|
||||
file = await service.get_file(file_id)
|
||||
if file is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Not found")
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Не найдено")
|
||||
|
||||
if not current_user.is_staff and file.owner_id != current_user.id:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Forbidden")
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Доступ запрещён")
|
||||
|
||||
file = await service.update_file(file, body)
|
||||
return FileRead.model_validate(file)
|
||||
@@ -181,10 +241,10 @@ async def delete_file_entry(
|
||||
service = FileService(db)
|
||||
file = await service.get_file(file_id)
|
||||
if file is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Not found")
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Не найдено")
|
||||
|
||||
if not current_user.is_staff and file.owner_id != current_user.id:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Forbidden")
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Доступ запрещён")
|
||||
|
||||
await service.delete_file(file)
|
||||
return Response(status_code=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -51,6 +51,7 @@ class FileUpdate(Schema):
|
||||
|
||||
|
||||
class FileInfoResponse(Schema):
|
||||
file_id: UUID
|
||||
file_path: str
|
||||
file_url: str
|
||||
file_size: int | None = None
|
||||
|
||||
@@ -22,6 +22,9 @@ class FileService:
|
||||
async def get_file(self, file_id: uuid.UUID) -> File | None:
|
||||
return await self._repo.get_by_id(file_id)
|
||||
|
||||
async def get_file_by_path(self, path: str) -> File | None:
|
||||
return await self._repo.get_by_path(path)
|
||||
|
||||
async def create_file(self, *, requester: User, data: FileCreate) -> File:
|
||||
return await self._repo.create(requester=requester, data=data)
|
||||
|
||||
|
||||
@@ -53,10 +53,10 @@ async def retrieve_job_endpoint(
|
||||
service = JobService(db)
|
||||
job = await service.get_job(job_id)
|
||||
if job is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Not found")
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Не найдено")
|
||||
|
||||
if not current_user.is_staff and job.user_id != current_user.id:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Forbidden")
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Доступ запрещён")
|
||||
|
||||
return JobRead.model_validate(job)
|
||||
|
||||
@@ -71,10 +71,10 @@ async def patch_job_endpoint(
|
||||
service = JobService(db)
|
||||
job = await service.get_job(job_id)
|
||||
if job is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Not found")
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Не найдено")
|
||||
|
||||
if not current_user.is_staff and job.user_id != current_user.id:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Forbidden")
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Доступ запрещён")
|
||||
|
||||
if body.status == "CANCELLED":
|
||||
task_service = TaskService(db)
|
||||
@@ -94,10 +94,10 @@ async def delete_job_endpoint(
|
||||
service = JobService(db)
|
||||
job = await service.get_job(job_id)
|
||||
if job is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Not found")
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Не найдено")
|
||||
|
||||
if not current_user.is_staff and job.user_id != current_user.id:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Forbidden")
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Доступ запрещён")
|
||||
|
||||
await service.deactivate_job(job)
|
||||
return Response(status_code=status.HTTP_204_NO_CONTENT)
|
||||
@@ -136,7 +136,7 @@ async def retrieve_event_endpoint(
|
||||
service = JobService(db)
|
||||
event = await service.get_job_event(event_id)
|
||||
if event is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Not found")
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Не найдено")
|
||||
|
||||
return JobEventRead.model_validate(event)
|
||||
|
||||
@@ -152,7 +152,7 @@ async def patch_event_endpoint(
|
||||
service = JobService(db)
|
||||
event = await service.get_job_event(event_id)
|
||||
if event is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Not found")
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Не найдено")
|
||||
|
||||
event = await service.update_job_event(event, body)
|
||||
return JobEventRead.model_validate(event)
|
||||
@@ -168,7 +168,7 @@ async def delete_event_endpoint(
|
||||
service = JobService(db)
|
||||
event = await service.get_job_event(event_id)
|
||||
if event is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Not found")
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Не найдено")
|
||||
|
||||
await service.deactivate_job_event(event)
|
||||
return Response(status_code=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -157,10 +157,10 @@ async def retrieve_mediafile(
|
||||
repo = MediaFileRepository(db)
|
||||
media_file = await repo.get_by_id(media_file_id)
|
||||
if media_file is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Not found")
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Не найдено")
|
||||
|
||||
if not current_user.is_staff and media_file.owner_id != current_user.id:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Forbidden")
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Доступ запрещён")
|
||||
|
||||
return MediaFileRead.model_validate(media_file)
|
||||
|
||||
@@ -175,10 +175,10 @@ async def patch_mediafile(
|
||||
repo = MediaFileRepository(db)
|
||||
media_file = await repo.get_by_id(media_file_id)
|
||||
if media_file is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Not found")
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Не найдено")
|
||||
|
||||
if not current_user.is_staff and media_file.owner_id != current_user.id:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Forbidden")
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Доступ запрещён")
|
||||
|
||||
media_file = await repo.update(media_file, body)
|
||||
return MediaFileRead.model_validate(media_file)
|
||||
@@ -193,10 +193,10 @@ async def delete_mediafile(
|
||||
repo = MediaFileRepository(db)
|
||||
media_file = await repo.get_by_id(media_file_id)
|
||||
if media_file is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Not found")
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Не найдено")
|
||||
|
||||
if not current_user.is_staff and media_file.owner_id != current_user.id:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Forbidden")
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Доступ запрещён")
|
||||
|
||||
await repo.mark_deleted(media_file)
|
||||
return Response(status_code=status.HTTP_204_NO_CONTENT)
|
||||
@@ -237,7 +237,7 @@ async def retrieve_artifact_mediafile(
|
||||
repo = ArtifactRepository(db)
|
||||
artifact = await repo.get_by_id(artifact_id)
|
||||
if artifact is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Not found")
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Не найдено")
|
||||
|
||||
return ArtifactMediaFileRead.model_validate(artifact)
|
||||
|
||||
@@ -253,7 +253,7 @@ async def patch_artifact_mediafile(
|
||||
repo = ArtifactRepository(db)
|
||||
artifact = await repo.get_by_id(artifact_id)
|
||||
if artifact is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Not found")
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Не найдено")
|
||||
|
||||
artifact = await repo.update(artifact, body)
|
||||
return ArtifactMediaFileRead.model_validate(artifact)
|
||||
@@ -269,7 +269,7 @@ async def delete_artifact_mediafile(
|
||||
repo = ArtifactRepository(db)
|
||||
artifact = await repo.get_by_id(artifact_id)
|
||||
if artifact is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Not found")
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Не найдено")
|
||||
|
||||
await repo.mark_deleted(artifact)
|
||||
return Response(status_code=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
+227
-74
@@ -19,6 +19,9 @@ FRAME_WIDTH_PX = 128
|
||||
FRAME_FPS = 1
|
||||
FRAME_JPEG_QUALITY = 5
|
||||
FRAMES_META_FILENAME = "meta.json"
|
||||
FFMPEG_PROGRESS_DIVISOR = 1_000_000.0
|
||||
|
||||
MediaProgressCallback = Callable[[str, float | None], None]
|
||||
|
||||
|
||||
def get_frames_folder(user_folder: str, file_key: str) -> str:
|
||||
@@ -55,6 +58,123 @@ async def probe_media(storage: StorageService, *, file_key: str) -> MediaProbeSc
|
||||
tmp.cleanup()
|
||||
|
||||
|
||||
def _parse_ffmpeg_timecode_seconds(value: str) -> float | None:
|
||||
parts = value.strip().split(":")
|
||||
if len(parts) != 3:
|
||||
return None
|
||||
|
||||
try:
|
||||
hours = int(parts[0])
|
||||
minutes = int(parts[1])
|
||||
seconds = float(parts[2])
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
return hours * 3600 + minutes * 60 + seconds
|
||||
|
||||
|
||||
def _get_ffmpeg_output_time_seconds(progress_snapshot: dict[str, str]) -> float | None:
|
||||
timecode = progress_snapshot.get("out_time")
|
||||
if timecode:
|
||||
parsed = _parse_ffmpeg_timecode_seconds(timecode)
|
||||
if parsed is not None:
|
||||
return parsed
|
||||
|
||||
for key in ("out_time_us", "out_time_ms"):
|
||||
raw_value = progress_snapshot.get(key)
|
||||
if raw_value is None:
|
||||
continue
|
||||
try:
|
||||
return max(float(raw_value), 0.0) / FFMPEG_PROGRESS_DIVISOR
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _extract_ffmpeg_out_time_ms(progress_snapshot: dict[str, str]) -> float | None:
|
||||
seconds = _get_ffmpeg_output_time_seconds(progress_snapshot)
|
||||
if seconds is None:
|
||||
return None
|
||||
return seconds * 1000.0
|
||||
|
||||
|
||||
async def _probe_local_duration_seconds(input_path: str) -> float | None:
|
||||
proc = await asyncio.create_subprocess_exec(
|
||||
"ffprobe",
|
||||
"-v",
|
||||
"error",
|
||||
"-show_entries",
|
||||
"format=duration",
|
||||
"-of",
|
||||
"json",
|
||||
input_path,
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
stderr=asyncio.subprocess.PIPE,
|
||||
)
|
||||
stdout, _ = await proc.communicate()
|
||||
if proc.returncode != 0:
|
||||
return None
|
||||
|
||||
try:
|
||||
raw = json.loads(stdout.decode())
|
||||
except json.JSONDecodeError:
|
||||
return None
|
||||
duration = raw.get("format", {}).get("duration")
|
||||
if duration is None:
|
||||
return None
|
||||
|
||||
try:
|
||||
value = float(duration)
|
||||
except (TypeError, ValueError):
|
||||
return None
|
||||
|
||||
return value if value > 0 else None
|
||||
|
||||
|
||||
async def _forward_ffmpeg_progress(
|
||||
stdout: asyncio.StreamReader | None,
|
||||
*,
|
||||
duration_seconds: float | None,
|
||||
on_progress: MediaProgressCallback | None,
|
||||
progress_stage: str,
|
||||
) -> None:
|
||||
if stdout is None:
|
||||
return
|
||||
|
||||
snapshot: dict[str, str] = {}
|
||||
last_pct = -1.0
|
||||
|
||||
while True:
|
||||
line = await stdout.readline()
|
||||
if not line:
|
||||
break
|
||||
|
||||
decoded = line.decode(errors="ignore").strip()
|
||||
if "=" not in decoded:
|
||||
continue
|
||||
|
||||
key, value = decoded.split("=", 1)
|
||||
snapshot[key] = value
|
||||
|
||||
if key != "progress":
|
||||
continue
|
||||
|
||||
if (
|
||||
on_progress is not None
|
||||
and duration_seconds is not None
|
||||
and duration_seconds > 0
|
||||
):
|
||||
output_time_seconds = _get_ffmpeg_output_time_seconds(snapshot)
|
||||
if output_time_seconds is not None:
|
||||
pct = min(max((output_time_seconds / duration_seconds) * 100.0, 0.0), 100.0)
|
||||
if pct > last_pct:
|
||||
last_pct = pct
|
||||
on_progress(progress_stage, pct)
|
||||
|
||||
snapshot = {}
|
||||
|
||||
|
||||
def _compute_non_silent_segments(
|
||||
*,
|
||||
local_audio_path: str,
|
||||
@@ -83,6 +203,54 @@ def _compute_non_silent_segments(
|
||||
return segments
|
||||
|
||||
|
||||
def _build_trim_concat_filter(segments: list[tuple[int, int]]) -> str:
|
||||
parts: list[str] = []
|
||||
concat_inputs: list[str] = []
|
||||
|
||||
for index, (start_ms, end_ms) in enumerate(segments):
|
||||
start_s = start_ms / 1000.0
|
||||
end_s = end_ms / 1000.0
|
||||
video_label = f"v{index}"
|
||||
audio_label = f"a{index}"
|
||||
|
||||
parts.append(
|
||||
f"[0:v:0]trim=start={start_s:.3f}:end={end_s:.3f},setpts=PTS-STARTPTS[{video_label}]"
|
||||
)
|
||||
parts.append(
|
||||
f"[0:a:0]atrim=start={start_s:.3f}:end={end_s:.3f},asetpts=PTS-STARTPTS[{audio_label}]"
|
||||
)
|
||||
concat_inputs.append(f"[{video_label}][{audio_label}]")
|
||||
|
||||
return ";".join(parts + ["".join(concat_inputs) + f"concat=n={len(segments)}:v=1:a=1[v][a]"])
|
||||
|
||||
|
||||
def _build_trim_concat_command(
|
||||
*,
|
||||
input_path: str,
|
||||
out_path: str,
|
||||
segments: list[tuple[int, int]],
|
||||
) -> list[str]:
|
||||
return [
|
||||
"ffmpeg",
|
||||
"-y",
|
||||
"-i",
|
||||
input_path,
|
||||
"-filter_complex",
|
||||
_build_trim_concat_filter(segments),
|
||||
"-map",
|
||||
"[v]",
|
||||
"-map",
|
||||
"[a]",
|
||||
"-c:v",
|
||||
"libx264",
|
||||
"-c:a",
|
||||
"aac",
|
||||
"-preset",
|
||||
"medium",
|
||||
out_path,
|
||||
]
|
||||
|
||||
|
||||
async def detect_silence(
|
||||
storage: StorageService,
|
||||
*,
|
||||
@@ -137,6 +305,7 @@ async def apply_silence_cuts(
|
||||
out_folder: str,
|
||||
cuts: list[dict],
|
||||
output_name: str | None = None,
|
||||
on_progress: MediaProgressCallback | None = None,
|
||||
) -> FileInfo:
|
||||
"""Apply explicit cut regions to a media file, concatenating the non-cut parts."""
|
||||
input_tmp = await storage.download_to_temp(file_key)
|
||||
@@ -165,59 +334,47 @@ async def apply_silence_cuts(
|
||||
if not segments:
|
||||
return await storage.get_file_info(file_key)
|
||||
|
||||
output_duration_seconds = sum(end - start for start, end in segments) / 1000.0
|
||||
|
||||
with NamedTemporaryFile(
|
||||
suffix=path.splitext(file_key)[1] or ".mp4", delete=False
|
||||
) as out:
|
||||
out_path = out.name
|
||||
|
||||
try:
|
||||
cmd: list[str] = ["ffmpeg"]
|
||||
for start_ms, end_ms in segments:
|
||||
start_s = start_ms / 1000.0
|
||||
duration_s = (end_ms - start_ms) / 1000.0
|
||||
cmd.extend(
|
||||
[
|
||||
"-ss",
|
||||
f"{start_s:.3f}",
|
||||
"-t",
|
||||
f"{duration_s:.3f}",
|
||||
"-y",
|
||||
"-i",
|
||||
input_tmp.path,
|
||||
]
|
||||
)
|
||||
if on_progress is not None:
|
||||
on_progress("applying_cuts", 0.0)
|
||||
|
||||
seg_count = len(segments)
|
||||
parts = [f"[{i}:v:0][{i}:a:0]" for i in range(seg_count)]
|
||||
filter_complex = "".join(parts) + f"concat=n={seg_count}:v=1:a=1[v][a]"
|
||||
|
||||
cmd.extend(
|
||||
[
|
||||
"-filter_complex",
|
||||
filter_complex,
|
||||
"-map",
|
||||
"[v]",
|
||||
"-map",
|
||||
"[a]",
|
||||
"-c:v",
|
||||
"libx264",
|
||||
"-c:a",
|
||||
"aac",
|
||||
"-preset",
|
||||
"medium",
|
||||
out_path,
|
||||
]
|
||||
cmd = _build_trim_concat_command(
|
||||
input_path=input_tmp.path,
|
||||
out_path=out_path,
|
||||
segments=segments,
|
||||
)
|
||||
|
||||
proc = await asyncio.create_subprocess_exec(
|
||||
*cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
|
||||
)
|
||||
_, stderr = await proc.communicate()
|
||||
progress_task = asyncio.create_task(
|
||||
_forward_ffmpeg_progress(
|
||||
proc.stdout,
|
||||
duration_seconds=output_duration_seconds,
|
||||
on_progress=on_progress,
|
||||
progress_stage="applying_cuts",
|
||||
)
|
||||
)
|
||||
stderr_task = asyncio.create_task(
|
||||
proc.stderr.read() if proc.stderr is not None else asyncio.sleep(0, result=b"")
|
||||
)
|
||||
await asyncio.gather(progress_task, stderr_task)
|
||||
await proc.wait()
|
||||
stderr = stderr_task.result()
|
||||
if proc.returncode != 0:
|
||||
raise RuntimeError(f"ffmpeg failed: {stderr.decode(errors='ignore')}")
|
||||
|
||||
base_name = output_name or path.basename(file_key)
|
||||
output_key = path.join(out_folder or "", "silent", base_name)
|
||||
if on_progress is not None:
|
||||
on_progress("uploading", None)
|
||||
with open(out_path, "rb") as out_file:
|
||||
_ = await storage.upload_fileobj(
|
||||
fileobj=out_file,
|
||||
@@ -267,42 +424,10 @@ async def remove_silence(
|
||||
out_path = out.name
|
||||
|
||||
try:
|
||||
cmd: list[str] = ["ffmpeg"]
|
||||
for start_ms, end_ms in segments:
|
||||
start_s = start_ms / 1000.0
|
||||
duration_s = (end_ms - start_ms) / 1000.0
|
||||
cmd.extend(
|
||||
[
|
||||
"-ss",
|
||||
f"{start_s:.3f}",
|
||||
"-t",
|
||||
f"{duration_s:.3f}",
|
||||
"-y",
|
||||
"-i",
|
||||
input_tmp.path,
|
||||
]
|
||||
)
|
||||
|
||||
seg_count = len(segments)
|
||||
parts = [f"[{i}:v:0][{i}:a:0]" for i in range(seg_count)]
|
||||
filter_complex = "".join(parts) + f"concat=n={seg_count}:v=1:a=1[v][a]"
|
||||
|
||||
cmd.extend(
|
||||
[
|
||||
"-filter_complex",
|
||||
filter_complex,
|
||||
"-map",
|
||||
"[v]",
|
||||
"-map",
|
||||
"[a]",
|
||||
"-c:v",
|
||||
"libx264",
|
||||
"-c:a",
|
||||
"aac",
|
||||
"-preset",
|
||||
"medium",
|
||||
out_path,
|
||||
]
|
||||
cmd = _build_trim_concat_command(
|
||||
input_path=input_tmp.path,
|
||||
out_path=out_path,
|
||||
segments=segments,
|
||||
)
|
||||
|
||||
proc = await asyncio.create_subprocess_exec(
|
||||
@@ -333,21 +458,34 @@ async def remove_silence(
|
||||
|
||||
|
||||
async def convert_to_mp4(
|
||||
storage: StorageService, *, file_key: str, out_folder: str
|
||||
storage: StorageService,
|
||||
*,
|
||||
file_key: str,
|
||||
out_folder: str,
|
||||
on_progress: MediaProgressCallback | None = None,
|
||||
) -> FileInfo:
|
||||
input_tmp = await storage.download_to_temp(file_key)
|
||||
|
||||
try:
|
||||
filename_without_ext = path.splitext(path.basename(file_key))[0]
|
||||
mp4_filename = f"Конвертированое видео {filename_without_ext}.mp4"
|
||||
duration_seconds = await _probe_local_duration_seconds(input_tmp.path)
|
||||
|
||||
with NamedTemporaryFile(suffix=".mp4", delete=False) as out:
|
||||
out_path = out.name
|
||||
|
||||
try:
|
||||
if on_progress is not None:
|
||||
on_progress("converting", 0.0)
|
||||
|
||||
cmd = [
|
||||
"ffmpeg",
|
||||
"-y",
|
||||
"-nostdin",
|
||||
"-v",
|
||||
"error",
|
||||
"-progress",
|
||||
"pipe:1",
|
||||
"-i",
|
||||
input_tmp.path,
|
||||
"-c:v",
|
||||
@@ -364,11 +502,26 @@ async def convert_to_mp4(
|
||||
proc = await asyncio.create_subprocess_exec(
|
||||
*cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
|
||||
)
|
||||
_, stderr = await proc.communicate()
|
||||
progress_task = asyncio.create_task(
|
||||
_forward_ffmpeg_progress(
|
||||
proc.stdout,
|
||||
duration_seconds=duration_seconds,
|
||||
on_progress=on_progress,
|
||||
progress_stage="converting",
|
||||
)
|
||||
)
|
||||
stderr_task = asyncio.create_task(
|
||||
proc.stderr.read() if proc.stderr is not None else asyncio.sleep(0, result=b"")
|
||||
)
|
||||
await asyncio.gather(progress_task, stderr_task)
|
||||
await proc.wait()
|
||||
stderr = stderr_task.result()
|
||||
if proc.returncode != 0:
|
||||
raise RuntimeError(f"ffmpeg failed: {stderr.decode(errors='ignore')}")
|
||||
|
||||
output_key = path.join(out_folder or "", "converted", mp4_filename)
|
||||
if on_progress is not None:
|
||||
on_progress("uploading", None)
|
||||
with open(out_path, "rb") as out_file:
|
||||
_ = await storage.upload_fileobj(
|
||||
fileobj=out_file,
|
||||
|
||||
@@ -80,7 +80,7 @@ async def mark_notification_read(
|
||||
repo = NotificationRepository(db)
|
||||
found = await repo.mark_read(notification_id, current_user.id)
|
||||
if not found:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Not found")
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Не найдено")
|
||||
|
||||
|
||||
@router.post("/read-all/", status_code=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
import uuid
|
||||
|
||||
@@ -30,13 +29,6 @@ JOB_TYPE_LABELS: dict[str, str] = {
|
||||
"CAPTIONS_GENERATE": "Генерация субтитров",
|
||||
}
|
||||
|
||||
STATUS_TITLES: dict[str, str] = {
|
||||
"RUNNING": "Задача запущена",
|
||||
"DONE": "Задача завершена",
|
||||
"FAILED": "Ошибка выполнения",
|
||||
}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# ConnectionManager — singleton for WebSocket pub/sub via Redis
|
||||
# ---------------------------------------------------------------------------
|
||||
@@ -113,14 +105,13 @@ class NotificationService:
|
||||
# Only persist notifications on status changes (not progress-only updates)
|
||||
notification_id: uuid.UUID | None = None
|
||||
if notification_type is not None:
|
||||
title = STATUS_TITLES.get(event.status or "", job_type_label)
|
||||
notification = await self._repo.create(
|
||||
NotificationCreate(
|
||||
user_id=user_id,
|
||||
job_id=job.id,
|
||||
project_id=job.project_id,
|
||||
notification_type=notification_type,
|
||||
title=title,
|
||||
title=job_type_label,
|
||||
message=event.error_message or event.current_message,
|
||||
payload={
|
||||
"job_type": job.job_type,
|
||||
@@ -139,8 +130,8 @@ class NotificationService:
|
||||
job_id=job.id,
|
||||
project_id=job.project_id,
|
||||
job_type=job.job_type,
|
||||
status=event.status or job.status,
|
||||
progress_pct=event.progress_pct or job.project_pct,
|
||||
status=event.status if event.status is not None else job.status,
|
||||
progress_pct=job.project_pct if event.progress_pct is None else event.progress_pct,
|
||||
message=event.error_message or event.current_message or job.current_message,
|
||||
title=job_type_label,
|
||||
created_at=now,
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
"""Typed project workspace module."""
|
||||
@@ -0,0 +1,30 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy import DateTime, ForeignKey, Integer, JSON
|
||||
from sqlalchemy.dialects.postgresql import JSONB, UUID
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
||||
from cpv3.db.base import Base, utcnow
|
||||
|
||||
STATE_JSON_TYPE = JSON().with_variant(JSONB(), "postgresql")
|
||||
|
||||
|
||||
class ProjectWorkspace(Base):
|
||||
__tablename__ = "project_workspaces"
|
||||
|
||||
project_id: Mapped[uuid.UUID] = mapped_column(
|
||||
UUID(as_uuid=True),
|
||||
ForeignKey("projects.id", ondelete="CASCADE"),
|
||||
primary_key=True,
|
||||
)
|
||||
revision: Mapped[int] = mapped_column(Integer, default=0, nullable=False)
|
||||
state: Mapped[dict] = mapped_column(STATE_JSON_TYPE, default=dict, nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=utcnow)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True),
|
||||
default=utcnow,
|
||||
onupdate=utcnow,
|
||||
)
|
||||
@@ -0,0 +1,74 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import uuid
|
||||
|
||||
from sqlalchemy import select, update
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from cpv3.db.base import utcnow
|
||||
from cpv3.modules.project_workspaces.models import ProjectWorkspace
|
||||
|
||||
|
||||
class WorkspaceRevisionConflictError(RuntimeError):
|
||||
"""Raised when the optimistic workspace revision check fails."""
|
||||
|
||||
|
||||
class ProjectWorkspaceRepository:
|
||||
def __init__(self, session: AsyncSession) -> None:
|
||||
self._session = session
|
||||
|
||||
async def get_by_project_id(self, project_id: uuid.UUID) -> ProjectWorkspace | None:
|
||||
result = await self._session.execute(
|
||||
select(ProjectWorkspace).where(ProjectWorkspace.project_id == project_id)
|
||||
)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def create(self, *, project_id: uuid.UUID, state: dict) -> ProjectWorkspace:
|
||||
workspace = ProjectWorkspace(project_id=project_id, revision=0, state=state)
|
||||
self._session.add(workspace)
|
||||
await self._session.commit()
|
||||
await self._session.refresh(workspace)
|
||||
return workspace
|
||||
|
||||
async def get_or_create(self, *, project_id: uuid.UUID, state: dict) -> ProjectWorkspace:
|
||||
workspace = await self.get_by_project_id(project_id)
|
||||
if workspace is not None:
|
||||
return workspace
|
||||
|
||||
try:
|
||||
return await self.create(project_id=project_id, state=state)
|
||||
except IntegrityError:
|
||||
await self._session.rollback()
|
||||
workspace = await self.get_by_project_id(project_id)
|
||||
if workspace is None:
|
||||
raise
|
||||
return workspace
|
||||
|
||||
async def update_state(
|
||||
self,
|
||||
*,
|
||||
project_id: uuid.UUID,
|
||||
expected_revision: int,
|
||||
state: dict,
|
||||
) -> ProjectWorkspace:
|
||||
stmt = (
|
||||
update(ProjectWorkspace)
|
||||
.where(ProjectWorkspace.project_id == project_id)
|
||||
.where(ProjectWorkspace.revision == expected_revision)
|
||||
.values(
|
||||
state=state,
|
||||
revision=expected_revision + 1,
|
||||
updated_at=utcnow(),
|
||||
)
|
||||
)
|
||||
result = await self._session.execute(stmt)
|
||||
if result.rowcount != 1:
|
||||
await self._session.rollback()
|
||||
raise WorkspaceRevisionConflictError
|
||||
|
||||
await self._session.commit()
|
||||
workspace = await self.get_by_project_id(project_id)
|
||||
if workspace is None:
|
||||
raise RuntimeError("Workspace disappeared after update")
|
||||
return workspace
|
||||
@@ -0,0 +1,78 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import uuid
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from cpv3.db.session import get_db
|
||||
from cpv3.infrastructure.auth import get_current_user
|
||||
from cpv3.modules.projects.service import ProjectService
|
||||
from cpv3.modules.project_workspaces.schemas import (
|
||||
ProjectWorkspaceRead,
|
||||
WorkflowActionRequest,
|
||||
)
|
||||
from cpv3.modules.project_workspaces.service import (
|
||||
ProjectWorkspaceRevisionConflictError,
|
||||
ProjectWorkspaceService,
|
||||
ProjectWorkflowValidationError,
|
||||
)
|
||||
from cpv3.modules.users.models import User
|
||||
|
||||
router = APIRouter(prefix="/api/projects", tags=["Project Workspaces"])
|
||||
|
||||
|
||||
@router.get("/{project_id}/workspace", response_model=ProjectWorkspaceRead)
|
||||
@router.get(
|
||||
"/{project_id}/workspace/",
|
||||
response_model=ProjectWorkspaceRead,
|
||||
include_in_schema=False,
|
||||
)
|
||||
async def get_project_workspace(
|
||||
project_id: uuid.UUID,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
) -> ProjectWorkspaceRead:
|
||||
project_service = ProjectService(db)
|
||||
project = await project_service.get_project(project_id)
|
||||
if project is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Не найдено")
|
||||
|
||||
if not current_user.is_staff and project.owner_id != current_user.id:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Доступ запрещён")
|
||||
|
||||
workspace_service = ProjectWorkspaceService(db)
|
||||
return await workspace_service.get_workspace(project=project)
|
||||
|
||||
|
||||
@router.post("/{project_id}/workflow/actions", response_model=ProjectWorkspaceRead)
|
||||
@router.post(
|
||||
"/{project_id}/workflow/actions/",
|
||||
response_model=ProjectWorkspaceRead,
|
||||
include_in_schema=False,
|
||||
)
|
||||
async def dispatch_project_workflow_action(
|
||||
project_id: uuid.UUID,
|
||||
body: WorkflowActionRequest,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
) -> ProjectWorkspaceRead:
|
||||
project_service = ProjectService(db)
|
||||
project = await project_service.get_project(project_id)
|
||||
if project is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Не найдено")
|
||||
|
||||
if not current_user.is_staff and project.owner_id != current_user.id:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Доступ запрещён")
|
||||
|
||||
workspace_service = ProjectWorkspaceService(db)
|
||||
try:
|
||||
return await workspace_service.apply_action(
|
||||
project=project,
|
||||
requester=current_user,
|
||||
action=body,
|
||||
)
|
||||
except ProjectWorkspaceRevisionConflictError as exc:
|
||||
raise HTTPException(status_code=status.HTTP_409_CONFLICT, detail=str(exc)) from exc
|
||||
except ProjectWorkflowValidationError as exc:
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(exc)) from exc
|
||||
@@ -0,0 +1,471 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from enum import StrEnum
|
||||
from typing import Annotated, Literal, get_args
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import AliasChoices, Field, model_validator
|
||||
|
||||
from cpv3.common.schemas import Schema
|
||||
from cpv3.modules.jobs.schemas import JobTypeEnum
|
||||
|
||||
|
||||
WORKFLOW_VERSION = 1
|
||||
VALID_JOB_TYPES = set(get_args(JobTypeEnum))
|
||||
|
||||
WorkspaceScreenEnum = Literal[
|
||||
"upload",
|
||||
"verify",
|
||||
"silence-settings",
|
||||
"processing",
|
||||
"fragments",
|
||||
"silence-apply-processing",
|
||||
"transcription-settings",
|
||||
"transcription-processing",
|
||||
"subtitle-revision",
|
||||
"caption-settings",
|
||||
"caption-processing",
|
||||
"caption-result",
|
||||
]
|
||||
|
||||
|
||||
class WorkflowPhase(StrEnum):
|
||||
INGEST = "INGEST"
|
||||
VERIFY = "VERIFY"
|
||||
SILENCE = "SILENCE"
|
||||
TRANSCRIPTION = "TRANSCRIPTION"
|
||||
CAPTIONS = "CAPTIONS"
|
||||
DONE = "DONE"
|
||||
|
||||
|
||||
class SilenceWorkflowStatus(StrEnum):
|
||||
IDLE = "IDLE"
|
||||
CONFIGURED = "CONFIGURED"
|
||||
DETECTING = "DETECTING"
|
||||
REVIEWING = "REVIEWING"
|
||||
APPLYING = "APPLYING"
|
||||
COMPLETED = "COMPLETED"
|
||||
SKIPPED = "SKIPPED"
|
||||
|
||||
|
||||
class TranscriptionWorkflowStatus(StrEnum):
|
||||
IDLE = "IDLE"
|
||||
PROCESSING = "PROCESSING"
|
||||
REVIEWING = "REVIEWING"
|
||||
COMPLETED = "COMPLETED"
|
||||
|
||||
|
||||
class CaptionsWorkflowStatus(StrEnum):
|
||||
IDLE = "IDLE"
|
||||
CONFIGURED = "CONFIGURED"
|
||||
PROCESSING = "PROCESSING"
|
||||
COMPLETED = "COMPLETED"
|
||||
|
||||
|
||||
class ActiveJobState(Schema):
|
||||
job_id: UUID
|
||||
job_type: JobTypeEnum
|
||||
|
||||
|
||||
class WorkspaceViewState(Schema):
|
||||
used_file_ids: list[UUID] = Field(default_factory=list)
|
||||
selected_file_id: UUID | None = None
|
||||
|
||||
|
||||
class SilenceSettingsState(Schema):
|
||||
min_silence_duration_ms: int = 200
|
||||
silence_threshold_db: int = 16
|
||||
padding_ms: int = 100
|
||||
|
||||
|
||||
class CutRegionState(Schema):
|
||||
start_ms: int
|
||||
end_ms: int
|
||||
|
||||
|
||||
class SilenceState(Schema):
|
||||
status: SilenceWorkflowStatus = SilenceWorkflowStatus.IDLE
|
||||
settings: SilenceSettingsState = Field(default_factory=SilenceSettingsState)
|
||||
detect_job_id: UUID | None = None
|
||||
detected_segments: list[CutRegionState] = Field(default_factory=list)
|
||||
reviewed_cuts: list[CutRegionState] = Field(
|
||||
default_factory=list,
|
||||
validation_alias=AliasChoices("reviewed_cuts", "cut_regions"),
|
||||
serialization_alias="reviewed_cuts",
|
||||
)
|
||||
duration_ms: int | None = None
|
||||
applied_output_file_id: UUID | None = Field(
|
||||
default=None,
|
||||
validation_alias=AliasChoices("applied_output_file_id", "output_file_id"),
|
||||
serialization_alias="applied_output_file_id",
|
||||
)
|
||||
|
||||
|
||||
class TranscriptionRequestState(Schema):
|
||||
engine: Literal["whisper", "google", "salutespeech"] = "whisper"
|
||||
language: str | None = None
|
||||
model: str = "base"
|
||||
|
||||
|
||||
class TranscriptionState(Schema):
|
||||
status: TranscriptionWorkflowStatus = TranscriptionWorkflowStatus.IDLE
|
||||
request: TranscriptionRequestState = Field(default_factory=TranscriptionRequestState)
|
||||
job_id: UUID | None = None
|
||||
artifact_id: UUID | None = None
|
||||
transcription_id: UUID | None = None
|
||||
reviewed: bool = False
|
||||
|
||||
|
||||
class CaptionsState(Schema):
|
||||
status: CaptionsWorkflowStatus = CaptionsWorkflowStatus.IDLE
|
||||
preset_id: UUID | None = None
|
||||
style_config: dict | None = None
|
||||
render_job_id: UUID | None = Field(
|
||||
default=None,
|
||||
validation_alias=AliasChoices("render_job_id", "job_id"),
|
||||
serialization_alias="render_job_id",
|
||||
)
|
||||
output_file_id: UUID | None = None
|
||||
|
||||
|
||||
class ProjectWorkspaceState(Schema):
|
||||
version: int = WORKFLOW_VERSION
|
||||
phase: WorkflowPhase = WorkflowPhase.INGEST
|
||||
active_job: ActiveJobState | None = None
|
||||
source_file_id: UUID | None = None
|
||||
workspace_view: WorkspaceViewState = Field(default_factory=WorkspaceViewState)
|
||||
silence: SilenceState = Field(default_factory=SilenceState)
|
||||
transcription: TranscriptionState = Field(default_factory=TranscriptionState)
|
||||
captions: CaptionsState = Field(default_factory=CaptionsState)
|
||||
|
||||
|
||||
class ProjectWorkspaceRead(Schema):
|
||||
project_id: UUID
|
||||
revision: int
|
||||
version: int
|
||||
phase: WorkflowPhase
|
||||
current_screen: WorkspaceScreenEnum
|
||||
active_job: ActiveJobState | None
|
||||
source_file_id: UUID | None
|
||||
workspace_view: WorkspaceViewState
|
||||
silence: SilenceState
|
||||
transcription: TranscriptionState
|
||||
captions: CaptionsState
|
||||
|
||||
|
||||
class WorkflowActionBase(Schema):
|
||||
type: str
|
||||
revision: int
|
||||
|
||||
|
||||
class SetSourceFileAction(WorkflowActionBase):
|
||||
type: Literal["SET_SOURCE_FILE"]
|
||||
file_id: UUID = Field(
|
||||
validation_alias=AliasChoices("file_id", "source_file_id"),
|
||||
serialization_alias="file_id",
|
||||
)
|
||||
|
||||
|
||||
class ResetSourceFileAction(WorkflowActionBase):
|
||||
type: Literal["RESET_SOURCE_FILE"]
|
||||
|
||||
|
||||
class StartMediaConvertAction(WorkflowActionBase):
|
||||
type: Literal["START_MEDIA_CONVERT"]
|
||||
output_format: str = "mp4"
|
||||
out_folder: str = "output_files"
|
||||
|
||||
|
||||
class ConfirmVerifyAction(WorkflowActionBase):
|
||||
type: Literal["CONFIRM_VERIFY"]
|
||||
|
||||
|
||||
class SetSilenceSettingsAction(WorkflowActionBase):
|
||||
type: Literal["SET_SILENCE_SETTINGS"]
|
||||
settings: SilenceSettingsState = Field(default_factory=SilenceSettingsState)
|
||||
|
||||
@model_validator(mode="before")
|
||||
@classmethod
|
||||
def normalize_settings(cls, data: object) -> object:
|
||||
if not isinstance(data, dict) or "settings" in data:
|
||||
return data
|
||||
|
||||
return {
|
||||
**data,
|
||||
"settings": {
|
||||
"min_silence_duration_ms": data.get("min_silence_duration_ms", 200),
|
||||
"silence_threshold_db": data.get("silence_threshold_db", 16),
|
||||
"padding_ms": data.get("padding_ms", 100),
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
class StartSilenceDetectAction(WorkflowActionBase):
|
||||
type: Literal["START_SILENCE_DETECT"]
|
||||
|
||||
|
||||
class SetSilenceCutsAction(WorkflowActionBase):
|
||||
type: Literal["SET_SILENCE_CUTS"]
|
||||
cuts: list[CutRegionState] = Field(
|
||||
validation_alias=AliasChoices("cuts", "reviewed_cuts", "cut_regions"),
|
||||
)
|
||||
|
||||
|
||||
class SkipSilenceApplyAction(WorkflowActionBase):
|
||||
type: Literal["SKIP_SILENCE_APPLY"]
|
||||
|
||||
|
||||
class StartSilenceApplyAction(WorkflowActionBase):
|
||||
type: Literal["START_SILENCE_APPLY"]
|
||||
cuts: list[CutRegionState] | None = None
|
||||
out_folder: str = "output_files"
|
||||
output_name: str | None = None
|
||||
|
||||
|
||||
class ReopenSilenceReviewAction(WorkflowActionBase):
|
||||
type: Literal["REOPEN_SILENCE_REVIEW"]
|
||||
|
||||
|
||||
class StartTranscriptionAction(WorkflowActionBase):
|
||||
type: Literal["START_TRANSCRIPTION"]
|
||||
engine: Literal["whisper", "google", "salutespeech"] = "whisper"
|
||||
language: str | None = None
|
||||
model: str = "base"
|
||||
request: TranscriptionRequestState | None = None
|
||||
|
||||
@model_validator(mode="after")
|
||||
def normalize_request(self) -> "StartTranscriptionAction":
|
||||
if self.request is None:
|
||||
self.request = TranscriptionRequestState(
|
||||
engine=self.engine,
|
||||
language=self.language,
|
||||
model=self.model,
|
||||
)
|
||||
return self
|
||||
|
||||
self.engine = self.request.engine
|
||||
self.language = self.request.language
|
||||
self.model = self.request.model
|
||||
return self
|
||||
|
||||
|
||||
class ReopenTranscriptionConfigAction(WorkflowActionBase):
|
||||
type: Literal["REOPEN_TRANSCRIPTION_CONFIG"]
|
||||
|
||||
|
||||
class MarkTranscriptionReviewedAction(WorkflowActionBase):
|
||||
type: Literal["MARK_TRANSCRIPTION_REVIEWED"]
|
||||
|
||||
|
||||
class SelectCaptionPresetAction(WorkflowActionBase):
|
||||
type: Literal["SELECT_CAPTION_PRESET"]
|
||||
preset_id: UUID | None = None
|
||||
style_config: dict | None = None
|
||||
|
||||
|
||||
class StartCaptionRenderAction(WorkflowActionBase):
|
||||
type: Literal["START_CAPTION_RENDER"]
|
||||
folder: str = "output_files"
|
||||
|
||||
|
||||
class ReopenCaptionConfigAction(WorkflowActionBase):
|
||||
type: Literal["REOPEN_CAPTION_CONFIG"]
|
||||
|
||||
|
||||
class SetWorkspaceViewAction(WorkflowActionBase):
|
||||
type: Literal["SET_WORKSPACE_VIEW"]
|
||||
workspace_view: WorkspaceViewState
|
||||
|
||||
@model_validator(mode="before")
|
||||
@classmethod
|
||||
def normalize_workspace_view(cls, data: object) -> object:
|
||||
if not isinstance(data, dict) or "workspace_view" in data:
|
||||
return data
|
||||
|
||||
return {
|
||||
**data,
|
||||
"workspace_view": {
|
||||
"used_file_ids": data.get("used_file_ids", []),
|
||||
"selected_file_id": data.get("selected_file_id"),
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
WorkflowActionRequest = Annotated[
|
||||
(
|
||||
SetSourceFileAction
|
||||
| ResetSourceFileAction
|
||||
| StartMediaConvertAction
|
||||
| ConfirmVerifyAction
|
||||
| SetSilenceSettingsAction
|
||||
| StartSilenceDetectAction
|
||||
| SetSilenceCutsAction
|
||||
| SkipSilenceApplyAction
|
||||
| StartSilenceApplyAction
|
||||
| ReopenSilenceReviewAction
|
||||
| StartTranscriptionAction
|
||||
| ReopenTranscriptionConfigAction
|
||||
| MarkTranscriptionReviewedAction
|
||||
| SelectCaptionPresetAction
|
||||
| StartCaptionRenderAction
|
||||
| ReopenCaptionConfigAction
|
||||
| SetWorkspaceViewAction
|
||||
),
|
||||
Field(discriminator="type"),
|
||||
]
|
||||
|
||||
|
||||
def build_default_workspace_state() -> ProjectWorkspaceState:
|
||||
return ProjectWorkspaceState()
|
||||
|
||||
|
||||
def build_workspace_state_from_legacy(
|
||||
legacy_workspace_state: dict | None,
|
||||
) -> ProjectWorkspaceState:
|
||||
state = build_default_workspace_state()
|
||||
if not isinstance(legacy_workspace_state, dict):
|
||||
return state
|
||||
|
||||
wizard = legacy_workspace_state.get("wizard")
|
||||
if not isinstance(wizard, dict):
|
||||
wizard = {}
|
||||
|
||||
source_file_id = _parse_uuid(wizard.get("primary_file_id"))
|
||||
if source_file_id is not None:
|
||||
state.source_file_id = source_file_id
|
||||
|
||||
used_file_ids: list[UUID] = []
|
||||
used_files = legacy_workspace_state.get("used_files")
|
||||
if isinstance(used_files, list):
|
||||
for item in used_files:
|
||||
if not isinstance(item, dict):
|
||||
continue
|
||||
file_id = _parse_uuid(item.get("id"))
|
||||
if file_id is not None and file_id not in used_file_ids:
|
||||
used_file_ids.append(file_id)
|
||||
|
||||
if source_file_id is not None and source_file_id not in used_file_ids:
|
||||
used_file_ids.insert(0, source_file_id)
|
||||
|
||||
state.workspace_view.used_file_ids = used_file_ids
|
||||
if source_file_id is not None and source_file_id in used_file_ids:
|
||||
state.workspace_view.selected_file_id = source_file_id
|
||||
|
||||
active_job_id = _parse_uuid(wizard.get("active_job_id"))
|
||||
active_job_type = wizard.get("active_job_type")
|
||||
if active_job_id is not None and active_job_type in VALID_JOB_TYPES:
|
||||
state.active_job = ActiveJobState(
|
||||
job_id=active_job_id,
|
||||
job_type=active_job_type,
|
||||
)
|
||||
|
||||
silence_job_id = _parse_uuid(wizard.get("silence_job_id"))
|
||||
if silence_job_id is not None:
|
||||
state.silence.detect_job_id = silence_job_id
|
||||
|
||||
transcription_artifact_id = _parse_uuid(wizard.get("transcription_artifact_id"))
|
||||
if transcription_artifact_id is not None:
|
||||
state.transcription.artifact_id = transcription_artifact_id
|
||||
|
||||
caption_preset_id = _parse_uuid(wizard.get("caption_preset_id"))
|
||||
if caption_preset_id is not None:
|
||||
state.captions.preset_id = caption_preset_id
|
||||
|
||||
caption_style_config = wizard.get("caption_style_config")
|
||||
if isinstance(caption_style_config, dict):
|
||||
state.captions.style_config = caption_style_config
|
||||
|
||||
captioned_video_file_id = _parse_uuid(wizard.get("captioned_video_file_id"))
|
||||
if captioned_video_file_id is not None:
|
||||
state.captions.output_file_id = captioned_video_file_id
|
||||
|
||||
silence_settings = wizard.get("silence_settings")
|
||||
if isinstance(silence_settings, dict):
|
||||
state.silence.settings = SilenceSettingsState.model_validate(silence_settings)
|
||||
state.silence.status = SilenceWorkflowStatus.CONFIGURED
|
||||
|
||||
current_step = wizard.get("current_step")
|
||||
step_phase_map = {
|
||||
"upload": WorkflowPhase.INGEST,
|
||||
"verify": WorkflowPhase.VERIFY,
|
||||
"silence-settings": WorkflowPhase.SILENCE,
|
||||
"processing": WorkflowPhase.SILENCE,
|
||||
"fragments": WorkflowPhase.SILENCE,
|
||||
"silence-apply-processing": WorkflowPhase.SILENCE,
|
||||
"transcription-settings": WorkflowPhase.TRANSCRIPTION,
|
||||
"transcription-processing": WorkflowPhase.TRANSCRIPTION,
|
||||
"subtitle-revision": WorkflowPhase.TRANSCRIPTION,
|
||||
"caption-settings": WorkflowPhase.CAPTIONS,
|
||||
"caption-processing": WorkflowPhase.CAPTIONS,
|
||||
"caption-result": WorkflowPhase.DONE,
|
||||
}
|
||||
if current_step in step_phase_map:
|
||||
state.phase = step_phase_map[current_step]
|
||||
|
||||
if current_step == "processing":
|
||||
state.silence.status = SilenceWorkflowStatus.DETECTING
|
||||
elif current_step == "fragments":
|
||||
state.silence.status = SilenceWorkflowStatus.REVIEWING
|
||||
elif current_step == "silence-apply-processing":
|
||||
state.silence.status = SilenceWorkflowStatus.APPLYING
|
||||
elif current_step == "transcription-processing":
|
||||
state.transcription.status = TranscriptionWorkflowStatus.PROCESSING
|
||||
elif current_step == "subtitle-revision":
|
||||
state.transcription.status = TranscriptionWorkflowStatus.REVIEWING
|
||||
elif current_step == "caption-settings":
|
||||
state.captions.status = CaptionsWorkflowStatus.CONFIGURED
|
||||
elif current_step == "caption-processing":
|
||||
state.captions.status = CaptionsWorkflowStatus.PROCESSING
|
||||
elif current_step == "caption-result":
|
||||
state.captions.status = CaptionsWorkflowStatus.COMPLETED
|
||||
|
||||
if state.active_job is not None:
|
||||
if state.active_job.job_type == "MEDIA_CONVERT":
|
||||
state.phase = WorkflowPhase.VERIFY
|
||||
elif state.active_job.job_type == "SILENCE_DETECT":
|
||||
state.phase = WorkflowPhase.SILENCE
|
||||
state.silence.status = SilenceWorkflowStatus.DETECTING
|
||||
state.silence.detect_job_id = state.active_job.job_id
|
||||
elif state.active_job.job_type == "SILENCE_APPLY":
|
||||
state.phase = WorkflowPhase.SILENCE
|
||||
state.silence.status = SilenceWorkflowStatus.APPLYING
|
||||
elif state.active_job.job_type == "TRANSCRIPTION_GENERATE":
|
||||
state.phase = WorkflowPhase.TRANSCRIPTION
|
||||
state.transcription.status = TranscriptionWorkflowStatus.PROCESSING
|
||||
state.transcription.job_id = state.active_job.job_id
|
||||
elif state.active_job.job_type == "CAPTIONS_GENERATE":
|
||||
state.phase = WorkflowPhase.CAPTIONS
|
||||
state.captions.status = CaptionsWorkflowStatus.PROCESSING
|
||||
state.captions.render_job_id = state.active_job.job_id
|
||||
|
||||
if captioned_video_file_id is not None:
|
||||
state.phase = WorkflowPhase.DONE
|
||||
state.captions.status = CaptionsWorkflowStatus.COMPLETED
|
||||
elif transcription_artifact_id is not None and (
|
||||
state.transcription.status == TranscriptionWorkflowStatus.IDLE
|
||||
):
|
||||
state.phase = WorkflowPhase.TRANSCRIPTION
|
||||
state.transcription.status = TranscriptionWorkflowStatus.REVIEWING
|
||||
elif silence_job_id is not None and state.silence.status == SilenceWorkflowStatus.IDLE:
|
||||
state.phase = WorkflowPhase.SILENCE
|
||||
state.silence.status = SilenceWorkflowStatus.REVIEWING
|
||||
elif source_file_id is not None and state.phase == WorkflowPhase.INGEST:
|
||||
state.phase = WorkflowPhase.VERIFY
|
||||
|
||||
return state
|
||||
|
||||
|
||||
def _parse_uuid(value: object) -> UUID | None:
|
||||
if value is None:
|
||||
return None
|
||||
try:
|
||||
return UUID(str(value))
|
||||
except (TypeError, ValueError):
|
||||
return None
|
||||
|
||||
|
||||
# Backward-compatible aliases used by existing tests and frontend hand-written types.
|
||||
TaskWorkflowActiveJob = ActiveJobState
|
||||
SilenceSettingsPayload = SilenceSettingsState
|
||||
WorkflowSilenceState = SilenceState
|
||||
WorkflowTranscriptionRequest = TranscriptionRequestState
|
||||
@@ -0,0 +1,824 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from uuid import UUID
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from cpv3.modules.captions.models import CaptionPreset
|
||||
from cpv3.modules.captions.repository import CaptionPresetRepository
|
||||
from cpv3.modules.files.models import File
|
||||
from cpv3.modules.files.repository import FileRepository
|
||||
from cpv3.modules.jobs.models import Job
|
||||
from cpv3.modules.media.repository import ArtifactRepository
|
||||
from cpv3.modules.project_workspaces.models import ProjectWorkspace
|
||||
from cpv3.modules.project_workspaces.repository import (
|
||||
ProjectWorkspaceRepository,
|
||||
WorkspaceRevisionConflictError as RepositoryWorkspaceRevisionConflictError,
|
||||
)
|
||||
from cpv3.modules.project_workspaces.schemas import (
|
||||
ActiveJobState,
|
||||
CaptionsState,
|
||||
CaptionsWorkflowStatus,
|
||||
ConfirmVerifyAction,
|
||||
CutRegionState,
|
||||
MarkTranscriptionReviewedAction,
|
||||
ProjectWorkspaceRead,
|
||||
ProjectWorkspaceState,
|
||||
ReopenCaptionConfigAction,
|
||||
ReopenSilenceReviewAction,
|
||||
ReopenTranscriptionConfigAction,
|
||||
ResetSourceFileAction,
|
||||
SelectCaptionPresetAction,
|
||||
SetSilenceCutsAction,
|
||||
SetSilenceSettingsAction,
|
||||
SetSourceFileAction,
|
||||
SetWorkspaceViewAction,
|
||||
SilenceState,
|
||||
SilenceWorkflowStatus,
|
||||
SkipSilenceApplyAction,
|
||||
StartCaptionRenderAction,
|
||||
StartMediaConvertAction,
|
||||
StartSilenceApplyAction,
|
||||
StartSilenceDetectAction,
|
||||
StartTranscriptionAction,
|
||||
TranscriptionState,
|
||||
TranscriptionWorkflowStatus,
|
||||
WorkflowActionRequest,
|
||||
WorkflowPhase,
|
||||
WorkspaceScreenEnum,
|
||||
WorkspaceViewState,
|
||||
build_workspace_state_from_legacy,
|
||||
)
|
||||
from cpv3.modules.projects.models import Project
|
||||
from cpv3.modules.projects.repository import ProjectRepository
|
||||
from cpv3.modules.tasks.schemas import (
|
||||
CaptionsGenerateRequest,
|
||||
MediaConvertRequest,
|
||||
SilenceApplyRequest,
|
||||
SilenceDetectRequest,
|
||||
TranscriptionGenerateRequest,
|
||||
)
|
||||
from cpv3.modules.transcription.repository import TranscriptionRepository
|
||||
from cpv3.modules.users.models import User
|
||||
|
||||
|
||||
class ProjectWorkspaceRevisionConflictError(RuntimeError):
|
||||
pass
|
||||
|
||||
|
||||
class ProjectWorkflowValidationError(ValueError):
|
||||
pass
|
||||
|
||||
|
||||
WorkspaceRevisionConflictError = ProjectWorkspaceRevisionConflictError
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class _WorkspaceItemValidationResult:
|
||||
item_id: UUID
|
||||
file: File | None = None
|
||||
|
||||
|
||||
class ProjectWorkspaceService:
|
||||
def __init__(self, session: AsyncSession) -> None:
|
||||
self._session = session
|
||||
self._repo = ProjectWorkspaceRepository(session)
|
||||
self._project_repo = ProjectRepository(session)
|
||||
self._file_repo = FileRepository(session)
|
||||
self._artifact_repo = ArtifactRepository(session)
|
||||
self._transcription_repo = TranscriptionRepository(session)
|
||||
self._caption_preset_repo = CaptionPresetRepository(session)
|
||||
self._task_service_factory = self._build_task_service
|
||||
|
||||
async def create_for_project(self, project: Project) -> ProjectWorkspaceRead:
|
||||
workspace = await self._get_or_create_workspace(project=project)
|
||||
return self._to_read_model(workspace)
|
||||
|
||||
async def get_workspace(self, *, project: Project) -> ProjectWorkspaceRead:
|
||||
workspace = await self._get_or_create_workspace(project=project)
|
||||
return self._to_read_model(workspace)
|
||||
|
||||
async def apply_action(
|
||||
self,
|
||||
*,
|
||||
project: Project,
|
||||
requester: User,
|
||||
action: WorkflowActionRequest,
|
||||
) -> ProjectWorkspaceRead:
|
||||
workspace = await self._get_or_create_workspace(project=project)
|
||||
if workspace.revision != action.revision:
|
||||
raise ProjectWorkspaceRevisionConflictError("Версия рабочего пространства устарела")
|
||||
|
||||
state = ProjectWorkspaceState.model_validate(workspace.state)
|
||||
next_state = await self._apply_action_to_state(
|
||||
project=project,
|
||||
requester=requester,
|
||||
state=state,
|
||||
action=action,
|
||||
)
|
||||
workspace = await self._save_workspace_state(
|
||||
project_id=project.id,
|
||||
expected_revision=workspace.revision,
|
||||
state=next_state,
|
||||
)
|
||||
return self._to_read_model(workspace)
|
||||
|
||||
async def handle_job_update(self, *, job: Job) -> ProjectWorkspaceRead | None:
|
||||
if job.project_id is None:
|
||||
return None
|
||||
|
||||
project = await self._project_repo.get_by_id(job.project_id)
|
||||
if project is None:
|
||||
return None
|
||||
|
||||
workspace = await self._get_or_create_workspace(project=project)
|
||||
state = ProjectWorkspaceState.model_validate(workspace.state)
|
||||
next_state = self._apply_job_event_to_state(state, job)
|
||||
|
||||
if next_state.model_dump(mode="json") == state.model_dump(mode="json"):
|
||||
return self._to_read_model(workspace)
|
||||
|
||||
try:
|
||||
workspace = await self._save_workspace_state(
|
||||
project_id=project.id,
|
||||
expected_revision=workspace.revision,
|
||||
state=next_state,
|
||||
)
|
||||
except ProjectWorkspaceRevisionConflictError:
|
||||
workspace = await self._get_or_create_workspace(project=project)
|
||||
|
||||
return self._to_read_model(workspace)
|
||||
|
||||
async def apply_job_update(
|
||||
self,
|
||||
*,
|
||||
project: Project,
|
||||
job: Job,
|
||||
) -> ProjectWorkspaceRead | None:
|
||||
workspace = await self._get_or_create_workspace(project=project)
|
||||
state = ProjectWorkspaceState.model_validate(workspace.state)
|
||||
next_state = self._apply_job_event_to_state(state, job)
|
||||
|
||||
if next_state.model_dump(mode="json") == state.model_dump(mode="json"):
|
||||
return self._to_read_model(workspace)
|
||||
|
||||
try:
|
||||
workspace = await self._save_workspace_state(
|
||||
project_id=project.id,
|
||||
expected_revision=workspace.revision,
|
||||
state=next_state,
|
||||
)
|
||||
except ProjectWorkspaceRevisionConflictError:
|
||||
workspace = await self._get_or_create_workspace(project=project)
|
||||
|
||||
return self._to_read_model(workspace)
|
||||
|
||||
async def apply_job_event(self, job: Job) -> None:
|
||||
await self.handle_job_update(job=job)
|
||||
|
||||
async def _get_or_create_workspace(self, *, project: Project) -> ProjectWorkspace:
|
||||
workspace = await self._repo.get_by_project_id(project.id)
|
||||
if workspace is not None:
|
||||
return workspace
|
||||
|
||||
initial_state = build_workspace_state_from_legacy(getattr(project, "workspace_state", None))
|
||||
state_payload = initial_state.model_dump(mode="json")
|
||||
get_or_create = getattr(self._repo, "get_or_create", None)
|
||||
if callable(get_or_create):
|
||||
return await get_or_create(project_id=project.id, state=state_payload)
|
||||
return await self._repo.create(project_id=project.id, state=state_payload)
|
||||
|
||||
async def _save_workspace_state(
|
||||
self,
|
||||
*,
|
||||
project_id: UUID,
|
||||
expected_revision: int,
|
||||
state: ProjectWorkspaceState,
|
||||
) -> ProjectWorkspace:
|
||||
try:
|
||||
return await self._repo.update_state(
|
||||
project_id=project_id,
|
||||
expected_revision=expected_revision,
|
||||
state=state.model_dump(mode="json"),
|
||||
)
|
||||
except RepositoryWorkspaceRevisionConflictError as exc:
|
||||
raise ProjectWorkspaceRevisionConflictError(
|
||||
"Версия рабочего пространства устарела"
|
||||
) from exc
|
||||
|
||||
async def _apply_action_to_state(
|
||||
self,
|
||||
*,
|
||||
project: Project,
|
||||
requester: User,
|
||||
state: ProjectWorkspaceState,
|
||||
action: WorkflowActionRequest,
|
||||
) -> ProjectWorkspaceState:
|
||||
next_state = state.model_copy(deep=True)
|
||||
|
||||
if isinstance(action, SetSourceFileAction):
|
||||
file_result = await self._validate_accessible_file(
|
||||
requester=requester,
|
||||
project=project,
|
||||
file_id=action.file_id,
|
||||
)
|
||||
next_state.phase = WorkflowPhase.VERIFY
|
||||
next_state.active_job = None
|
||||
next_state.source_file_id = file_result.item_id
|
||||
next_state.silence = SilenceState()
|
||||
next_state.transcription = TranscriptionState()
|
||||
next_state.captions = CaptionsState()
|
||||
next_state.workspace_view = WorkspaceViewState(
|
||||
used_file_ids=[file_result.item_id],
|
||||
selected_file_id=file_result.item_id,
|
||||
)
|
||||
return next_state
|
||||
|
||||
if isinstance(action, ResetSourceFileAction):
|
||||
return ProjectWorkspaceState(version=state.version)
|
||||
|
||||
if isinstance(action, StartMediaConvertAction):
|
||||
self._require_phase(next_state, WorkflowPhase.VERIFY)
|
||||
source_file = await self._require_source_file(
|
||||
next_state,
|
||||
project=project,
|
||||
requester=requester,
|
||||
)
|
||||
task_service = self._task_service_factory()
|
||||
response = await task_service.submit_media_convert(
|
||||
requester=requester,
|
||||
request=MediaConvertRequest(
|
||||
file_key=source_file.path,
|
||||
out_folder=action.out_folder,
|
||||
output_format=action.output_format,
|
||||
project_id=project.id,
|
||||
),
|
||||
)
|
||||
next_state.active_job = ActiveJobState(
|
||||
job_id=response.job_id,
|
||||
job_type="MEDIA_CONVERT",
|
||||
)
|
||||
return next_state
|
||||
|
||||
if isinstance(action, ConfirmVerifyAction):
|
||||
self._require_phase(next_state, WorkflowPhase.VERIFY)
|
||||
await self._require_source_file(
|
||||
next_state,
|
||||
project=project,
|
||||
requester=requester,
|
||||
)
|
||||
next_state.phase = WorkflowPhase.SILENCE
|
||||
next_state.active_job = None
|
||||
next_state.silence.status = SilenceWorkflowStatus.CONFIGURED
|
||||
return next_state
|
||||
|
||||
if isinstance(action, SetSilenceSettingsAction):
|
||||
self._require_phase(next_state, WorkflowPhase.SILENCE)
|
||||
if next_state.silence.status in {
|
||||
SilenceWorkflowStatus.DETECTING,
|
||||
SilenceWorkflowStatus.APPLYING,
|
||||
}:
|
||||
raise ProjectWorkflowValidationError("Нельзя менять настройки во время обработки")
|
||||
next_state.silence.settings = action.settings
|
||||
next_state.silence.status = SilenceWorkflowStatus.CONFIGURED
|
||||
return next_state
|
||||
|
||||
if isinstance(action, StartSilenceDetectAction):
|
||||
self._require_phase(next_state, WorkflowPhase.SILENCE)
|
||||
source_file = await self._require_source_file(
|
||||
next_state,
|
||||
project=project,
|
||||
requester=requester,
|
||||
)
|
||||
task_service = self._task_service_factory()
|
||||
response = await task_service.submit_silence_detect(
|
||||
requester=requester,
|
||||
request=SilenceDetectRequest(
|
||||
file_key=source_file.path,
|
||||
project_id=project.id,
|
||||
min_silence_duration_ms=next_state.silence.settings.min_silence_duration_ms,
|
||||
silence_threshold_db=next_state.silence.settings.silence_threshold_db,
|
||||
padding_ms=next_state.silence.settings.padding_ms,
|
||||
),
|
||||
)
|
||||
next_state.active_job = ActiveJobState(
|
||||
job_id=response.job_id,
|
||||
job_type="SILENCE_DETECT",
|
||||
)
|
||||
next_state.silence.status = SilenceWorkflowStatus.DETECTING
|
||||
next_state.silence.detect_job_id = response.job_id
|
||||
next_state.silence.detected_segments = []
|
||||
next_state.silence.reviewed_cuts = []
|
||||
next_state.silence.duration_ms = None
|
||||
next_state.silence.applied_output_file_id = None
|
||||
return next_state
|
||||
|
||||
if isinstance(action, SetSilenceCutsAction):
|
||||
self._require_phase(next_state, WorkflowPhase.SILENCE)
|
||||
next_state.silence.reviewed_cuts = [
|
||||
CutRegionState.model_validate(cut) for cut in action.cuts
|
||||
]
|
||||
next_state.silence.status = SilenceWorkflowStatus.REVIEWING
|
||||
return next_state
|
||||
|
||||
if isinstance(action, SkipSilenceApplyAction):
|
||||
self._require_phase(next_state, WorkflowPhase.SILENCE)
|
||||
next_state.phase = WorkflowPhase.TRANSCRIPTION
|
||||
next_state.active_job = None
|
||||
next_state.silence.status = SilenceWorkflowStatus.SKIPPED
|
||||
return next_state
|
||||
|
||||
if isinstance(action, StartSilenceApplyAction):
|
||||
self._require_phase(next_state, WorkflowPhase.SILENCE)
|
||||
source_file = await self._require_source_file(
|
||||
next_state,
|
||||
project=project,
|
||||
requester=requester,
|
||||
)
|
||||
if action.cuts is not None:
|
||||
next_state.silence.reviewed_cuts = [
|
||||
CutRegionState.model_validate(cut) for cut in action.cuts
|
||||
]
|
||||
if not next_state.silence.reviewed_cuts:
|
||||
raise ProjectWorkflowValidationError("Нет выбранных фрагментов для применения")
|
||||
task_service = self._task_service_factory()
|
||||
response = await task_service.submit_silence_apply(
|
||||
requester=requester,
|
||||
request=SilenceApplyRequest(
|
||||
file_key=source_file.path,
|
||||
out_folder=action.out_folder,
|
||||
project_id=project.id,
|
||||
output_name=action.output_name,
|
||||
cuts=[cut.model_dump(mode="json") for cut in next_state.silence.reviewed_cuts],
|
||||
),
|
||||
)
|
||||
next_state.active_job = ActiveJobState(
|
||||
job_id=response.job_id,
|
||||
job_type="SILENCE_APPLY",
|
||||
)
|
||||
next_state.silence.status = SilenceWorkflowStatus.APPLYING
|
||||
return next_state
|
||||
|
||||
if isinstance(action, ReopenSilenceReviewAction):
|
||||
if not next_state.silence.detected_segments:
|
||||
raise ProjectWorkflowValidationError("Нет результатов детекции тишины")
|
||||
next_state.phase = WorkflowPhase.SILENCE
|
||||
next_state.active_job = None
|
||||
next_state.silence.status = SilenceWorkflowStatus.REVIEWING
|
||||
return next_state
|
||||
|
||||
if isinstance(action, StartTranscriptionAction):
|
||||
if next_state.phase not in {WorkflowPhase.SILENCE, WorkflowPhase.TRANSCRIPTION}:
|
||||
raise ProjectWorkflowValidationError("Транскрибация пока недоступна")
|
||||
transcription_file = await self._resolve_transcription_input_file(
|
||||
next_state,
|
||||
project=project,
|
||||
requester=requester,
|
||||
)
|
||||
request_payload = action.request or TranscriptionGenerateRequest(
|
||||
engine=action.engine,
|
||||
language=action.language,
|
||||
model=action.model,
|
||||
file_key=transcription_file.path,
|
||||
project_id=project.id,
|
||||
)
|
||||
task_service = self._task_service_factory()
|
||||
response = await task_service.submit_transcription_generate(
|
||||
requester=requester,
|
||||
request=TranscriptionGenerateRequest(
|
||||
file_key=transcription_file.path,
|
||||
project_id=project.id,
|
||||
engine=request_payload.engine,
|
||||
language=request_payload.language,
|
||||
model=request_payload.model,
|
||||
),
|
||||
)
|
||||
next_state.phase = WorkflowPhase.TRANSCRIPTION
|
||||
next_state.active_job = ActiveJobState(
|
||||
job_id=response.job_id,
|
||||
job_type="TRANSCRIPTION_GENERATE",
|
||||
)
|
||||
next_state.transcription.status = TranscriptionWorkflowStatus.PROCESSING
|
||||
next_state.transcription.request = action.request or next_state.transcription.request
|
||||
next_state.transcription.job_id = response.job_id
|
||||
next_state.transcription.artifact_id = None
|
||||
next_state.transcription.transcription_id = None
|
||||
next_state.transcription.reviewed = False
|
||||
next_state.captions = CaptionsState()
|
||||
return next_state
|
||||
|
||||
if isinstance(action, ReopenTranscriptionConfigAction):
|
||||
if next_state.phase not in {
|
||||
WorkflowPhase.TRANSCRIPTION,
|
||||
WorkflowPhase.CAPTIONS,
|
||||
WorkflowPhase.DONE,
|
||||
}:
|
||||
raise ProjectWorkflowValidationError("Нельзя вернуться к настройкам транскрибации")
|
||||
next_state.phase = WorkflowPhase.TRANSCRIPTION
|
||||
next_state.active_job = None
|
||||
next_state.transcription = TranscriptionState(
|
||||
request=next_state.transcription.request,
|
||||
)
|
||||
next_state.captions = CaptionsState()
|
||||
return next_state
|
||||
|
||||
if isinstance(action, MarkTranscriptionReviewedAction):
|
||||
self._require_phase(next_state, WorkflowPhase.TRANSCRIPTION)
|
||||
if next_state.transcription.transcription_id is None:
|
||||
raise ProjectWorkflowValidationError("Сначала завершите транскрибацию")
|
||||
next_state.transcription.reviewed = True
|
||||
next_state.transcription.status = TranscriptionWorkflowStatus.COMPLETED
|
||||
next_state.phase = WorkflowPhase.CAPTIONS
|
||||
if next_state.captions.status == CaptionsWorkflowStatus.IDLE:
|
||||
next_state.captions.status = CaptionsWorkflowStatus.CONFIGURED
|
||||
return next_state
|
||||
|
||||
if isinstance(action, SelectCaptionPresetAction):
|
||||
if next_state.phase not in {WorkflowPhase.CAPTIONS, WorkflowPhase.DONE}:
|
||||
raise ProjectWorkflowValidationError("Сначала завершите транскрибацию")
|
||||
preset = await self._validate_caption_preset(
|
||||
requester=requester,
|
||||
preset_id=action.preset_id,
|
||||
)
|
||||
next_state.phase = WorkflowPhase.CAPTIONS
|
||||
next_state.active_job = None
|
||||
next_state.captions.preset_id = action.preset_id
|
||||
next_state.captions.style_config = (
|
||||
action.style_config
|
||||
if action.style_config is not None
|
||||
else (preset.style_config if preset is not None else None)
|
||||
)
|
||||
next_state.captions.render_job_id = None
|
||||
next_state.captions.output_file_id = None
|
||||
next_state.captions.status = CaptionsWorkflowStatus.CONFIGURED
|
||||
return next_state
|
||||
|
||||
if isinstance(action, StartCaptionRenderAction):
|
||||
if next_state.phase not in {WorkflowPhase.CAPTIONS, WorkflowPhase.DONE}:
|
||||
raise ProjectWorkflowValidationError("Рендер субтитров пока недоступен")
|
||||
if next_state.transcription.transcription_id is None:
|
||||
raise ProjectWorkflowValidationError("Сначала завершите транскрибацию")
|
||||
video_file = await self._resolve_caption_video_file(
|
||||
next_state,
|
||||
project=project,
|
||||
requester=requester,
|
||||
)
|
||||
transcription = await self._transcription_repo.get_by_id(
|
||||
next_state.transcription.transcription_id
|
||||
)
|
||||
if transcription is None:
|
||||
raise ProjectWorkflowValidationError("Транскрипция не найдена")
|
||||
|
||||
task_service = self._task_service_factory()
|
||||
response = await task_service.submit_captions_generate(
|
||||
requester=requester,
|
||||
request=CaptionsGenerateRequest(
|
||||
video_s3_path=video_file.path,
|
||||
folder=action.folder,
|
||||
transcription_id=transcription.id,
|
||||
project_id=project.id,
|
||||
preset_id=next_state.captions.preset_id,
|
||||
style_config=next_state.captions.style_config,
|
||||
),
|
||||
)
|
||||
next_state.phase = WorkflowPhase.CAPTIONS
|
||||
next_state.active_job = ActiveJobState(
|
||||
job_id=response.job_id,
|
||||
job_type="CAPTIONS_GENERATE",
|
||||
)
|
||||
next_state.captions.render_job_id = response.job_id
|
||||
next_state.captions.output_file_id = None
|
||||
next_state.captions.status = CaptionsWorkflowStatus.PROCESSING
|
||||
return next_state
|
||||
|
||||
if isinstance(action, ReopenCaptionConfigAction):
|
||||
if next_state.phase not in {WorkflowPhase.CAPTIONS, WorkflowPhase.DONE}:
|
||||
raise ProjectWorkflowValidationError("Нельзя вернуться к настройкам рендера")
|
||||
next_state.phase = WorkflowPhase.CAPTIONS
|
||||
next_state.active_job = None
|
||||
next_state.captions.render_job_id = None
|
||||
next_state.captions.output_file_id = None
|
||||
next_state.captions.status = CaptionsWorkflowStatus.CONFIGURED
|
||||
return next_state
|
||||
|
||||
if isinstance(action, SetWorkspaceViewAction):
|
||||
await self._validate_workspace_view_items(
|
||||
requester=requester,
|
||||
project=project,
|
||||
used_file_ids=action.workspace_view.used_file_ids,
|
||||
selected_file_id=action.workspace_view.selected_file_id,
|
||||
)
|
||||
next_state.workspace_view = action.workspace_view
|
||||
return next_state
|
||||
|
||||
raise ProjectWorkflowValidationError("Неподдерживаемое действие")
|
||||
|
||||
def _apply_job_event_to_state(
|
||||
self,
|
||||
state: ProjectWorkspaceState,
|
||||
job: Job,
|
||||
) -> ProjectWorkspaceState:
|
||||
next_state = state.model_copy(deep=True)
|
||||
output_data = job.output_data or {}
|
||||
|
||||
if next_state.active_job is not None and next_state.active_job.job_id == job.id:
|
||||
next_state.active_job = None
|
||||
|
||||
if job.status in {"FAILED", "CANCELLED"}:
|
||||
if job.job_type == "MEDIA_CONVERT":
|
||||
next_state.phase = WorkflowPhase.VERIFY
|
||||
elif job.job_type == "SILENCE_DETECT":
|
||||
next_state.phase = WorkflowPhase.SILENCE
|
||||
next_state.silence.status = SilenceWorkflowStatus.CONFIGURED
|
||||
next_state.silence.detect_job_id = None
|
||||
elif job.job_type == "SILENCE_APPLY":
|
||||
next_state.phase = WorkflowPhase.SILENCE
|
||||
next_state.silence.status = (
|
||||
SilenceWorkflowStatus.REVIEWING
|
||||
if next_state.silence.detected_segments
|
||||
else SilenceWorkflowStatus.CONFIGURED
|
||||
)
|
||||
elif job.job_type == "TRANSCRIPTION_GENERATE":
|
||||
next_state.phase = WorkflowPhase.TRANSCRIPTION
|
||||
next_state.transcription.status = TranscriptionWorkflowStatus.IDLE
|
||||
next_state.transcription.job_id = None
|
||||
elif job.job_type == "CAPTIONS_GENERATE":
|
||||
next_state.phase = WorkflowPhase.CAPTIONS
|
||||
next_state.captions.status = CaptionsWorkflowStatus.CONFIGURED
|
||||
next_state.captions.render_job_id = None
|
||||
return next_state
|
||||
|
||||
if job.status != "DONE":
|
||||
return next_state
|
||||
|
||||
if job.job_type == "MEDIA_CONVERT":
|
||||
converted_file_id = self._parse_uuid(output_data.get("file_id"))
|
||||
if converted_file_id is None:
|
||||
return next_state
|
||||
next_state.source_file_id = converted_file_id
|
||||
next_state.phase = WorkflowPhase.VERIFY
|
||||
self._append_used_file(next_state, converted_file_id)
|
||||
next_state.workspace_view.selected_file_id = converted_file_id
|
||||
return next_state
|
||||
|
||||
if job.job_type == "SILENCE_DETECT":
|
||||
silent_segments = output_data.get("silent_segments")
|
||||
if not isinstance(silent_segments, list):
|
||||
return next_state
|
||||
cut_regions = [CutRegionState.model_validate(item) for item in silent_segments]
|
||||
next_state.phase = WorkflowPhase.SILENCE
|
||||
next_state.silence.detect_job_id = job.id
|
||||
next_state.silence.detected_segments = cut_regions
|
||||
next_state.silence.reviewed_cuts = cut_regions
|
||||
next_state.silence.duration_ms = self._parse_int(output_data.get("duration_ms"))
|
||||
next_state.silence.status = SilenceWorkflowStatus.REVIEWING
|
||||
return next_state
|
||||
|
||||
if job.job_type == "SILENCE_APPLY":
|
||||
output_file_id = self._parse_uuid(output_data.get("file_id"))
|
||||
if output_file_id is None:
|
||||
return next_state
|
||||
next_state.phase = WorkflowPhase.TRANSCRIPTION
|
||||
next_state.silence.applied_output_file_id = output_file_id
|
||||
next_state.silence.status = SilenceWorkflowStatus.COMPLETED
|
||||
self._append_used_file(next_state, output_file_id)
|
||||
next_state.workspace_view.selected_file_id = output_file_id
|
||||
return next_state
|
||||
|
||||
if job.job_type == "TRANSCRIPTION_GENERATE":
|
||||
artifact_id = self._parse_uuid(output_data.get("artifact_id"))
|
||||
transcription_id = self._parse_uuid(output_data.get("transcription_id"))
|
||||
if artifact_id is None or transcription_id is None:
|
||||
return next_state
|
||||
next_state.phase = WorkflowPhase.TRANSCRIPTION
|
||||
next_state.transcription.status = TranscriptionWorkflowStatus.REVIEWING
|
||||
next_state.transcription.job_id = job.id
|
||||
next_state.transcription.artifact_id = artifact_id
|
||||
next_state.transcription.transcription_id = transcription_id
|
||||
next_state.transcription.reviewed = False
|
||||
return next_state
|
||||
|
||||
if job.job_type == "CAPTIONS_GENERATE":
|
||||
output_file_id = self._parse_uuid(output_data.get("file_id"))
|
||||
if output_file_id is None:
|
||||
return next_state
|
||||
next_state.phase = WorkflowPhase.DONE
|
||||
next_state.captions.status = CaptionsWorkflowStatus.COMPLETED
|
||||
next_state.captions.render_job_id = job.id
|
||||
next_state.captions.output_file_id = output_file_id
|
||||
self._append_used_file(next_state, output_file_id)
|
||||
next_state.workspace_view.selected_file_id = output_file_id
|
||||
return next_state
|
||||
|
||||
return next_state
|
||||
|
||||
def _to_read_model(self, workspace: ProjectWorkspace) -> ProjectWorkspaceRead:
|
||||
state = ProjectWorkspaceState.model_validate(workspace.state)
|
||||
return ProjectWorkspaceRead(
|
||||
project_id=workspace.project_id,
|
||||
revision=workspace.revision,
|
||||
version=state.version,
|
||||
phase=state.phase,
|
||||
current_screen=self._derive_current_screen(state),
|
||||
active_job=state.active_job,
|
||||
source_file_id=state.source_file_id,
|
||||
workspace_view=state.workspace_view,
|
||||
silence=state.silence,
|
||||
transcription=state.transcription,
|
||||
captions=state.captions,
|
||||
)
|
||||
|
||||
def _derive_current_screen(self, state: ProjectWorkspaceState) -> WorkspaceScreenEnum:
|
||||
if state.phase == WorkflowPhase.INGEST:
|
||||
return "upload"
|
||||
if state.phase == WorkflowPhase.VERIFY:
|
||||
return "verify"
|
||||
if state.phase == WorkflowPhase.SILENCE:
|
||||
if state.silence.status == SilenceWorkflowStatus.DETECTING:
|
||||
return "processing"
|
||||
if state.silence.status == SilenceWorkflowStatus.REVIEWING:
|
||||
return "fragments"
|
||||
if state.silence.status == SilenceWorkflowStatus.APPLYING:
|
||||
return "silence-apply-processing"
|
||||
return "silence-settings"
|
||||
if state.phase == WorkflowPhase.TRANSCRIPTION:
|
||||
if state.transcription.status == TranscriptionWorkflowStatus.PROCESSING:
|
||||
return "transcription-processing"
|
||||
if state.transcription.status in {
|
||||
TranscriptionWorkflowStatus.REVIEWING,
|
||||
TranscriptionWorkflowStatus.COMPLETED,
|
||||
}:
|
||||
return "subtitle-revision"
|
||||
return "transcription-settings"
|
||||
if state.phase == WorkflowPhase.CAPTIONS:
|
||||
if state.captions.status == CaptionsWorkflowStatus.PROCESSING:
|
||||
return "caption-processing"
|
||||
if state.captions.status == CaptionsWorkflowStatus.COMPLETED:
|
||||
return "caption-result"
|
||||
return "caption-settings"
|
||||
return "caption-result"
|
||||
|
||||
async def _validate_accessible_file(
|
||||
self,
|
||||
*,
|
||||
requester: User,
|
||||
project: Project,
|
||||
file_id: UUID,
|
||||
) -> _WorkspaceItemValidationResult:
|
||||
file = await self._file_repo.get_by_id(file_id)
|
||||
if file is None:
|
||||
raise ProjectWorkflowValidationError("Файл не найден")
|
||||
if file.project_id not in {None, project.id}:
|
||||
raise ProjectWorkflowValidationError("Файл не относится к текущему проекту")
|
||||
if not requester.is_staff and file.owner_id not in {None, requester.id}:
|
||||
raise ProjectWorkflowValidationError("Файл недоступен")
|
||||
return _WorkspaceItemValidationResult(item_id=file.id, file=file)
|
||||
|
||||
async def _require_source_file(
|
||||
self,
|
||||
state: ProjectWorkspaceState,
|
||||
*,
|
||||
project: Project,
|
||||
requester: User,
|
||||
) -> File:
|
||||
if state.source_file_id is None:
|
||||
raise ProjectWorkflowValidationError("Сначала выберите исходный файл")
|
||||
validation = await self._validate_accessible_file(
|
||||
requester=requester,
|
||||
project=project,
|
||||
file_id=state.source_file_id,
|
||||
)
|
||||
if validation.file is None:
|
||||
raise ProjectWorkflowValidationError("Исходный файл не найден")
|
||||
return validation.file
|
||||
|
||||
async def _resolve_transcription_input_file(
|
||||
self,
|
||||
state: ProjectWorkspaceState,
|
||||
*,
|
||||
project: Project,
|
||||
requester: User,
|
||||
) -> File:
|
||||
if state.silence.applied_output_file_id is not None:
|
||||
validation = await self._validate_accessible_file(
|
||||
requester=requester,
|
||||
project=project,
|
||||
file_id=state.silence.applied_output_file_id,
|
||||
)
|
||||
if validation.file is not None:
|
||||
return validation.file
|
||||
return await self._require_source_file(
|
||||
state,
|
||||
project=project,
|
||||
requester=requester,
|
||||
)
|
||||
|
||||
async def _resolve_caption_video_file(
|
||||
self,
|
||||
state: ProjectWorkspaceState,
|
||||
*,
|
||||
project: Project,
|
||||
requester: User,
|
||||
) -> File:
|
||||
return await self._resolve_transcription_input_file(
|
||||
state,
|
||||
project=project,
|
||||
requester=requester,
|
||||
)
|
||||
|
||||
async def _validate_caption_preset(
|
||||
self,
|
||||
*,
|
||||
requester: User,
|
||||
preset_id: UUID | None,
|
||||
) -> CaptionPreset | None:
|
||||
if preset_id is None:
|
||||
return None
|
||||
preset = await self._caption_preset_repo.get_by_id(preset_id)
|
||||
if preset is None:
|
||||
raise ProjectWorkflowValidationError("Пресет субтитров не найден")
|
||||
if not requester.is_staff and not preset.is_system and preset.user_id != requester.id:
|
||||
raise ProjectWorkflowValidationError("Пресет субтитров недоступен")
|
||||
return preset
|
||||
|
||||
async def _validate_workspace_view_items(
|
||||
self,
|
||||
*,
|
||||
requester: User,
|
||||
project: Project,
|
||||
used_file_ids: list[UUID],
|
||||
selected_file_id: UUID | None,
|
||||
) -> None:
|
||||
if selected_file_id is not None and selected_file_id not in used_file_ids:
|
||||
raise ProjectWorkflowValidationError(
|
||||
"Выбранный файл должен входить в список используемых файлов"
|
||||
)
|
||||
seen: set[UUID] = set()
|
||||
for item_id in used_file_ids:
|
||||
if item_id in seen:
|
||||
continue
|
||||
seen.add(item_id)
|
||||
await self._validate_workspace_item(
|
||||
requester=requester,
|
||||
project=project,
|
||||
item_id=item_id,
|
||||
)
|
||||
|
||||
async def _validate_workspace_item(
|
||||
self,
|
||||
*,
|
||||
requester: User,
|
||||
project: Project,
|
||||
item_id: UUID,
|
||||
) -> None:
|
||||
file = await self._file_repo.get_by_id(item_id)
|
||||
if file is not None:
|
||||
if file.project_id not in {None, project.id}:
|
||||
raise ProjectWorkflowValidationError("Файл не относится к текущему проекту")
|
||||
if not requester.is_staff and file.owner_id not in {None, requester.id}:
|
||||
raise ProjectWorkflowValidationError("Файл недоступен")
|
||||
return
|
||||
|
||||
artifact = await self._artifact_repo.get_by_id(item_id)
|
||||
if artifact is None or artifact.project_id not in {None, project.id}:
|
||||
raise ProjectWorkflowValidationError("Элемент рабочего пространства не найден")
|
||||
|
||||
def _build_task_service(self):
|
||||
from cpv3.modules.tasks.service import TaskService
|
||||
|
||||
return TaskService(self._session)
|
||||
|
||||
def _require_phase(
|
||||
self,
|
||||
state: ProjectWorkspaceState,
|
||||
required_phase: WorkflowPhase,
|
||||
) -> None:
|
||||
if state.phase != required_phase:
|
||||
raise ProjectWorkflowValidationError(
|
||||
f"Ожидалась фаза {required_phase}, текущая фаза {state.phase}"
|
||||
)
|
||||
|
||||
def _append_used_file(
|
||||
self,
|
||||
state: ProjectWorkspaceState,
|
||||
file_id: UUID,
|
||||
) -> None:
|
||||
if file_id not in state.workspace_view.used_file_ids:
|
||||
state.workspace_view.used_file_ids.append(file_id)
|
||||
|
||||
def _parse_int(self, value: object) -> int | None:
|
||||
if value is None:
|
||||
return None
|
||||
try:
|
||||
return int(value)
|
||||
except (TypeError, ValueError):
|
||||
return None
|
||||
|
||||
def _parse_uuid(self, value: object) -> UUID | None:
|
||||
if value is None:
|
||||
return None
|
||||
try:
|
||||
return UUID(str(value))
|
||||
except (TypeError, ValueError):
|
||||
return None
|
||||
@@ -50,10 +50,10 @@ async def retrieve_project(
|
||||
service = ProjectService(db)
|
||||
project = await service.get_project(project_id)
|
||||
if project is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Not found")
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Не найдено")
|
||||
|
||||
if not current_user.is_staff and project.owner_id != current_user.id:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Forbidden")
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Доступ запрещён")
|
||||
|
||||
return ProjectRead.model_validate(project)
|
||||
|
||||
@@ -68,10 +68,10 @@ async def patch_project(
|
||||
service = ProjectService(db)
|
||||
project = await service.get_project(project_id)
|
||||
if project is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Not found")
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Не найдено")
|
||||
|
||||
if not current_user.is_staff and project.owner_id != current_user.id:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Forbidden")
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Доступ запрещён")
|
||||
|
||||
project = await service.update_project(project, body)
|
||||
return ProjectRead.model_validate(project)
|
||||
@@ -86,10 +86,10 @@ async def delete_project(
|
||||
service = ProjectService(db)
|
||||
project = await service.get_project(project_id)
|
||||
if project is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Not found")
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Не найдено")
|
||||
|
||||
if not current_user.is_staff and project.owner_id != current_user.id:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Forbidden")
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Доступ запрещён")
|
||||
|
||||
await service.deactivate_project(project)
|
||||
return Response(status_code=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -19,8 +19,6 @@ class ProjectRead(Schema):
|
||||
folder: str | None
|
||||
status: ProjectStatusEnum
|
||||
|
||||
workspace_state: dict | None
|
||||
|
||||
is_active: bool
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
@@ -38,4 +36,3 @@ class ProjectUpdate(Schema):
|
||||
language: str | None = None
|
||||
folder: str | None = None
|
||||
status: ProjectStatusEnum | None = None
|
||||
workspace_state: dict | None = None
|
||||
|
||||
@@ -14,6 +14,7 @@ class ProjectService:
|
||||
"""Service for project business logic and orchestration."""
|
||||
|
||||
def __init__(self, session: AsyncSession) -> None:
|
||||
self._session = session
|
||||
self._repo = ProjectRepository(session)
|
||||
|
||||
async def list_projects(
|
||||
@@ -32,9 +33,13 @@ class ProjectService:
|
||||
|
||||
async def create_project(self, *, requester: User, data: ProjectCreate) -> Project:
|
||||
folder = f"/{requester.username}/{data.name}"
|
||||
return await self._repo.create(
|
||||
project = await self._repo.create(
|
||||
requester=requester, data=data, folder=folder, status="DRAFT",
|
||||
)
|
||||
from cpv3.modules.project_workspaces.service import ProjectWorkspaceService
|
||||
|
||||
await ProjectWorkspaceService(self._session).create_for_project(project)
|
||||
return project
|
||||
|
||||
async def update_project(self, project: Project, data: ProjectUpdate) -> Project:
|
||||
return await self._repo.update(project, data)
|
||||
|
||||
@@ -1,6 +1,10 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from fastapi import APIRouter
|
||||
from fastapi import APIRouter, Depends
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from cpv3.db.session import get_db
|
||||
|
||||
router = APIRouter(prefix="/api", tags=["System"])
|
||||
|
||||
@@ -8,3 +12,16 @@ router = APIRouter(prefix="/api", tags=["System"])
|
||||
@router.get("/ping/")
|
||||
async def ping() -> dict[str, str]:
|
||||
return {"status": "ok"}
|
||||
|
||||
|
||||
@router.get("/health/")
|
||||
async def health(db: AsyncSession = Depends(get_db)) -> dict[str, str]:
|
||||
"""Health check for Docker/K8s probes. Verifies DB connectivity."""
|
||||
try:
|
||||
await db.execute(text("SELECT 1"))
|
||||
db_status = "connected"
|
||||
except Exception:
|
||||
db_status = "disconnected"
|
||||
|
||||
status = "ok" if db_status == "connected" else "degraded"
|
||||
return {"status": status, "database": db_status}
|
||||
|
||||
@@ -173,11 +173,11 @@ async def get_task_status(
|
||||
|
||||
if job is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND, detail="Job not found"
|
||||
status_code=status.HTTP_404_NOT_FOUND, detail="Задача не найдена"
|
||||
)
|
||||
|
||||
if not current_user.is_staff and job.user_id != current_user.id:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Forbidden")
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Доступ запрещён")
|
||||
|
||||
return TaskStatusResponse(
|
||||
job_id=job.id,
|
||||
|
||||
@@ -83,7 +83,7 @@ class TranscriptionGenerateRequest(Schema):
|
||||
|
||||
file_key: str = Field(..., description="Storage key of the input file")
|
||||
project_id: UUID | None = Field(default=None, description="Associated project ID")
|
||||
engine: Literal["whisper", "google"] = Field(
|
||||
engine: Literal["whisper", "google", "salutespeech"] = Field(
|
||||
default="whisper", description="Transcription engine to use"
|
||||
)
|
||||
language: str | None = Field(default=None, description="Language code (e.g., 'en')")
|
||||
@@ -164,5 +164,5 @@ class TaskWebhookEvent(Schema):
|
||||
)
|
||||
)
|
||||
if not has_update:
|
||||
raise ValueError("Webhook event must include at least one update field.")
|
||||
raise ValueError("Событие вебхука должно содержать хотя бы одно обновляемое поле.")
|
||||
return self
|
||||
|
||||
+262
-36
@@ -36,6 +36,7 @@ from cpv3.modules.jobs.schemas import (
|
||||
)
|
||||
from cpv3.modules.media.repository import ArtifactRepository
|
||||
from cpv3.modules.media.schemas import ArtifactMediaFileCreate
|
||||
from cpv3.modules.project_workspaces.service import ProjectWorkspaceService
|
||||
from cpv3.modules.tasks.schemas import (
|
||||
CaptionsGenerateRequest,
|
||||
FrameExtractRequest,
|
||||
@@ -88,14 +89,19 @@ ERROR_UNKNOWN_ENGINE = "Неизвестный движок транскрипц
|
||||
ENGINE_MAP: dict[str, str] = {
|
||||
"whisper": "LOCAL_WHISPER",
|
||||
"google": "GOOGLE_SPEECH_CLOUD",
|
||||
"salutespeech": "SALUTE_SPEECH",
|
||||
}
|
||||
|
||||
MESSAGE_STARTING = "Starting"
|
||||
MESSAGE_COMPLETED = "Completed"
|
||||
MESSAGE_PROBING_MEDIA = "Probing media"
|
||||
MESSAGE_PROCESSING = "Processing"
|
||||
MESSAGE_CONVERTING = "Converting"
|
||||
MESSAGE_RENDERING_CAPTIONS = "Rendering captions"
|
||||
MESSAGE_STARTING = "Запуск"
|
||||
MESSAGE_COMPLETED = "Завершено"
|
||||
MESSAGE_PROBING_MEDIA = "Анализ медиафайла"
|
||||
MESSAGE_PROCESSING = "Обработка"
|
||||
MESSAGE_CONVERTING = "Конвертация"
|
||||
MESSAGE_PREPARING_FILE = "Подготовка файла"
|
||||
MESSAGE_CONVERTING_VIDEO = "Конвертация видео"
|
||||
MESSAGE_UPLOADING_RESULT = "Загрузка результата"
|
||||
MESSAGE_SAVING_RESULT = "Сохранение результата"
|
||||
MESSAGE_RENDERING_CAPTIONS = "Рендеринг субтитров"
|
||||
MESSAGE_CANCELLED = "Отменено пользователем"
|
||||
MESSAGE_EXTRACTING_FRAMES = "Извлечение кадров"
|
||||
MESSAGE_UPLOADING_FRAMES = "Загрузка кадров"
|
||||
@@ -105,6 +111,14 @@ PROGRESS_COMPLETE = 100.0
|
||||
PROGRESS_MEDIA_PROBE = 50.0
|
||||
PROGRESS_SILENCE_REMOVE = 30.0
|
||||
PROGRESS_MEDIA_CONVERT = 30.0
|
||||
PROGRESS_MEDIA_CONVERT_PREPARING = 5.0
|
||||
PROGRESS_MEDIA_CONVERT_START = 10.0
|
||||
PROGRESS_MEDIA_CONVERT_END = 95.0
|
||||
PROGRESS_MEDIA_CONVERT_SAVING = 99.0
|
||||
PROGRESS_SILENCE_APPLY_PREPARING = 5.0
|
||||
PROGRESS_SILENCE_APPLY_START = 10.0
|
||||
PROGRESS_SILENCE_APPLY_END = 95.0
|
||||
PROGRESS_SILENCE_APPLY_SAVING = 99.0
|
||||
PROGRESS_TRANSCRIPTION_START = 20.0
|
||||
PROGRESS_TRANSCRIPTION_END = 95.0
|
||||
PROGRESS_CAPTIONS = 30.0
|
||||
@@ -118,6 +132,7 @@ MESSAGE_DETECTING_SILENCE = "Обнаружение тишины"
|
||||
MESSAGE_APPLYING_CUTS = "Применение вырезок"
|
||||
|
||||
PROGRESS_THROTTLE_SECONDS = 3.0
|
||||
PROGRESS_CONVERT_THROTTLE_SECONDS = 1.0
|
||||
|
||||
ACTIVE_JOB_STATUSES = (JOB_STATUS_PENDING, JOB_STATUS_RUNNING)
|
||||
DRAMATIQ_BROKER_REF_SEPARATOR = ":"
|
||||
@@ -126,6 +141,7 @@ DRAMATIQ_BROKER_REF_SEPARATOR = ":"
|
||||
class JobCancelledError(RuntimeError):
|
||||
"""Raised when a job was cancelled before completion."""
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Dramatiq broker setup
|
||||
# ---------------------------------------------------------------------------
|
||||
@@ -480,20 +496,56 @@ def silence_apply_actor(
|
||||
webhook_url,
|
||||
TaskWebhookEvent(
|
||||
status=JOB_STATUS_RUNNING,
|
||||
current_message=MESSAGE_STARTING,
|
||||
current_message=MESSAGE_PREPARING_FILE,
|
||||
progress_pct=PROGRESS_SILENCE_APPLY_PREPARING,
|
||||
started_at=_utc_now(),
|
||||
),
|
||||
)
|
||||
|
||||
try:
|
||||
storage = _get_storage_service()
|
||||
_send_webhook_event(
|
||||
webhook_url,
|
||||
TaskWebhookEvent(
|
||||
current_message=MESSAGE_APPLYING_CUTS,
|
||||
progress_pct=PROGRESS_SILENCE_APPLY,
|
||||
),
|
||||
)
|
||||
last_report_time = 0.0
|
||||
last_progress = PROGRESS_SILENCE_APPLY_PREPARING
|
||||
|
||||
def _emit_silence_apply_progress(stage: str, pct: float | None) -> None:
|
||||
nonlocal last_report_time, last_progress
|
||||
|
||||
if stage == "applying_cuts":
|
||||
raw_pct = min(max(pct or 0.0, 0.0), 100.0)
|
||||
if raw_pct >= 100.0:
|
||||
return
|
||||
mapped = PROGRESS_SILENCE_APPLY_START + (raw_pct / 100.0) * (
|
||||
PROGRESS_SILENCE_APPLY_END - PROGRESS_SILENCE_APPLY_START
|
||||
)
|
||||
message = MESSAGE_APPLYING_CUTS
|
||||
force = raw_pct == 0.0
|
||||
elif stage == "uploading":
|
||||
mapped = PROGRESS_SILENCE_APPLY_END
|
||||
message = MESSAGE_UPLOADING_RESULT
|
||||
force = True
|
||||
else:
|
||||
return
|
||||
|
||||
mapped = round(mapped, 1)
|
||||
now = time.monotonic()
|
||||
if not force:
|
||||
if mapped <= last_progress:
|
||||
return
|
||||
if mapped - last_progress < 1.0:
|
||||
return
|
||||
if now - last_report_time < PROGRESS_CONVERT_THROTTLE_SECONDS:
|
||||
return
|
||||
|
||||
last_report_time = now
|
||||
last_progress = max(last_progress, mapped)
|
||||
_send_webhook_event(
|
||||
webhook_url,
|
||||
TaskWebhookEvent(
|
||||
current_message=message,
|
||||
progress_pct=last_progress,
|
||||
),
|
||||
)
|
||||
|
||||
result = _run_async(
|
||||
apply_silence_cuts(
|
||||
storage,
|
||||
@@ -501,8 +553,16 @@ def silence_apply_actor(
|
||||
out_folder=out_folder,
|
||||
cuts=cuts,
|
||||
output_name=output_name,
|
||||
on_progress=_emit_silence_apply_progress,
|
||||
)
|
||||
)
|
||||
_send_webhook_event(
|
||||
webhook_url,
|
||||
TaskWebhookEvent(
|
||||
current_message=MESSAGE_SAVING_RESULT,
|
||||
progress_pct=PROGRESS_SILENCE_APPLY_SAVING,
|
||||
),
|
||||
)
|
||||
_send_webhook_event(
|
||||
webhook_url,
|
||||
TaskWebhookEvent(
|
||||
@@ -553,24 +613,74 @@ def media_convert_actor(
|
||||
webhook_url,
|
||||
TaskWebhookEvent(
|
||||
status=JOB_STATUS_RUNNING,
|
||||
current_message=MESSAGE_STARTING,
|
||||
current_message=MESSAGE_PREPARING_FILE,
|
||||
progress_pct=PROGRESS_MEDIA_CONVERT_PREPARING,
|
||||
started_at=_utc_now(),
|
||||
),
|
||||
)
|
||||
|
||||
try:
|
||||
if output_format.lower() != "mp4":
|
||||
raise ValueError(f"Unsupported format: {output_format}")
|
||||
raise ValueError(f"Неподдерживаемый формат: {output_format}")
|
||||
|
||||
storage = _get_storage_service()
|
||||
last_report_time = 0.0
|
||||
last_progress = PROGRESS_MEDIA_CONVERT_PREPARING
|
||||
|
||||
def _emit_convert_progress(stage: str, pct: float | None) -> None:
|
||||
nonlocal last_report_time, last_progress
|
||||
|
||||
if stage == "converting":
|
||||
raw_pct = min(max(pct or 0.0, 0.0), 100.0)
|
||||
if raw_pct >= 100.0:
|
||||
return
|
||||
mapped = PROGRESS_MEDIA_CONVERT_START + (raw_pct / 100.0) * (
|
||||
PROGRESS_MEDIA_CONVERT_END - PROGRESS_MEDIA_CONVERT_START
|
||||
)
|
||||
message = MESSAGE_CONVERTING_VIDEO
|
||||
force = raw_pct == 0.0
|
||||
elif stage == "uploading":
|
||||
mapped = PROGRESS_MEDIA_CONVERT_END
|
||||
message = MESSAGE_UPLOADING_RESULT
|
||||
force = True
|
||||
else:
|
||||
return
|
||||
|
||||
mapped = round(mapped, 1)
|
||||
now = time.monotonic()
|
||||
if not force:
|
||||
if mapped <= last_progress:
|
||||
return
|
||||
if mapped - last_progress < 1.0:
|
||||
return
|
||||
if now - last_report_time < PROGRESS_CONVERT_THROTTLE_SECONDS:
|
||||
return
|
||||
|
||||
last_report_time = now
|
||||
last_progress = max(last_progress, mapped)
|
||||
_send_webhook_event(
|
||||
webhook_url,
|
||||
TaskWebhookEvent(
|
||||
current_message=message,
|
||||
progress_pct=last_progress,
|
||||
),
|
||||
)
|
||||
|
||||
result = _run_async(
|
||||
convert_to_mp4(
|
||||
storage,
|
||||
file_key=file_key,
|
||||
out_folder=out_folder,
|
||||
on_progress=_emit_convert_progress,
|
||||
)
|
||||
)
|
||||
_send_webhook_event(
|
||||
webhook_url,
|
||||
TaskWebhookEvent(
|
||||
current_message=MESSAGE_CONVERTING,
|
||||
progress_pct=PROGRESS_MEDIA_CONVERT,
|
||||
current_message=MESSAGE_SAVING_RESULT,
|
||||
progress_pct=PROGRESS_MEDIA_CONVERT_SAVING,
|
||||
),
|
||||
)
|
||||
result = _run_async(convert_to_mp4(storage, file_key=file_key, out_folder=out_folder))
|
||||
_send_webhook_event(
|
||||
webhook_url,
|
||||
TaskWebhookEvent(
|
||||
@@ -612,6 +722,7 @@ def transcription_generate_actor(
|
||||
"""Generate transcription from audio/video file."""
|
||||
from cpv3.modules.transcription.service import (
|
||||
transcribe_with_google_speech,
|
||||
transcribe_with_salute_speech,
|
||||
transcribe_with_whisper,
|
||||
)
|
||||
|
||||
@@ -698,6 +809,24 @@ def transcription_generate_actor(
|
||||
storage, file_key=file_key, language_codes=language_codes
|
||||
)
|
||||
)
|
||||
elif engine == "salutespeech":
|
||||
audio_stream = next((s for s in probe.streams if s.codec_type == "audio"), None)
|
||||
sr = (
|
||||
int(audio_stream.sample_rate)
|
||||
if audio_stream and audio_stream.sample_rate
|
||||
else 16000
|
||||
)
|
||||
document = _run_async(
|
||||
transcribe_with_salute_speech(
|
||||
storage,
|
||||
file_key=file_key,
|
||||
language=language,
|
||||
model=model,
|
||||
sample_rate=sr,
|
||||
job_id=job_uuid,
|
||||
on_progress=_on_whisper_progress,
|
||||
)
|
||||
)
|
||||
else:
|
||||
raise ValueError(ERROR_UNKNOWN_ENGINE.format(engine=engine))
|
||||
|
||||
@@ -1000,6 +1129,9 @@ class TaskService:
|
||||
self._event_repo = JobEventRepository(session)
|
||||
self._webhook_repo = WebhookRepository(session)
|
||||
|
||||
def _get_project_workspace_service(self):
|
||||
return ProjectWorkspaceService(self._session)
|
||||
|
||||
async def _update_job_broker_reference(self, job: Job, broker_reference: str) -> Job:
|
||||
"""Persist the transport-specific broker reference after enqueueing."""
|
||||
job.broker_id = broker_reference
|
||||
@@ -1159,7 +1291,7 @@ class TaskService:
|
||||
"""Apply a webhook event to the job and store a job event record."""
|
||||
job = await self._job_repo.get_by_id(job_id)
|
||||
if job is None:
|
||||
raise ValueError(f"Job {job_id} not found")
|
||||
raise ValueError(f"Задача {job_id} не найдена")
|
||||
|
||||
if job.status in (JOB_STATUS_DONE, JOB_STATUS_FAILED, JOB_STATUS_CANCELLED):
|
||||
logger.info("Ignoring webhook for terminal job %s (status=%s)", job_id, job.status)
|
||||
@@ -1185,22 +1317,33 @@ class TaskService:
|
||||
# Save artifacts BEFORE sending notifications so data exists when frontend refetches
|
||||
if job.job_type == JOB_TYPE_TRANSCRIPTION_GENERATE and event.status == JOB_STATUS_DONE:
|
||||
try:
|
||||
await self._save_transcription_artifacts(job)
|
||||
job = await self._save_transcription_artifacts(job)
|
||||
except Exception:
|
||||
logger.exception("Failed to save transcription artifacts for job %s", job_id)
|
||||
|
||||
if job.job_type == JOB_TYPE_MEDIA_CONVERT and event.status == JOB_STATUS_DONE:
|
||||
try:
|
||||
await self._save_convert_artifacts(job)
|
||||
job = await self._save_convert_artifacts(job)
|
||||
except Exception:
|
||||
logger.exception("Failed to save convert artifacts for job %s", job_id)
|
||||
|
||||
if job.job_type == JOB_TYPE_SILENCE_APPLY and event.status == JOB_STATUS_DONE:
|
||||
try:
|
||||
job = await self._save_silence_apply_artifacts(job)
|
||||
except Exception:
|
||||
logger.exception("Failed to save silence apply artifacts for job %s", job_id)
|
||||
|
||||
if job.job_type == JOB_TYPE_CAPTIONS_GENERATE and event.status == JOB_STATUS_DONE:
|
||||
try:
|
||||
await self._save_captions_artifacts(job)
|
||||
job = await self._save_captions_artifacts(job)
|
||||
except Exception:
|
||||
logger.exception("Failed to save captions artifacts for job %s", job_id)
|
||||
|
||||
try:
|
||||
await self._sync_project_workspace_after_webhook(job)
|
||||
except Exception:
|
||||
logger.exception("Failed to project workspace state for job %s", job_id)
|
||||
|
||||
# Push real-time notification via WebSocket (after artifacts are persisted)
|
||||
if job.user_id is not None:
|
||||
try:
|
||||
@@ -1213,6 +1356,11 @@ class TaskService:
|
||||
|
||||
return job
|
||||
|
||||
async def _sync_project_workspace_after_webhook(self, job: Job) -> None:
|
||||
if job.project_id is None:
|
||||
return
|
||||
await self._get_project_workspace_service().handle_job_update(job=job)
|
||||
|
||||
async def cancel_job(self, job: Job) -> Job:
|
||||
"""Cancel a job, clean queued transport state and ignore late webhooks."""
|
||||
if job.status in (JOB_STATUS_DONE, JOB_STATUS_FAILED, JOB_STATUS_CANCELLED):
|
||||
@@ -1255,9 +1403,14 @@ class TaskService:
|
||||
except Exception:
|
||||
logger.exception("Failed to create cancellation notification for job %s", job.id)
|
||||
|
||||
try:
|
||||
await self._sync_project_workspace_after_webhook(job)
|
||||
except Exception:
|
||||
logger.exception("Failed to project workspace state for cancelled job %s", job.id)
|
||||
|
||||
return job
|
||||
|
||||
async def _save_transcription_artifacts(self, job: Job) -> None:
|
||||
async def _save_transcription_artifacts(self, job: Job) -> Job:
|
||||
"""Create Transcription, ArtifactMediaFile and File records."""
|
||||
input_data = job.input_data or {}
|
||||
output_data = job.output_data or {}
|
||||
@@ -1276,7 +1429,7 @@ class TaskService:
|
||||
user = await user_repo.get_by_id(job.user_id) # type: ignore[arg-type]
|
||||
if user is None:
|
||||
logger.warning("User %s not found, skipping artifact save", job.user_id)
|
||||
return
|
||||
return job
|
||||
|
||||
# Find or create source File record
|
||||
file_repo = FileRepository(self._session)
|
||||
@@ -1341,7 +1494,7 @@ class TaskService:
|
||||
# Create Transcription record
|
||||
transcription_repo = TranscriptionRepository(self._session)
|
||||
engine_db = ENGINE_MAP.get(engine_raw, "LOCAL_WHISPER")
|
||||
await transcription_repo.create(
|
||||
transcription = await transcription_repo.create(
|
||||
data=TranscriptionCreate(
|
||||
project_id=project_id,
|
||||
source_file_id=source_file.id,
|
||||
@@ -1352,9 +1505,16 @@ class TaskService:
|
||||
),
|
||||
)
|
||||
|
||||
logger.info("Saved transcription artifacts for job %s", job.id)
|
||||
updated_output = dict(output_data)
|
||||
updated_output["artifact_id"] = str(artifact.id)
|
||||
updated_output["transcription_id"] = str(transcription.id)
|
||||
updated_output["source_file_id"] = str(source_file.id)
|
||||
job = await self._job_repo.update(job, JobUpdate(output_data=updated_output))
|
||||
|
||||
async def _save_convert_artifacts(self, job: Job) -> None:
|
||||
logger.info("Saved transcription artifacts for job %s", job.id)
|
||||
return job
|
||||
|
||||
async def _save_convert_artifacts(self, job: Job) -> Job:
|
||||
"""Create File and ArtifactMediaFile records for converted MP4."""
|
||||
input_data = job.input_data or {}
|
||||
output_data = job.output_data or {}
|
||||
@@ -1372,7 +1532,7 @@ class TaskService:
|
||||
user = await user_repo.get_by_id(job.user_id) # type: ignore[arg-type]
|
||||
if user is None:
|
||||
logger.warning("User %s not found, skipping convert artifact save", job.user_id)
|
||||
return
|
||||
return job
|
||||
|
||||
# Derive output filename from source file
|
||||
file_repo = FileRepository(self._session)
|
||||
@@ -1409,9 +1569,76 @@ class TaskService:
|
||||
),
|
||||
)
|
||||
|
||||
logger.info("Saved convert artifacts for job %s", job.id)
|
||||
updated_output = dict(output_data)
|
||||
updated_output["file_id"] = str(converted_file.id)
|
||||
job = await self._job_repo.update(job, JobUpdate(output_data=updated_output))
|
||||
|
||||
async def _save_captions_artifacts(self, job: Job) -> None:
|
||||
logger.info("Saved convert artifacts for job %s", job.id)
|
||||
return job
|
||||
|
||||
async def _save_silence_apply_artifacts(self, job: Job) -> Job:
|
||||
"""Create File and ArtifactMediaFile records for silence-applied video."""
|
||||
input_data = job.input_data or {}
|
||||
output_data = job.output_data or {}
|
||||
|
||||
file_key: str = input_data["file_key"]
|
||||
project_id: uuid.UUID | None = (
|
||||
uuid.UUID(input_data["project_id"]) if input_data.get("project_id") else None
|
||||
)
|
||||
|
||||
file_path: str = output_data["file_path"]
|
||||
file_size: int = output_data.get("file_size", 0)
|
||||
|
||||
user_repo = UserRepository(self._session)
|
||||
user = await user_repo.get_by_id(job.user_id) # type: ignore[arg-type]
|
||||
if user is None:
|
||||
logger.warning("User %s not found, skipping silence apply artifact save", job.user_id)
|
||||
return job
|
||||
|
||||
file_repo = FileRepository(self._session)
|
||||
source_file = await file_repo.get_by_path(file_key)
|
||||
if source_file is not None:
|
||||
stem = Path(source_file.original_filename).stem
|
||||
else:
|
||||
stem = Path(file_key).stem
|
||||
processed_filename = f"Видео без тишины {stem}.mp4"
|
||||
|
||||
processed_file = await file_repo.create(
|
||||
requester=user,
|
||||
data=FileCreate(
|
||||
project_id=project_id,
|
||||
original_filename=processed_filename,
|
||||
path=file_path,
|
||||
storage_backend="S3",
|
||||
mime_type="video/mp4",
|
||||
size_bytes=file_size,
|
||||
file_format="mp4",
|
||||
is_uploaded=True,
|
||||
),
|
||||
)
|
||||
|
||||
artifact_repo = ArtifactRepository(self._session)
|
||||
await artifact_repo.create(
|
||||
data=ArtifactMediaFileCreate(
|
||||
project_id=project_id,
|
||||
file_id=processed_file.id,
|
||||
media_file_id=None,
|
||||
artifact_type="SILENCE_REMOVED_VIDEO",
|
||||
),
|
||||
)
|
||||
|
||||
updated_output = dict(output_data)
|
||||
updated_output["file_id"] = str(processed_file.id)
|
||||
job = await self._job_repo.update(job, JobUpdate(output_data=updated_output))
|
||||
|
||||
logger.info(
|
||||
"Saved silence apply artifacts for job %s (file_id=%s)",
|
||||
job.id,
|
||||
processed_file.id,
|
||||
)
|
||||
return job
|
||||
|
||||
async def _save_captions_artifacts(self, job: Job) -> Job:
|
||||
"""Create File and ArtifactMediaFile records for captioned video."""
|
||||
input_data = job.input_data or {}
|
||||
output_data = job.output_data or {}
|
||||
@@ -1428,7 +1655,7 @@ class TaskService:
|
||||
user = await user_repo.get_by_id(job.user_id) # type: ignore[arg-type]
|
||||
if user is None:
|
||||
logger.warning("User %s not found, skipping captions artifact save", job.user_id)
|
||||
return
|
||||
return job
|
||||
|
||||
# Get file size from S3
|
||||
storage = _get_storage_service()
|
||||
@@ -1472,11 +1699,10 @@ class TaskService:
|
||||
# Update job output_data with file_id so frontend can reference it
|
||||
updated_output = dict(output_data)
|
||||
updated_output["file_id"] = str(captioned_file.id)
|
||||
job = await self._job_repo.update(
|
||||
job, JobUpdate(output_data=updated_output)
|
||||
)
|
||||
job = await self._job_repo.update(job, JobUpdate(output_data=updated_output))
|
||||
|
||||
logger.info("Saved captions artifacts for job %s (file_id=%s)", job.id, captioned_file.id)
|
||||
return job
|
||||
|
||||
async def submit_media_probe(
|
||||
self, *, requester: User, request: MediaProbeRequest
|
||||
@@ -1644,7 +1870,7 @@ class TaskService:
|
||||
transcription_repo = TranscriptionRepository(self._session)
|
||||
transcription = await transcription_repo.get_by_id(request.transcription_id)
|
||||
if transcription is None:
|
||||
raise ValueError(f"Transcription {request.transcription_id} not found")
|
||||
raise ValueError(f"Транскрипция {request.transcription_id} не найдена")
|
||||
|
||||
user_folder = get_user_folder(requester)
|
||||
resolved_folder = (
|
||||
|
||||
@@ -12,6 +12,7 @@ from cpv3.db.session import get_db
|
||||
from cpv3.modules.transcription.schemas import (
|
||||
Document,
|
||||
GoogleSpeechParams,
|
||||
SaluteSpeechParams,
|
||||
TranscriptionCreate,
|
||||
TranscriptionRead,
|
||||
TranscriptionUpdate,
|
||||
@@ -19,6 +20,7 @@ from cpv3.modules.transcription.schemas import (
|
||||
)
|
||||
from cpv3.modules.transcription.service import (
|
||||
transcribe_with_google_speech,
|
||||
transcribe_with_salute_speech,
|
||||
transcribe_with_whisper,
|
||||
)
|
||||
from cpv3.modules.transcription.repository import TranscriptionRepository
|
||||
@@ -62,7 +64,7 @@ async def retrieve_transcription_entry(
|
||||
repo = TranscriptionRepository(db)
|
||||
transcription = await repo.get_by_id(transcription_id)
|
||||
if transcription is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Not found")
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Не найдено")
|
||||
|
||||
return TranscriptionRead.model_validate(transcription)
|
||||
|
||||
@@ -77,7 +79,7 @@ async def retrieve_transcription_by_artifact(
|
||||
repo = TranscriptionRepository(db)
|
||||
transcription = await repo.get_by_artifact_id(artifact_id)
|
||||
if transcription is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Not found")
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Не найдено")
|
||||
|
||||
return TranscriptionRead.model_validate(transcription)
|
||||
|
||||
@@ -93,7 +95,7 @@ async def patch_transcription_entry(
|
||||
repo = TranscriptionRepository(db)
|
||||
transcription = await repo.get_by_id(transcription_id)
|
||||
if transcription is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Not found")
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Не найдено")
|
||||
|
||||
transcription = await repo.update(transcription, body)
|
||||
return TranscriptionRead.model_validate(transcription)
|
||||
@@ -109,7 +111,7 @@ async def delete_transcription_entry(
|
||||
repo = TranscriptionRepository(db)
|
||||
transcription = await repo.get_by_id(transcription_id)
|
||||
if transcription is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Not found")
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Не найдено")
|
||||
|
||||
await repo.deactivate(transcription)
|
||||
return Response(status_code=status.HTTP_204_NO_CONTENT)
|
||||
@@ -142,3 +144,18 @@ async def google_speech_transcribe(
|
||||
file_key=body.file_path,
|
||||
language_codes=body.language_codes,
|
||||
)
|
||||
|
||||
|
||||
@router.post("/salute-speech/", response_model=Document)
|
||||
async def salute_speech_transcribe(
|
||||
body: SaluteSpeechParams,
|
||||
current_user: User = Depends(get_current_user),
|
||||
storage: StorageService = Depends(get_storage),
|
||||
) -> Document:
|
||||
_ = current_user
|
||||
return await transcribe_with_salute_speech(
|
||||
storage,
|
||||
file_key=body.file_path,
|
||||
language=body.language,
|
||||
model=body.model,
|
||||
)
|
||||
|
||||
@@ -1,10 +1,17 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import ssl
|
||||
import threading
|
||||
import time
|
||||
import uuid
|
||||
from pathlib import Path
|
||||
from tempfile import NamedTemporaryFile
|
||||
from typing import Callable, cast
|
||||
|
||||
import anyio
|
||||
import httpx
|
||||
|
||||
from cpv3.infrastructure.settings import get_settings
|
||||
from cpv3.infrastructure.storage.base import StorageService
|
||||
@@ -28,6 +35,8 @@ from cpv3.modules.transcription.schemas import (
|
||||
GoogleSpeechSegment,
|
||||
GoogleSpeechWord,
|
||||
LineNode,
|
||||
SaluteSpeechSegment,
|
||||
SaluteSpeechWord,
|
||||
SegmentNode,
|
||||
Tag,
|
||||
TimeRange,
|
||||
@@ -39,9 +48,58 @@ from cpv3.modules.transcription.schemas import (
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------- SaluteSpeech Constants ----------------------------------
|
||||
|
||||
SALUTE_AUTH_URL = "https://ngw.devices.sberbank.ru:9443/api/v2/oauth"
|
||||
SALUTE_API_BASE = "https://smartspeech.sber.ru/rest/v1"
|
||||
SALUTE_POLL_INTERVAL_SECONDS = 5.0
|
||||
SALUTE_POLL_TIMEOUT_SECONDS = 600
|
||||
SALUTE_TOKEN_REFRESH_MARGIN_SECONDS = 60
|
||||
|
||||
SALUTE_ENCODING_MAP: dict[str, str] = {
|
||||
".mp3": "MP3",
|
||||
".wav": "PCM_S16LE",
|
||||
".ogg": "opus",
|
||||
".flac": "FLAC",
|
||||
}
|
||||
|
||||
SALUTE_CONTENT_TYPE_MAP: dict[str, str] = {
|
||||
".mp3": "audio/mpeg",
|
||||
".wav": "audio/wav",
|
||||
".ogg": "audio/ogg",
|
||||
".flac": "audio/flac",
|
||||
}
|
||||
|
||||
SALUTE_LANGUAGE_MAP: dict[str, str] = {
|
||||
"ru": "ru-RU",
|
||||
"en": "en-US",
|
||||
}
|
||||
|
||||
ERROR_SALUTE_AUTH_FAILED = "Ошибка авторизации SaluteSpeech: {detail}"
|
||||
ERROR_SALUTE_UPLOAD_FAILED = "Ошибка загрузки файла в SaluteSpeech: {detail}"
|
||||
ERROR_SALUTE_TASK_FAILED = "Ошибка распознавания SaluteSpeech: {detail}"
|
||||
ERROR_SALUTE_TIMEOUT = "Превышено время ожидания распознавания SaluteSpeech"
|
||||
ERROR_SALUTE_UNSUPPORTED_FORMAT = "Неподдерживаемый формат аудио для SaluteSpeech: {ext}"
|
||||
ERROR_SALUTE_AUTH_KEY_MISSING = "Не задан SALUTE_AUTH_KEY для авторизации SaluteSpeech"
|
||||
ERROR_SALUTE_SSL_FAILED = (
|
||||
"SSL ошибка при обращении к SaluteSpeech: {detail}. "
|
||||
"Если используется корпоративный или локальный сертификат, "
|
||||
"укажите путь в SALUTE_CA_CERT_PATH. "
|
||||
"Для локальной отладки можно отключить проверку через SALUTE_SSL_VERIFY=false."
|
||||
)
|
||||
|
||||
_salute_token_lock = threading.Lock()
|
||||
_salute_token: str | None = None
|
||||
_salute_token_expires_at: float = 0.0
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DocumentBuilder:
|
||||
def compute_segment_lines(
|
||||
self, segment: WhisperSegment | GoogleSpeechSegment, max_chars_per_line: int
|
||||
self,
|
||||
segment: WhisperSegment | GoogleSpeechSegment | SaluteSpeechSegment,
|
||||
max_chars_per_line: int,
|
||||
) -> list[LineNode]:
|
||||
words = segment.words or []
|
||||
lines: list[list[WhisperWord | GoogleSpeechWord]] = []
|
||||
@@ -219,7 +277,7 @@ async def _convert_local_to_ogg(input_path: str) -> tuple[str, Callable[[], None
|
||||
|
||||
def _make_document_from_segments(
|
||||
builder: DocumentBuilder,
|
||||
segments: list[WhisperSegment] | list[GoogleSpeechSegment],
|
||||
segments: list[WhisperSegment] | list[GoogleSpeechSegment] | list[SaluteSpeechSegment],
|
||||
*,
|
||||
max_line_width: int,
|
||||
) -> Document:
|
||||
@@ -427,3 +485,331 @@ async def transcribe_with_google_speech(
|
||||
ogg_cleanup()
|
||||
finally:
|
||||
input_tmp.cleanup()
|
||||
|
||||
|
||||
# ---------------------------------- SaluteSpeech Engine ----------------------------------
|
||||
|
||||
|
||||
def _parse_salute_time(s: str) -> float:
|
||||
"""Parse SaluteSpeech timestamp string '0.480s' → 0.48."""
|
||||
return float(s.rstrip("s"))
|
||||
|
||||
|
||||
def _build_salute_ssl_context() -> ssl.SSLContext:
|
||||
"""Build SSL context for SaluteSpeech using system trust plus optional custom CA."""
|
||||
settings = get_settings()
|
||||
if not settings.salute_ssl_verify:
|
||||
return ssl._create_unverified_context()
|
||||
|
||||
ssl_context = ssl.create_default_context()
|
||||
if settings.salute_ca_cert_path is not None:
|
||||
ssl_context.load_verify_locations(cafile=str(settings.salute_ca_cert_path))
|
||||
return ssl_context
|
||||
|
||||
|
||||
def _get_salute_auth_header_value() -> str:
|
||||
"""Build Basic auth header for SaluteSpeech from settings."""
|
||||
settings = get_settings()
|
||||
auth_key = settings.salute_auth_key.strip()
|
||||
if not auth_key:
|
||||
raise RuntimeError(ERROR_SALUTE_AUTH_KEY_MISSING)
|
||||
return f"Basic {auth_key}"
|
||||
|
||||
|
||||
def _get_salute_access_token(client: httpx.Client) -> str:
|
||||
"""Get or refresh SaluteSpeech OAuth token. Thread-safe."""
|
||||
global _salute_token, _salute_token_expires_at
|
||||
with _salute_token_lock:
|
||||
if _salute_token and time.monotonic() < (
|
||||
_salute_token_expires_at - SALUTE_TOKEN_REFRESH_MARGIN_SECONDS
|
||||
):
|
||||
return _salute_token
|
||||
|
||||
settings = get_settings()
|
||||
response = client.post(
|
||||
SALUTE_AUTH_URL,
|
||||
headers={
|
||||
"Authorization": _get_salute_auth_header_value(),
|
||||
"RqUID": str(uuid.uuid4()),
|
||||
"Content-Type": "application/x-www-form-urlencoded",
|
||||
},
|
||||
content=f"scope={settings.salute_scope}",
|
||||
)
|
||||
if response.status_code != 200:
|
||||
raise RuntimeError(
|
||||
ERROR_SALUTE_AUTH_FAILED.format(detail=response.text[:200])
|
||||
)
|
||||
data = response.json()
|
||||
_salute_token = data["access_token"]
|
||||
expires_in_seconds = (data["expires_at"] / 1000) - time.time()
|
||||
_salute_token_expires_at = time.monotonic() + expires_in_seconds
|
||||
return _salute_token
|
||||
|
||||
|
||||
def _upload_salute_audio(
|
||||
client: httpx.Client, token: str, audio_data: bytes, content_type: str
|
||||
) -> str:
|
||||
"""Upload audio to SaluteSpeech, return request_file_id."""
|
||||
response = client.post(
|
||||
f"{SALUTE_API_BASE}/data:upload",
|
||||
headers={
|
||||
"Authorization": f"Bearer {token}",
|
||||
"Content-Type": content_type,
|
||||
},
|
||||
content=audio_data,
|
||||
timeout=120.0,
|
||||
)
|
||||
if response.status_code != 200:
|
||||
raise RuntimeError(
|
||||
ERROR_SALUTE_UPLOAD_FAILED.format(detail=response.text[:200])
|
||||
)
|
||||
return response.json()["result"]["request_file_id"]
|
||||
|
||||
|
||||
def _create_salute_task(
|
||||
client: httpx.Client,
|
||||
token: str,
|
||||
file_id: str,
|
||||
*,
|
||||
language: str,
|
||||
model: str,
|
||||
audio_encoding: str,
|
||||
sample_rate: int,
|
||||
) -> str:
|
||||
"""Create async recognition task, return task_id."""
|
||||
body = {
|
||||
"options": {
|
||||
"audio_encoding": audio_encoding,
|
||||
"sample_rate": sample_rate,
|
||||
"language": language,
|
||||
"model": model,
|
||||
"channels_count": 1,
|
||||
"hypotheses_count": 1,
|
||||
},
|
||||
"request_file_id": file_id,
|
||||
}
|
||||
response = client.post(
|
||||
f"{SALUTE_API_BASE}/speech:async_recognize",
|
||||
headers={
|
||||
"Authorization": f"Bearer {token}",
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
json=body,
|
||||
)
|
||||
if response.status_code != 200:
|
||||
raise RuntimeError(
|
||||
ERROR_SALUTE_TASK_FAILED.format(detail=response.text[:200])
|
||||
)
|
||||
return response.json()["result"]["id"]
|
||||
|
||||
|
||||
def _poll_salute_task(
|
||||
client: httpx.Client,
|
||||
token: str,
|
||||
task_id: str,
|
||||
job_uuid: uuid.UUID | None,
|
||||
on_progress: ProgressCallback | None,
|
||||
) -> str:
|
||||
"""Poll task until DONE, return response_file_id."""
|
||||
start = time.monotonic()
|
||||
while True:
|
||||
elapsed = time.monotonic() - start
|
||||
if elapsed > SALUTE_POLL_TIMEOUT_SECONDS:
|
||||
raise TimeoutError(ERROR_SALUTE_TIMEOUT)
|
||||
|
||||
if job_uuid is not None:
|
||||
from cpv3.modules.tasks.service import _raise_if_job_cancelled
|
||||
|
||||
_raise_if_job_cancelled(job_uuid)
|
||||
|
||||
response = client.get(
|
||||
f"{SALUTE_API_BASE}/task:get",
|
||||
params={"id": task_id},
|
||||
headers={"Authorization": f"Bearer {token}"},
|
||||
)
|
||||
response.raise_for_status()
|
||||
result = response.json()["result"]
|
||||
status = result["status"]
|
||||
|
||||
if status == "DONE":
|
||||
return result["response_file_id"]
|
||||
if status == "ERROR":
|
||||
error_msg = result.get("error", "unknown error")
|
||||
raise RuntimeError(
|
||||
ERROR_SALUTE_TASK_FAILED.format(detail=error_msg)
|
||||
)
|
||||
|
||||
if on_progress is not None:
|
||||
pct = min(elapsed / SALUTE_POLL_TIMEOUT_SECONDS * 100, 95.0)
|
||||
on_progress(pct)
|
||||
|
||||
time.sleep(SALUTE_POLL_INTERVAL_SECONDS)
|
||||
|
||||
|
||||
def _download_salute_result(
|
||||
client: httpx.Client, token: str, response_file_id: str
|
||||
) -> list[dict]:
|
||||
"""Download recognition result JSON."""
|
||||
response = client.get(
|
||||
f"{SALUTE_API_BASE}/data:download",
|
||||
params={"response_file_id": response_file_id},
|
||||
headers={"Authorization": f"Bearer {token}"},
|
||||
timeout=60.0,
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
|
||||
def _build_document_from_salute_result(
|
||||
raw_channels: list[dict], *, language: str
|
||||
) -> Document:
|
||||
"""Convert SaluteSpeech result JSON to Document."""
|
||||
builder = DocumentBuilder()
|
||||
words_options = WordOptions()
|
||||
|
||||
all_segments: list[SaluteSpeechSegment] = []
|
||||
|
||||
for channel_data in raw_channels:
|
||||
for result_item in channel_data.get("results", []):
|
||||
word_alignments = result_item.get("word_alignments", [])
|
||||
words = [
|
||||
SaluteSpeechWord(
|
||||
word=w["word"],
|
||||
start=_parse_salute_time(w["start"]),
|
||||
end=_parse_salute_time(w["end"]),
|
||||
)
|
||||
for w in word_alignments
|
||||
]
|
||||
|
||||
text = result_item.get("text", "")
|
||||
seg_start = _parse_salute_time(result_item["start"])
|
||||
seg_end = _parse_salute_time(result_item["end"])
|
||||
|
||||
all_segments.append(
|
||||
SaluteSpeechSegment(
|
||||
text=text,
|
||||
start=seg_start,
|
||||
end=seg_end,
|
||||
words=words,
|
||||
)
|
||||
)
|
||||
|
||||
document = _make_document_from_segments(
|
||||
builder, all_segments, max_line_width=words_options.max_line_width
|
||||
)
|
||||
return builder.process_document(document)
|
||||
|
||||
|
||||
def _convert_to_wav_sync(input_path: str, sample_rate: int = 16000) -> tuple[str, Callable[[], None]]:
|
||||
"""Convert any audio/video to WAV (PCM signed 16-bit LE) using ffmpeg. Sync version."""
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
with NamedTemporaryFile(suffix=".wav", delete=False) as out:
|
||||
out_path = out.name
|
||||
|
||||
result = subprocess.run(
|
||||
[
|
||||
"ffmpeg", "-y", "-i", input_path,
|
||||
"-vn", "-ac", "1", "-ar", str(sample_rate),
|
||||
"-acodec", "pcm_s16le",
|
||||
out_path,
|
||||
],
|
||||
capture_output=True,
|
||||
)
|
||||
if result.returncode != 0:
|
||||
raise RuntimeError(f"ffmpeg failed: {result.stderr.decode(errors='ignore')}")
|
||||
|
||||
def _cleanup() -> None:
|
||||
if os.path.exists(out_path):
|
||||
os.remove(out_path)
|
||||
|
||||
return out_path, _cleanup
|
||||
|
||||
|
||||
def _salute_transcribe_sync(
|
||||
*,
|
||||
local_file_path: str,
|
||||
language: str | None,
|
||||
model: str,
|
||||
sample_rate: int,
|
||||
job_id: uuid.UUID | None = None,
|
||||
on_progress: ProgressCallback | None = None,
|
||||
) -> Document:
|
||||
"""Synchronous SaluteSpeech transcription (runs in Dramatiq worker thread)."""
|
||||
ext = Path(local_file_path).suffix.lower()
|
||||
audio_encoding = SALUTE_ENCODING_MAP.get(ext)
|
||||
content_type = SALUTE_CONTENT_TYPE_MAP.get(ext)
|
||||
|
||||
# Convert unsupported formats (mp4, webm, m4a, etc.) to WAV via ffmpeg
|
||||
cleanup_fn: Callable[[], None] | None = None
|
||||
if not audio_encoding or not content_type:
|
||||
wav_path, cleanup_fn = _convert_to_wav_sync(local_file_path, sample_rate)
|
||||
local_file_path = wav_path
|
||||
audio_encoding = "PCM_S16LE"
|
||||
content_type = "audio/wav"
|
||||
|
||||
salute_language = SALUTE_LANGUAGE_MAP.get(language or "", "ru-RU")
|
||||
|
||||
try:
|
||||
ssl_context = _build_salute_ssl_context()
|
||||
with httpx.Client(verify=ssl_context, timeout=30.0) as client:
|
||||
token = _get_salute_access_token(client)
|
||||
|
||||
with open(local_file_path, "rb") as f:
|
||||
audio_data = f.read()
|
||||
|
||||
file_id = _upload_salute_audio(client, token, audio_data, content_type)
|
||||
task_id = _create_salute_task(
|
||||
client,
|
||||
token,
|
||||
file_id,
|
||||
language=salute_language,
|
||||
model=model,
|
||||
audio_encoding=audio_encoding,
|
||||
sample_rate=sample_rate,
|
||||
)
|
||||
response_file_id = _poll_salute_task(
|
||||
client, token, task_id, job_id, on_progress
|
||||
)
|
||||
raw_result = _download_salute_result(client, token, response_file_id)
|
||||
|
||||
return _build_document_from_salute_result(raw_result, language=salute_language)
|
||||
except ssl.SSLError as exc:
|
||||
raise RuntimeError(ERROR_SALUTE_SSL_FAILED.format(detail=str(exc))) from exc
|
||||
except httpx.ConnectError as exc:
|
||||
if isinstance(exc.__cause__, ssl.SSLError):
|
||||
raise RuntimeError(
|
||||
ERROR_SALUTE_SSL_FAILED.format(detail=str(exc.__cause__))
|
||||
) from exc
|
||||
raise
|
||||
finally:
|
||||
if cleanup_fn is not None:
|
||||
cleanup_fn()
|
||||
|
||||
|
||||
async def transcribe_with_salute_speech(
|
||||
storage: StorageService,
|
||||
*,
|
||||
file_key: str,
|
||||
language: str | None = None,
|
||||
model: str = "general",
|
||||
sample_rate: int = 16000,
|
||||
job_id: uuid.UUID | None = None,
|
||||
on_progress: ProgressCallback | None = None,
|
||||
) -> Document:
|
||||
"""Async wrapper for SaluteSpeech transcription."""
|
||||
tmp = await storage.download_to_temp(file_key)
|
||||
try:
|
||||
return await anyio.to_thread.run_sync(
|
||||
lambda: _salute_transcribe_sync(
|
||||
local_file_path=tmp.path,
|
||||
language=language,
|
||||
model=model,
|
||||
sample_rate=sample_rate,
|
||||
job_id=job_id,
|
||||
on_progress=on_progress,
|
||||
)
|
||||
)
|
||||
finally:
|
||||
tmp.cleanup()
|
||||
|
||||
@@ -51,7 +51,7 @@ class UserRepository:
|
||||
await self._session.commit()
|
||||
except IntegrityError as e:
|
||||
await self._session.rollback()
|
||||
raise ValueError("User already exists or violates constraints") from e
|
||||
raise ValueError("Пользователь уже существует или нарушены ограничения") from e
|
||||
|
||||
await self._session.refresh(user)
|
||||
return user
|
||||
@@ -66,7 +66,7 @@ class UserRepository:
|
||||
await self._session.commit()
|
||||
except IntegrityError as e:
|
||||
await self._session.rollback()
|
||||
raise ValueError("Update violates constraints") from e
|
||||
raise ValueError("Обновление нарушает ограничения") from e
|
||||
|
||||
await self._session.refresh(user)
|
||||
return user
|
||||
|
||||
@@ -123,10 +123,10 @@ async def retrieve_user(
|
||||
service = UserService(db)
|
||||
user = await service.get_user_by_id(user_id)
|
||||
if user is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Not found")
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Не найдено")
|
||||
|
||||
if not current_user.is_staff and user.id != current_user.id:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Forbidden")
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Доступ запрещён")
|
||||
|
||||
return await _resolve_avatar(user, storage)
|
||||
|
||||
@@ -142,10 +142,10 @@ async def patch_user(
|
||||
service = UserService(db)
|
||||
user = await service.get_user_by_id(user_id)
|
||||
if user is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Not found")
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Не найдено")
|
||||
|
||||
if not current_user.is_staff and user.id != current_user.id:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Forbidden")
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Доступ запрещён")
|
||||
|
||||
try:
|
||||
user = await service.update_user(user, body)
|
||||
@@ -164,10 +164,10 @@ async def delete_user(
|
||||
service = UserService(db)
|
||||
user = await service.get_user_by_id(user_id)
|
||||
if user is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Not found")
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Не найдено")
|
||||
|
||||
if not current_user.is_staff and user.id != current_user.id:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Forbidden")
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Доступ запрещён")
|
||||
|
||||
await service.deactivate_user(user)
|
||||
return Response(status_code=status.HTTP_204_NO_CONTENT)
|
||||
@@ -201,7 +201,7 @@ async def login(
|
||||
service = UserService(db)
|
||||
user = await service.authenticate(body.username, body.password)
|
||||
if user is None:
|
||||
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid credentials")
|
||||
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Неверные учётные данные")
|
||||
|
||||
access, refresh = _issue_tokens(user)
|
||||
user_read = await _resolve_avatar(user, storage)
|
||||
@@ -226,5 +226,5 @@ async def refresh(body: TokenRefresh) -> TokenRefreshResponse:
|
||||
return TokenRefreshResponse(access=access, refresh=body.refresh)
|
||||
except (ExpiredSignatureError, InvalidTokenError, ValueError):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid refresh token"
|
||||
status_code=status.HTTP_401_UNAUTHORIZED, detail="Недействительный токен обновления"
|
||||
)
|
||||
|
||||
@@ -28,7 +28,7 @@ class UserService:
|
||||
async def create_user(self, data: UserCreate, *, requester: User | None) -> User:
|
||||
# Keep Django behavior: any authenticated user can create via this endpoint.
|
||||
if requester is None:
|
||||
raise ValueError("Authentication required")
|
||||
raise ValueError("Требуется авторизация")
|
||||
return await self._repo.create(data=data)
|
||||
|
||||
async def register_user(self, data: UserRegister) -> User:
|
||||
@@ -42,7 +42,7 @@ class UserService:
|
||||
|
||||
async def change_password(self, user: User, current_password: str, new_password: str) -> None:
|
||||
if not verify_password(current_password, user.password_hash):
|
||||
raise ValueError("Current password is incorrect")
|
||||
raise ValueError("Текущий пароль неверен")
|
||||
new_hash = hash_password(new_password)
|
||||
await self._repo.update_password(user, new_hash)
|
||||
|
||||
|
||||
@@ -44,10 +44,10 @@ async def retrieve_webhook_endpoint(
|
||||
service = WebhookService(db)
|
||||
webhook = await service.get_webhook(webhook_id)
|
||||
if webhook is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Not found")
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Не найдено")
|
||||
|
||||
if not current_user.is_staff and webhook.user_id != current_user.id:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Forbidden")
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Доступ запрещён")
|
||||
|
||||
return WebhookRead.model_validate(webhook)
|
||||
|
||||
@@ -62,10 +62,10 @@ async def patch_webhook_endpoint(
|
||||
service = WebhookService(db)
|
||||
webhook = await service.get_webhook(webhook_id)
|
||||
if webhook is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Not found")
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Не найдено")
|
||||
|
||||
if not current_user.is_staff and webhook.user_id != current_user.id:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Forbidden")
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Доступ запрещён")
|
||||
|
||||
webhook = await service.update_webhook(webhook, body)
|
||||
return WebhookRead.model_validate(webhook)
|
||||
@@ -80,10 +80,10 @@ async def delete_webhook_endpoint(
|
||||
service = WebhookService(db)
|
||||
webhook = await service.get_webhook(webhook_id)
|
||||
if webhook is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Not found")
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Не найдено")
|
||||
|
||||
if not current_user.is_staff and webhook.user_id != current_user.id:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Forbidden")
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Доступ запрещён")
|
||||
|
||||
await service.deactivate_webhook(webhook)
|
||||
return Response(status_code=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
+105
-56
@@ -1,13 +1,49 @@
|
||||
x-backend-image: &backend-image
|
||||
image: cpv3-backend:dev
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
target: dev
|
||||
|
||||
x-backend-env: &backend-env
|
||||
DEBUG: ${DEBUG:-1}
|
||||
JWT_SECRET_KEY: ${JWT_SECRET_KEY:-dev-secret}
|
||||
|
||||
POSTGRES_USER: ${POSTGRES_USER:-postgres}
|
||||
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-postgres}
|
||||
POSTGRES_HOST: db
|
||||
POSTGRES_PORT: 5432
|
||||
POSTGRES_DATABASE: ${POSTGRES_DATABASE:-coffee_project_db}
|
||||
|
||||
STORAGE_BACKEND: ${STORAGE_BACKEND:-S3}
|
||||
|
||||
S3_ACCESS_KEY: ${MINIO_ROOT_USER:-minioadmin}
|
||||
S3_SECRET_KEY: ${MINIO_ROOT_PASSWORD:-minioadmin}
|
||||
S3_BUCKET_NAME: ${S3_BUCKET_NAME:-coffee-bucket}
|
||||
S3_ENDPOINT_URL_INTERNAL: http://minio:9000
|
||||
# Used only for generated browser links (presigned URLs)
|
||||
S3_ENDPOINT_URL_PUBLIC: http://localhost:9000
|
||||
|
||||
REDIS_URL: redis://redis:6379/0
|
||||
WEBHOOK_BASE_URL: http://api:8000
|
||||
|
||||
REMOTION_SERVICE_URL: ${REMOTION_SERVICE_URL:-http://remotion:3001}
|
||||
SALUTE_AUTH_KEY: ${SALUTE_AUTH_KEY:-}
|
||||
SALUTE_CA_CERT_PATH: ${SALUTE_CA_CERT_PATH:-./.certs/russian_trusted_ca_bundle.pem}
|
||||
SALUTE_SSL_VERIFY: ${SALUTE_SSL_VERIFY:-true}
|
||||
SALUTE_SCOPE: ${SALUTE_SCOPE:-SALUTE_SPEECH_PERS}
|
||||
|
||||
services:
|
||||
db:
|
||||
container_name: cpv3_postgres
|
||||
image: postgres:16
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
POSTGRES_USER: ${POSTGRES_USER:-postgres}
|
||||
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-postgres}
|
||||
POSTGRES_DB: ${POSTGRES_DATABASE:-coffee_project_db}
|
||||
ports:
|
||||
- "5332:5432"
|
||||
- "127.0.0.1:5332:5432"
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER:-postgres} -d ${POSTGRES_DB:-coffee_project_db}"]
|
||||
interval: 5s
|
||||
@@ -15,25 +51,37 @@ services:
|
||||
retries: 20
|
||||
volumes:
|
||||
- cpv3_db:/var/lib/postgresql/data
|
||||
networks:
|
||||
- db-net
|
||||
|
||||
minio:
|
||||
container_name: cpv3_minio
|
||||
image: minio/minio
|
||||
image: minio/minio:RELEASE.2025-09-07T16-13-09Z
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "9000:9000"
|
||||
- "9001:9001"
|
||||
- "0.0.0.0:9000:9000"
|
||||
- "0.0.0.0:9001:9001"
|
||||
environment:
|
||||
MINIO_ROOT_USER: ${MINIO_ROOT_USER:-minioadmin}
|
||||
MINIO_ROOT_PASSWORD: ${MINIO_ROOT_PASSWORD:-minioadmin}
|
||||
command: server /data --console-address ":9001"
|
||||
healthcheck:
|
||||
test: ["CMD", "mc", "ready", "local"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
volumes:
|
||||
- cpv3_minio:/data
|
||||
networks:
|
||||
- db-net
|
||||
- app-net
|
||||
|
||||
redis:
|
||||
container_name: cpv3_redis
|
||||
image: redis:7-alpine
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "6379:6379"
|
||||
- "127.0.0.1:6379:6379"
|
||||
healthcheck:
|
||||
test: ["CMD", "redis-cli", "ping"]
|
||||
interval: 5s
|
||||
@@ -41,83 +89,84 @@ services:
|
||||
retries: 10
|
||||
volumes:
|
||||
- cpv3_redis:/data
|
||||
networks:
|
||||
- db-net
|
||||
- app-net
|
||||
|
||||
api:
|
||||
container_name: cpv3_api
|
||||
build: .
|
||||
<<: *backend-image
|
||||
restart: unless-stopped
|
||||
env_file: .env
|
||||
depends_on:
|
||||
db:
|
||||
condition: service_healthy
|
||||
redis:
|
||||
condition: service_healthy
|
||||
healthcheck:
|
||||
test: ["CMD", "python", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:8000/api/health/')"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
start_period: 30s
|
||||
environment:
|
||||
DEBUG: ${DEBUG:-1}
|
||||
JWT_SECRET_KEY: ${JWT_SECRET_KEY:-dev-secret}
|
||||
|
||||
POSTGRES_USER: ${POSTGRES_USER:-postgres}
|
||||
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-postgres}
|
||||
POSTGRES_HOST: db
|
||||
POSTGRES_PORT: 5432
|
||||
POSTGRES_DATABASE: ${POSTGRES_DATABASE:-coffee_project_db}
|
||||
|
||||
STORAGE_BACKEND: ${STORAGE_BACKEND:-S3}
|
||||
|
||||
S3_ACCESS_KEY: ${MINIO_ROOT_USER:-minioadmin}
|
||||
S3_SECRET_KEY: ${MINIO_ROOT_PASSWORD:-minioadmin}
|
||||
S3_BUCKET_NAME: ${S3_BUCKET_NAME:-coffee-bucket}
|
||||
S3_ENDPOINT_URL_INTERNAL: http://minio:9000
|
||||
# Used only for generated browser links (presigned URLs)
|
||||
S3_ENDPOINT_URL_PUBLIC: http://localhost:9000
|
||||
|
||||
REDIS_URL: redis://redis:6379/0
|
||||
WEBHOOK_BASE_URL: http://api:8000
|
||||
|
||||
REMOTION_SERVICE_URL: ${REMOTION_SERVICE_URL:-http://remotion:3001}
|
||||
<<: *backend-env
|
||||
ports:
|
||||
- "8000:8000"
|
||||
command: >
|
||||
sh -c "uv run alembic upgrade head &&
|
||||
uv run uvicorn cpv3.main:app --host 0.0.0.0 --port 8000 --reload --reload-dir /app/src"
|
||||
- "0.0.0.0:8000:8000"
|
||||
volumes:
|
||||
- ./src:/app/src
|
||||
- ./cpv3:/app/cpv3
|
||||
- ./alembic:/app/alembic
|
||||
- ./alembic.ini:/app/alembic.ini
|
||||
- ./.certs:/app/.certs:ro
|
||||
networks:
|
||||
- db-net
|
||||
- app-net
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
memory: 512m
|
||||
cpus: "1"
|
||||
|
||||
worker:
|
||||
container_name: cpv3_worker
|
||||
build: .
|
||||
env_file: .env
|
||||
<<: *backend-image
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
db:
|
||||
condition: service_healthy
|
||||
redis:
|
||||
condition: service_healthy
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pgrep -f dramatiq || exit 1"]
|
||||
interval: 15s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
environment:
|
||||
DEBUG: ${DEBUG:-1}
|
||||
|
||||
POSTGRES_USER: ${POSTGRES_USER:-postgres}
|
||||
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-postgres}
|
||||
POSTGRES_HOST: db
|
||||
POSTGRES_PORT: 5432
|
||||
POSTGRES_DATABASE: ${POSTGRES_DATABASE:-coffee_project_db}
|
||||
|
||||
STORAGE_BACKEND: ${STORAGE_BACKEND:-S3}
|
||||
|
||||
S3_ACCESS_KEY: ${MINIO_ROOT_USER:-minioadmin}
|
||||
S3_SECRET_KEY: ${MINIO_ROOT_PASSWORD:-minioadmin}
|
||||
S3_BUCKET_NAME: ${S3_BUCKET_NAME:-coffee-bucket}
|
||||
S3_ENDPOINT_URL_INTERNAL: http://minio:9000
|
||||
S3_ENDPOINT_URL_PUBLIC: http://localhost:9000
|
||||
|
||||
REDIS_URL: redis://redis:6379/0
|
||||
WEBHOOK_BASE_URL: http://api:8000
|
||||
|
||||
REMOTION_SERVICE_URL: ${REMOTION_SERVICE_URL:-http://localhost:8001}
|
||||
<<: *backend-env
|
||||
# watchfiles restarts dramatiq whenever Python files in /app/cpv3 change.
|
||||
# This gives the worker the same hot-reload experience as the API.
|
||||
command: >
|
||||
uv run dramatiq cpv3.modules.tasks.service --processes 1 --threads 2
|
||||
watchfiles --filter python 'dramatiq cpv3.modules.tasks.service --processes 1 --threads 2' /app/cpv3
|
||||
volumes:
|
||||
- ./src:/app/src
|
||||
- ./cpv3:/app/cpv3
|
||||
- ./.certs:/app/.certs:ro
|
||||
networks:
|
||||
- db-net
|
||||
- app-net
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
memory: 1g
|
||||
cpus: "1"
|
||||
|
||||
volumes:
|
||||
cpv3_db:
|
||||
cpv3_minio:
|
||||
cpv3_redis:
|
||||
|
||||
networks:
|
||||
db-net:
|
||||
driver: bridge
|
||||
app-net:
|
||||
driver: bridge
|
||||
|
||||
@@ -25,6 +25,8 @@ dependencies = [
|
||||
"dramatiq[redis]>=1.17.0",
|
||||
"redis>=5.0.0",
|
||||
"psycopg2-binary>=2.9.9",
|
||||
"tiktoken>=0.3.3",
|
||||
"greenlet>=3.3.0",
|
||||
]
|
||||
|
||||
[dependency-groups]
|
||||
@@ -35,6 +37,12 @@ dev = [
|
||||
"pytest-asyncio>=0.23.0",
|
||||
"aiosqlite>=0.20.0",
|
||||
]
|
||||
tools = [
|
||||
"bandit",
|
||||
"pip-audit",
|
||||
"schemathesis",
|
||||
"radon",
|
||||
]
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
asyncio_mode = "auto"
|
||||
|
||||
+17
-11
@@ -6,11 +6,14 @@ from __future__ import annotations
|
||||
|
||||
import uuid
|
||||
from datetime import timedelta
|
||||
from pathlib import Path
|
||||
from tempfile import NamedTemporaryFile
|
||||
from typing import AsyncGenerator
|
||||
from unittest.mock import AsyncMock, MagicMock
|
||||
|
||||
import pytest
|
||||
from httpx import ASGITransport, AsyncClient
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
|
||||
|
||||
from cpv3.db.base import Base
|
||||
@@ -22,20 +25,23 @@ from cpv3.main import app
|
||||
from cpv3.modules.users.models import User
|
||||
|
||||
|
||||
# Use in-memory SQLite for tests (or configure a test database)
|
||||
TEST_DATABASE_URL = "sqlite+aiosqlite:///:memory:"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def test_engine():
|
||||
"""Create a test database engine with tables."""
|
||||
engine = create_async_engine(TEST_DATABASE_URL, echo=False)
|
||||
async with engine.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.create_all)
|
||||
yield engine
|
||||
async with engine.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.drop_all)
|
||||
await engine.dispose()
|
||||
with NamedTemporaryFile(suffix=".sqlite3", delete=False) as tmp_db:
|
||||
db_path = Path(tmp_db.name)
|
||||
|
||||
sync_engine = create_engine(f"sqlite:///{db_path}", echo=False)
|
||||
Base.metadata.create_all(bind=sync_engine)
|
||||
|
||||
engine = create_async_engine(f"sqlite+aiosqlite:///{db_path}", echo=False)
|
||||
try:
|
||||
yield engine
|
||||
finally:
|
||||
await engine.dispose()
|
||||
Base.metadata.drop_all(bind=sync_engine)
|
||||
sync_engine.dispose()
|
||||
db_path.unlink(missing_ok=True)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
||||
@@ -68,6 +68,7 @@ class TestUploadFileEndpoint:
|
||||
|
||||
assert response.status_code == 201
|
||||
data = response.json()
|
||||
assert "file_id" in data
|
||||
assert "file_path" in data
|
||||
assert "file_url" in data
|
||||
|
||||
@@ -209,6 +210,30 @@ class TestRetrieveFileEntryEndpoint:
|
||||
assert response.status_code == 403
|
||||
|
||||
|
||||
class TestResolveFileEntryEndpoint:
|
||||
"""Tests for GET /api/files/files/{file_id}/resolve/."""
|
||||
|
||||
async def test_resolve_own_file_entry(
|
||||
self, auth_client: AsyncClient, test_file: File
|
||||
):
|
||||
"""Test resolving own file entry into downloadable URL."""
|
||||
response = await auth_client.get(f"/api/files/files/{test_file.id}/resolve/")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["file_id"] == str(test_file.id)
|
||||
assert data["file_path"] == test_file.path
|
||||
assert "file_url" in data
|
||||
|
||||
async def test_resolve_other_file_forbidden(
|
||||
self, auth_client: AsyncClient, other_file: File
|
||||
):
|
||||
"""Test regular user cannot resolve another user's file entry."""
|
||||
response = await auth_client.get(f"/api/files/files/{other_file.id}/resolve/")
|
||||
|
||||
assert response.status_code == 403
|
||||
|
||||
|
||||
class TestPatchFileEntryEndpoint:
|
||||
"""Tests for PATCH /api/files/files/{file_id}/."""
|
||||
|
||||
|
||||
@@ -0,0 +1,212 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import uuid
|
||||
|
||||
import pytest
|
||||
from httpx import AsyncClient
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from cpv3.modules.files.models import File
|
||||
from cpv3.modules.projects.models import Project
|
||||
from cpv3.modules.tasks.schemas import TaskSubmitResponse
|
||||
from cpv3.modules.tasks.service import TaskService
|
||||
from cpv3.modules.users.models import User
|
||||
|
||||
pytest.importorskip("greenlet")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def workflow_project(test_db_session: AsyncSession, test_user: User) -> Project:
|
||||
project = Project(
|
||||
id=uuid.uuid4(),
|
||||
owner_id=test_user.id,
|
||||
name="Workflow Project",
|
||||
description="Typed workflow test project",
|
||||
language="ru",
|
||||
status="DRAFT",
|
||||
is_active=True,
|
||||
)
|
||||
test_db_session.add(project)
|
||||
await test_db_session.commit()
|
||||
await test_db_session.refresh(project)
|
||||
return project
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def source_file(
|
||||
test_db_session: AsyncSession,
|
||||
test_user: User,
|
||||
workflow_project: Project,
|
||||
) -> File:
|
||||
file = File(
|
||||
id=uuid.uuid4(),
|
||||
owner_id=test_user.id,
|
||||
project_id=workflow_project.id,
|
||||
original_filename="source.mp4",
|
||||
path="users/test/source.mp4",
|
||||
storage_backend="S3",
|
||||
mime_type="video/mp4",
|
||||
size_bytes=1024,
|
||||
file_format="mp4",
|
||||
is_uploaded=True,
|
||||
is_deleted=False,
|
||||
)
|
||||
test_db_session.add(file)
|
||||
await test_db_session.commit()
|
||||
await test_db_session.refresh(file)
|
||||
return file
|
||||
|
||||
|
||||
class TestProjectWorkspaceEndpoints:
|
||||
async def test_get_workspace_returns_default_state(
|
||||
self,
|
||||
auth_client: AsyncClient,
|
||||
workflow_project: Project,
|
||||
) -> None:
|
||||
response = await auth_client.get(f"/api/projects/{workflow_project.id}/workspace")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["project_id"] == str(workflow_project.id)
|
||||
assert data["revision"] == 0
|
||||
assert data["version"] == 1
|
||||
assert data["phase"] == "INGEST"
|
||||
assert data["current_screen"] == "upload"
|
||||
assert data["source_file_id"] is None
|
||||
assert data["active_job"] is None
|
||||
assert data["workspace_view"] == {
|
||||
"used_file_ids": [],
|
||||
"selected_file_id": None,
|
||||
}
|
||||
|
||||
async def test_get_workspace_forbidden_for_other_users_project(
|
||||
self,
|
||||
auth_client: AsyncClient,
|
||||
test_db_session: AsyncSession,
|
||||
other_user: User,
|
||||
) -> None:
|
||||
foreign_project = Project(
|
||||
id=uuid.uuid4(),
|
||||
owner_id=other_user.id,
|
||||
name="Other Project",
|
||||
description=None,
|
||||
language="ru",
|
||||
status="DRAFT",
|
||||
is_active=True,
|
||||
)
|
||||
test_db_session.add(foreign_project)
|
||||
await test_db_session.commit()
|
||||
|
||||
response = await auth_client.get(f"/api/projects/{foreign_project.id}/workspace")
|
||||
|
||||
assert response.status_code == 403
|
||||
|
||||
async def test_set_source_file_action_updates_workspace(
|
||||
self,
|
||||
auth_client: AsyncClient,
|
||||
workflow_project: Project,
|
||||
source_file: File,
|
||||
) -> None:
|
||||
response = await auth_client.post(
|
||||
f"/api/projects/{workflow_project.id}/workflow/actions",
|
||||
json={
|
||||
"type": "SET_SOURCE_FILE",
|
||||
"revision": 0,
|
||||
"file_id": str(source_file.id),
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["revision"] == 1
|
||||
assert data["phase"] == "VERIFY"
|
||||
assert data["current_screen"] == "verify"
|
||||
assert data["source_file_id"] == str(source_file.id)
|
||||
assert data["workspace_view"] == {
|
||||
"used_file_ids": [str(source_file.id)],
|
||||
"selected_file_id": str(source_file.id),
|
||||
}
|
||||
|
||||
async def test_action_returns_conflict_on_stale_revision(
|
||||
self,
|
||||
auth_client: AsyncClient,
|
||||
workflow_project: Project,
|
||||
source_file: File,
|
||||
) -> None:
|
||||
first_response = await auth_client.post(
|
||||
f"/api/projects/{workflow_project.id}/workflow/actions",
|
||||
json={
|
||||
"type": "SET_SOURCE_FILE",
|
||||
"revision": 0,
|
||||
"file_id": str(source_file.id),
|
||||
},
|
||||
)
|
||||
assert first_response.status_code == 200
|
||||
|
||||
response = await auth_client.post(
|
||||
f"/api/projects/{workflow_project.id}/workflow/actions",
|
||||
json={
|
||||
"type": "RESET_SOURCE_FILE",
|
||||
"revision": 0,
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == 409
|
||||
|
||||
async def test_start_media_convert_action_sets_active_job(
|
||||
self,
|
||||
auth_client: AsyncClient,
|
||||
workflow_project: Project,
|
||||
source_file: File,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
) -> None:
|
||||
async def fake_submit_media_convert(
|
||||
self,
|
||||
*,
|
||||
requester: User,
|
||||
request,
|
||||
) -> TaskSubmitResponse:
|
||||
assert requester.id == workflow_project.owner_id
|
||||
assert request.file_key == source_file.path
|
||||
assert request.project_id == workflow_project.id
|
||||
return TaskSubmitResponse(
|
||||
job_id=uuid.UUID("00000000-0000-4000-a000-000000000123"),
|
||||
webhook_url=("http://test/api/tasks/webhook/00000000-0000-4000-a000-000000000123/"),
|
||||
status="PENDING",
|
||||
)
|
||||
|
||||
monkeypatch.setattr(
|
||||
TaskService,
|
||||
"submit_media_convert",
|
||||
fake_submit_media_convert,
|
||||
)
|
||||
|
||||
set_source_response = await auth_client.post(
|
||||
f"/api/projects/{workflow_project.id}/workflow/actions",
|
||||
json={
|
||||
"type": "SET_SOURCE_FILE",
|
||||
"revision": 0,
|
||||
"file_id": str(source_file.id),
|
||||
},
|
||||
)
|
||||
assert set_source_response.status_code == 200
|
||||
|
||||
response = await auth_client.post(
|
||||
f"/api/projects/{workflow_project.id}/workflow/actions",
|
||||
json={
|
||||
"type": "START_MEDIA_CONVERT",
|
||||
"revision": 1,
|
||||
"output_format": "mp4",
|
||||
"out_folder": "output_files",
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["revision"] == 2
|
||||
assert data["phase"] == "VERIFY"
|
||||
assert data["current_screen"] == "verify"
|
||||
assert data["active_job"] == {
|
||||
"job_id": "00000000-0000-4000-a000-000000000123",
|
||||
"job_type": "MEDIA_CONVERT",
|
||||
}
|
||||
@@ -0,0 +1,99 @@
|
||||
"""Tests for SaluteSpeech result parsing and document building."""
|
||||
|
||||
from cpv3.modules.transcription.service import (
|
||||
_build_document_from_salute_result,
|
||||
_parse_salute_time,
|
||||
)
|
||||
|
||||
|
||||
class TestParseSaluteTime:
|
||||
def test_simple_timestamp(self):
|
||||
assert _parse_salute_time("0.480s") == 0.48
|
||||
|
||||
def test_zero(self):
|
||||
assert _parse_salute_time("0.000s") == 0.0
|
||||
|
||||
def test_large_timestamp(self):
|
||||
assert _parse_salute_time("123.456s") == 123.456
|
||||
|
||||
def test_integer_timestamp(self):
|
||||
assert _parse_salute_time("5s") == 5.0
|
||||
|
||||
|
||||
class TestBuildDocumentFromSaluteResult:
|
||||
def _make_raw_result(self):
|
||||
"""Minimal SaluteSpeech API response for testing."""
|
||||
return [
|
||||
{
|
||||
"results": [
|
||||
{
|
||||
"text": "привет мир",
|
||||
"normalized_text": "Привет мир.",
|
||||
"start": "0.480s",
|
||||
"end": "1.200s",
|
||||
"word_alignments": [
|
||||
{"word": "привет", "start": "0.480s", "end": "0.840s"},
|
||||
{"word": "мир", "start": "0.960s", "end": "1.200s"},
|
||||
],
|
||||
},
|
||||
{
|
||||
"text": "это тест",
|
||||
"normalized_text": "Это тест.",
|
||||
"start": "1.500s",
|
||||
"end": "2.100s",
|
||||
"word_alignments": [
|
||||
{"word": "это", "start": "1.500s", "end": "1.700s"},
|
||||
{"word": "тест", "start": "1.800s", "end": "2.100s"},
|
||||
],
|
||||
},
|
||||
],
|
||||
"channel": 0,
|
||||
}
|
||||
]
|
||||
|
||||
def test_returns_document_with_segments(self):
|
||||
raw = self._make_raw_result()
|
||||
doc = _build_document_from_salute_result(raw, language="ru-RU")
|
||||
assert len(doc.segments) == 2
|
||||
|
||||
def test_segment_text(self):
|
||||
raw = self._make_raw_result()
|
||||
doc = _build_document_from_salute_result(raw, language="ru-RU")
|
||||
assert doc.segments[0].lines[0].text == "привет мир"
|
||||
|
||||
def test_word_timestamps(self):
|
||||
raw = self._make_raw_result()
|
||||
doc = _build_document_from_salute_result(raw, language="ru-RU")
|
||||
first_word = doc.segments[0].lines[0].words[0]
|
||||
assert first_word.text == "привет"
|
||||
assert first_word.time.start == 0.48
|
||||
assert first_word.time.end == 0.84
|
||||
|
||||
def test_segment_time_range(self):
|
||||
raw = self._make_raw_result()
|
||||
doc = _build_document_from_salute_result(raw, language="ru-RU")
|
||||
assert doc.segments[0].time.start == 0.48
|
||||
assert doc.segments[0].time.end == 1.2
|
||||
|
||||
def test_empty_results(self):
|
||||
raw = [{"results": [], "channel": 0}]
|
||||
doc = _build_document_from_salute_result(raw, language="ru-RU")
|
||||
assert len(doc.segments) == 0
|
||||
|
||||
def test_missing_word_alignments(self):
|
||||
raw = [
|
||||
{
|
||||
"results": [
|
||||
{
|
||||
"text": "привет",
|
||||
"normalized_text": "Привет.",
|
||||
"start": "0.000s",
|
||||
"end": "0.500s",
|
||||
}
|
||||
],
|
||||
"channel": 0,
|
||||
}
|
||||
]
|
||||
doc = _build_document_from_salute_result(raw, language="ru-RU")
|
||||
assert len(doc.segments) == 1
|
||||
assert doc.segments[0].time.start == 0.0
|
||||
@@ -0,0 +1,182 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from itertools import count
|
||||
from types import SimpleNamespace
|
||||
import uuid
|
||||
|
||||
from cpv3.modules.media import service as media_service
|
||||
from cpv3.modules.tasks import service as task_service
|
||||
|
||||
|
||||
def test_parse_ffmpeg_progress_time_seconds_from_timecode() -> None:
|
||||
assert media_service._get_ffmpeg_output_time_seconds(
|
||||
{
|
||||
"out_time": "00:00:12.500000",
|
||||
"progress": "continue",
|
||||
}
|
||||
) == 12.5
|
||||
|
||||
|
||||
def test_parse_ffmpeg_progress_time_seconds_returns_none_for_invalid_snapshot() -> None:
|
||||
assert media_service._get_ffmpeg_output_time_seconds(
|
||||
{
|
||||
"out_time": "not-a-timecode",
|
||||
"progress": "continue",
|
||||
}
|
||||
) is None
|
||||
|
||||
|
||||
def test_media_convert_actor_emits_intermediate_progress_events(monkeypatch) -> None:
|
||||
sent_events: list[task_service.TaskWebhookEvent] = []
|
||||
monotonic_values = count(step=2)
|
||||
|
||||
async def fake_convert_to_mp4(
|
||||
_storage: object,
|
||||
*,
|
||||
file_key: str,
|
||||
out_folder: str,
|
||||
on_progress: object | None = None,
|
||||
) -> SimpleNamespace:
|
||||
assert file_key == "uploads/source.mov"
|
||||
assert out_folder == "projects/1"
|
||||
assert callable(on_progress)
|
||||
|
||||
on_progress("converting", 0.0)
|
||||
on_progress("converting", 50.0)
|
||||
on_progress("converting", 100.0)
|
||||
on_progress("uploading", None)
|
||||
|
||||
return SimpleNamespace(
|
||||
file_path="projects/1/converted/video.mp4",
|
||||
file_url="https://example.com/video.mp4",
|
||||
file_size=123,
|
||||
)
|
||||
|
||||
monkeypatch.setattr(task_service, "_run_async", asyncio.run)
|
||||
monkeypatch.setattr(task_service, "_raise_if_job_cancelled", lambda _job_id: None)
|
||||
monkeypatch.setattr(task_service, "_get_storage_service", lambda: object())
|
||||
monkeypatch.setattr(
|
||||
task_service,
|
||||
"_send_webhook_event",
|
||||
lambda _url, event: sent_events.append(event),
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
task_service.time,
|
||||
"monotonic",
|
||||
lambda: float(next(monotonic_values)),
|
||||
)
|
||||
monkeypatch.setattr(media_service, "convert_to_mp4", fake_convert_to_mp4)
|
||||
|
||||
task_service.media_convert_actor.fn(
|
||||
job_id=str(uuid.uuid4()),
|
||||
webhook_url="http://backend.test/api/tasks/webhook/job-1/",
|
||||
file_key="uploads/source.mov",
|
||||
out_folder="projects/1",
|
||||
output_format="mp4",
|
||||
)
|
||||
|
||||
progress_events = [event for event in sent_events if event.progress_pct is not None]
|
||||
|
||||
assert [event.progress_pct for event in progress_events] == [
|
||||
5.0,
|
||||
10.0,
|
||||
52.5,
|
||||
95.0,
|
||||
99.0,
|
||||
100.0,
|
||||
]
|
||||
assert [event.current_message for event in progress_events] == [
|
||||
"Подготовка файла",
|
||||
"Конвертация видео",
|
||||
"Конвертация видео",
|
||||
"Загрузка результата",
|
||||
"Сохранение результата",
|
||||
"Завершено",
|
||||
]
|
||||
assert sent_events[-1].status == task_service.JOB_STATUS_DONE
|
||||
assert sent_events[-1].output_data == {
|
||||
"file_path": "projects/1/converted/video.mp4",
|
||||
"file_url": "https://example.com/video.mp4",
|
||||
"file_size": 123,
|
||||
}
|
||||
|
||||
|
||||
def test_silence_apply_actor_emits_intermediate_progress_events(monkeypatch) -> None:
|
||||
sent_events: list[task_service.TaskWebhookEvent] = []
|
||||
monotonic_values = count(step=2)
|
||||
|
||||
async def fake_apply_silence_cuts(
|
||||
_storage: object,
|
||||
*,
|
||||
file_key: str,
|
||||
out_folder: str,
|
||||
cuts: list[dict],
|
||||
output_name: str | None = None,
|
||||
on_progress: object | None = None,
|
||||
) -> SimpleNamespace:
|
||||
assert file_key == "uploads/source.mp4"
|
||||
assert out_folder == "projects/1"
|
||||
assert cuts == [{"start_ms": 100, "end_ms": 200}]
|
||||
assert output_name == "edited.mp4"
|
||||
assert callable(on_progress)
|
||||
|
||||
on_progress("applying_cuts", 0.0)
|
||||
on_progress("applying_cuts", 50.0)
|
||||
on_progress("applying_cuts", 100.0)
|
||||
on_progress("uploading", None)
|
||||
|
||||
return SimpleNamespace(
|
||||
file_path="projects/1/silent/edited.mp4",
|
||||
file_url="https://example.com/edited.mp4",
|
||||
file_size=456,
|
||||
)
|
||||
|
||||
monkeypatch.setattr(task_service, "_run_async", asyncio.run)
|
||||
monkeypatch.setattr(task_service, "_raise_if_job_cancelled", lambda _job_id: None)
|
||||
monkeypatch.setattr(task_service, "_get_storage_service", lambda: object())
|
||||
monkeypatch.setattr(
|
||||
task_service,
|
||||
"_send_webhook_event",
|
||||
lambda _url, event: sent_events.append(event),
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
task_service.time,
|
||||
"monotonic",
|
||||
lambda: float(next(monotonic_values)),
|
||||
)
|
||||
monkeypatch.setattr(media_service, "apply_silence_cuts", fake_apply_silence_cuts)
|
||||
|
||||
task_service.silence_apply_actor.fn(
|
||||
job_id=str(uuid.uuid4()),
|
||||
webhook_url="http://backend.test/api/tasks/webhook/job-2/",
|
||||
file_key="uploads/source.mp4",
|
||||
out_folder="projects/1",
|
||||
cuts=[{"start_ms": 100, "end_ms": 200}],
|
||||
output_name="edited.mp4",
|
||||
)
|
||||
|
||||
progress_events = [event for event in sent_events if event.progress_pct is not None]
|
||||
|
||||
assert [event.progress_pct for event in progress_events] == [
|
||||
5.0,
|
||||
10.0,
|
||||
52.5,
|
||||
95.0,
|
||||
99.0,
|
||||
100.0,
|
||||
]
|
||||
assert [event.current_message for event in progress_events] == [
|
||||
"Подготовка файла",
|
||||
"Применение вырезок",
|
||||
"Применение вырезок",
|
||||
"Загрузка результата",
|
||||
"Сохранение результата",
|
||||
"Завершено",
|
||||
]
|
||||
assert sent_events[-1].status == task_service.JOB_STATUS_DONE
|
||||
assert sent_events[-1].output_data == {
|
||||
"file_path": "projects/1/silent/edited.mp4",
|
||||
"file_url": "https://example.com/edited.mp4",
|
||||
"file_size": 456,
|
||||
}
|
||||
@@ -0,0 +1,93 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import uuid
|
||||
from types import SimpleNamespace
|
||||
|
||||
from cpv3.infrastructure.storage.types import FileInfo
|
||||
from cpv3.modules.media import service as media_service
|
||||
from cpv3.modules.tasks import service as task_service
|
||||
|
||||
|
||||
def test_extract_ffmpeg_out_time_ms_prefers_out_time_us() -> None:
|
||||
assert media_service._extract_ffmpeg_out_time_ms(
|
||||
{
|
||||
"out_time_us": "2500000",
|
||||
"out_time_ms": "1000000",
|
||||
}
|
||||
) == 2500.0
|
||||
|
||||
|
||||
def test_extract_ffmpeg_out_time_ms_supports_legacy_out_time_ms_field() -> None:
|
||||
assert media_service._extract_ffmpeg_out_time_ms(
|
||||
{
|
||||
"out_time_ms": "1250000",
|
||||
}
|
||||
) == 1250.0
|
||||
|
||||
|
||||
def test_extract_ffmpeg_out_time_ms_returns_none_for_invalid_values() -> None:
|
||||
assert media_service._extract_ffmpeg_out_time_ms(
|
||||
{
|
||||
"out_time_us": "N/A",
|
||||
}
|
||||
) is None
|
||||
|
||||
|
||||
def test_media_convert_actor_emits_precise_progress_updates(monkeypatch) -> None:
|
||||
sent_events: list[task_service.TaskWebhookEvent] = []
|
||||
|
||||
async def fake_convert_to_mp4(
|
||||
_storage: object,
|
||||
*,
|
||||
file_key: str,
|
||||
out_folder: str,
|
||||
on_progress,
|
||||
) -> FileInfo:
|
||||
assert file_key == "uploads/source.mov"
|
||||
assert out_folder == "projects/1"
|
||||
|
||||
on_progress("converting", 0.0)
|
||||
on_progress("converting", 50.0)
|
||||
on_progress("uploading", None)
|
||||
|
||||
return FileInfo(
|
||||
file_path="projects/1/converted/output.mp4",
|
||||
file_url="https://example.com/output.mp4",
|
||||
file_size=1234,
|
||||
filename="output.mp4",
|
||||
)
|
||||
|
||||
monkeypatch.setattr(media_service, "convert_to_mp4", fake_convert_to_mp4)
|
||||
monkeypatch.setattr(task_service, "_run_async", asyncio.run)
|
||||
monkeypatch.setattr(task_service, "_raise_if_job_cancelled", lambda _job_id: None)
|
||||
monkeypatch.setattr(task_service, "_get_storage_service", lambda: object())
|
||||
monkeypatch.setattr(task_service, "PROGRESS_CONVERT_THROTTLE_SECONDS", 0.0)
|
||||
monkeypatch.setattr(
|
||||
task_service,
|
||||
"_send_webhook_event",
|
||||
lambda _url, event: sent_events.append(event),
|
||||
)
|
||||
|
||||
task_service.media_convert_actor.fn(
|
||||
job_id=str(uuid.uuid4()),
|
||||
webhook_url="http://backend.test/api/tasks/webhook/job-1/",
|
||||
file_key="uploads/source.mov",
|
||||
out_folder="projects/1",
|
||||
output_format="mp4",
|
||||
)
|
||||
|
||||
progress_events = [
|
||||
(event.progress_pct, event.current_message)
|
||||
for event in sent_events
|
||||
if event.progress_pct is not None
|
||||
]
|
||||
|
||||
assert progress_events == [
|
||||
(5.0, "Подготовка файла"),
|
||||
(10.0, "Конвертация видео"),
|
||||
(52.5, "Конвертация видео"),
|
||||
(95.0, "Загрузка результата"),
|
||||
(99.0, "Сохранение результата"),
|
||||
(100.0, "Завершено"),
|
||||
]
|
||||
@@ -0,0 +1,137 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from types import SimpleNamespace
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from cpv3.modules.media.service import apply_silence_cuts, remove_silence
|
||||
|
||||
|
||||
class _TempDownload:
|
||||
def __init__(self, path: str) -> None:
|
||||
self.path = path
|
||||
|
||||
def cleanup(self) -> None:
|
||||
return None
|
||||
|
||||
|
||||
class _FakeProcess:
|
||||
returncode = 0
|
||||
|
||||
async def communicate(self) -> tuple[bytes, bytes]:
|
||||
return b"", b""
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_remove_silence_uses_single_input_trim_filter(tmp_path) -> None:
|
||||
input_path = tmp_path / "input.mp4"
|
||||
input_path.write_bytes(b"video")
|
||||
|
||||
file_info = SimpleNamespace(
|
||||
file_path="uploads/test-file.txt",
|
||||
file_url="http://example.com/uploads/test-file.txt",
|
||||
file_size=1024,
|
||||
filename="test-file.txt",
|
||||
)
|
||||
storage = SimpleNamespace(
|
||||
download_to_temp=AsyncMock(return_value=_TempDownload(str(input_path))),
|
||||
upload_fileobj=AsyncMock(return_value="uploads/test-file.txt"),
|
||||
get_file_info=AsyncMock(return_value=file_info),
|
||||
)
|
||||
|
||||
captured: dict[str, tuple[str, ...]] = {}
|
||||
|
||||
async def fake_create_subprocess_exec(
|
||||
*cmd: str, stdout=None, stderr=None
|
||||
) -> _FakeProcess:
|
||||
captured["cmd"] = cmd
|
||||
return _FakeProcess()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"cpv3.modules.media.service._compute_non_silent_segments",
|
||||
return_value=[(0, 1000), (2000, 3000)],
|
||||
),
|
||||
patch(
|
||||
"cpv3.modules.media.service.asyncio.create_subprocess_exec",
|
||||
side_effect=fake_create_subprocess_exec,
|
||||
),
|
||||
):
|
||||
await remove_silence(
|
||||
storage,
|
||||
file_key="uploads/input.mp4",
|
||||
out_folder="processed",
|
||||
)
|
||||
|
||||
cmd = list(captured["cmd"])
|
||||
assert cmd.count("-i") == 1
|
||||
|
||||
filter_complex = cmd[cmd.index("-filter_complex") + 1]
|
||||
assert "trim=start=0.000:end=1.000" in filter_complex
|
||||
assert "atrim=start=0.000:end=1.000" in filter_complex
|
||||
assert "trim=start=2.000:end=3.000" in filter_complex
|
||||
assert "atrim=start=2.000:end=3.000" in filter_complex
|
||||
|
||||
|
||||
async def _run_sync_immediately(func):
|
||||
return func()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_apply_silence_cuts_uses_single_input_trim_filter(tmp_path) -> None:
|
||||
input_path = tmp_path / "input.mp4"
|
||||
input_path.write_bytes(b"video")
|
||||
|
||||
file_info = SimpleNamespace(
|
||||
file_path="uploads/test-file.txt",
|
||||
file_url="http://example.com/uploads/test-file.txt",
|
||||
file_size=1024,
|
||||
filename="test-file.txt",
|
||||
)
|
||||
storage = SimpleNamespace(
|
||||
download_to_temp=AsyncMock(return_value=_TempDownload(str(input_path))),
|
||||
upload_fileobj=AsyncMock(return_value="uploads/test-file.txt"),
|
||||
get_file_info=AsyncMock(return_value=file_info),
|
||||
)
|
||||
|
||||
captured: dict[str, tuple[str, ...]] = {}
|
||||
|
||||
async def fake_create_subprocess_exec(
|
||||
*cmd: str, stdout=None, stderr=None
|
||||
) -> _FakeProcess:
|
||||
captured["cmd"] = cmd
|
||||
return _FakeProcess()
|
||||
|
||||
fake_audio = MagicMock()
|
||||
fake_audio.__len__.return_value = 5000
|
||||
|
||||
with (
|
||||
patch(
|
||||
"cpv3.modules.media.service.anyio.to_thread.run_sync",
|
||||
side_effect=_run_sync_immediately,
|
||||
),
|
||||
patch("pydub.AudioSegment.from_file", return_value=fake_audio),
|
||||
patch(
|
||||
"cpv3.modules.media.service.asyncio.create_subprocess_exec",
|
||||
side_effect=fake_create_subprocess_exec,
|
||||
),
|
||||
):
|
||||
await apply_silence_cuts(
|
||||
storage,
|
||||
file_key="uploads/input.mp4",
|
||||
out_folder="processed",
|
||||
cuts=[
|
||||
{"start_ms": 1000, "end_ms": 2000},
|
||||
{"start_ms": 3000, "end_ms": 3500},
|
||||
],
|
||||
)
|
||||
|
||||
cmd = list(captured["cmd"])
|
||||
assert cmd.count("-i") == 1
|
||||
|
||||
filter_complex = cmd[cmd.index("-filter_complex") + 1]
|
||||
assert "trim=start=0.000:end=1.000" in filter_complex
|
||||
assert "atrim=start=0.000:end=1.000" in filter_complex
|
||||
assert "trim=start=2.000:end=3.000" in filter_complex
|
||||
assert "atrim=start=3.500:end=5.000" in filter_complex
|
||||
@@ -0,0 +1,102 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import uuid
|
||||
from types import SimpleNamespace
|
||||
from unittest.mock import AsyncMock
|
||||
|
||||
import pytest
|
||||
|
||||
from cpv3.modules.notifications.service import NotificationService
|
||||
from cpv3.modules.tasks.schemas import TaskWebhookEvent
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_task_notification_persists_operation_title() -> None:
|
||||
service = NotificationService(session=AsyncMock())
|
||||
service._repo = SimpleNamespace(
|
||||
create=AsyncMock(return_value=SimpleNamespace(id=uuid.uuid4()))
|
||||
)
|
||||
|
||||
job = SimpleNamespace(
|
||||
id=uuid.uuid4(),
|
||||
project_id=uuid.uuid4(),
|
||||
job_type="SILENCE_APPLY",
|
||||
status="DONE",
|
||||
project_pct=100,
|
||||
current_message="Завершено",
|
||||
)
|
||||
|
||||
event = TaskWebhookEvent(
|
||||
status="DONE",
|
||||
progress_pct=100,
|
||||
current_message="Завершено",
|
||||
)
|
||||
|
||||
publish_mock = AsyncMock()
|
||||
|
||||
from cpv3.modules import notifications as notifications_module
|
||||
|
||||
original_publish = notifications_module.service.publish_to_user
|
||||
notifications_module.service.publish_to_user = publish_mock
|
||||
try:
|
||||
await service.create_task_notification(
|
||||
user_id=uuid.uuid4(),
|
||||
job=job,
|
||||
event=event,
|
||||
)
|
||||
finally:
|
||||
notifications_module.service.publish_to_user = original_publish
|
||||
|
||||
create_call = service._repo.create.await_args
|
||||
notification = create_call.args[0]
|
||||
|
||||
assert notification.title == "Применение вырезок"
|
||||
assert notification.message == "Завершено"
|
||||
assert notification.payload == {
|
||||
"job_type": "SILENCE_APPLY",
|
||||
"progress_pct": 100,
|
||||
"status": "DONE",
|
||||
}
|
||||
publish_mock.assert_awaited_once()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_task_notification_preserves_zero_progress_for_websocket() -> None:
|
||||
service = NotificationService(session=AsyncMock())
|
||||
service._repo = SimpleNamespace(
|
||||
create=AsyncMock(return_value=SimpleNamespace(id=uuid.uuid4()))
|
||||
)
|
||||
|
||||
job = SimpleNamespace(
|
||||
id=uuid.uuid4(),
|
||||
project_id=uuid.uuid4(),
|
||||
job_type="MEDIA_CONVERT",
|
||||
status="RUNNING",
|
||||
project_pct=35.0,
|
||||
current_message="Конвертация видео",
|
||||
)
|
||||
|
||||
event = TaskWebhookEvent(
|
||||
progress_pct=0.0,
|
||||
current_message="Подготовка файла",
|
||||
)
|
||||
|
||||
publish_mock = AsyncMock()
|
||||
|
||||
from cpv3.modules import notifications as notifications_module
|
||||
|
||||
original_publish = notifications_module.service.publish_to_user
|
||||
notifications_module.service.publish_to_user = publish_mock
|
||||
try:
|
||||
await service.create_task_notification(
|
||||
user_id=uuid.uuid4(),
|
||||
job=job,
|
||||
event=event,
|
||||
)
|
||||
finally:
|
||||
notifications_module.service.publish_to_user = original_publish
|
||||
|
||||
published_message = publish_mock.await_args.args[1]
|
||||
|
||||
assert published_message.progress_pct == 0.0
|
||||
assert published_message.message == "Подготовка файла"
|
||||
@@ -0,0 +1,210 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import uuid
|
||||
from types import SimpleNamespace
|
||||
|
||||
import pytest
|
||||
|
||||
from cpv3.modules.project_workspaces.schemas import (
|
||||
ProjectWorkspaceState,
|
||||
build_workspace_state_from_legacy,
|
||||
)
|
||||
from cpv3.modules.project_workspaces.service import ProjectWorkspaceService
|
||||
|
||||
|
||||
def test_build_workspace_state_from_legacy_maps_known_fields() -> None:
|
||||
source_file_id = uuid.uuid4()
|
||||
active_job_id = uuid.uuid4()
|
||||
silence_job_id = uuid.uuid4()
|
||||
artifact_id = uuid.uuid4()
|
||||
|
||||
workspace = build_workspace_state_from_legacy(
|
||||
{
|
||||
"wizard": {
|
||||
"current_step": "subtitle-revision",
|
||||
"primary_file_id": str(source_file_id),
|
||||
"active_job_id": str(active_job_id),
|
||||
"active_job_type": "TRANSCRIPTION_GENERATE",
|
||||
"silence_job_id": str(silence_job_id),
|
||||
"transcription_artifact_id": str(artifact_id),
|
||||
"silence_settings": {
|
||||
"min_silence_duration_ms": 350,
|
||||
"silence_threshold_db": 21,
|
||||
"padding_ms": 180,
|
||||
},
|
||||
"unknown_field": "ignored",
|
||||
},
|
||||
"used_files": [
|
||||
{"id": str(source_file_id), "path": "users/test/source.mp4"},
|
||||
{"id": "not-a-uuid", "path": "broken"},
|
||||
],
|
||||
"unknown_root": {"ignored": True},
|
||||
}
|
||||
)
|
||||
|
||||
assert workspace.phase == "TRANSCRIPTION"
|
||||
assert workspace.source_file_id == source_file_id
|
||||
assert workspace.active_job is not None
|
||||
assert workspace.active_job.job_id == active_job_id
|
||||
assert workspace.active_job.job_type == "TRANSCRIPTION_GENERATE"
|
||||
assert workspace.workspace_view.used_file_ids == [source_file_id]
|
||||
assert workspace.workspace_view.selected_file_id == source_file_id
|
||||
assert workspace.silence.detect_job_id == silence_job_id
|
||||
assert workspace.silence.settings.min_silence_duration_ms == 350
|
||||
assert workspace.silence.settings.silence_threshold_db == 21
|
||||
assert workspace.silence.settings.padding_ms == 180
|
||||
assert workspace.transcription.artifact_id == artifact_id
|
||||
assert workspace.captions.output_file_id is None
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
("job_type", "output_data", "initial_state", "expected"),
|
||||
[
|
||||
(
|
||||
"MEDIA_CONVERT",
|
||||
{"file_id": "00000000-0000-4000-a000-000000000101"},
|
||||
{
|
||||
"phase": "VERIFY",
|
||||
"source_file_id": "00000000-0000-4000-a000-000000000001",
|
||||
"active_job": {
|
||||
"job_id": "00000000-0000-4000-a000-000000000010",
|
||||
"job_type": "MEDIA_CONVERT",
|
||||
},
|
||||
},
|
||||
{
|
||||
"phase": "VERIFY",
|
||||
"source_file_id": "00000000-0000-4000-a000-000000000101",
|
||||
"active_job": None,
|
||||
"current_screen": "verify",
|
||||
},
|
||||
),
|
||||
(
|
||||
"SILENCE_DETECT",
|
||||
{
|
||||
"silent_segments": [{"start_ms": 100, "end_ms": 220}],
|
||||
"duration_ms": 1000,
|
||||
},
|
||||
{
|
||||
"phase": "SILENCE",
|
||||
"source_file_id": "00000000-0000-4000-a000-000000000001",
|
||||
"active_job": {
|
||||
"job_id": "00000000-0000-4000-a000-000000000011",
|
||||
"job_type": "SILENCE_DETECT",
|
||||
},
|
||||
"silence": {"status": "DETECTING"},
|
||||
},
|
||||
{
|
||||
"phase": "SILENCE",
|
||||
"active_job": None,
|
||||
"silence_status": "REVIEWING",
|
||||
"current_screen": "fragments",
|
||||
},
|
||||
),
|
||||
(
|
||||
"SILENCE_APPLY",
|
||||
{"file_id": "00000000-0000-4000-a000-000000000102"},
|
||||
{
|
||||
"phase": "SILENCE",
|
||||
"source_file_id": "00000000-0000-4000-a000-000000000001",
|
||||
"active_job": {
|
||||
"job_id": "00000000-0000-4000-a000-000000000012",
|
||||
"job_type": "SILENCE_APPLY",
|
||||
},
|
||||
"silence": {"status": "APPLYING"},
|
||||
},
|
||||
{
|
||||
"phase": "TRANSCRIPTION",
|
||||
"active_job": None,
|
||||
"silence_status": "COMPLETED",
|
||||
"current_screen": "transcription-settings",
|
||||
},
|
||||
),
|
||||
(
|
||||
"TRANSCRIPTION_GENERATE",
|
||||
{
|
||||
"artifact_id": "00000000-0000-4000-a000-000000000103",
|
||||
"transcription_id": "00000000-0000-4000-a000-000000000104",
|
||||
},
|
||||
{
|
||||
"phase": "TRANSCRIPTION",
|
||||
"source_file_id": "00000000-0000-4000-a000-000000000001",
|
||||
"active_job": {
|
||||
"job_id": "00000000-0000-4000-a000-000000000013",
|
||||
"job_type": "TRANSCRIPTION_GENERATE",
|
||||
},
|
||||
"transcription": {"status": "PROCESSING"},
|
||||
},
|
||||
{
|
||||
"phase": "TRANSCRIPTION",
|
||||
"active_job": None,
|
||||
"transcription_status": "REVIEWING",
|
||||
"current_screen": "subtitle-revision",
|
||||
},
|
||||
),
|
||||
(
|
||||
"CAPTIONS_GENERATE",
|
||||
{"file_id": "00000000-0000-4000-a000-000000000105"},
|
||||
{
|
||||
"phase": "CAPTIONS",
|
||||
"source_file_id": "00000000-0000-4000-a000-000000000001",
|
||||
"active_job": {
|
||||
"job_id": "00000000-0000-4000-a000-000000000014",
|
||||
"job_type": "CAPTIONS_GENERATE",
|
||||
},
|
||||
"captions": {"status": "PROCESSING"},
|
||||
},
|
||||
{
|
||||
"phase": "DONE",
|
||||
"active_job": None,
|
||||
"captions_status": "COMPLETED",
|
||||
"current_screen": "caption-result",
|
||||
},
|
||||
),
|
||||
],
|
||||
)
|
||||
async def test_apply_job_event_advances_workspace_for_done_jobs(
|
||||
job_type: str,
|
||||
output_data: dict[str, object],
|
||||
initial_state: dict[str, object],
|
||||
expected: dict[str, object],
|
||||
) -> None:
|
||||
service = ProjectWorkspaceService(session=SimpleNamespace())
|
||||
|
||||
state = ProjectWorkspaceState.model_validate(
|
||||
{
|
||||
"version": 1,
|
||||
"phase": "INGEST",
|
||||
"active_job": None,
|
||||
"source_file_id": None,
|
||||
"workspace_view": {"used_file_ids": [], "selected_file_id": None},
|
||||
"silence": {},
|
||||
"transcription": {},
|
||||
"captions": {},
|
||||
**initial_state,
|
||||
}
|
||||
)
|
||||
|
||||
job = SimpleNamespace(
|
||||
id=uuid.UUID(str(state.active_job.job_id)) if state.active_job else uuid.uuid4(),
|
||||
project_id=uuid.uuid4(),
|
||||
job_type=job_type,
|
||||
status="DONE",
|
||||
output_data=output_data,
|
||||
)
|
||||
|
||||
next_state = service._apply_job_event_to_state(state, job)
|
||||
current_screen = service._derive_current_screen(next_state)
|
||||
|
||||
assert next_state.phase == expected["phase"]
|
||||
assert next_state.active_job == expected["active_job"]
|
||||
assert current_screen == expected["current_screen"]
|
||||
|
||||
if "source_file_id" in expected:
|
||||
assert str(next_state.source_file_id) == expected["source_file_id"]
|
||||
if "silence_status" in expected:
|
||||
assert next_state.silence.status == expected["silence_status"]
|
||||
if "transcription_status" in expected:
|
||||
assert next_state.transcription.status == expected["transcription_status"]
|
||||
if "captions_status" in expected:
|
||||
assert next_state.captions.status == expected["captions_status"]
|
||||
@@ -0,0 +1,331 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import uuid
|
||||
from types import SimpleNamespace
|
||||
from unittest.mock import AsyncMock
|
||||
|
||||
import pytest
|
||||
|
||||
from cpv3.modules.project_workspaces.schemas import (
|
||||
ActiveJobState,
|
||||
ProjectWorkspaceState,
|
||||
SetSourceFileAction,
|
||||
SilenceSettingsState,
|
||||
SilenceState,
|
||||
StartSilenceDetectAction,
|
||||
StartTranscriptionAction,
|
||||
TranscriptionRequestState,
|
||||
)
|
||||
from cpv3.modules.project_workspaces.service import (
|
||||
ProjectWorkspaceService,
|
||||
WorkspaceRevisionConflictError,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_workspace_returns_default_state_when_workspace_missing() -> None:
|
||||
project = SimpleNamespace(id=uuid.uuid4(), workspace_state=None)
|
||||
|
||||
service = ProjectWorkspaceService(session=AsyncMock())
|
||||
service._repo = SimpleNamespace(
|
||||
get_by_project_id=AsyncMock(return_value=None),
|
||||
create=AsyncMock(
|
||||
return_value=SimpleNamespace(
|
||||
project_id=project.id,
|
||||
revision=0,
|
||||
state=ProjectWorkspaceState().model_dump(mode="json"),
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
workspace = await service.get_workspace(project=project)
|
||||
|
||||
assert workspace.revision == 0
|
||||
assert workspace.phase == "INGEST"
|
||||
assert workspace.current_screen == "upload"
|
||||
assert workspace.active_job is None
|
||||
assert workspace.source_file_id is None
|
||||
assert workspace.workspace_view.used_file_ids == []
|
||||
assert workspace.workspace_view.selected_file_id is None
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_apply_action_set_source_file_moves_workspace_to_verify() -> None:
|
||||
project = SimpleNamespace(id=uuid.uuid4(), workspace_state=None)
|
||||
requester = SimpleNamespace(id=uuid.uuid4(), is_staff=False)
|
||||
file_id = uuid.uuid4()
|
||||
workspace_row = SimpleNamespace(
|
||||
project_id=project.id,
|
||||
revision=0,
|
||||
state=ProjectWorkspaceState().model_dump(mode="json"),
|
||||
)
|
||||
saved_state: dict[str, object] = {}
|
||||
|
||||
async def update_state(*, project_id, expected_revision, state):
|
||||
saved_state.update(state)
|
||||
return SimpleNamespace(
|
||||
project_id=project_id,
|
||||
revision=expected_revision + 1,
|
||||
state=state,
|
||||
)
|
||||
|
||||
service = ProjectWorkspaceService(session=AsyncMock())
|
||||
service._repo = SimpleNamespace(
|
||||
get_by_project_id=AsyncMock(return_value=workspace_row),
|
||||
create=AsyncMock(),
|
||||
update_state=AsyncMock(side_effect=update_state),
|
||||
)
|
||||
service._file_repo = SimpleNamespace(
|
||||
get_by_id=AsyncMock(
|
||||
return_value=SimpleNamespace(
|
||||
id=file_id,
|
||||
owner_id=requester.id,
|
||||
project_id=project.id,
|
||||
path="users/test/source.mp4",
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
workspace = await service.apply_action(
|
||||
requester=requester,
|
||||
project=project,
|
||||
action=SetSourceFileAction(
|
||||
type="SET_SOURCE_FILE",
|
||||
revision=0,
|
||||
file_id=file_id,
|
||||
),
|
||||
)
|
||||
|
||||
assert workspace.revision == 1
|
||||
assert workspace.phase == "VERIFY"
|
||||
assert workspace.current_screen == "verify"
|
||||
assert workspace.source_file_id == file_id
|
||||
assert saved_state["source_file_id"] == str(file_id)
|
||||
assert saved_state["workspace_view"] == {
|
||||
"used_file_ids": [str(file_id)],
|
||||
"selected_file_id": str(file_id),
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_apply_action_rejects_stale_revision() -> None:
|
||||
project = SimpleNamespace(id=uuid.uuid4(), workspace_state=None)
|
||||
workspace_row = SimpleNamespace(
|
||||
project_id=project.id,
|
||||
revision=2,
|
||||
state=ProjectWorkspaceState().model_dump(mode="json"),
|
||||
)
|
||||
|
||||
service = ProjectWorkspaceService(session=AsyncMock())
|
||||
service._repo = SimpleNamespace(
|
||||
get_by_project_id=AsyncMock(return_value=workspace_row),
|
||||
create=AsyncMock(),
|
||||
update_state=AsyncMock(),
|
||||
)
|
||||
|
||||
with pytest.raises(WorkspaceRevisionConflictError):
|
||||
await service.apply_action(
|
||||
requester=SimpleNamespace(id=uuid.uuid4(), is_staff=False),
|
||||
project=project,
|
||||
action=SetSourceFileAction(
|
||||
type="SET_SOURCE_FILE",
|
||||
revision=1,
|
||||
file_id=uuid.uuid4(),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_start_silence_detect_submits_task_and_tracks_active_job() -> None:
|
||||
project = SimpleNamespace(id=uuid.uuid4(), workspace_state=None)
|
||||
requester = SimpleNamespace(id=uuid.uuid4(), is_staff=False)
|
||||
source_file_id = uuid.uuid4()
|
||||
workspace_state = ProjectWorkspaceState(
|
||||
phase="SILENCE",
|
||||
source_file_id=source_file_id,
|
||||
silence=SilenceState(
|
||||
status="CONFIGURED",
|
||||
settings=SilenceSettingsState(
|
||||
min_silence_duration_ms=250,
|
||||
silence_threshold_db=18,
|
||||
padding_ms=125,
|
||||
),
|
||||
),
|
||||
)
|
||||
workspace_row = SimpleNamespace(
|
||||
project_id=project.id,
|
||||
revision=0,
|
||||
state=workspace_state.model_dump(mode="json"),
|
||||
)
|
||||
submitted_response = SimpleNamespace(job_id=uuid.uuid4(), status="PENDING")
|
||||
|
||||
async def update_state(*, project_id, expected_revision, state):
|
||||
return SimpleNamespace(
|
||||
project_id=project_id,
|
||||
revision=expected_revision + 1,
|
||||
state=state,
|
||||
)
|
||||
|
||||
task_service = SimpleNamespace(
|
||||
submit_silence_detect=AsyncMock(return_value=submitted_response),
|
||||
)
|
||||
|
||||
service = ProjectWorkspaceService(session=AsyncMock())
|
||||
service._repo = SimpleNamespace(
|
||||
get_by_project_id=AsyncMock(return_value=workspace_row),
|
||||
create=AsyncMock(),
|
||||
update_state=AsyncMock(side_effect=update_state),
|
||||
)
|
||||
service._file_repo = SimpleNamespace(
|
||||
get_by_id=AsyncMock(
|
||||
return_value=SimpleNamespace(
|
||||
id=source_file_id,
|
||||
owner_id=requester.id,
|
||||
project_id=project.id,
|
||||
path="projects/test/video.mp4",
|
||||
)
|
||||
)
|
||||
)
|
||||
service._task_service_factory = lambda: task_service
|
||||
|
||||
workspace = await service.apply_action(
|
||||
requester=requester,
|
||||
project=project,
|
||||
action=StartSilenceDetectAction(
|
||||
type="START_SILENCE_DETECT",
|
||||
revision=0,
|
||||
),
|
||||
)
|
||||
|
||||
task_service.submit_silence_detect.assert_awaited_once()
|
||||
assert workspace.current_screen == "processing"
|
||||
assert workspace.active_job == ActiveJobState(
|
||||
job_id=submitted_response.job_id,
|
||||
job_type="SILENCE_DETECT",
|
||||
)
|
||||
assert workspace.silence.detect_job_id == submitted_response.job_id
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_start_transcription_persists_request_and_processing_job() -> None:
|
||||
project = SimpleNamespace(id=uuid.uuid4(), workspace_state=None)
|
||||
requester = SimpleNamespace(id=uuid.uuid4(), is_staff=False)
|
||||
source_file_id = uuid.uuid4()
|
||||
workspace_state = ProjectWorkspaceState(
|
||||
phase="TRANSCRIPTION",
|
||||
source_file_id=source_file_id,
|
||||
)
|
||||
workspace_row = SimpleNamespace(
|
||||
project_id=project.id,
|
||||
revision=3,
|
||||
state=workspace_state.model_dump(mode="json"),
|
||||
)
|
||||
submitted_response = SimpleNamespace(job_id=uuid.uuid4(), status="PENDING")
|
||||
|
||||
async def update_state(*, project_id, expected_revision, state):
|
||||
return SimpleNamespace(
|
||||
project_id=project_id,
|
||||
revision=expected_revision + 1,
|
||||
state=state,
|
||||
)
|
||||
|
||||
task_service = SimpleNamespace(
|
||||
submit_transcription_generate=AsyncMock(return_value=submitted_response),
|
||||
)
|
||||
|
||||
service = ProjectWorkspaceService(session=AsyncMock())
|
||||
service._repo = SimpleNamespace(
|
||||
get_by_project_id=AsyncMock(return_value=workspace_row),
|
||||
create=AsyncMock(),
|
||||
update_state=AsyncMock(side_effect=update_state),
|
||||
)
|
||||
service._file_repo = SimpleNamespace(
|
||||
get_by_id=AsyncMock(
|
||||
return_value=SimpleNamespace(
|
||||
id=source_file_id,
|
||||
owner_id=requester.id,
|
||||
project_id=project.id,
|
||||
path="projects/test/video.mp4",
|
||||
)
|
||||
)
|
||||
)
|
||||
service._task_service_factory = lambda: task_service
|
||||
|
||||
request = TranscriptionRequestState(engine="whisper", language="ru", model="base")
|
||||
workspace = await service.apply_action(
|
||||
requester=requester,
|
||||
project=project,
|
||||
action=StartTranscriptionAction(
|
||||
type="START_TRANSCRIPTION",
|
||||
revision=3,
|
||||
request=request,
|
||||
),
|
||||
)
|
||||
|
||||
assert workspace.current_screen == "transcription-processing"
|
||||
assert workspace.transcription.request == request
|
||||
assert workspace.active_job == ActiveJobState(
|
||||
job_id=submitted_response.job_id,
|
||||
job_type="TRANSCRIPTION_GENERATE",
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_apply_job_update_moves_transcription_job_to_review() -> None:
|
||||
project = SimpleNamespace(id=uuid.uuid4(), workspace_state=None)
|
||||
job_id = uuid.uuid4()
|
||||
transcription_id = uuid.uuid4()
|
||||
artifact_id = uuid.uuid4()
|
||||
workspace_state = ProjectWorkspaceState(
|
||||
phase="TRANSCRIPTION",
|
||||
active_job=ActiveJobState(job_id=job_id, job_type="TRANSCRIPTION_GENERATE"),
|
||||
transcription={
|
||||
"status": "PROCESSING",
|
||||
"job_id": job_id,
|
||||
"artifact_id": None,
|
||||
"transcription_id": None,
|
||||
"reviewed": False,
|
||||
},
|
||||
)
|
||||
workspace_row = SimpleNamespace(
|
||||
project_id=project.id,
|
||||
revision=4,
|
||||
state=workspace_state.model_dump(mode="json"),
|
||||
)
|
||||
|
||||
async def update_state(*, project_id, expected_revision, state):
|
||||
return SimpleNamespace(
|
||||
project_id=project_id,
|
||||
revision=expected_revision + 1,
|
||||
state=state,
|
||||
)
|
||||
|
||||
service = ProjectWorkspaceService(session=AsyncMock())
|
||||
service._repo = SimpleNamespace(
|
||||
get_by_project_id=AsyncMock(return_value=workspace_row),
|
||||
create=AsyncMock(),
|
||||
update_state=AsyncMock(side_effect=update_state),
|
||||
)
|
||||
|
||||
workspace = await service.apply_job_update(
|
||||
project=project,
|
||||
job=SimpleNamespace(
|
||||
id=job_id,
|
||||
project_id=project.id,
|
||||
job_type="TRANSCRIPTION_GENERATE",
|
||||
status="DONE",
|
||||
output_data={
|
||||
"transcription_id": str(transcription_id),
|
||||
"artifact_id": str(artifact_id),
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
assert workspace is not None
|
||||
assert workspace.revision == 5
|
||||
assert workspace.phase == "TRANSCRIPTION"
|
||||
assert workspace.current_screen == "subtitle-revision"
|
||||
assert workspace.active_job is None
|
||||
assert workspace.transcription.transcription_id == transcription_id
|
||||
assert workspace.transcription.artifact_id == artifact_id
|
||||
assert workspace.transcription.reviewed is False
|
||||
@@ -0,0 +1,114 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
from types import SimpleNamespace
|
||||
|
||||
from cpv3.modules.transcription import service as transcription_service
|
||||
|
||||
|
||||
class _FakeSSLContext:
|
||||
def __init__(self) -> None:
|
||||
self.loaded_cafile: str | None = None
|
||||
|
||||
def load_verify_locations(self, *, cafile: str) -> None:
|
||||
self.loaded_cafile = cafile
|
||||
|
||||
|
||||
def test_build_salute_ssl_context_uses_default_context(monkeypatch) -> None:
|
||||
fake_context = _FakeSSLContext()
|
||||
|
||||
monkeypatch.setattr(
|
||||
transcription_service,
|
||||
"get_settings",
|
||||
lambda: SimpleNamespace(
|
||||
salute_ssl_verify=True,
|
||||
salute_ca_cert_path=None,
|
||||
),
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
transcription_service.ssl,
|
||||
"create_default_context",
|
||||
lambda: fake_context,
|
||||
)
|
||||
|
||||
ssl_context = transcription_service._build_salute_ssl_context()
|
||||
|
||||
assert ssl_context is fake_context
|
||||
assert fake_context.loaded_cafile is None
|
||||
|
||||
|
||||
def test_build_salute_ssl_context_loads_custom_ca(monkeypatch) -> None:
|
||||
fake_context = _FakeSSLContext()
|
||||
custom_ca_path = Path("/tmp/salute-ca.pem")
|
||||
|
||||
monkeypatch.setattr(
|
||||
transcription_service,
|
||||
"get_settings",
|
||||
lambda: SimpleNamespace(
|
||||
salute_ssl_verify=True,
|
||||
salute_ca_cert_path=custom_ca_path,
|
||||
),
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
transcription_service.ssl,
|
||||
"create_default_context",
|
||||
lambda: fake_context,
|
||||
)
|
||||
|
||||
ssl_context = transcription_service._build_salute_ssl_context()
|
||||
|
||||
assert ssl_context is fake_context
|
||||
assert fake_context.loaded_cafile == str(custom_ca_path)
|
||||
|
||||
|
||||
def test_build_salute_ssl_context_disables_verification(monkeypatch) -> None:
|
||||
unverified_context = object()
|
||||
|
||||
monkeypatch.setattr(
|
||||
transcription_service,
|
||||
"get_settings",
|
||||
lambda: SimpleNamespace(
|
||||
salute_ssl_verify=False,
|
||||
salute_ca_cert_path=None,
|
||||
),
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
transcription_service.ssl,
|
||||
"_create_unverified_context",
|
||||
lambda: unverified_context,
|
||||
)
|
||||
|
||||
ssl_context = transcription_service._build_salute_ssl_context()
|
||||
|
||||
assert ssl_context is unverified_context
|
||||
|
||||
|
||||
def test_get_salute_auth_header_value_returns_basic_header(monkeypatch) -> None:
|
||||
monkeypatch.setattr(
|
||||
transcription_service,
|
||||
"get_settings",
|
||||
lambda: SimpleNamespace(
|
||||
salute_auth_key=" encoded-credentials ",
|
||||
),
|
||||
)
|
||||
|
||||
header_value = transcription_service._get_salute_auth_header_value()
|
||||
|
||||
assert header_value == "Basic encoded-credentials"
|
||||
|
||||
|
||||
def test_get_salute_auth_header_value_raises_when_missing(monkeypatch) -> None:
|
||||
monkeypatch.setattr(
|
||||
transcription_service,
|
||||
"get_settings",
|
||||
lambda: SimpleNamespace(
|
||||
salute_auth_key=" ",
|
||||
),
|
||||
)
|
||||
|
||||
try:
|
||||
transcription_service._get_salute_auth_header_value()
|
||||
except RuntimeError as exc:
|
||||
assert str(exc) == transcription_service.ERROR_SALUTE_AUTH_KEY_MISSING
|
||||
else:
|
||||
raise AssertionError("Expected RuntimeError for missing SaluteSpeech auth key")
|
||||
+198
-77
@@ -12,97 +12,218 @@ from cpv3.modules.tasks.service import TaskService
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_submit_captions_generate_reuses_existing_active_job() -> None:
|
||||
service = TaskService(session=AsyncMock())
|
||||
existing_job_id = uuid.uuid4()
|
||||
existing_job = SimpleNamespace(
|
||||
id=existing_job_id,
|
||||
status="RUNNING",
|
||||
)
|
||||
service = TaskService(session=AsyncMock())
|
||||
existing_job_id = uuid.uuid4()
|
||||
existing_job = SimpleNamespace(
|
||||
id=existing_job_id,
|
||||
status="RUNNING",
|
||||
)
|
||||
|
||||
service._find_duplicate_active_job = AsyncMock(return_value=existing_job)
|
||||
service._submit_task = AsyncMock()
|
||||
service._find_duplicate_active_job = AsyncMock(return_value=existing_job)
|
||||
service._submit_task = AsyncMock()
|
||||
|
||||
response = await service.submit_captions_generate(
|
||||
requester=SimpleNamespace(id=uuid.uuid4()),
|
||||
request=CaptionsGenerateRequest(
|
||||
video_s3_path="projects/test/video.mp4",
|
||||
folder="output_files",
|
||||
transcription_id=uuid.uuid4(),
|
||||
project_id=uuid.uuid4(),
|
||||
preset_id=uuid.uuid4(),
|
||||
),
|
||||
)
|
||||
response = await service.submit_captions_generate(
|
||||
requester=SimpleNamespace(id=uuid.uuid4()),
|
||||
request=CaptionsGenerateRequest(
|
||||
video_s3_path="projects/test/video.mp4",
|
||||
folder="output_files",
|
||||
transcription_id=uuid.uuid4(),
|
||||
project_id=uuid.uuid4(),
|
||||
preset_id=uuid.uuid4(),
|
||||
),
|
||||
)
|
||||
|
||||
assert response.job_id == existing_job_id
|
||||
assert response.status == "RUNNING"
|
||||
assert response.webhook_url.endswith(f"/api/tasks/webhook/{existing_job_id}/")
|
||||
service._submit_task.assert_not_awaited()
|
||||
assert response.job_id == existing_job_id
|
||||
assert response.status == "RUNNING"
|
||||
assert response.webhook_url.endswith(f"/api/tasks/webhook/{existing_job_id}/")
|
||||
service._submit_task.assert_not_awaited()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_record_webhook_event_ignores_cancelled_job() -> None:
|
||||
cancelled_job = SimpleNamespace(
|
||||
id=uuid.uuid4(),
|
||||
status="CANCELLED",
|
||||
)
|
||||
job_repo = SimpleNamespace(
|
||||
get_by_id=AsyncMock(return_value=cancelled_job),
|
||||
update=AsyncMock(),
|
||||
)
|
||||
event_repo = SimpleNamespace(create=AsyncMock())
|
||||
cancelled_job = SimpleNamespace(
|
||||
id=uuid.uuid4(),
|
||||
status="CANCELLED",
|
||||
)
|
||||
job_repo = SimpleNamespace(
|
||||
get_by_id=AsyncMock(return_value=cancelled_job),
|
||||
update=AsyncMock(),
|
||||
)
|
||||
event_repo = SimpleNamespace(create=AsyncMock())
|
||||
|
||||
service = TaskService(session=AsyncMock())
|
||||
service._job_repo = job_repo
|
||||
service._event_repo = event_repo
|
||||
service = TaskService(session=AsyncMock())
|
||||
service._job_repo = job_repo
|
||||
service._event_repo = event_repo
|
||||
|
||||
result = await service.record_webhook_event(
|
||||
job_id=cancelled_job.id,
|
||||
event=TaskWebhookEvent(
|
||||
status="DONE",
|
||||
current_message="Готово",
|
||||
output_data={"output_path": "projects/test/output.mp4"},
|
||||
),
|
||||
)
|
||||
result = await service.record_webhook_event(
|
||||
job_id=cancelled_job.id,
|
||||
event=TaskWebhookEvent(
|
||||
status="DONE",
|
||||
current_message="Готово",
|
||||
output_data={"output_path": "projects/test/output.mp4"},
|
||||
),
|
||||
)
|
||||
|
||||
assert result is cancelled_job
|
||||
job_repo.update.assert_not_awaited()
|
||||
event_repo.create.assert_not_awaited()
|
||||
assert result is cancelled_job
|
||||
job_repo.update.assert_not_awaited()
|
||||
event_repo.create.assert_not_awaited()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_cancel_job_marks_job_cancelled_and_keeps_record() -> None:
|
||||
job_id = uuid.uuid4()
|
||||
user_id = uuid.uuid4()
|
||||
job = SimpleNamespace(
|
||||
id=job_id,
|
||||
status="PENDING",
|
||||
broker_id="default:redis-message-id",
|
||||
job_type="CAPTIONS_GENERATE",
|
||||
user_id=user_id,
|
||||
)
|
||||
cancelled_job = SimpleNamespace(
|
||||
id=job_id,
|
||||
status="CANCELLED",
|
||||
broker_id="default:redis-message-id",
|
||||
job_type="CAPTIONS_GENERATE",
|
||||
user_id=user_id,
|
||||
current_message="Отменено пользователем",
|
||||
)
|
||||
job_id = uuid.uuid4()
|
||||
user_id = uuid.uuid4()
|
||||
job = SimpleNamespace(
|
||||
id=job_id,
|
||||
status="PENDING",
|
||||
broker_id="default:redis-message-id",
|
||||
job_type="CAPTIONS_GENERATE",
|
||||
user_id=user_id,
|
||||
)
|
||||
cancelled_job = SimpleNamespace(
|
||||
id=job_id,
|
||||
status="CANCELLED",
|
||||
broker_id="default:redis-message-id",
|
||||
job_type="CAPTIONS_GENERATE",
|
||||
user_id=user_id,
|
||||
current_message="Отменено пользователем",
|
||||
)
|
||||
|
||||
service = TaskService(session=AsyncMock())
|
||||
service._job_repo = SimpleNamespace(update=AsyncMock(return_value=cancelled_job))
|
||||
service._event_repo = SimpleNamespace(create=AsyncMock())
|
||||
service._cancel_dramatiq_message = AsyncMock()
|
||||
service._cancel_caption_render = AsyncMock()
|
||||
service._create_cancellation_notification = AsyncMock()
|
||||
service = TaskService(session=AsyncMock())
|
||||
service._job_repo = SimpleNamespace(update=AsyncMock(return_value=cancelled_job))
|
||||
service._event_repo = SimpleNamespace(create=AsyncMock())
|
||||
service._cancel_dramatiq_message = AsyncMock()
|
||||
service._cancel_caption_render = AsyncMock()
|
||||
service._create_cancellation_notification = AsyncMock()
|
||||
service._sync_project_workspace_after_webhook = AsyncMock()
|
||||
|
||||
result = await service.cancel_job(job)
|
||||
result = await service.cancel_job(job)
|
||||
|
||||
assert result is cancelled_job
|
||||
service._job_repo.update.assert_awaited_once()
|
||||
service._event_repo.create.assert_awaited_once()
|
||||
service._cancel_dramatiq_message.assert_awaited_once_with(job.broker_id)
|
||||
service._cancel_caption_render.assert_awaited_once_with(job)
|
||||
service._create_cancellation_notification.assert_awaited_once_with(
|
||||
cancelled_job
|
||||
)
|
||||
assert result is cancelled_job
|
||||
service._job_repo.update.assert_awaited_once()
|
||||
service._event_repo.create.assert_awaited_once()
|
||||
service._cancel_dramatiq_message.assert_awaited_once_with(job.broker_id)
|
||||
service._cancel_caption_render.assert_awaited_once_with(job)
|
||||
service._create_cancellation_notification.assert_awaited_once_with(cancelled_job)
|
||||
service._sync_project_workspace_after_webhook.assert_awaited_once_with(cancelled_job)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_record_webhook_event_updates_progress_for_conversion_job() -> None:
|
||||
job = SimpleNamespace(
|
||||
id=uuid.uuid4(),
|
||||
status="RUNNING",
|
||||
job_type="MEDIA_CONVERT",
|
||||
project_id=uuid.uuid4(),
|
||||
user_id=None,
|
||||
)
|
||||
updated_job = SimpleNamespace(**job.__dict__, project_pct=52.5)
|
||||
job_repo = SimpleNamespace(
|
||||
get_by_id=AsyncMock(return_value=job),
|
||||
update=AsyncMock(return_value=updated_job),
|
||||
)
|
||||
event_repo = SimpleNamespace(create=AsyncMock())
|
||||
|
||||
service = TaskService(session=AsyncMock())
|
||||
service._job_repo = job_repo
|
||||
service._event_repo = event_repo
|
||||
service._sync_project_workspace_after_webhook = AsyncMock()
|
||||
|
||||
result = await service.record_webhook_event(
|
||||
job_id=job.id,
|
||||
event=TaskWebhookEvent(
|
||||
progress_pct=52.5,
|
||||
current_message="Конвертация видео",
|
||||
),
|
||||
)
|
||||
|
||||
update_call = job_repo.update.await_args.args[1]
|
||||
event_call = event_repo.create.await_args.args[0]
|
||||
|
||||
assert result is updated_job
|
||||
assert update_call.project_pct == 52.5
|
||||
assert update_call.current_message == "Конвертация видео"
|
||||
assert event_call.event_type == "progress"
|
||||
assert event_call.payload["progress_pct"] == 52.5
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_record_webhook_event_syncs_workspace_for_completed_supported_job() -> None:
|
||||
job = SimpleNamespace(
|
||||
id=uuid.uuid4(),
|
||||
status="RUNNING",
|
||||
job_type="SILENCE_DETECT",
|
||||
user_id=None,
|
||||
project_id=uuid.uuid4(),
|
||||
output_data={"silent_segments": []},
|
||||
)
|
||||
updated_job = SimpleNamespace(**{**job.__dict__, "status": "DONE"})
|
||||
job_repo = SimpleNamespace(
|
||||
get_by_id=AsyncMock(return_value=job),
|
||||
update=AsyncMock(return_value=updated_job),
|
||||
)
|
||||
event_repo = SimpleNamespace(create=AsyncMock())
|
||||
|
||||
service = TaskService(session=AsyncMock())
|
||||
service._job_repo = job_repo
|
||||
service._event_repo = event_repo
|
||||
service._sync_project_workspace_after_webhook = AsyncMock()
|
||||
|
||||
result = await service.record_webhook_event(
|
||||
job_id=job.id,
|
||||
event=TaskWebhookEvent(
|
||||
status="DONE",
|
||||
current_message="Готово",
|
||||
output_data={"silent_segments": []},
|
||||
),
|
||||
)
|
||||
|
||||
assert result is updated_job
|
||||
service._sync_project_workspace_after_webhook.assert_awaited_once_with(updated_job)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_record_webhook_event_projects_workspace_after_done_job() -> None:
|
||||
job = SimpleNamespace(
|
||||
id=uuid.uuid4(),
|
||||
status="RUNNING",
|
||||
job_type="MEDIA_CONVERT",
|
||||
project_id=uuid.uuid4(),
|
||||
user_id=None,
|
||||
output_data={"file_path": "users/test/converted.mp4"},
|
||||
)
|
||||
updated_job = SimpleNamespace(
|
||||
**{
|
||||
**job.__dict__,
|
||||
"status": "DONE",
|
||||
"output_data": {
|
||||
"file_path": "users/test/converted.mp4",
|
||||
"file_id": "00000000-0000-4000-a000-000000000777",
|
||||
},
|
||||
}
|
||||
)
|
||||
job_repo = SimpleNamespace(
|
||||
get_by_id=AsyncMock(return_value=job),
|
||||
update=AsyncMock(return_value=updated_job),
|
||||
)
|
||||
event_repo = SimpleNamespace(create=AsyncMock())
|
||||
workspace_service = SimpleNamespace(handle_job_update=AsyncMock())
|
||||
|
||||
service = TaskService(session=AsyncMock())
|
||||
service._job_repo = job_repo
|
||||
service._event_repo = event_repo
|
||||
service._save_convert_artifacts = AsyncMock(return_value=updated_job)
|
||||
service._get_project_workspace_service = lambda: workspace_service
|
||||
|
||||
result = await service.record_webhook_event(
|
||||
job_id=job.id,
|
||||
event=TaskWebhookEvent(
|
||||
status="DONE",
|
||||
current_message="Готово",
|
||||
output_data={"file_path": "users/test/converted.mp4"},
|
||||
),
|
||||
)
|
||||
|
||||
assert result is updated_job
|
||||
service._save_convert_artifacts.assert_awaited_once_with(updated_job)
|
||||
workspace_service.handle_job_update.assert_awaited_once_with(job=updated_job)
|
||||
|
||||
@@ -119,6 +119,30 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/3c/d7/8fb3044eaef08a310acfe23dae9a8e2e07d305edc29a53497e52bc76eca7/asyncpg-0.31.0-cp314-cp314t-win_amd64.whl", hash = "sha256:bd4107bb7cdd0e9e65fae66a62afd3a249663b844fa34d479f6d5b3bef9c04c3", size = 706062, upload-time = "2025-11-24T23:26:44.086Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "attrs"
|
||||
version = "26.1.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/9a/8e/82a0fe20a541c03148528be8cac2408564a6c9a0cc7e9171802bc1d26985/attrs-26.1.0.tar.gz", hash = "sha256:d03ceb89cb322a8fd706d4fb91940737b6642aa36998fe130a9bc96c985eff32", size = 952055, upload-time = "2026-03-19T14:22:25.026Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/64/b4/17d4b0b2a2dc85a6df63d1157e028ed19f90d4cd97c36717afef2bc2f395/attrs-26.1.0-py3-none-any.whl", hash = "sha256:c647aa4a12dfbad9333ca4e71fe62ddc36f4e63b2d260a37a8b83d2f043ac309", size = 67548, upload-time = "2026-03-19T14:22:23.645Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bandit"
|
||||
version = "1.9.4"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
||||
{ name = "pyyaml" },
|
||||
{ name = "rich" },
|
||||
{ name = "stevedore" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/aa/c3/0cb80dfe0f3076e5da7e4c5ad8e57bac6ac357ff4a6406205501cade4965/bandit-1.9.4.tar.gz", hash = "sha256:b589e5de2afe70bd4d53fa0c1da6199f4085af666fde00e8a034f152a52cd628", size = 4242677, upload-time = "2026-02-25T06:44:15.503Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/05/a4/a26d5b25671d27e03afb5401a0be5899d94ff8fab6a698b1ac5be3ec29ef/bandit-1.9.4-py3-none-any.whl", hash = "sha256:f89ffa663767f5a0585ea075f01020207e966a9c0f2b9ef56a57c7963a3f6f8e", size = 134741, upload-time = "2026-02-25T06:44:13.694Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bcrypt"
|
||||
version = "3.2.2"
|
||||
@@ -140,6 +164,15 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/f5/37/7cd297ff571c4d86371ff024c0e008b37b59e895b28f69444a9b6f94ca1a/bcrypt-3.2.2-cp36-abi3-win_amd64.whl", hash = "sha256:7ff2069240c6bbe49109fe84ca80508773a904f5a8cb960e02a977f7f519b129", size = 29581, upload-time = "2022-05-01T18:05:57.878Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "boolean-py"
|
||||
version = "5.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c4/cf/85379f13b76f3a69bca86b60237978af17d6aa0bc5998978c3b8cf05abb2/boolean_py-5.0.tar.gz", hash = "sha256:60cbc4bad079753721d32649545505362c754e121570ada4658b852a3a318d95", size = 37047, upload-time = "2025-04-03T10:39:49.734Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e5/ca/78d423b324b8d77900030fa59c4aa9054261ef0925631cd2501dd015b7b7/boolean_py-5.0-py3-none-any.whl", hash = "sha256:ef28a70bd43115208441b53a045d1549e2f0ec6e3d08a9d142cbc41c1938e8d9", size = 26577, upload-time = "2025-04-03T10:39:48.449Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "boto3"
|
||||
version = "1.42.27"
|
||||
@@ -168,6 +201,24 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/bf/32/8a4a0447432425cd2f772c757d988742685f46796cf5d68aeaf6bcb6bc37/botocore-1.42.27-py3-none-any.whl", hash = "sha256:d51fb3b8dd1a944c8d238d2827a0dd6e5528d6da49a3bd9eccad019c533e4c9c", size = 14555236, upload-time = "2026-01-13T20:34:55.918Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cachecontrol"
|
||||
version = "0.14.4"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "msgpack" },
|
||||
{ name = "requests" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/2d/f6/c972b32d80760fb79d6b9eeb0b3010a46b89c0b23cf6329417ff7886cd22/cachecontrol-0.14.4.tar.gz", hash = "sha256:e6220afafa4c22a47dd0badb319f84475d79108100d04e26e8542ef7d3ab05a1", size = 16150, upload-time = "2025-11-14T04:32:13.138Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ef/79/c45f2d53efe6ada1110cf6f9fca095e4ff47a0454444aefdde6ac4789179/cachecontrol-0.14.4-py3-none-any.whl", hash = "sha256:b7ac014ff72ee199b5f8af1de29d60239954f223e948196fa3d84adaffc71d2b", size = 22247, upload-time = "2025-11-14T04:32:11.733Z" },
|
||||
]
|
||||
|
||||
[package.optional-dependencies]
|
||||
filecache = [
|
||||
{ name = "filelock" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "certifi"
|
||||
version = "2026.1.4"
|
||||
@@ -353,6 +404,7 @@ dependencies = [
|
||||
{ name = "dramatiq", extra = ["redis"] },
|
||||
{ name = "fastapi" },
|
||||
{ name = "google-cloud-speech" },
|
||||
{ name = "greenlet" },
|
||||
{ name = "httpx" },
|
||||
{ name = "openai-whisper" },
|
||||
{ name = "passlib", extra = ["bcrypt"] },
|
||||
@@ -365,6 +417,7 @@ dependencies = [
|
||||
{ name = "python-multipart" },
|
||||
{ name = "redis" },
|
||||
{ name = "sqlalchemy" },
|
||||
{ name = "tiktoken" },
|
||||
{ name = "uvicorn", extra = ["standard"] },
|
||||
]
|
||||
|
||||
@@ -376,6 +429,12 @@ dev = [
|
||||
{ name = "pytest-asyncio" },
|
||||
{ name = "ruff" },
|
||||
]
|
||||
tools = [
|
||||
{ name = "bandit" },
|
||||
{ name = "pip-audit" },
|
||||
{ name = "radon" },
|
||||
{ name = "schemathesis" },
|
||||
]
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
@@ -386,6 +445,7 @@ requires-dist = [
|
||||
{ name = "dramatiq", extras = ["redis"], specifier = ">=1.17.0" },
|
||||
{ name = "fastapi", specifier = ">=0.115.0" },
|
||||
{ name = "google-cloud-speech", specifier = ">=2.34.0" },
|
||||
{ name = "greenlet", specifier = ">=3.3.0" },
|
||||
{ name = "httpx", specifier = ">=0.27.0" },
|
||||
{ name = "openai-whisper", specifier = ">=20250625" },
|
||||
{ name = "passlib", extras = ["bcrypt"], specifier = ">=1.7.4" },
|
||||
@@ -398,6 +458,7 @@ requires-dist = [
|
||||
{ name = "python-multipart", specifier = ">=0.0.9" },
|
||||
{ name = "redis", specifier = ">=5.0.0" },
|
||||
{ name = "sqlalchemy", specifier = ">=2.0.30" },
|
||||
{ name = "tiktoken", specifier = ">=0.3.3" },
|
||||
{ name = "uvicorn", extras = ["standard"], specifier = ">=0.30.0" },
|
||||
]
|
||||
|
||||
@@ -409,6 +470,37 @@ dev = [
|
||||
{ name = "pytest-asyncio", specifier = ">=0.23.0" },
|
||||
{ name = "ruff", specifier = ">=0.6.0" },
|
||||
]
|
||||
tools = [
|
||||
{ name = "bandit" },
|
||||
{ name = "pip-audit" },
|
||||
{ name = "radon" },
|
||||
{ name = "schemathesis" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cyclonedx-python-lib"
|
||||
version = "11.7.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "license-expression" },
|
||||
{ name = "packageurl-python" },
|
||||
{ name = "py-serializable" },
|
||||
{ name = "sortedcontainers" },
|
||||
{ name = "typing-extensions", marker = "python_full_version < '3.13'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/21/0d/64f02d3fd9c116d6f50a540d04d1e4f2e3c487f5062d2db53733ddb25917/cyclonedx_python_lib-11.7.0.tar.gz", hash = "sha256:fb1bc3dedfa31208444dbd743007f478ab6984010a184e5bd466bffd969e936e", size = 1411174, upload-time = "2026-03-17T15:19:16.606Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/30/09/fe0e3bc32bd33707c519b102fc064ad2a2ce5a1b53e2be38b86936b476b1/cyclonedx_python_lib-11.7.0-py3-none-any.whl", hash = "sha256:02fa4f15ddbba21ac9093039f8137c0d1813af7fe88b760c5dcd3311a8da2178", size = 513041, upload-time = "2026-03-17T15:19:14.369Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "defusedxml"
|
||||
version = "0.7.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/0f/d5/c66da9b79e5bdb124974bfe172b4daf3c984ebd9c2a06e2b8a4dc7331c72/defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69", size = 75520, upload-time = "2021-03-08T10:59:26.269Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/07/6c/aa3f2f849e01cb6a001cd8554a88d4c77c5c1a31c95bdf1cf9301e6d9ef4/defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61", size = 25604, upload-time = "2021-03-08T10:59:24.45Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dramatiq"
|
||||
@@ -520,6 +612,15 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/c4/ab/09169d5a4612a5f92490806649ac8d41e3ec9129c636754575b3553f4ea4/googleapis_common_protos-1.72.0-py3-none-any.whl", hash = "sha256:4299c5a82d5ae1a9702ada957347726b167f9f8d1fc352477702a1e851ff4038", size = 297515, upload-time = "2025-11-06T18:29:13.14Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "graphql-core"
|
||||
version = "3.2.8"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/68/c5/36aa96205c3ecbb3d34c7c24189e4553c7ca2ebc7e1dd07432339b980272/graphql_core-3.2.8.tar.gz", hash = "sha256:015457da5d996c924ddf57a43f4e959b0b94fb695b85ed4c29446e508ed65cf3", size = 513181, upload-time = "2026-03-05T19:55:37.332Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/86/41/cb887d9afc5dabd78feefe6ccbaf83ff423c206a7a1b7aeeac05120b2125/graphql_core-3.2.8-py3-none-any.whl", hash = "sha256:cbee07bee1b3ed5e531723685369039f32ff815ef60166686e0162f540f1520c", size = 207349, upload-time = "2026-03-05T19:55:35.911Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "greenlet"
|
||||
version = "3.3.0"
|
||||
@@ -529,6 +630,7 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/1f/cb/48e964c452ca2b92175a9b2dca037a553036cb053ba69e284650ce755f13/greenlet-3.3.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e29f3018580e8412d6aaf5641bb7745d38c85228dacf51a73bd4e26ddf2a6a8e", size = 274908, upload-time = "2025-12-04T14:23:26.435Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/28/da/38d7bff4d0277b594ec557f479d65272a893f1f2a716cad91efeb8680953/greenlet-3.3.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a687205fb22794e838f947e2194c0566d3812966b41c78709554aa883183fb62", size = 577113, upload-time = "2025-12-04T14:50:05.493Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3c/f2/89c5eb0faddc3ff014f1c04467d67dee0d1d334ab81fadbf3744847f8a8a/greenlet-3.3.0-cp311-cp311-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4243050a88ba61842186cb9e63c7dfa677ec146160b0efd73b855a3d9c7fcf32", size = 590338, upload-time = "2025-12-04T14:57:41.136Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/80/d7/db0a5085035d05134f8c089643da2b44cc9b80647c39e93129c5ef170d8f/greenlet-3.3.0-cp311-cp311-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:670d0f94cd302d81796e37299bcd04b95d62403883b24225c6b5271466612f45", size = 601098, upload-time = "2025-12-04T15:07:11.898Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/dc/a6/e959a127b630a58e23529972dbc868c107f9d583b5a9f878fb858c46bc1a/greenlet-3.3.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6cb3a8ec3db4a3b0eb8a3c25436c2d49e3505821802074969db017b87bc6a948", size = 590206, upload-time = "2025-12-04T14:26:01.254Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/48/60/29035719feb91798693023608447283b266b12efc576ed013dd9442364bb/greenlet-3.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2de5a0b09eab81fc6a382791b995b1ccf2b172a9fec934747a7a23d2ff291794", size = 1550668, upload-time = "2025-12-04T15:04:22.439Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/5f/783a23754b691bfa86bd72c3033aa107490deac9b2ef190837b860996c9f/greenlet-3.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4449a736606bd30f27f8e1ff4678ee193bc47f6ca810d705981cfffd6ce0d8c5", size = 1615483, upload-time = "2025-12-04T14:27:28.083Z" },
|
||||
@@ -536,6 +638,7 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/f8/0a/a3871375c7b9727edaeeea994bfff7c63ff7804c9829c19309ba2e058807/greenlet-3.3.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:b01548f6e0b9e9784a2c99c5651e5dc89ffcbe870bc5fb2e5ef864e9cc6b5dcb", size = 276379, upload-time = "2025-12-04T14:23:30.498Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/43/ab/7ebfe34dce8b87be0d11dae91acbf76f7b8246bf9d6b319c741f99fa59c6/greenlet-3.3.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:349345b770dc88f81506c6861d22a6ccd422207829d2c854ae2af8025af303e3", size = 597294, upload-time = "2025-12-04T14:50:06.847Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a4/39/f1c8da50024feecd0793dbd5e08f526809b8ab5609224a2da40aad3a7641/greenlet-3.3.0-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e8e18ed6995e9e2c0b4ed264d2cf89260ab3ac7e13555b8032b25a74c6d18655", size = 607742, upload-time = "2025-12-04T14:57:42.349Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/77/cb/43692bcd5f7a0da6ec0ec6d58ee7cddb606d055ce94a62ac9b1aa481e969/greenlet-3.3.0-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c024b1e5696626890038e34f76140ed1daf858e37496d33f2af57f06189e70d7", size = 622297, upload-time = "2025-12-04T15:07:13.552Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/75/b0/6bde0b1011a60782108c01de5913c588cf51a839174538d266de15e4bf4d/greenlet-3.3.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:047ab3df20ede6a57c35c14bf5200fcf04039d50f908270d3f9a7a82064f543b", size = 609885, upload-time = "2025-12-04T14:26:02.368Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/49/0e/49b46ac39f931f59f987b7cd9f34bfec8ef81d2a1e6e00682f55be5de9f4/greenlet-3.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2d9ad37fc657b1102ec880e637cccf20191581f75c64087a549e66c57e1ceb53", size = 1567424, upload-time = "2025-12-04T15:04:23.757Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/05/f5/49a9ac2dff7f10091935def9165c90236d8f175afb27cbed38fb1d61ab6b/greenlet-3.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:83cd0e36932e0e7f36a64b732a6f60c2fc2df28c351bae79fbaf4f8092fe7614", size = 1636017, upload-time = "2025-12-04T14:27:29.688Z" },
|
||||
@@ -543,6 +646,7 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/02/2f/28592176381b9ab2cafa12829ba7b472d177f3acc35d8fbcf3673d966fff/greenlet-3.3.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:a1e41a81c7e2825822f4e068c48cb2196002362619e2d70b148f20a831c00739", size = 275140, upload-time = "2025-12-04T14:23:01.282Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2c/80/fbe937bf81e9fca98c981fe499e59a3f45df2a04da0baa5c2be0dca0d329/greenlet-3.3.0-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9f515a47d02da4d30caaa85b69474cec77b7929b2e936ff7fb853d42f4bf8808", size = 599219, upload-time = "2025-12-04T14:50:08.309Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c2/ff/7c985128f0514271b8268476af89aee6866df5eec04ac17dcfbc676213df/greenlet-3.3.0-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7d2d9fd66bfadf230b385fdc90426fcd6eb64db54b40c495b72ac0feb5766c54", size = 610211, upload-time = "2025-12-04T14:57:43.968Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/79/07/c47a82d881319ec18a4510bb30463ed6891f2ad2c1901ed5ec23d3de351f/greenlet-3.3.0-cp313-cp313-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:30a6e28487a790417d036088b3bcb3f3ac7d8babaa7d0139edbaddebf3af9492", size = 624311, upload-time = "2025-12-04T15:07:14.697Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fd/8e/424b8c6e78bd9837d14ff7df01a9829fc883ba2ab4ea787d4f848435f23f/greenlet-3.3.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:087ea5e004437321508a8d6f20efc4cfec5e3c30118e1417ea96ed1d93950527", size = 612833, upload-time = "2025-12-04T14:26:03.669Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b5/ba/56699ff9b7c76ca12f1cdc27a886d0f81f2189c3455ff9f65246780f713d/greenlet-3.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ab97cf74045343f6c60a39913fa59710e4bd26a536ce7ab2397adf8b27e67c39", size = 1567256, upload-time = "2025-12-04T15:04:25.276Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1e/37/f31136132967982d698c71a281a8901daf1a8fbab935dce7c0cf15f942cc/greenlet-3.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5375d2e23184629112ca1ea89a53389dddbffcf417dad40125713d88eb5f96e8", size = 1636483, upload-time = "2025-12-04T14:27:30.804Z" },
|
||||
@@ -550,6 +654,7 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/d7/7c/f0a6d0ede2c7bf092d00bc83ad5bafb7e6ec9b4aab2fbdfa6f134dc73327/greenlet-3.3.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:60c2ef0f578afb3c8d92ea07ad327f9a062547137afe91f38408f08aacab667f", size = 275671, upload-time = "2025-12-04T14:23:05.267Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/44/06/dac639ae1a50f5969d82d2e3dd9767d30d6dbdbab0e1a54010c8fe90263c/greenlet-3.3.0-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a5d554d0712ba1de0a6c94c640f7aeba3f85b3a6e1f2899c11c2c0428da9365", size = 646360, upload-time = "2025-12-04T14:50:10.026Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e0/94/0fb76fe6c5369fba9bf98529ada6f4c3a1adf19e406a47332245ef0eb357/greenlet-3.3.0-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3a898b1e9c5f7307ebbde4102908e6cbfcb9ea16284a3abe15cab996bee8b9b3", size = 658160, upload-time = "2025-12-04T14:57:45.41Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/93/79/d2c70cae6e823fac36c3bbc9077962105052b7ef81db2f01ec3b9bf17e2b/greenlet-3.3.0-cp314-cp314-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:dcd2bdbd444ff340e8d6bdf54d2f206ccddbb3ccfdcd3c25bf4afaa7b8f0cf45", size = 671388, upload-time = "2025-12-04T15:07:15.789Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b8/14/bab308fc2c1b5228c3224ec2bf928ce2e4d21d8046c161e44a2012b5203e/greenlet-3.3.0-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5773edda4dc00e173820722711d043799d3adb4f01731f40619e07ea2750b955", size = 660166, upload-time = "2025-12-04T14:26:05.099Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/d2/91465d39164eaa0085177f61983d80ffe746c5a1860f009811d498e7259c/greenlet-3.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ac0549373982b36d5fd5d30beb8a7a33ee541ff98d2b502714a09f1169f31b55", size = 1615193, upload-time = "2025-12-04T15:04:27.041Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/42/1b/83d110a37044b92423084d52d5d5a3b3a73cafb51b547e6d7366ff62eff1/greenlet-3.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d198d2d977460358c3b3a4dc844f875d1adb33817f0613f663a656f463764ccc", size = 1683653, upload-time = "2025-12-04T14:27:32.366Z" },
|
||||
@@ -557,6 +662,7 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/a0/66/bd6317bc5932accf351fc19f177ffba53712a202f9df10587da8df257c7e/greenlet-3.3.0-cp314-cp314t-macosx_11_0_universal2.whl", hash = "sha256:d6ed6f85fae6cdfdb9ce04c9bf7a08d666cfcfb914e7d006f44f840b46741931", size = 282638, upload-time = "2025-12-04T14:25:20.941Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/30/cf/cc81cb030b40e738d6e69502ccbd0dd1bced0588e958f9e757945de24404/greenlet-3.3.0-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d9125050fcf24554e69c4cacb086b87b3b55dc395a8b3ebe6487b045b2614388", size = 651145, upload-time = "2025-12-04T14:50:11.039Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9c/ea/1020037b5ecfe95ca7df8d8549959baceb8186031da83d5ecceff8b08cd2/greenlet-3.3.0-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:87e63ccfa13c0a0f6234ed0add552af24cc67dd886731f2261e46e241608bee3", size = 654236, upload-time = "2025-12-04T14:57:47.007Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/69/cc/1e4bae2e45ca2fa55299f4e85854606a78ecc37fead20d69322f96000504/greenlet-3.3.0-cp314-cp314t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2662433acbca297c9153a4023fe2161c8dcfdcc91f10433171cf7e7d94ba2221", size = 662506, upload-time = "2025-12-04T15:07:16.906Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/57/b9/f8025d71a6085c441a7eaff0fd928bbb275a6633773667023d19179fe815/greenlet-3.3.0-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3c6e9b9c1527a78520357de498b0e709fb9e2f49c3a513afd5a249007261911b", size = 653783, upload-time = "2025-12-04T14:26:06.225Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f6/c7/876a8c7a7485d5d6b5c6821201d542ef28be645aa024cfe1145b35c120c1/greenlet-3.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:286d093f95ec98fdd92fcb955003b8a3d054b4e2cab3e2707a5039e7b50520fd", size = 1614857, upload-time = "2025-12-04T15:04:28.484Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4f/dc/041be1dff9f23dac5f48a43323cd0789cb798342011c19a248d9c9335536/greenlet-3.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c10513330af5b8ae16f023e8ddbfb486ab355d04467c4679c5cfe4659975dd9", size = 1676034, upload-time = "2025-12-04T14:27:33.531Z" },
|
||||
@@ -636,6 +742,15 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "harfile"
|
||||
version = "0.4.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/88/56/06ebfce8ee11b906db9984d7442edfb05e8eb495ed2f553857c1c793dbd5/harfile-0.4.0.tar.gz", hash = "sha256:34e2d9ef34101d769566bffab3c420e147776174308bed1a036ed8db600cabde", size = 10055, upload-time = "2025-09-24T09:12:42.202Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/97/b7/aff025c4b69bd611f1594b22e4793ee0ac68600d12c687d09f665c40f88e/harfile-0.4.0-py3-none-any.whl", hash = "sha256:ddb1483cb30f7549ddc67c0b7fdc6424f1feb19373b67e33e429b02f09bf43a8", size = 6935, upload-time = "2025-09-24T09:12:40.886Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "httpcore"
|
||||
version = "1.0.9"
|
||||
@@ -700,6 +815,44 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hypothesis"
|
||||
version = "6.151.9"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "sortedcontainers" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/19/e1/ef365ff480903b929d28e057f57b76cae51a30375943e33374ec9a165d9c/hypothesis-6.151.9.tar.gz", hash = "sha256:2f284428dda6c3c48c580de0e18470ff9c7f5ef628a647ee8002f38c3f9097ca", size = 463534, upload-time = "2026-02-16T22:59:23.09Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/c4/f7/5cc291d701094754a1d327b44d80a44971e13962881d9a400235726171da/hypothesis-6.151.9-py3-none-any.whl", hash = "sha256:7b7220585c67759b1b1ef839b1e6e9e3d82ed468cfc1ece43c67184848d7edd9", size = 529307, upload-time = "2026-02-16T22:59:20.443Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hypothesis-graphql"
|
||||
version = "0.12.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "graphql-core" },
|
||||
{ name = "hypothesis" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/47/d7/aa6d3cacb0fa7ae02fe7810c05dad025ce2fef88c817d959a862aab3ed4a/hypothesis_graphql-0.12.0.tar.gz", hash = "sha256:15f5f69b6e0b9ad889f59d340e091d7d481471373eb6a8a8591d126aa56e7700", size = 747809, upload-time = "2026-02-04T21:32:05.296Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/92/9c/e6baef1c1188d2d12dcd2b344a166cbe5b220db215c6177bedcf0fa8cac7/hypothesis_graphql-0.12.0-py3-none-any.whl", hash = "sha256:d200d3d4320e772248075f13c656f4b1de01e7f0f5e7d9fd6fea7da759b325f3", size = 20320, upload-time = "2026-02-04T21:32:03.398Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hypothesis-jsonschema"
|
||||
version = "0.23.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "hypothesis" },
|
||||
{ name = "jsonschema" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/4f/ad/2073dd29d8463a92c243d0c298370e50e0d4082bc67f156dc613634d0ec4/hypothesis-jsonschema-0.23.1.tar.gz", hash = "sha256:f4ac032024342a4149a10253984f5a5736b82b3fe2afb0888f3834a31153f215", size = 42896, upload-time = "2024-02-28T20:33:50.209Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/17/44/635a8d2add845c9a2d99a93a379df77f7e70829f0a1d7d5a6998b61f9d01/hypothesis_jsonschema-0.23.1-py3-none-any.whl", hash = "sha256:a4d74d9516dd2784fbbae82e009f62486c9104ac6f4e3397091d98a1d5ee94a2", size = 29200, upload-time = "2024-02-28T20:33:48.744Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "3.11"
|
||||
@@ -739,6 +892,80 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/31/b4/b9b800c45527aadd64d5b442f9b932b00648617eb5d63d2c7a6587b7cafc/jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", size = 20256, upload-time = "2022-06-17T18:00:10.251Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "jsonschema"
|
||||
version = "4.26.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "attrs" },
|
||||
{ name = "jsonschema-specifications" },
|
||||
{ name = "referencing" },
|
||||
{ name = "rpds-py" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b3/fc/e067678238fa451312d4c62bf6e6cf5ec56375422aee02f9cb5f909b3047/jsonschema-4.26.0.tar.gz", hash = "sha256:0c26707e2efad8aa1bfc5b7ce170f3fccc2e4918ff85989ba9ffa9facb2be326", size = 366583, upload-time = "2026-01-07T13:41:07.246Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/69/90/f63fb5873511e014207a475e2bb4e8b2e570d655b00ac19a9a0ca0a385ee/jsonschema-4.26.0-py3-none-any.whl", hash = "sha256:d489f15263b8d200f8387e64b4c3a75f06629559fb73deb8fdfb525f2dab50ce", size = 90630, upload-time = "2026-01-07T13:41:05.306Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "jsonschema-rs"
|
||||
version = "0.45.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d2/7b/40ed0aa40ff2f3bc9bfccf20ea29d9a99199a8eefda26ae9d65601c144fa/jsonschema_rs-0.45.0.tar.gz", hash = "sha256:897deffee817fe0f493710221e19bc4d9fedabdba121d9f8e0aa824460d2498d", size = 1984183, upload-time = "2026-03-08T20:27:16.907Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/9e/5b/36ee0e9dead2a17162343d666c73de1d7aaefbd264124137453d85bacab0/jsonschema_rs-0.45.0-cp310-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:74f81e45ff0ce0354cb717092407faaf275bedb5564c3c32b556cb9e06df6755", size = 7384904, upload-time = "2026-03-08T20:26:33.97Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ba/58/6c44f32751fe4b6a4f80aeaf5744209a6dba6d7253ff876cf08be1a14f75/jsonschema_rs-0.45.0-cp310-abi3-macosx_10_12_x86_64.whl", hash = "sha256:a8f47574214aee3bab3cf7457819d1aead6f27673602ea533a9bb95f432e4ef3", size = 3837625, upload-time = "2026-03-08T20:26:36.068Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a2/09/f19cb90eb3b961eff3a338e15473e649d7201ffca1d8a74b8b72c1f247b7/jsonschema_rs-0.45.0-cp310-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9cf9dd9853ce52dc2d0aa94f7e672bcaf62d170f4d0754bd29416b74b0573fd", size = 3585920, upload-time = "2026-03-08T20:26:37.666Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3d/98/5d152fba7206d714eae5dc5d7bc4b9948d870bbbd84e62aeb50da5ab4e22/jsonschema_rs-0.45.0-cp310-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad438babc0867b80a042a4a5126fc6798ab559db974f080590a7ddef255a8c4a", size = 3935976, upload-time = "2026-03-08T20:26:39.023Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ae/eb/9daf5a29c5092537a176e2211ccef357d88a07ba75212ec226384e3382b9/jsonschema_rs-0.45.0-cp310-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:c7fb155f0ca377b16e0e67668a2285f135448f0ddccbcad7d81b147604bc6309", size = 3597750, upload-time = "2026-03-08T20:26:40.682Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/45/94/0f00bc81769bfcc0b4f284508f72a41818ecc8d8a6a266df0ce3041df306/jsonschema_rs-0.45.0-cp310-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0c0dd401f2853cd8696fb1d1a54db2e3ddae0fd7e7a702e5318f6442bae37151", size = 3791976, upload-time = "2026-03-08T20:26:42.372Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/ce/f186a2101aaac0003b70fed37c3da7b637e894d166e645d3d4abfe28cfbb/jsonschema_rs-0.45.0-cp310-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:35da28a23adcbdb96ce52817e14f4f628cd4c60d6b062af95fee970a13262406", size = 4160234, upload-time = "2026-03-08T20:26:44.805Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/89/fa/4c535db47b2e766b8e08059d949139a278bc9610be1416e04a9d35cc0ba4/jsonschema_rs-0.45.0-cp310-abi3-win32.whl", hash = "sha256:304bebe2750c558c1d77c8c47030ee7161cb8dd9d5943494511977c37e37d999", size = 3204196, upload-time = "2026-03-08T20:26:46.157Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7b/84/cd9e3ba627a2f91e350c51e04d18382da35f77a5bfbac204d0eb0dc371e8/jsonschema_rs-0.45.0-cp310-abi3-win_amd64.whl", hash = "sha256:76144a8d83e159480079728b1e61a5376f35954cb271d45c9ba36f6f1292acd8", size = 3754631, upload-time = "2026-03-08T20:26:47.723Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9f/31/cb7451c916fe56e950b2028443d331fe8dd1cf0f1630bc77a862b82735a2/jsonschema_rs-0.45.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:d0d03159c9fcdfed67b63ac9653402a23199716688a4cb6287d95223bc88cab1", size = 3827150, upload-time = "2026-03-08T20:26:49.083Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b4/40/31d602081082f3db92e0be0c9969f1362159c6b72e9f292c8781b8f30e16/jsonschema_rs-0.45.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b1f6cf46599e77a92db9335ab8cb7bb04de64ef47d90ea32e3293a0da114d76d", size = 3580038, upload-time = "2026-03-08T20:26:50.724Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b0/b7/d67c9f759a72b55a45750c21c26ebd1eecaac1b250fea497b57271d8531c/jsonschema_rs-0.45.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3833af7cee66947ce43587cf364c2297412fd755c87b9a84539c148cbd499ac", size = 3929132, upload-time = "2026-03-08T20:26:52.348Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e2/02/9d64ea087dc78debdcd2c7cf42f23a6dfb2657c840e7b58342a12ca359b4/jsonschema_rs-0.45.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:be778354a4065b221e92af40fd930de50fd38e820c93fbd4d3fcb5dcdb5df421", size = 3591851, upload-time = "2026-03-08T20:26:54.339Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a5/61/14fad5456f149c5fa1a1e20413f7bbbe7409fdf4cc1f5d8e891ac24802bf/jsonschema_rs-0.45.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:09d938187c22c440b1a1bb0820ba577ee11a7555b0d1a494e7bcbb7884eb2117", size = 3786803, upload-time = "2026-03-08T20:26:55.969Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/85/c9/0cb4b5683a47cee7e216110de3942437afd7e34ac78d53235cd1358e7fe1/jsonschema_rs-0.45.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:caf5c88d0be4b78bdbf5ce62201822abf085f27ae4eb4b719a97f70364c215ce", size = 4153160, upload-time = "2026-03-08T20:26:57.338Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/98/4f/d4a13529fd127a51067d910eb03016c1c5a0b2da39f4c6445f6b70bd1e94/jsonschema_rs-0.45.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5dd9e3ab439d7bbad574a97459d5f2acc0bd8cf4e7b212c40851becabdb73aa5", size = 3745439, upload-time = "2026-03-08T20:26:58.997Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3a/df/d9942538a5a8eb4d1a9df2e476ed577589c2b9036c5182fafeda0e606378/jsonschema_rs-0.45.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:df8845f61f70ecdb4fe55469062d7bdcd327e8e23ca8dc07a79704ea14ab92df", size = 3824981, upload-time = "2026-03-08T20:27:00.397Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/29/cb/b538b4c954ce1c7f507d655882256a5aae4439b34802c497d718960dffd0/jsonschema_rs-0.45.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:dcb38db7b839e013c0667f78a724078aaaa22ebdc66badbdc0c1e1c327fc3120", size = 3577402, upload-time = "2026-03-08T20:27:02.128Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0d/93/9cc9ece5f7263255b1865b15c873d6743afc0556362b0db4ca5c73af3295/jsonschema_rs-0.45.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60f2257b12a6af6e2475ce036dd05cac48caa282329359f587fc84340108c572", size = 3927594, upload-time = "2026-03-08T20:27:03.434Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cb/33/6cbfc1128da7d7aa30513c22e6bdd9eb103f34d19a1ee2a9d3e0ebdf5cd4/jsonschema_rs-0.45.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:3cb0b522167291d6c96df67d2b1b20f173cfb1a0ba0d6540be78dda5e2d65f81", size = 3590774, upload-time = "2026-03-08T20:27:04.71Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/64/79/c389778a38ecb35836d566559582b298853d37baffb2d84271bd9c5e43a0/jsonschema_rs-0.45.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c1bd20b65ae142e52c8e54a3c9386f4049ab435ae8b7a05e9096f0ad90c2ea0f", size = 3785723, upload-time = "2026-03-08T20:27:05.985Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/50/6c/e88b24beeb1050a7c171eedd2964f3c8299c5d711be2dee81eaa5b7a70c7/jsonschema_rs-0.45.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:32dffd5bffd2a2f6b118d848017bb53b2fbc9e70cbbe6ef5af9dceaed5c9010e", size = 4151676, upload-time = "2026-03-08T20:27:07.422Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e0/e2/2288a685a502de79fe130e8db9e6ae3d304e02eb893f93e2414691965fe8/jsonschema_rs-0.45.0-cp314-cp314t-win_amd64.whl", hash = "sha256:92ce7ae2b562154a1df524a268552439125c346c1190e5d3978370b941181422", size = 3743734, upload-time = "2026-03-08T20:27:08.715Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3a/c3/2e9d2e287ed67ee2e94cf37c6807aa2d803ee60e4437db1a7ef588579b1e/jsonschema_rs-0.45.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:222561b2df0912c021e0b0148e96b39e135ed63ce5526eedf2d15b6e67b43183", size = 3834545, upload-time = "2026-03-08T20:27:10.489Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/00/ec/2700770ca22125a77d74d64692e65518813a4771c1ff64f3f88e9a44ca2a/jsonschema_rs-0.45.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a53ec771a15f18e5946ad63ad1daefd720de73b9c7dcf1ffe75eebe6b77d2be", size = 3932850, upload-time = "2026-03-08T20:27:11.88Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cd/07/42c31267a2ffdb8eae5fdc800937bdf4af5844876710fe51bad52053305b/jsonschema_rs-0.45.0-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:84424e519ec4cb6c0f6cfd83b07a8932742b63319ae0d9d948f548dbb0659ae7", size = 3595779, upload-time = "2026-03-08T20:27:13.555Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ae/68/c25d699d7bc087e735acc80ad192a2f4c05927f6e5165872596aad784813/jsonschema_rs-0.45.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:0c2da9d3134d0f5ad13ced36aac2a692da181aeb181d729c341388a487003815", size = 3751135, upload-time = "2026-03-08T20:27:15.162Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "jsonschema-specifications"
|
||||
version = "2025.9.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "referencing" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/19/74/a633ee74eb36c44aa6d1095e7cc5569bebf04342ee146178e2d36600708b/jsonschema_specifications-2025.9.1.tar.gz", hash = "sha256:b540987f239e745613c7a9176f3edb72b832a4ac465cf02712288397832b5e8d", size = 32855, upload-time = "2025-09-08T01:34:59.186Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe", size = 18437, upload-time = "2025-09-08T01:34:57.871Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "junit-xml"
|
||||
version = "1.9"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "six" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/98/af/bc988c914dd1ea2bc7540ecc6a0265c2b6faccc6d9cdb82f20e2094a8229/junit-xml-1.9.tar.gz", hash = "sha256:de16a051990d4e25a3982b2dd9e89d671067548718866416faec14d9de56db9f", size = 7349, upload-time = "2023-01-24T18:42:00.836Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/2a/93/2d896b5fd3d79b4cadd8882c06650e66d003f465c9d12c488d92853dff78/junit_xml-1.9-py2.py3-none-any.whl", hash = "sha256:ec5ca1a55aefdd76d28fcc0b135251d156c7106fa979686a4b48d62b761b4732", size = 7130, upload-time = "2020-02-22T20:41:37.661Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "librt"
|
||||
version = "0.7.8"
|
||||
@@ -802,6 +1029,18 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/fc/85/69f92b2a7b3c0f88ffe107c86b952b397004b5b8ea5a81da3d9c04c04422/librt-0.7.8-cp314-cp314t-win_arm64.whl", hash = "sha256:8766ece9de08527deabcd7cb1b4f1a967a385d26e33e536d6d8913db6ef74f06", size = 40550, upload-time = "2026-01-14T12:56:01.542Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "license-expression"
|
||||
version = "30.4.4"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "boolean-py" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/40/71/d89bb0e71b1415453980fd32315f2a037aad9f7f70f695c7cec7035feb13/license_expression-30.4.4.tar.gz", hash = "sha256:73448f0aacd8d0808895bdc4b2c8e01a8d67646e4188f887375398c761f340fd", size = 186402, upload-time = "2025-07-22T11:13:32.17Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/af/40/791891d4c0c4dab4c5e187c17261cedc26285fd41541577f900470a45a4d/license_expression-30.4.4-py3-none-any.whl", hash = "sha256:421788fdcadb41f049d2dc934ce666626265aeccefddd25e162a26f23bcbf8a4", size = 120615, upload-time = "2025-07-22T11:13:31.217Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "llvmlite"
|
||||
version = "0.46.0"
|
||||
@@ -838,6 +1077,30 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/87/fb/99f81ac72ae23375f22b7afdb7642aba97c00a713c217124420147681a2f/mako-1.3.10-py3-none-any.whl", hash = "sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59", size = 78509, upload-time = "2025-04-10T12:50:53.297Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mando"
|
||||
version = "0.7.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "six" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/35/24/cd70d5ae6d35962be752feccb7dca80b5e0c2d450e995b16abd6275f3296/mando-0.7.1.tar.gz", hash = "sha256:18baa999b4b613faefb00eac4efadcf14f510b59b924b66e08289aa1de8c3500", size = 37868, upload-time = "2022-02-24T08:12:27.316Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/d2/f0/834e479e47e499b6478e807fb57b31cc2db696c4db30557bb6f5aea4a90b/mando-0.7.1-py2.py3-none-any.whl", hash = "sha256:26ef1d70928b6057ee3ca12583d73c63e05c49de8972d620c278a7b206581a8a", size = 28149, upload-time = "2022-02-24T08:12:25.24Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "markdown-it-py"
|
||||
version = "4.0.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "mdurl" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "markupsafe"
|
||||
version = "3.0.3"
|
||||
@@ -912,6 +1175,15 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/70/bc/6f1c2f612465f5fa89b95bead1f44dcb607670fd42891d8fdcd5d039f4f4/markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa", size = 14146, upload-time = "2025-09-27T18:37:28.327Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mdurl"
|
||||
version = "0.1.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "more-itertools"
|
||||
version = "10.8.0"
|
||||
@@ -930,6 +1202,59 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c", size = 536198, upload-time = "2023-03-07T16:47:09.197Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "msgpack"
|
||||
version = "1.1.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/4d/f2/bfb55a6236ed8725a96b0aa3acbd0ec17588e6a2c3b62a93eb513ed8783f/msgpack-1.1.2.tar.gz", hash = "sha256:3b60763c1373dd60f398488069bcdc703cd08a711477b5d480eecc9f9626f47e", size = 173581, upload-time = "2025-10-08T09:15:56.596Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/2c/97/560d11202bcd537abca693fd85d81cebe2107ba17301de42b01ac1677b69/msgpack-1.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2e86a607e558d22985d856948c12a3fa7b42efad264dca8a3ebbcfa2735d786c", size = 82271, upload-time = "2025-10-08T09:14:49.967Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/83/04/28a41024ccbd67467380b6fb440ae916c1e4f25e2cd4c63abe6835ac566e/msgpack-1.1.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:283ae72fc89da59aa004ba147e8fc2f766647b1251500182fac0350d8af299c0", size = 84914, upload-time = "2025-10-08T09:14:50.958Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/71/46/b817349db6886d79e57a966346cf0902a426375aadc1e8e7a86a75e22f19/msgpack-1.1.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:61c8aa3bd513d87c72ed0b37b53dd5c5a0f58f2ff9f26e1555d3bd7948fb7296", size = 416962, upload-time = "2025-10-08T09:14:51.997Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/da/e0/6cc2e852837cd6086fe7d8406af4294e66827a60a4cf60b86575a4a65ca8/msgpack-1.1.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:454e29e186285d2ebe65be34629fa0e8605202c60fbc7c4c650ccd41870896ef", size = 426183, upload-time = "2025-10-08T09:14:53.477Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/25/98/6a19f030b3d2ea906696cedd1eb251708e50a5891d0978b012cb6107234c/msgpack-1.1.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7bc8813f88417599564fafa59fd6f95be417179f76b40325b500b3c98409757c", size = 411454, upload-time = "2025-10-08T09:14:54.648Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b7/cd/9098fcb6adb32187a70b7ecaabf6339da50553351558f37600e53a4a2a23/msgpack-1.1.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bafca952dc13907bdfdedfc6a5f579bf4f292bdd506fadb38389afa3ac5b208e", size = 422341, upload-time = "2025-10-08T09:14:56.328Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e6/ae/270cecbcf36c1dc85ec086b33a51a4d7d08fc4f404bdbc15b582255d05ff/msgpack-1.1.2-cp311-cp311-win32.whl", hash = "sha256:602b6740e95ffc55bfb078172d279de3773d7b7db1f703b2f1323566b878b90e", size = 64747, upload-time = "2025-10-08T09:14:57.882Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2a/79/309d0e637f6f37e83c711f547308b91af02b72d2326ddd860b966080ef29/msgpack-1.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:d198d275222dc54244bf3327eb8cbe00307d220241d9cec4d306d49a44e85f68", size = 71633, upload-time = "2025-10-08T09:14:59.177Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/73/4d/7c4e2b3d9b1106cd0aa6cb56cc57c6267f59fa8bfab7d91df5adc802c847/msgpack-1.1.2-cp311-cp311-win_arm64.whl", hash = "sha256:86f8136dfa5c116365a8a651a7d7484b65b13339731dd6faebb9a0242151c406", size = 64755, upload-time = "2025-10-08T09:15:00.48Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ad/bd/8b0d01c756203fbab65d265859749860682ccd2a59594609aeec3a144efa/msgpack-1.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:70a0dff9d1f8da25179ffcf880e10cf1aad55fdb63cd59c9a49a1b82290062aa", size = 81939, upload-time = "2025-10-08T09:15:01.472Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/34/68/ba4f155f793a74c1483d4bdef136e1023f7bcba557f0db4ef3db3c665cf1/msgpack-1.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:446abdd8b94b55c800ac34b102dffd2f6aa0ce643c55dfc017ad89347db3dbdb", size = 85064, upload-time = "2025-10-08T09:15:03.764Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f2/60/a064b0345fc36c4c3d2c743c82d9100c40388d77f0b48b2f04d6041dbec1/msgpack-1.1.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c63eea553c69ab05b6747901b97d620bb2a690633c77f23feb0c6a947a8a7b8f", size = 417131, upload-time = "2025-10-08T09:15:05.136Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/65/92/a5100f7185a800a5d29f8d14041f61475b9de465ffcc0f3b9fba606e4505/msgpack-1.1.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:372839311ccf6bdaf39b00b61288e0557916c3729529b301c52c2d88842add42", size = 427556, upload-time = "2025-10-08T09:15:06.837Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f5/87/ffe21d1bf7d9991354ad93949286f643b2bb6ddbeab66373922b44c3b8cc/msgpack-1.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2929af52106ca73fcb28576218476ffbb531a036c2adbcf54a3664de124303e9", size = 404920, upload-time = "2025-10-08T09:15:08.179Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ff/41/8543ed2b8604f7c0d89ce066f42007faac1eaa7d79a81555f206a5cdb889/msgpack-1.1.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:be52a8fc79e45b0364210eef5234a7cf8d330836d0a64dfbb878efa903d84620", size = 415013, upload-time = "2025-10-08T09:15:09.83Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/41/0d/2ddfaa8b7e1cee6c490d46cb0a39742b19e2481600a7a0e96537e9c22f43/msgpack-1.1.2-cp312-cp312-win32.whl", hash = "sha256:1fff3d825d7859ac888b0fbda39a42d59193543920eda9d9bea44d958a878029", size = 65096, upload-time = "2025-10-08T09:15:11.11Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8c/ec/d431eb7941fb55a31dd6ca3404d41fbb52d99172df2e7707754488390910/msgpack-1.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:1de460f0403172cff81169a30b9a92b260cb809c4cb7e2fc79ae8d0510c78b6b", size = 72708, upload-time = "2025-10-08T09:15:12.554Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c5/31/5b1a1f70eb0e87d1678e9624908f86317787b536060641d6798e3cf70ace/msgpack-1.1.2-cp312-cp312-win_arm64.whl", hash = "sha256:be5980f3ee0e6bd44f3a9e9dea01054f175b50c3e6cdb692bc9424c0bbb8bf69", size = 64119, upload-time = "2025-10-08T09:15:13.589Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6b/31/b46518ecc604d7edf3a4f94cb3bf021fc62aa301f0cb849936968164ef23/msgpack-1.1.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4efd7b5979ccb539c221a4c4e16aac1a533efc97f3b759bb5a5ac9f6d10383bf", size = 81212, upload-time = "2025-10-08T09:15:14.552Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/92/dc/c385f38f2c2433333345a82926c6bfa5ecfff3ef787201614317b58dd8be/msgpack-1.1.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:42eefe2c3e2af97ed470eec850facbe1b5ad1d6eacdbadc42ec98e7dcf68b4b7", size = 84315, upload-time = "2025-10-08T09:15:15.543Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d3/68/93180dce57f684a61a88a45ed13047558ded2be46f03acb8dec6d7c513af/msgpack-1.1.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1fdf7d83102bf09e7ce3357de96c59b627395352a4024f6e2458501f158bf999", size = 412721, upload-time = "2025-10-08T09:15:16.567Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5d/ba/459f18c16f2b3fc1a1ca871f72f07d70c07bf768ad0a507a698b8052ac58/msgpack-1.1.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fac4be746328f90caa3cd4bc67e6fe36ca2bf61d5c6eb6d895b6527e3f05071e", size = 424657, upload-time = "2025-10-08T09:15:17.825Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/38/f8/4398c46863b093252fe67368b44edc6c13b17f4e6b0e4929dbf0bdb13f23/msgpack-1.1.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:fffee09044073e69f2bad787071aeec727183e7580443dfeb8556cbf1978d162", size = 402668, upload-time = "2025-10-08T09:15:19.003Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/28/ce/698c1eff75626e4124b4d78e21cca0b4cc90043afb80a507626ea354ab52/msgpack-1.1.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5928604de9b032bc17f5099496417f113c45bc6bc21b5c6920caf34b3c428794", size = 419040, upload-time = "2025-10-08T09:15:20.183Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/67/32/f3cd1667028424fa7001d82e10ee35386eea1408b93d399b09fb0aa7875f/msgpack-1.1.2-cp313-cp313-win32.whl", hash = "sha256:a7787d353595c7c7e145e2331abf8b7ff1e6673a6b974ded96e6d4ec09f00c8c", size = 65037, upload-time = "2025-10-08T09:15:21.416Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/74/07/1ed8277f8653c40ebc65985180b007879f6a836c525b3885dcc6448ae6cb/msgpack-1.1.2-cp313-cp313-win_amd64.whl", hash = "sha256:a465f0dceb8e13a487e54c07d04ae3ba131c7c5b95e2612596eafde1dccf64a9", size = 72631, upload-time = "2025-10-08T09:15:22.431Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e5/db/0314e4e2db56ebcf450f277904ffd84a7988b9e5da8d0d61ab2d057df2b6/msgpack-1.1.2-cp313-cp313-win_arm64.whl", hash = "sha256:e69b39f8c0aa5ec24b57737ebee40be647035158f14ed4b40e6f150077e21a84", size = 64118, upload-time = "2025-10-08T09:15:23.402Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/22/71/201105712d0a2ff07b7873ed3c220292fb2ea5120603c00c4b634bcdafb3/msgpack-1.1.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e23ce8d5f7aa6ea6d2a2b326b4ba46c985dbb204523759984430db7114f8aa00", size = 81127, upload-time = "2025-10-08T09:15:24.408Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1b/9f/38ff9e57a2eade7bf9dfee5eae17f39fc0e998658050279cbb14d97d36d9/msgpack-1.1.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:6c15b7d74c939ebe620dd8e559384be806204d73b4f9356320632d783d1f7939", size = 84981, upload-time = "2025-10-08T09:15:25.812Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8e/a9/3536e385167b88c2cc8f4424c49e28d49a6fc35206d4a8060f136e71f94c/msgpack-1.1.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:99e2cb7b9031568a2a5c73aa077180f93dd2e95b4f8d3b8e14a73ae94a9e667e", size = 411885, upload-time = "2025-10-08T09:15:27.22Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2f/40/dc34d1a8d5f1e51fc64640b62b191684da52ca469da9cd74e84936ffa4a6/msgpack-1.1.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:180759d89a057eab503cf62eeec0aa61c4ea1200dee709f3a8e9397dbb3b6931", size = 419658, upload-time = "2025-10-08T09:15:28.4Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3b/ef/2b92e286366500a09a67e03496ee8b8ba00562797a52f3c117aa2b29514b/msgpack-1.1.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:04fb995247a6e83830b62f0b07bf36540c213f6eac8e851166d8d86d83cbd014", size = 403290, upload-time = "2025-10-08T09:15:29.764Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/78/90/e0ea7990abea5764e4655b8177aa7c63cdfa89945b6e7641055800f6c16b/msgpack-1.1.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8e22ab046fa7ede9e36eeb4cfad44d46450f37bb05d5ec482b02868f451c95e2", size = 415234, upload-time = "2025-10-08T09:15:31.022Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/72/4e/9390aed5db983a2310818cd7d3ec0aecad45e1f7007e0cda79c79507bb0d/msgpack-1.1.2-cp314-cp314-win32.whl", hash = "sha256:80a0ff7d4abf5fecb995fcf235d4064b9a9a8a40a3ab80999e6ac1e30b702717", size = 66391, upload-time = "2025-10-08T09:15:32.265Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6e/f1/abd09c2ae91228c5f3998dbd7f41353def9eac64253de3c8105efa2082f7/msgpack-1.1.2-cp314-cp314-win_amd64.whl", hash = "sha256:9ade919fac6a3e7260b7f64cea89df6bec59104987cbea34d34a2fa15d74310b", size = 73787, upload-time = "2025-10-08T09:15:33.219Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6a/b0/9d9f667ab48b16ad4115c1935d94023b82b3198064cb84a123e97f7466c1/msgpack-1.1.2-cp314-cp314-win_arm64.whl", hash = "sha256:59415c6076b1e30e563eb732e23b994a61c159cec44deaf584e5cc1dd662f2af", size = 66453, upload-time = "2025-10-08T09:15:34.225Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/16/67/93f80545eb1792b61a217fa7f06d5e5cb9e0055bed867f43e2b8e012e137/msgpack-1.1.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:897c478140877e5307760b0ea66e0932738879e7aa68144d9b78ea4c8302a84a", size = 85264, upload-time = "2025-10-08T09:15:35.61Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/87/1c/33c8a24959cf193966ef11a6f6a2995a65eb066bd681fd085afd519a57ce/msgpack-1.1.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a668204fa43e6d02f89dbe79a30b0d67238d9ec4c5bd8a940fc3a004a47b721b", size = 89076, upload-time = "2025-10-08T09:15:36.619Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fc/6b/62e85ff7193663fbea5c0254ef32f0c77134b4059f8da89b958beb7696f3/msgpack-1.1.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5559d03930d3aa0f3aacb4c42c776af1a2ace2611871c84a75afe436695e6245", size = 435242, upload-time = "2025-10-08T09:15:37.647Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c1/47/5c74ecb4cc277cf09f64e913947871682ffa82b3b93c8dad68083112f412/msgpack-1.1.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:70c5a7a9fea7f036b716191c29047374c10721c389c21e9ffafad04df8c52c90", size = 432509, upload-time = "2025-10-08T09:15:38.794Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/24/a4/e98ccdb56dc4e98c929a3f150de1799831c0a800583cde9fa022fa90602d/msgpack-1.1.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:f2cb069d8b981abc72b41aea1c580ce92d57c673ec61af4c500153a626cb9e20", size = 415957, upload-time = "2025-10-08T09:15:40.238Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/da/28/6951f7fb67bc0a4e184a6b38ab71a92d9ba58080b27a77d3e2fb0be5998f/msgpack-1.1.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d62ce1f483f355f61adb5433ebfd8868c5f078d1a52d042b0a998682b4fa8c27", size = 422910, upload-time = "2025-10-08T09:15:41.505Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f0/03/42106dcded51f0a0b5284d3ce30a671e7bd3f7318d122b2ead66ad289fed/msgpack-1.1.2-cp314-cp314t-win32.whl", hash = "sha256:1d1418482b1ee984625d88aa9585db570180c286d942da463533b238b98b812b", size = 75197, upload-time = "2025-10-08T09:15:42.954Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/15/86/d0071e94987f8db59d4eeb386ddc64d0bb9b10820a8d82bcd3e53eeb2da6/msgpack-1.1.2-cp314-cp314t-win_amd64.whl", hash = "sha256:5a46bf7e831d09470ad92dff02b8b1ac92175ca36b087f904a0519857c6be3ff", size = 85772, upload-time = "2025-10-08T09:15:43.954Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/81/f2/08ace4142eb281c12701fc3b93a10795e4d4dc7f753911d836675050f886/msgpack-1.1.2-cp314-cp314t-win_arm64.whl", hash = "sha256:d99ef64f349d5ec3293688e91486c5fdb925ed03807f64d98d205d2713c60b46", size = 70868, upload-time = "2025-10-08T09:15:44.959Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mypy"
|
||||
version = "1.19.1"
|
||||
@@ -1245,6 +1570,15 @@ dependencies = [
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/35/8e/d36f8880bcf18ec026a55807d02fe4c7357da9f25aebd92f85178000c0dc/openai_whisper-20250625.tar.gz", hash = "sha256:37a91a3921809d9f44748ffc73c0a55c9f366c85a3ef5c2ae0cc09540432eb96", size = 803191, upload-time = "2025-06-26T01:06:13.34Z" }
|
||||
|
||||
[[package]]
|
||||
name = "packageurl-python"
|
||||
version = "0.17.6"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f5/d6/3b5a4e3cfaef7a53869a26ceb034d1ff5e5c27c814ce77260a96d50ab7bb/packageurl_python-0.17.6.tar.gz", hash = "sha256:1252ce3a102372ca6f86eb968e16f9014c4ba511c5c37d95a7f023e2ca6e5c25", size = 50618, upload-time = "2025-11-24T15:20:17.998Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/b1/2f/c7277b7615a93f51b5fbc1eacfc1b75e8103370e786fd8ce2abf6e5c04ab/packageurl_python-0.17.6-py3-none-any.whl", hash = "sha256:31a85c2717bc41dd818f3c62908685ff9eebcb68588213745b14a6ee9e7df7c9", size = 36776, upload-time = "2025-11-24T15:20:16.962Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "packaging"
|
||||
version = "26.0"
|
||||
@@ -1277,6 +1611,70 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ef/3c/2c197d226f9ea224a9ab8d197933f9da0ae0aac5b6e0f884e2b8d9c8e9f7/pathspec-1.0.4-py3-none-any.whl", hash = "sha256:fb6ae2fd4e7c921a165808a552060e722767cfa526f99ca5156ed2ce45a5c723", size = 55206, upload-time = "2026-01-27T03:59:45.137Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pip"
|
||||
version = "26.0.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/48/83/0d7d4e9efe3344b8e2fe25d93be44f64b65364d3c8d7bc6dc90198d5422e/pip-26.0.1.tar.gz", hash = "sha256:c4037d8a277c89b320abe636d59f91e6d0922d08a05b60e85e53b296613346d8", size = 1812747, upload-time = "2026-02-05T02:20:18.702Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/de/f0/c81e05b613866b76d2d1066490adf1a3dbc4ee9d9c839961c3fc8a6997af/pip-26.0.1-py3-none-any.whl", hash = "sha256:bdb1b08f4274833d62c1aa29e20907365a2ceb950410df15fc9521bad440122b", size = 1787723, upload-time = "2026-02-05T02:20:16.416Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pip-api"
|
||||
version = "0.0.34"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "pip" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b9/f1/ee85f8c7e82bccf90a3c7aad22863cc6e20057860a1361083cd2adacb92e/pip_api-0.0.34.tar.gz", hash = "sha256:9b75e958f14c5a2614bae415f2adf7eeb54d50a2cfbe7e24fd4826471bac3625", size = 123017, upload-time = "2024-07-09T20:32:30.641Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/91/f7/ebf5003e1065fd00b4cbef53bf0a65c3d3e1b599b676d5383ccb7a8b88ba/pip_api-0.0.34-py3-none-any.whl", hash = "sha256:8b2d7d7c37f2447373aa2cf8b1f60a2f2b27a84e1e9e0294a3f6ef10eb3ba6bb", size = 120369, upload-time = "2024-07-09T20:32:29.099Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pip-audit"
|
||||
version = "2.10.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "cachecontrol", extra = ["filecache"] },
|
||||
{ name = "cyclonedx-python-lib" },
|
||||
{ name = "packaging" },
|
||||
{ name = "pip-api" },
|
||||
{ name = "pip-requirements-parser" },
|
||||
{ name = "platformdirs" },
|
||||
{ name = "requests" },
|
||||
{ name = "rich" },
|
||||
{ name = "tomli" },
|
||||
{ name = "tomli-w" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/bd/89/0e999b413facab81c33d118f3ac3739fd02c0622ccf7c4e82e37cebd8447/pip_audit-2.10.0.tar.gz", hash = "sha256:427ea5bf61d1d06b98b1ae29b7feacc00288a2eced52c9c58ceed5253ef6c2a4", size = 53776, upload-time = "2025-12-01T23:42:40.612Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/be/f3/4888f895c02afa085630a3a3329d1b18b998874642ad4c530e9a4d7851fe/pip_audit-2.10.0-py3-none-any.whl", hash = "sha256:16e02093872fac97580303f0848fa3ad64f7ecf600736ea7835a2b24de49613f", size = 61518, upload-time = "2025-12-01T23:42:39.193Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pip-requirements-parser"
|
||||
version = "32.0.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "packaging" },
|
||||
{ name = "pyparsing" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/5e/2a/63b574101850e7f7b306ddbdb02cb294380d37948140eecd468fae392b54/pip-requirements-parser-32.0.1.tar.gz", hash = "sha256:b4fa3a7a0be38243123cf9d1f3518da10c51bdb165a2b2985566247f9155a7d3", size = 209359, upload-time = "2022-12-21T15:25:22.732Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/54/d0/d04f1d1e064ac901439699ee097f58688caadea42498ec9c4b4ad2ef84ab/pip_requirements_parser-32.0.1-py3-none-any.whl", hash = "sha256:4659bc2a667783e7a15d190f6fccf8b2486685b6dba4c19c3876314769c57526", size = 35648, upload-time = "2022-12-21T15:25:21.046Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "platformdirs"
|
||||
version = "4.9.4"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/19/56/8d4c30c8a1d07013911a8fdbd8f89440ef9f08d07a1b50ab8ca8be5a20f9/platformdirs-4.9.4.tar.gz", hash = "sha256:1ec356301b7dc906d83f371c8f487070e99d3ccf9e501686456394622a01a934", size = 28737, upload-time = "2026-03-05T18:34:13.271Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/63/d7/97f7e3a6abb67d8080dd406fd4df842c2be0efaf712d1c899c32a075027c/platformdirs-4.9.4-py3-none-any.whl", hash = "sha256:68a9a4619a666ea6439f2ff250c12a853cd1cbd5158d258bd824a7df6be2f868", size = 21216, upload-time = "2026-03-05T18:34:12.172Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pluggy"
|
||||
version = "1.6.0"
|
||||
@@ -1365,6 +1763,18 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e1/36/9c0c326fe3a4227953dfb29f5d0c8ae3b8eb8c1cd2967aa569f50cb3c61f/psycopg2_binary-2.9.11-cp314-cp314-win_amd64.whl", hash = "sha256:4012c9c954dfaccd28f94e84ab9f94e12df76b4afb22331b1f0d3154893a6316", size = 2803913, upload-time = "2025-10-10T11:13:57.058Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "py-serializable"
|
||||
version = "2.1.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "defusedxml" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/73/21/d250cfca8ff30c2e5a7447bc13861541126ce9bd4426cd5d0c9f08b5547d/py_serializable-2.1.0.tar.gz", hash = "sha256:9d5db56154a867a9b897c0163b33a793c804c80cee984116d02d49e4578fc103", size = 52368, upload-time = "2025-07-21T09:56:48.07Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/9b/bf/7595e817906a29453ba4d99394e781b6fabe55d21f3c15d240f85dd06bb1/py_serializable-2.1.0-py3-none-any.whl", hash = "sha256:b56d5d686b5a03ba4f4db5e769dc32336e142fc3bd4d68a8c25579ebb0a67304", size = 23045, upload-time = "2025-07-21T09:56:46.848Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyasn1"
|
||||
version = "0.6.1"
|
||||
@@ -1548,6 +1958,24 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/61/ad/689f02752eeec26aed679477e80e632ef1b682313be70793d798c1d5fc8f/PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb", size = 22997, upload-time = "2024-11-28T03:43:27.893Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyparsing"
|
||||
version = "3.3.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f3/91/9c6ee907786a473bf81c5f53cf703ba0957b23ab84c264080fb5a450416f/pyparsing-3.3.2.tar.gz", hash = "sha256:c777f4d763f140633dcb6d8a3eda953bf7a214dc4eff598413c070bcdc117cbc", size = 6851574, upload-time = "2026-01-21T03:57:59.36Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/10/bd/c038d7cc38edc1aa5bf91ab8068b63d4308c66c4c8bb3cbba7dfbc049f9c/pyparsing-3.3.2-py3-none-any.whl", hash = "sha256:850ba148bd908d7e2411587e247a1e4f0327839c40e2e5e6d05a007ecc69911d", size = 122781, upload-time = "2026-01-21T03:57:55.912Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyrate-limiter"
|
||||
version = "4.0.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/03/98/2b3dc1ba6bdf2efaeaa3e102124cbd2636a4ccec241ffeb8a1de207f5cd4/pyrate_limiter-4.0.2.tar.gz", hash = "sha256:b678841e2215f114ef6f98c7093755ca3b466de83cb5a881231fd6e321fa14b5", size = 301304, upload-time = "2026-01-23T09:37:33.612Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/13/b9/80ffe3f2c34d3247186d74b1d08c1fed1e3ad4127ff6a8a5501b7bf16a97/pyrate_limiter-4.0.2-py3-none-any.whl", hash = "sha256:35ec42b9bb9cfabcafab14d0c5c6523f48378c3da2949e534ce3cbdfea71eadd", size = 36439, upload-time = "2026-01-23T09:37:32.097Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pytest"
|
||||
version = "9.0.2"
|
||||
@@ -1662,6 +2090,19 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "radon"
|
||||
version = "6.0.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "colorama" },
|
||||
{ name = "mando" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b1/6d/98e61600febf6bd929cf04154537c39dc577ce414bafbfc24a286c4fa76d/radon-6.0.1.tar.gz", hash = "sha256:d1ac0053943a893878940fedc8b19ace70386fc9c9bf0a09229a44125ebf45b5", size = 1874992, upload-time = "2023-03-26T06:24:38.868Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/93/f7/d00d9b4a0313a6be3a3e0818e6375e15da6d7076f4ae47d1324e7ca986a1/radon-6.0.1-py2.py3-none-any.whl", hash = "sha256:632cc032364a6f8bb1010a2f6a12d0f14bc7e5ede76585ef29dc0cecf4cd8859", size = 52784, upload-time = "2023-03-26T06:24:33.949Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redis"
|
||||
version = "6.4.0"
|
||||
@@ -1674,6 +2115,20 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e8/02/89e2ed7e85db6c93dfa9e8f691c5087df4e3551ab39081a4d7c6d1f90e05/redis-6.4.0-py3-none-any.whl", hash = "sha256:f0544fa9604264e9464cdf4814e7d4830f74b165d52f2a330a760a88dd248b7f", size = 279847, upload-time = "2025-08-07T08:10:09.84Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "referencing"
|
||||
version = "0.37.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "attrs" },
|
||||
{ name = "rpds-py" },
|
||||
{ name = "typing-extensions", marker = "python_full_version < '3.13'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/22/f5/df4e9027acead3ecc63e50fe1e36aca1523e1719559c499951bb4b53188f/referencing-0.37.0.tar.gz", hash = "sha256:44aefc3142c5b842538163acb373e24cce6632bd54bdb01b21ad5863489f50d8", size = 78036, upload-time = "2025-10-13T15:30:48.871Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/2c/58/ca301544e1fa93ed4f80d724bf5b194f6e4b945841c5bfd555878eea9fcb/referencing-0.37.0-py3-none-any.whl", hash = "sha256:381329a9f99628c9069361716891d34ad94af76e461dcb0335825aecc7692231", size = 26766, upload-time = "2025-10-13T15:30:47.625Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "2025.11.3"
|
||||
@@ -1781,6 +2236,127 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rich"
|
||||
version = "14.3.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "markdown-it-py" },
|
||||
{ name = "pygments" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b3/c6/f3b320c27991c46f43ee9d856302c70dc2d0fb2dba4842ff739d5f46b393/rich-14.3.3.tar.gz", hash = "sha256:b8daa0b9e4eef54dd8cf7c86c03713f53241884e814f4e2f5fb342fe520f639b", size = 230582, upload-time = "2026-02-19T17:23:12.474Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/14/25/b208c5683343959b670dc001595f2f3737e051da617f66c31f7c4fa93abc/rich-14.3.3-py3-none-any.whl", hash = "sha256:793431c1f8619afa7d3b52b2cdec859562b950ea0d4b6b505397612db8d5362d", size = 310458, upload-time = "2026-02-19T17:23:13.732Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rpds-py"
|
||||
version = "0.30.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/20/af/3f2f423103f1113b36230496629986e0ef7e199d2aa8392452b484b38ced/rpds_py-0.30.0.tar.gz", hash = "sha256:dd8ff7cf90014af0c0f787eea34794ebf6415242ee1d6fa91eaba725cc441e84", size = 69469, upload-time = "2025-11-30T20:24:38.837Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/4d/6e/f964e88b3d2abee2a82c1ac8366da848fce1c6d834dc2132c3fda3970290/rpds_py-0.30.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a2bffea6a4ca9f01b3f8e548302470306689684e61602aa3d141e34da06cf425", size = 370157, upload-time = "2025-11-30T20:21:53.789Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/94/ba/24e5ebb7c1c82e74c4e4f33b2112a5573ddc703915b13a073737b59b86e0/rpds_py-0.30.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dc4f992dfe1e2bc3ebc7444f6c7051b4bc13cd8e33e43511e8ffd13bf407010d", size = 359676, upload-time = "2025-11-30T20:21:55.475Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/84/86/04dbba1b087227747d64d80c3b74df946b986c57af0a9f0c98726d4d7a3b/rpds_py-0.30.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:422c3cb9856d80b09d30d2eb255d0754b23e090034e1deb4083f8004bd0761e4", size = 389938, upload-time = "2025-11-30T20:21:57.079Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/42/bb/1463f0b1722b7f45431bdd468301991d1328b16cffe0b1c2918eba2c4eee/rpds_py-0.30.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:07ae8a593e1c3c6b82ca3292efbe73c30b61332fd612e05abee07c79359f292f", size = 402932, upload-time = "2025-11-30T20:21:58.47Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/99/ee/2520700a5c1f2d76631f948b0736cdf9b0acb25abd0ca8e889b5c62ac2e3/rpds_py-0.30.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12f90dd7557b6bd57f40abe7747e81e0c0b119bef015ea7726e69fe550e394a4", size = 525830, upload-time = "2025-11-30T20:21:59.699Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e0/ad/bd0331f740f5705cc555a5e17fdf334671262160270962e69a2bdef3bf76/rpds_py-0.30.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99b47d6ad9a6da00bec6aabe5a6279ecd3c06a329d4aa4771034a21e335c3a97", size = 412033, upload-time = "2025-11-30T20:22:00.991Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f8/1e/372195d326549bb51f0ba0f2ecb9874579906b97e08880e7a65c3bef1a99/rpds_py-0.30.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33f559f3104504506a44bb666b93a33f5d33133765b0c216a5bf2f1e1503af89", size = 390828, upload-time = "2025-11-30T20:22:02.723Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ab/2b/d88bb33294e3e0c76bc8f351a3721212713629ffca1700fa94979cb3eae8/rpds_py-0.30.0-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:946fe926af6e44f3697abbc305ea168c2c31d3e3ef1058cf68f379bf0335a78d", size = 404683, upload-time = "2025-11-30T20:22:04.367Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/50/32/c759a8d42bcb5289c1fac697cd92f6fe01a018dd937e62ae77e0e7f15702/rpds_py-0.30.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:495aeca4b93d465efde585977365187149e75383ad2684f81519f504f5c13038", size = 421583, upload-time = "2025-11-30T20:22:05.814Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/81/e729761dbd55ddf5d84ec4ff1f47857f4374b0f19bdabfcf929164da3e24/rpds_py-0.30.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9a0ca5da0386dee0655b4ccdf46119df60e0f10da268d04fe7cc87886872ba7", size = 572496, upload-time = "2025-11-30T20:22:07.713Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/14/f6/69066a924c3557c9c30baa6ec3a0aa07526305684c6f86c696b08860726c/rpds_py-0.30.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8d6d1cc13664ec13c1b84241204ff3b12f9bb82464b8ad6e7a5d3486975c2eed", size = 598669, upload-time = "2025-11-30T20:22:09.312Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5f/48/905896b1eb8a05630d20333d1d8ffd162394127b74ce0b0784ae04498d32/rpds_py-0.30.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3896fa1be39912cf0757753826bc8bdc8ca331a28a7c4ae46b7a21280b06bb85", size = 561011, upload-time = "2025-11-30T20:22:11.309Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/22/16/cd3027c7e279d22e5eb431dd3c0fbc677bed58797fe7581e148f3f68818b/rpds_py-0.30.0-cp311-cp311-win32.whl", hash = "sha256:55f66022632205940f1827effeff17c4fa7ae1953d2b74a8581baaefb7d16f8c", size = 221406, upload-time = "2025-11-30T20:22:13.101Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/5b/e7b7aa136f28462b344e652ee010d4de26ee9fd16f1bfd5811f5153ccf89/rpds_py-0.30.0-cp311-cp311-win_amd64.whl", hash = "sha256:a51033ff701fca756439d641c0ad09a41d9242fa69121c7d8769604a0a629825", size = 236024, upload-time = "2025-11-30T20:22:14.853Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/14/a6/364bba985e4c13658edb156640608f2c9e1d3ea3c81b27aa9d889fff0e31/rpds_py-0.30.0-cp311-cp311-win_arm64.whl", hash = "sha256:47b0ef6231c58f506ef0b74d44e330405caa8428e770fec25329ed2cb971a229", size = 229069, upload-time = "2025-11-30T20:22:16.577Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/03/e7/98a2f4ac921d82f33e03f3835f5bf3a4a40aa1bfdc57975e74a97b2b4bdd/rpds_py-0.30.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a161f20d9a43006833cd7068375a94d035714d73a172b681d8881820600abfad", size = 375086, upload-time = "2025-11-30T20:22:17.93Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4d/a1/bca7fd3d452b272e13335db8d6b0b3ecde0f90ad6f16f3328c6fb150c889/rpds_py-0.30.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6abc8880d9d036ecaafe709079969f56e876fcf107f7a8e9920ba6d5a3878d05", size = 359053, upload-time = "2025-11-30T20:22:19.297Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/65/1c/ae157e83a6357eceff62ba7e52113e3ec4834a84cfe07fa4b0757a7d105f/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca28829ae5f5d569bb62a79512c842a03a12576375d5ece7d2cadf8abe96ec28", size = 390763, upload-time = "2025-11-30T20:22:21.661Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d4/36/eb2eb8515e2ad24c0bd43c3ee9cd74c33f7ca6430755ccdb240fd3144c44/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a1010ed9524c73b94d15919ca4d41d8780980e1765babf85f9a2f90d247153dd", size = 408951, upload-time = "2025-11-30T20:22:23.408Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d6/65/ad8dc1784a331fabbd740ef6f71ce2198c7ed0890dab595adb9ea2d775a1/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8d1736cfb49381ba528cd5baa46f82fdc65c06e843dab24dd70b63d09121b3f", size = 514622, upload-time = "2025-11-30T20:22:25.16Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/63/8e/0cfa7ae158e15e143fe03993b5bcd743a59f541f5952e1546b1ac1b5fd45/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d948b135c4693daff7bc2dcfc4ec57237a29bd37e60c2fabf5aff2bbacf3e2f1", size = 414492, upload-time = "2025-11-30T20:22:26.505Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/60/1b/6f8f29f3f995c7ffdde46a626ddccd7c63aefc0efae881dc13b6e5d5bb16/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47f236970bccb2233267d89173d3ad2703cd36a0e2a6e92d0560d333871a3d23", size = 394080, upload-time = "2025-11-30T20:22:27.934Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6d/d5/a266341051a7a3ca2f4b750a3aa4abc986378431fc2da508c5034d081b70/rpds_py-0.30.0-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:2e6ecb5a5bcacf59c3f912155044479af1d0b6681280048b338b28e364aca1f6", size = 408680, upload-time = "2025-11-30T20:22:29.341Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/10/3b/71b725851df9ab7a7a4e33cf36d241933da66040d195a84781f49c50490c/rpds_py-0.30.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a8fa71a2e078c527c3e9dc9fc5a98c9db40bcc8a92b4e8858e36d329f8684b51", size = 423589, upload-time = "2025-11-30T20:22:31.469Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/00/2b/e59e58c544dc9bd8bd8384ecdb8ea91f6727f0e37a7131baeff8d6f51661/rpds_py-0.30.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:73c67f2db7bc334e518d097c6d1e6fed021bbc9b7d678d6cc433478365d1d5f5", size = 573289, upload-time = "2025-11-30T20:22:32.997Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/da/3e/a18e6f5b460893172a7d6a680e86d3b6bc87a54c1f0b03446a3c8c7b588f/rpds_py-0.30.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5ba103fb455be00f3b1c2076c9d4264bfcb037c976167a6047ed82f23153f02e", size = 599737, upload-time = "2025-11-30T20:22:34.419Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5c/e2/714694e4b87b85a18e2c243614974413c60aa107fd815b8cbc42b873d1d7/rpds_py-0.30.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7cee9c752c0364588353e627da8a7e808a66873672bcb5f52890c33fd965b394", size = 563120, upload-time = "2025-11-30T20:22:35.903Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6f/ab/d5d5e3bcedb0a77f4f613706b750e50a5a3ba1c15ccd3665ecc636c968fd/rpds_py-0.30.0-cp312-cp312-win32.whl", hash = "sha256:1ab5b83dbcf55acc8b08fc62b796ef672c457b17dbd7820a11d6c52c06839bdf", size = 223782, upload-time = "2025-11-30T20:22:37.271Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/39/3b/f786af9957306fdc38a74cef405b7b93180f481fb48453a114bb6465744a/rpds_py-0.30.0-cp312-cp312-win_amd64.whl", hash = "sha256:a090322ca841abd453d43456ac34db46e8b05fd9b3b4ac0c78bcde8b089f959b", size = 240463, upload-time = "2025-11-30T20:22:39.021Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f3/d2/b91dc748126c1559042cfe41990deb92c4ee3e2b415f6b5234969ffaf0cc/rpds_py-0.30.0-cp312-cp312-win_arm64.whl", hash = "sha256:669b1805bd639dd2989b281be2cfd951c6121b65e729d9b843e9639ef1fd555e", size = 230868, upload-time = "2025-11-30T20:22:40.493Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ed/dc/d61221eb88ff410de3c49143407f6f3147acf2538c86f2ab7ce65ae7d5f9/rpds_py-0.30.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f83424d738204d9770830d35290ff3273fbb02b41f919870479fab14b9d303b2", size = 374887, upload-time = "2025-11-30T20:22:41.812Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fd/32/55fb50ae104061dbc564ef15cc43c013dc4a9f4527a1f4d99baddf56fe5f/rpds_py-0.30.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e7536cd91353c5273434b4e003cbda89034d67e7710eab8761fd918ec6c69cf8", size = 358904, upload-time = "2025-11-30T20:22:43.479Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/58/70/faed8186300e3b9bdd138d0273109784eea2396c68458ed580f885dfe7ad/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2771c6c15973347f50fece41fc447c054b7ac2ae0502388ce3b6738cd366e3d4", size = 389945, upload-time = "2025-11-30T20:22:44.819Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bd/a8/073cac3ed2c6387df38f71296d002ab43496a96b92c823e76f46b8af0543/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0a59119fc6e3f460315fe9d08149f8102aa322299deaa5cab5b40092345c2136", size = 407783, upload-time = "2025-11-30T20:22:46.103Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/77/57/5999eb8c58671f1c11eba084115e77a8899d6e694d2a18f69f0ba471ec8b/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:76fec018282b4ead0364022e3c54b60bf368b9d926877957a8624b58419169b7", size = 515021, upload-time = "2025-11-30T20:22:47.458Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e0/af/5ab4833eadc36c0a8ed2bc5c0de0493c04f6c06de223170bd0798ff98ced/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:692bef75a5525db97318e8cd061542b5a79812d711ea03dbc1f6f8dbb0c5f0d2", size = 414589, upload-time = "2025-11-30T20:22:48.872Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b7/de/f7192e12b21b9e9a68a6d0f249b4af3fdcdff8418be0767a627564afa1f1/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9027da1ce107104c50c81383cae773ef5c24d296dd11c99e2629dbd7967a20c6", size = 394025, upload-time = "2025-11-30T20:22:50.196Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/91/c4/fc70cd0249496493500e7cc2de87504f5aa6509de1e88623431fec76d4b6/rpds_py-0.30.0-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:9cf69cdda1f5968a30a359aba2f7f9aa648a9ce4b580d6826437f2b291cfc86e", size = 408895, upload-time = "2025-11-30T20:22:51.87Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/58/95/d9275b05ab96556fefff73a385813eb66032e4c99f411d0795372d9abcea/rpds_py-0.30.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a4796a717bf12b9da9d3ad002519a86063dcac8988b030e405704ef7d74d2d9d", size = 422799, upload-time = "2025-11-30T20:22:53.341Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/06/c1/3088fc04b6624eb12a57eb814f0d4997a44b0d208d6cace713033ff1a6ba/rpds_py-0.30.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5d4c2aa7c50ad4728a094ebd5eb46c452e9cb7edbfdb18f9e1221f597a73e1e7", size = 572731, upload-time = "2025-11-30T20:22:54.778Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d8/42/c612a833183b39774e8ac8fecae81263a68b9583ee343db33ab571a7ce55/rpds_py-0.30.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ba81a9203d07805435eb06f536d95a266c21e5b2dfbf6517748ca40c98d19e31", size = 599027, upload-time = "2025-11-30T20:22:56.212Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5f/60/525a50f45b01d70005403ae0e25f43c0384369ad24ffe46e8d9068b50086/rpds_py-0.30.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:945dccface01af02675628334f7cf49c2af4c1c904748efc5cf7bbdf0b579f95", size = 563020, upload-time = "2025-11-30T20:22:58.2Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0b/5d/47c4655e9bcd5ca907148535c10e7d489044243cc9941c16ed7cd53be91d/rpds_py-0.30.0-cp313-cp313-win32.whl", hash = "sha256:b40fb160a2db369a194cb27943582b38f79fc4887291417685f3ad693c5a1d5d", size = 223139, upload-time = "2025-11-30T20:23:00.209Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f2/e1/485132437d20aa4d3e1d8b3fb5a5e65aa8139f1e097080c2a8443201742c/rpds_py-0.30.0-cp313-cp313-win_amd64.whl", hash = "sha256:806f36b1b605e2d6a72716f321f20036b9489d29c51c91f4dd29a3e3afb73b15", size = 240224, upload-time = "2025-11-30T20:23:02.008Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/24/95/ffd128ed1146a153d928617b0ef673960130be0009c77d8fbf0abe306713/rpds_py-0.30.0-cp313-cp313-win_arm64.whl", hash = "sha256:d96c2086587c7c30d44f31f42eae4eac89b60dabbac18c7669be3700f13c3ce1", size = 230645, upload-time = "2025-11-30T20:23:03.43Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ff/1b/b10de890a0def2a319a2626334a7f0ae388215eb60914dbac8a3bae54435/rpds_py-0.30.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:eb0b93f2e5c2189ee831ee43f156ed34e2a89a78a66b98cadad955972548be5a", size = 364443, upload-time = "2025-11-30T20:23:04.878Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0d/bf/27e39f5971dc4f305a4fb9c672ca06f290f7c4e261c568f3dea16a410d47/rpds_py-0.30.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:922e10f31f303c7c920da8981051ff6d8c1a56207dbdf330d9047f6d30b70e5e", size = 353375, upload-time = "2025-11-30T20:23:06.342Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/40/58/442ada3bba6e8e6615fc00483135c14a7538d2ffac30e2d933ccf6852232/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdc62c8286ba9bf7f47befdcea13ea0e26bf294bda99758fd90535cbaf408000", size = 383850, upload-time = "2025-11-30T20:23:07.825Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/14/14/f59b0127409a33c6ef6f5c1ebd5ad8e32d7861c9c7adfa9a624fc3889f6c/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:47f9a91efc418b54fb8190a6b4aa7813a23fb79c51f4bb84e418f5476c38b8db", size = 392812, upload-time = "2025-11-30T20:23:09.228Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b3/66/e0be3e162ac299b3a22527e8913767d869e6cc75c46bd844aa43fb81ab62/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f3587eb9b17f3789ad50824084fa6f81921bbf9a795826570bda82cb3ed91f2", size = 517841, upload-time = "2025-11-30T20:23:11.186Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3d/55/fa3b9cf31d0c963ecf1ba777f7cf4b2a2c976795ac430d24a1f43d25a6ba/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:39c02563fc592411c2c61d26b6c5fe1e51eaa44a75aa2c8735ca88b0d9599daa", size = 408149, upload-time = "2025-11-30T20:23:12.864Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/60/ca/780cf3b1a32b18c0f05c441958d3758f02544f1d613abf9488cd78876378/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51a1234d8febafdfd33a42d97da7a43f5dcb120c1060e352a3fbc0c6d36e2083", size = 383843, upload-time = "2025-11-30T20:23:14.638Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/82/86/d5f2e04f2aa6247c613da0c1dd87fcd08fa17107e858193566048a1e2f0a/rpds_py-0.30.0-cp313-cp313t-manylinux_2_31_riscv64.whl", hash = "sha256:eb2c4071ab598733724c08221091e8d80e89064cd472819285a9ab0f24bcedb9", size = 396507, upload-time = "2025-11-30T20:23:16.105Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/9a/453255d2f769fe44e07ea9785c8347edaf867f7026872e76c1ad9f7bed92/rpds_py-0.30.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6bdfdb946967d816e6adf9a3d8201bfad269c67efe6cefd7093ef959683c8de0", size = 414949, upload-time = "2025-11-30T20:23:17.539Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a3/31/622a86cdc0c45d6df0e9ccb6becdba5074735e7033c20e401a6d9d0e2ca0/rpds_py-0.30.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:c77afbd5f5250bf27bf516c7c4a016813eb2d3e116139aed0096940c5982da94", size = 565790, upload-time = "2025-11-30T20:23:19.029Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1c/5d/15bbf0fb4a3f58a3b1c67855ec1efcc4ceaef4e86644665fff03e1b66d8d/rpds_py-0.30.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:61046904275472a76c8c90c9ccee9013d70a6d0f73eecefd38c1ae7c39045a08", size = 590217, upload-time = "2025-11-30T20:23:20.885Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6d/61/21b8c41f68e60c8cc3b2e25644f0e3681926020f11d06ab0b78e3c6bbff1/rpds_py-0.30.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4c5f36a861bc4b7da6516dbdf302c55313afa09b81931e8280361a4f6c9a2d27", size = 555806, upload-time = "2025-11-30T20:23:22.488Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f9/39/7e067bb06c31de48de3eb200f9fc7c58982a4d3db44b07e73963e10d3be9/rpds_py-0.30.0-cp313-cp313t-win32.whl", hash = "sha256:3d4a69de7a3e50ffc214ae16d79d8fbb0922972da0356dcf4d0fdca2878559c6", size = 211341, upload-time = "2025-11-30T20:23:24.449Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/4d/222ef0b46443cf4cf46764d9c630f3fe4abaa7245be9417e56e9f52b8f65/rpds_py-0.30.0-cp313-cp313t-win_amd64.whl", hash = "sha256:f14fc5df50a716f7ece6a80b6c78bb35ea2ca47c499e422aa4463455dd96d56d", size = 225768, upload-time = "2025-11-30T20:23:25.908Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/86/81/dad16382ebbd3d0e0328776d8fd7ca94220e4fa0798d1dc5e7da48cb3201/rpds_py-0.30.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:68f19c879420aa08f61203801423f6cd5ac5f0ac4ac82a2368a9fcd6a9a075e0", size = 362099, upload-time = "2025-11-30T20:23:27.316Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/60/19f7884db5d5603edf3c6bce35408f45ad3e97e10007df0e17dd57af18f8/rpds_py-0.30.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ec7c4490c672c1a0389d319b3a9cfcd098dcdc4783991553c332a15acf7249be", size = 353192, upload-time = "2025-11-30T20:23:29.151Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bf/c4/76eb0e1e72d1a9c4703c69607cec123c29028bff28ce41588792417098ac/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f251c812357a3fed308d684a5079ddfb9d933860fc6de89f2b7ab00da481e65f", size = 384080, upload-time = "2025-11-30T20:23:30.785Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/72/87/87ea665e92f3298d1b26d78814721dc39ed8d2c74b86e83348d6b48a6f31/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac98b175585ecf4c0348fd7b29c3864bda53b805c773cbf7bfdaffc8070c976f", size = 394841, upload-time = "2025-11-30T20:23:32.209Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/77/ad/7783a89ca0587c15dcbf139b4a8364a872a25f861bdb88ed99f9b0dec985/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3e62880792319dbeb7eb866547f2e35973289e7d5696c6e295476448f5b63c87", size = 516670, upload-time = "2025-11-30T20:23:33.742Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5b/3c/2882bdac942bd2172f3da574eab16f309ae10a3925644e969536553cb4ee/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4e7fc54e0900ab35d041b0601431b0a0eb495f0851a0639b6ef90f7741b39a18", size = 408005, upload-time = "2025-11-30T20:23:35.253Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/81/9a91c0111ce1758c92516a3e44776920b579d9a7c09b2b06b642d4de3f0f/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47e77dc9822d3ad616c3d5759ea5631a75e5809d5a28707744ef79d7a1bcfcad", size = 382112, upload-time = "2025-11-30T20:23:36.842Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cf/8e/1da49d4a107027e5fbc64daeab96a0706361a2918da10cb41769244b805d/rpds_py-0.30.0-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:b4dc1a6ff022ff85ecafef7979a2c6eb423430e05f1165d6688234e62ba99a07", size = 399049, upload-time = "2025-11-30T20:23:38.343Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/df/5a/7ee239b1aa48a127570ec03becbb29c9d5a9eb092febbd1699d567cae859/rpds_py-0.30.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4559c972db3a360808309e06a74628b95eaccbf961c335c8fe0d590cf587456f", size = 415661, upload-time = "2025-11-30T20:23:40.263Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/70/ea/caa143cf6b772f823bc7929a45da1fa83569ee49b11d18d0ada7f5ee6fd6/rpds_py-0.30.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:0ed177ed9bded28f8deb6ab40c183cd1192aa0de40c12f38be4d59cd33cb5c65", size = 565606, upload-time = "2025-11-30T20:23:42.186Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/64/91/ac20ba2d69303f961ad8cf55bf7dbdb4763f627291ba3d0d7d67333cced9/rpds_py-0.30.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:ad1fa8db769b76ea911cb4e10f049d80bf518c104f15b3edb2371cc65375c46f", size = 591126, upload-time = "2025-11-30T20:23:44.086Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/21/20/7ff5f3c8b00c8a95f75985128c26ba44503fb35b8e0259d812766ea966c7/rpds_py-0.30.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:46e83c697b1f1c72b50e5ee5adb4353eef7406fb3f2043d64c33f20ad1c2fc53", size = 553371, upload-time = "2025-11-30T20:23:46.004Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/72/c7/81dadd7b27c8ee391c132a6b192111ca58d866577ce2d9b0ca157552cce0/rpds_py-0.30.0-cp314-cp314-win32.whl", hash = "sha256:ee454b2a007d57363c2dfd5b6ca4a5d7e2c518938f8ed3b706e37e5d470801ed", size = 215298, upload-time = "2025-11-30T20:23:47.696Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3e/d2/1aaac33287e8cfb07aab2e6b8ac1deca62f6f65411344f1433c55e6f3eb8/rpds_py-0.30.0-cp314-cp314-win_amd64.whl", hash = "sha256:95f0802447ac2d10bcc69f6dc28fe95fdf17940367b21d34e34c737870758950", size = 228604, upload-time = "2025-11-30T20:23:49.501Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e8/95/ab005315818cc519ad074cb7784dae60d939163108bd2b394e60dc7b5461/rpds_py-0.30.0-cp314-cp314-win_arm64.whl", hash = "sha256:613aa4771c99f03346e54c3f038e4cc574ac09a3ddfb0e8878487335e96dead6", size = 222391, upload-time = "2025-11-30T20:23:50.96Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9e/68/154fe0194d83b973cdedcdcc88947a2752411165930182ae41d983dcefa6/rpds_py-0.30.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:7e6ecfcb62edfd632e56983964e6884851786443739dbfe3582947e87274f7cb", size = 364868, upload-time = "2025-11-30T20:23:52.494Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/83/69/8bbc8b07ec854d92a8b75668c24d2abcb1719ebf890f5604c61c9369a16f/rpds_py-0.30.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a1d0bc22a7cdc173fedebb73ef81e07faef93692b8c1ad3733b67e31e1b6e1b8", size = 353747, upload-time = "2025-11-30T20:23:54.036Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ab/00/ba2e50183dbd9abcce9497fa5149c62b4ff3e22d338a30d690f9af970561/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d08f00679177226c4cb8c5265012eea897c8ca3b93f429e546600c971bcbae7", size = 383795, upload-time = "2025-11-30T20:23:55.556Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/05/6f/86f0272b84926bcb0e4c972262f54223e8ecc556b3224d281e6598fc9268/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5965af57d5848192c13534f90f9dd16464f3c37aaf166cc1da1cae1fd5a34898", size = 393330, upload-time = "2025-11-30T20:23:57.033Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cb/e9/0e02bb2e6dc63d212641da45df2b0bf29699d01715913e0d0f017ee29438/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a4e86e34e9ab6b667c27f3211ca48f73dba7cd3d90f8d5b11be56e5dbc3fb4e", size = 518194, upload-time = "2025-11-30T20:23:58.637Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ee/ca/be7bca14cf21513bdf9c0606aba17d1f389ea2b6987035eb4f62bd923f25/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5d3e6b26f2c785d65cc25ef1e5267ccbe1b069c5c21b8cc724efee290554419", size = 408340, upload-time = "2025-11-30T20:24:00.2Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c2/c7/736e00ebf39ed81d75544c0da6ef7b0998f8201b369acf842f9a90dc8fce/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:626a7433c34566535b6e56a1b39a7b17ba961e97ce3b80ec62e6f1312c025551", size = 383765, upload-time = "2025-11-30T20:24:01.759Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4a/3f/da50dfde9956aaf365c4adc9533b100008ed31aea635f2b8d7b627e25b49/rpds_py-0.30.0-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:acd7eb3f4471577b9b5a41baf02a978e8bdeb08b4b355273994f8b87032000a8", size = 396834, upload-time = "2025-11-30T20:24:03.687Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4e/00/34bcc2565b6020eab2623349efbdec810676ad571995911f1abdae62a3a0/rpds_py-0.30.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fe5fa731a1fa8a0a56b0977413f8cacac1768dad38d16b3a296712709476fbd5", size = 415470, upload-time = "2025-11-30T20:24:05.232Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8c/28/882e72b5b3e6f718d5453bd4d0d9cf8df36fddeb4ddbbab17869d5868616/rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:74a3243a411126362712ee1524dfc90c650a503502f135d54d1b352bd01f2404", size = 565630, upload-time = "2025-11-30T20:24:06.878Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3b/97/04a65539c17692de5b85c6e293520fd01317fd878ea1995f0367d4532fb1/rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:3e8eeb0544f2eb0d2581774be4c3410356eba189529a6b3e36bbbf9696175856", size = 591148, upload-time = "2025-11-30T20:24:08.445Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/85/70/92482ccffb96f5441aab93e26c4d66489eb599efdcf96fad90c14bbfb976/rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:dbd936cde57abfee19ab3213cf9c26be06d60750e60a8e4dd85d1ab12c8b1f40", size = 556030, upload-time = "2025-11-30T20:24:10.956Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/20/53/7c7e784abfa500a2b6b583b147ee4bb5a2b3747a9166bab52fec4b5b5e7d/rpds_py-0.30.0-cp314-cp314t-win32.whl", hash = "sha256:dc824125c72246d924f7f796b4f63c1e9dc810c7d9e2355864b3c3a73d59ade0", size = 211570, upload-time = "2025-11-30T20:24:12.735Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d0/02/fa464cdfbe6b26e0600b62c528b72d8608f5cc49f96b8d6e38c95d60c676/rpds_py-0.30.0-cp314-cp314t-win_amd64.whl", hash = "sha256:27f4b0e92de5bfbc6f86e43959e6edd1425c33b5e69aab0984a72047f2bcf1e3", size = 226532, upload-time = "2025-11-30T20:24:14.634Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/69/71/3f34339ee70521864411f8b6992e7ab13ac30d8e4e3309e07c7361767d91/rpds_py-0.30.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c2262bdba0ad4fc6fb5545660673925c2d2a5d9e2e0fb603aad545427be0fc58", size = 372292, upload-time = "2025-11-30T20:24:16.537Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/57/09/f183df9b8f2d66720d2ef71075c59f7e1b336bec7ee4c48f0a2b06857653/rpds_py-0.30.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:ee6af14263f25eedc3bb918a3c04245106a42dfd4f5c2285ea6f997b1fc3f89a", size = 362128, upload-time = "2025-11-30T20:24:18.086Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7a/68/5c2594e937253457342e078f0cc1ded3dd7b2ad59afdbf2d354869110a02/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3adbb8179ce342d235c31ab8ec511e66c73faa27a47e076ccc92421add53e2bb", size = 391542, upload-time = "2025-11-30T20:24:20.092Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/49/5c/31ef1afd70b4b4fbdb2800249f34c57c64beb687495b10aec0365f53dfc4/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:250fa00e9543ac9b97ac258bd37367ff5256666122c2d0f2bc97577c60a1818c", size = 404004, upload-time = "2025-11-30T20:24:22.231Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e3/63/0cfbea38d05756f3440ce6534d51a491d26176ac045e2707adc99bb6e60a/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9854cf4f488b3d57b9aaeb105f06d78e5529d3145b1e4a41750167e8c213c6d3", size = 527063, upload-time = "2025-11-30T20:24:24.302Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/42/e6/01e1f72a2456678b0f618fc9a1a13f882061690893c192fcad9f2926553a/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:993914b8e560023bc0a8bf742c5f303551992dcb85e247b1e5c7f4a7d145bda5", size = 413099, upload-time = "2025-11-30T20:24:25.916Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b8/25/8df56677f209003dcbb180765520c544525e3ef21ea72279c98b9aa7c7fb/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58edca431fb9b29950807e301826586e5bbf24163677732429770a697ffe6738", size = 392177, upload-time = "2025-11-30T20:24:27.834Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4a/b4/0a771378c5f16f8115f796d1f437950158679bcd2a7c68cf251cfb00ed5b/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:dea5b552272a944763b34394d04577cf0f9bd013207bc32323b5a89a53cf9c2f", size = 406015, upload-time = "2025-11-30T20:24:29.457Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/36/d8/456dbba0af75049dc6f63ff295a2f92766b9d521fa00de67a2bd6427d57a/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ba3af48635eb83d03f6c9735dfb21785303e73d22ad03d489e88adae6eab8877", size = 423736, upload-time = "2025-11-30T20:24:31.22Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/13/64/b4d76f227d5c45a7e0b796c674fd81b0a6c4fbd48dc29271857d8219571c/rpds_py-0.30.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:dff13836529b921e22f15cb099751209a60009731a68519630a24d61f0b1b30a", size = 573981, upload-time = "2025-11-30T20:24:32.934Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/20/91/092bacadeda3edf92bf743cc96a7be133e13a39cdbfd7b5082e7ab638406/rpds_py-0.30.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:1b151685b23929ab7beec71080a8889d4d6d9fa9a983d213f07121205d48e2c4", size = 599782, upload-time = "2025-11-30T20:24:35.169Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d1/b7/b95708304cd49b7b6f82fdd039f1748b66ec2b21d6a45180910802f1abf1/rpds_py-0.30.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:ac37f9f516c51e5753f27dfdef11a88330f04de2d564be3991384b2f3535d02e", size = 562191, upload-time = "2025-11-30T20:24:36.853Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rsa"
|
||||
version = "4.9.1"
|
||||
@@ -1831,6 +2407,35 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/fc/51/727abb13f44c1fcf6d145979e1535a35794db0f6e450a0cb46aa24732fe2/s3transfer-0.16.0-py3-none-any.whl", hash = "sha256:18e25d66fed509e3868dc1572b3f427ff947dd2c56f844a5bf09481ad3f3b2fe", size = 86830, upload-time = "2025-12-01T02:30:57.729Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "schemathesis"
|
||||
version = "4.12.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "click" },
|
||||
{ name = "harfile" },
|
||||
{ name = "httpx" },
|
||||
{ name = "hypothesis" },
|
||||
{ name = "hypothesis-graphql" },
|
||||
{ name = "hypothesis-jsonschema" },
|
||||
{ name = "jsonschema" },
|
||||
{ name = "jsonschema-rs" },
|
||||
{ name = "junit-xml" },
|
||||
{ name = "pyrate-limiter" },
|
||||
{ name = "pytest" },
|
||||
{ name = "pyyaml" },
|
||||
{ name = "requests" },
|
||||
{ name = "rich" },
|
||||
{ name = "starlette-testclient" },
|
||||
{ name = "tenacity" },
|
||||
{ name = "typing-extensions" },
|
||||
{ name = "werkzeug" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/0b/f2/24df7e625ffeb52beb55047bff0eee89729482a0d54113bd9859e6f1fa68/schemathesis-4.12.2.tar.gz", hash = "sha256:cf7ba3d2ed26d3df441e8282c05008e6c9fa0c619c8a89ca080a9ff135216f0f", size = 59342738, upload-time = "2026-03-19T07:38:51.914Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/43/f7/6e414d3fd456a6686e814b2c2b98921296336606dbb64e72797c9d0acb9e/schemathesis-4.12.2-py3-none-any.whl", hash = "sha256:435eb16623f1d7e916d9366e5da17096fc307d2129cd502820c8f7f1270aeeed", size = 464819, upload-time = "2026-03-19T07:38:49.761Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "setuptools"
|
||||
version = "80.9.0"
|
||||
@@ -1849,6 +2454,15 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sortedcontainers"
|
||||
version = "2.4.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/e8/c4/ba2f8066cceb6f23394729afe52f3bf7adec04bf9ed2c820b39e19299111/sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88", size = 30594, upload-time = "2021-05-16T22:03:42.897Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0", size = 29575, upload-time = "2021-05-16T22:03:41.177Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sqlalchemy"
|
||||
version = "2.0.45"
|
||||
@@ -1904,6 +2518,28 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/d9/52/1064f510b141bd54025f9b55105e26d1fa970b9be67ad766380a3c9b74b0/starlette-0.50.0-py3-none-any.whl", hash = "sha256:9e5391843ec9b6e472eed1365a78c8098cfceb7a74bfd4d6b1c0c0095efb3bca", size = 74033, upload-time = "2025-11-01T15:25:25.461Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "starlette-testclient"
|
||||
version = "0.4.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "requests" },
|
||||
{ name = "starlette" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/cd/64/6debec8fc6e9abde0c7042145dc27a562bd1cd79350a55b80bf612a10ccb/starlette_testclient-0.4.1.tar.gz", hash = "sha256:9e993ffe12fab45606116257813986612262fe15c1bb6dc9e39cc68693ac1fc5", size = 12480, upload-time = "2024-04-29T10:54:28.503Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/25/44/f5209b889a344b1331a103aec4e9f906c7f67f9295fd287fdaa818179d95/starlette_testclient-0.4.1-py3-none-any.whl", hash = "sha256:dcf0eb237dc47f062ef5925f98330af46f67e547cb587119c9ae78c17ae6c1d1", size = 8143, upload-time = "2024-04-29T10:54:25.728Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "stevedore"
|
||||
version = "5.7.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a2/6d/90764092216fa560f6587f83bb70113a8ba510ba436c6476a2b47359057c/stevedore-5.7.0.tar.gz", hash = "sha256:31dd6fe6b3cbe921e21dcefabc9a5f1cf848cf538a1f27543721b8ca09948aa3", size = 516200, upload-time = "2026-02-20T13:27:06.765Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/69/06/36d260a695f383345ab5bbc3fd447249594ae2fa8dfd19c533d5ae23f46b/stevedore-5.7.0-py3-none-any.whl", hash = "sha256:fd25efbb32f1abb4c9e502f385f0018632baac11f9ee5d1b70f88cc5e22ad4ed", size = 54483, upload-time = "2026-02-20T13:27:05.561Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sympy"
|
||||
version = "1.14.0"
|
||||
@@ -1916,6 +2552,15 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/a2/09/77d55d46fd61b4a135c444fc97158ef34a095e5681d0a6c10b75bf356191/sympy-1.14.0-py3-none-any.whl", hash = "sha256:e091cc3e99d2141a0ba2847328f5479b05d94a6635cb96148ccb3f34671bd8f5", size = 6299353, upload-time = "2025-04-27T18:04:59.103Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tenacity"
|
||||
version = "9.1.4"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/47/c6/ee486fd809e357697ee8a44d3d69222b344920433d3b6666ccd9b374630c/tenacity-9.1.4.tar.gz", hash = "sha256:adb31d4c263f2bd041081ab33b498309a57c77f9acf2db65aadf0898179cf93a", size = 49413, upload-time = "2026-02-07T10:45:33.841Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/d7/c1/eb8f9debc45d3b7918a32ab756658a0904732f75e555402972246b0b8e71/tenacity-9.1.4-py3-none-any.whl", hash = "sha256:6095a360c919085f28c6527de529e76a06ad89b23659fa881ae0649b867a9d55", size = 28926, upload-time = "2026-02-07T10:45:32.24Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tiktoken"
|
||||
version = "0.3.3"
|
||||
@@ -1935,6 +2580,69 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/16/8c/63ae6e51c5db57d1c3a0d039eec9ba083d433dfad946098b2c80aed3e26d/tiktoken-0.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:5dca434c8680b987eacde2dbc449e9ea4526574dbf9f3d8938665f638095be82", size = 579436, upload-time = "2023-03-28T23:38:47.402Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tomli"
|
||||
version = "2.4.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/82/30/31573e9457673ab10aa432461bee537ce6cef177667deca369efb79df071/tomli-2.4.0.tar.gz", hash = "sha256:aa89c3f6c277dd275d8e243ad24f3b5e701491a860d5121f2cdd399fbb31fc9c", size = 17477, upload-time = "2026-01-11T11:22:38.165Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/3c/d9/3dc2289e1f3b32eb19b9785b6a006b28ee99acb37d1d47f78d4c10e28bf8/tomli-2.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b5ef256a3fd497d4973c11bf142e9ed78b150d36f5773f1ca6088c230ffc5867", size = 153663, upload-time = "2026-01-11T11:21:45.27Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/51/32/ef9f6845e6b9ca392cd3f64f9ec185cc6f09f0a2df3db08cbe8809d1d435/tomli-2.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5572e41282d5268eb09a697c89a7bee84fae66511f87533a6f88bd2f7b652da9", size = 148469, upload-time = "2026-01-11T11:21:46.873Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d6/c2/506e44cce89a8b1b1e047d64bd495c22c9f71f21e05f380f1a950dd9c217/tomli-2.4.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:551e321c6ba03b55676970b47cb1b73f14a0a4dce6a3e1a9458fd6d921d72e95", size = 236039, upload-time = "2026-01-11T11:21:48.503Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b3/40/e1b65986dbc861b7e986e8ec394598187fa8aee85b1650b01dd925ca0be8/tomli-2.4.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5e3f639a7a8f10069d0e15408c0b96a2a828cfdec6fca05296ebcdcc28ca7c76", size = 243007, upload-time = "2026-01-11T11:21:49.456Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9c/6f/6e39ce66b58a5b7ae572a0f4352ff40c71e8573633deda43f6a379d56b3e/tomli-2.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1b168f2731796b045128c45982d3a4874057626da0e2ef1fdd722848b741361d", size = 240875, upload-time = "2026-01-11T11:21:50.755Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/aa/ad/cb089cb190487caa80204d503c7fd0f4d443f90b95cf4ef5cf5aa0f439b0/tomli-2.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:133e93646ec4300d651839d382d63edff11d8978be23da4cc106f5a18b7d0576", size = 246271, upload-time = "2026-01-11T11:21:51.81Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0b/63/69125220e47fd7a3a27fd0de0c6398c89432fec41bc739823bcc66506af6/tomli-2.4.0-cp311-cp311-win32.whl", hash = "sha256:b6c78bdf37764092d369722d9946cb65b8767bfa4110f902a1b2542d8d173c8a", size = 96770, upload-time = "2026-01-11T11:21:52.647Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1e/0d/a22bb6c83f83386b0008425a6cd1fa1c14b5f3dd4bad05e98cf3dbbf4a64/tomli-2.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:d3d1654e11d724760cdb37a3d7691f0be9db5fbdaef59c9f532aabf87006dbaa", size = 107626, upload-time = "2026-01-11T11:21:53.459Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2f/6d/77be674a3485e75cacbf2ddba2b146911477bd887dda9d8c9dfb2f15e871/tomli-2.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:cae9c19ed12d4e8f3ebf46d1a75090e4c0dc16271c5bce1c833ac168f08fb614", size = 94842, upload-time = "2026-01-11T11:21:54.831Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3c/43/7389a1869f2f26dba52404e1ef13b4784b6b37dac93bac53457e3ff24ca3/tomli-2.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:920b1de295e72887bafa3ad9f7a792f811847d57ea6b1215154030cf131f16b1", size = 154894, upload-time = "2026-01-11T11:21:56.07Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e9/05/2f9bf110b5294132b2edf13fe6ca6ae456204f3d749f623307cbb7a946f2/tomli-2.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d6d9a4aee98fac3eab4952ad1d73aee87359452d1c086b5ceb43ed02ddb16b8", size = 149053, upload-time = "2026-01-11T11:21:57.467Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e8/41/1eda3ca1abc6f6154a8db4d714a4d35c4ad90adc0bcf700657291593fbf3/tomli-2.4.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36b9d05b51e65b254ea6c2585b59d2c4cb91c8a3d91d0ed0f17591a29aaea54a", size = 243481, upload-time = "2026-01-11T11:21:58.661Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d2/6d/02ff5ab6c8868b41e7d4b987ce2b5f6a51d3335a70aa144edd999e055a01/tomli-2.4.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1c8a885b370751837c029ef9bc014f27d80840e48bac415f3412e6593bbc18c1", size = 251720, upload-time = "2026-01-11T11:22:00.178Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7b/57/0405c59a909c45d5b6f146107c6d997825aa87568b042042f7a9c0afed34/tomli-2.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8768715ffc41f0008abe25d808c20c3d990f42b6e2e58305d5da280ae7d1fa3b", size = 247014, upload-time = "2026-01-11T11:22:01.238Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2c/0e/2e37568edd944b4165735687cbaf2fe3648129e440c26d02223672ee0630/tomli-2.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b438885858efd5be02a9a133caf5812b8776ee0c969fea02c45e8e3f296ba51", size = 251820, upload-time = "2026-01-11T11:22:02.727Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5a/1c/ee3b707fdac82aeeb92d1a113f803cf6d0f37bdca0849cb489553e1f417a/tomli-2.4.0-cp312-cp312-win32.whl", hash = "sha256:0408e3de5ec77cc7f81960c362543cbbd91ef883e3138e81b729fc3eea5b9729", size = 97712, upload-time = "2026-01-11T11:22:03.777Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/69/13/c07a9177d0b3bab7913299b9278845fc6eaaca14a02667c6be0b0a2270c8/tomli-2.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:685306e2cc7da35be4ee914fd34ab801a6acacb061b6a7abca922aaf9ad368da", size = 108296, upload-time = "2026-01-11T11:22:04.86Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/18/27/e267a60bbeeee343bcc279bb9e8fbed0cbe224bc7b2a3dc2975f22809a09/tomli-2.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:5aa48d7c2356055feef06a43611fc401a07337d5b006be13a30f6c58f869e3c3", size = 94553, upload-time = "2026-01-11T11:22:05.854Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/34/91/7f65f9809f2936e1f4ce6268ae1903074563603b2a2bd969ebbda802744f/tomli-2.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:84d081fbc252d1b6a982e1870660e7330fb8f90f676f6e78b052ad4e64714bf0", size = 154915, upload-time = "2026-01-11T11:22:06.703Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/20/aa/64dd73a5a849c2e8f216b755599c511badde80e91e9bc2271baa7b2cdbb1/tomli-2.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9a08144fa4cba33db5255f9b74f0b89888622109bd2776148f2597447f92a94e", size = 149038, upload-time = "2026-01-11T11:22:07.56Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9e/8a/6d38870bd3d52c8d1505ce054469a73f73a0fe62c0eaf5dddf61447e32fa/tomli-2.4.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c73add4bb52a206fd0c0723432db123c0c75c280cbd67174dd9d2db228ebb1b4", size = 242245, upload-time = "2026-01-11T11:22:08.344Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/59/bb/8002fadefb64ab2669e5b977df3f5e444febea60e717e755b38bb7c41029/tomli-2.4.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1fb2945cbe303b1419e2706e711b7113da57b7db31ee378d08712d678a34e51e", size = 250335, upload-time = "2026-01-11T11:22:09.951Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a5/3d/4cdb6f791682b2ea916af2de96121b3cb1284d7c203d97d92d6003e91c8d/tomli-2.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bbb1b10aa643d973366dc2cb1ad94f99c1726a02343d43cbc011edbfac579e7c", size = 245962, upload-time = "2026-01-11T11:22:11.27Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f2/4a/5f25789f9a460bd858ba9756ff52d0830d825b458e13f754952dd15fb7bb/tomli-2.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4cbcb367d44a1f0c2be408758b43e1ffb5308abe0ea222897d6bfc8e8281ef2f", size = 250396, upload-time = "2026-01-11T11:22:12.325Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/aa/2f/b73a36fea58dfa08e8b3a268750e6853a6aac2a349241a905ebd86f3047a/tomli-2.4.0-cp313-cp313-win32.whl", hash = "sha256:7d49c66a7d5e56ac959cb6fc583aff0651094ec071ba9ad43df785abc2320d86", size = 97530, upload-time = "2026-01-11T11:22:13.865Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3b/af/ca18c134b5d75de7e8dc551c5234eaba2e8e951f6b30139599b53de9c187/tomli-2.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:3cf226acb51d8f1c394c1b310e0e0e61fecdd7adcb78d01e294ac297dd2e7f87", size = 108227, upload-time = "2026-01-11T11:22:15.224Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/22/c3/b386b832f209fee8073c8138ec50f27b4460db2fdae9ffe022df89a57f9b/tomli-2.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:d20b797a5c1ad80c516e41bc1fb0443ddb5006e9aaa7bda2d71978346aeb9132", size = 94748, upload-time = "2026-01-11T11:22:16.009Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f3/c4/84047a97eb1004418bc10bdbcfebda209fca6338002eba2dc27cc6d13563/tomli-2.4.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:26ab906a1eb794cd4e103691daa23d95c6919cc2fa9160000ac02370cc9dd3f6", size = 154725, upload-time = "2026-01-11T11:22:17.269Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a8/5d/d39038e646060b9d76274078cddf146ced86dc2b9e8bbf737ad5983609a0/tomli-2.4.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:20cedb4ee43278bc4f2fee6cb50daec836959aadaf948db5172e776dd3d993fc", size = 148901, upload-time = "2026-01-11T11:22:18.287Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/73/e5/383be1724cb30f4ce44983d249645684a48c435e1cd4f8b5cded8a816d3c/tomli-2.4.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:39b0b5d1b6dd03684b3fb276407ebed7090bbec989fa55838c98560c01113b66", size = 243375, upload-time = "2026-01-11T11:22:19.154Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/31/f0/bea80c17971c8d16d3cc109dc3585b0f2ce1036b5f4a8a183789023574f2/tomli-2.4.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a26d7ff68dfdb9f87a016ecfd1e1c2bacbe3108f4e0f8bcd2228ef9a766c787d", size = 250639, upload-time = "2026-01-11T11:22:20.168Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2c/8f/2853c36abbb7608e3f945d8a74e32ed3a74ee3a1f468f1ffc7d1cb3abba6/tomli-2.4.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:20ffd184fb1df76a66e34bd1b36b4a4641bd2b82954befa32fe8163e79f1a702", size = 246897, upload-time = "2026-01-11T11:22:21.544Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/49/f0/6c05e3196ed5337b9fe7ea003e95fd3819a840b7a0f2bf5a408ef1dad8ed/tomli-2.4.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:75c2f8bbddf170e8effc98f5e9084a8751f8174ea6ccf4fca5398436e0320bc8", size = 254697, upload-time = "2026-01-11T11:22:23.058Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f3/f5/2922ef29c9f2951883525def7429967fc4d8208494e5ab524234f06b688b/tomli-2.4.0-cp314-cp314-win32.whl", hash = "sha256:31d556d079d72db7c584c0627ff3a24c5d3fb4f730221d3444f3efb1b2514776", size = 98567, upload-time = "2026-01-11T11:22:24.033Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7b/31/22b52e2e06dd2a5fdbc3ee73226d763b184ff21fc24e20316a44ccc4d96b/tomli-2.4.0-cp314-cp314-win_amd64.whl", hash = "sha256:43e685b9b2341681907759cf3a04e14d7104b3580f808cfde1dfdb60ada85475", size = 108556, upload-time = "2026-01-11T11:22:25.378Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/48/3d/5058dff3255a3d01b705413f64f4306a141a8fd7a251e5a495e3f192a998/tomli-2.4.0-cp314-cp314-win_arm64.whl", hash = "sha256:3d895d56bd3f82ddd6faaff993c275efc2ff38e52322ea264122d72729dca2b2", size = 96014, upload-time = "2026-01-11T11:22:26.138Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b8/4e/75dab8586e268424202d3a1997ef6014919c941b50642a1682df43204c22/tomli-2.4.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:5b5807f3999fb66776dbce568cc9a828544244a8eb84b84b9bafc080c99597b9", size = 163339, upload-time = "2026-01-11T11:22:27.143Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/06/e3/b904d9ab1016829a776d97f163f183a48be6a4deb87304d1e0116a349519/tomli-2.4.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c084ad935abe686bd9c898e62a02a19abfc9760b5a79bc29644463eaf2840cb0", size = 159490, upload-time = "2026-01-11T11:22:28.399Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e3/5a/fc3622c8b1ad823e8ea98a35e3c632ee316d48f66f80f9708ceb4f2a0322/tomli-2.4.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f2e3955efea4d1cfbcb87bc321e00dc08d2bcb737fd1d5e398af111d86db5df", size = 269398, upload-time = "2026-01-11T11:22:29.345Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fd/33/62bd6152c8bdd4c305ad9faca48f51d3acb2df1f8791b1477d46ff86e7f8/tomli-2.4.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e0fe8a0b8312acf3a88077a0802565cb09ee34107813bba1c7cd591fa6cfc8d", size = 276515, upload-time = "2026-01-11T11:22:30.327Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/ff/ae53619499f5235ee4211e62a8d7982ba9e439a0fb4f2f351a93d67c1dd2/tomli-2.4.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:413540dce94673591859c4c6f794dfeaa845e98bf35d72ed59636f869ef9f86f", size = 273806, upload-time = "2026-01-11T11:22:32.56Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/47/71/cbca7787fa68d4d0a9f7072821980b39fbb1b6faeb5f5cf02f4a5559fa28/tomli-2.4.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:0dc56fef0e2c1c470aeac5b6ca8cc7b640bb93e92d9803ddaf9ea03e198f5b0b", size = 281340, upload-time = "2026-01-11T11:22:33.505Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f5/00/d595c120963ad42474cf6ee7771ad0d0e8a49d0f01e29576ee9195d9ecdf/tomli-2.4.0-cp314-cp314t-win32.whl", hash = "sha256:d878f2a6707cc9d53a1be1414bbb419e629c3d6e67f69230217bb663e76b5087", size = 108106, upload-time = "2026-01-11T11:22:34.451Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/de/69/9aa0c6a505c2f80e519b43764f8b4ba93b5a0bbd2d9a9de6e2b24271b9a5/tomli-2.4.0-cp314-cp314t-win_amd64.whl", hash = "sha256:2add28aacc7425117ff6364fe9e06a183bb0251b03f986df0e78e974047571fd", size = 120504, upload-time = "2026-01-11T11:22:35.764Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b3/9f/f1668c281c58cfae01482f7114a4b88d345e4c140386241a1a24dcc9e7bc/tomli-2.4.0-cp314-cp314t-win_arm64.whl", hash = "sha256:2b1e3b80e1d5e52e40e9b924ec43d81570f0e7d09d11081b797bc4692765a3d4", size = 99561, upload-time = "2026-01-11T11:22:36.624Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/23/d1/136eb2cb77520a31e1f64cbae9d33ec6df0d78bdf4160398e86eec8a8754/tomli-2.4.0-py3-none-any.whl", hash = "sha256:1f776e7d669ebceb01dee46484485f43a4048746235e683bcdffacdf1fb4785a", size = 14477, upload-time = "2026-01-11T11:22:37.446Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tomli-w"
|
||||
version = "1.2.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/19/75/241269d1da26b624c0d5e110e8149093c759b7a286138f4efd61a60e75fe/tomli_w-1.2.0.tar.gz", hash = "sha256:2dd14fac5a47c27be9cd4c976af5a12d87fb1f0b4512f81d69cce3b35ae25021", size = 7184, upload-time = "2025-01-15T12:07:24.262Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/18/c86eb8e0202e32dd3df50d43d7ff9854f8e0603945ff398974c1d91ac1ef/tomli_w-1.2.0-py3-none-any.whl", hash = "sha256:188306098d013b691fcadc011abd66727d3c414c571bb01b1a174ba8c983cf90", size = 6675, upload-time = "2025-01-15T12:07:22.074Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "torch"
|
||||
version = "2.9.1"
|
||||
@@ -2253,3 +2961,15 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/9a/3f/f70e03f40ffc9a30d817eef7da1be72ee4956ba8d7255c399a01b135902a/websockets-16.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:a653aea902e0324b52f1613332ddf50b00c06fdaf7e92624fbf8c77c78fa5767", size = 178735, upload-time = "2026-01-10T09:23:42.259Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6f/28/258ebab549c2bf3e64d2b0217b973467394a9cea8c42f70418ca2c5d0d2e/websockets-16.0-py3-none-any.whl", hash = "sha256:1637db62fad1dc833276dded54215f2c7fa46912301a24bd94d45d46a011ceec", size = 171598, upload-time = "2026-01-10T09:23:45.395Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "werkzeug"
|
||||
version = "3.1.6"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "markupsafe" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/61/f1/ee81806690a87dab5f5653c1f146c92bc066d7f4cebc603ef88eb9e13957/werkzeug-3.1.6.tar.gz", hash = "sha256:210c6bede5a420a913956b4791a7f4d6843a43b6fcee4dfa08a65e93007d0d25", size = 864736, upload-time = "2026-02-19T15:17:18.884Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/4d/ec/d58832f89ede95652fd01f4f24236af7d32b70cab2196dfcc2d2fd13c5c2/werkzeug-3.1.6-py3-none-any.whl", hash = "sha256:7ddf3357bb9564e407607f988f683d72038551200c704012bb9a4c523d42f131", size = 225166, upload-time = "2026-02-19T15:17:17.475Z" },
|
||||
]
|
||||
|
||||
Reference in New Issue
Block a user