new features

This commit is contained in:
Daniil
2026-02-27 23:33:56 +03:00
parent 937e58859a
commit dc04efe0fb
41 changed files with 2067 additions and 141 deletions
+26
View File
@@ -0,0 +1,26 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}
@@ -0,0 +1,55 @@
"""add notifications table
Revision ID: 6a41fa07bd94
Revises: 0001
Create Date: 2026-02-20 19:07:40.634385
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '6a41fa07bd94'
down_revision: Union[str, None] = '0001'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('notifications',
sa.Column('user_id', sa.UUID(), nullable=False),
sa.Column('job_id', sa.UUID(), nullable=True),
sa.Column('project_id', sa.UUID(), nullable=True),
sa.Column('notification_type', sa.String(length=32), nullable=False),
sa.Column('title', sa.String(length=255), nullable=False),
sa.Column('message', sa.Text(), nullable=True),
sa.Column('payload', sa.JSON(), nullable=True),
sa.Column('is_read', sa.Boolean(), nullable=False),
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.ForeignKeyConstraint(['job_id'], ['jobs.id'], ondelete='SET NULL'),
sa.ForeignKeyConstraint(['project_id'], ['projects.id'], ondelete='SET NULL'),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_notifications_user_id'), 'notifications', ['user_id'], unique=False)
op.drop_constraint(op.f('uq_users_username'), 'users', type_='unique')
op.drop_index(op.f('ix_users_username'), table_name='users')
op.create_index(op.f('ix_users_username'), 'users', ['username'], unique=True)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_users_username'), table_name='users')
op.create_index(op.f('ix_users_username'), 'users', ['username'], unique=False)
op.create_unique_constraint(op.f('uq_users_username'), 'users', ['username'], postgresql_nulls_not_distinct=False)
op.drop_index(op.f('ix_notifications_user_id'), table_name='notifications')
op.drop_table('notifications')
# ### end Alembic commands ###
@@ -0,0 +1,71 @@
"""make artifact media_file_id nullable and fix transcription data
Revision ID: b3c4d5e6f7a8
Revises: a1b2c3d4e5f6
Create Date: 2026-02-21 18:00:00.000000
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = "b3c4d5e6f7a8"
down_revision: Union[str, None] = "a1b2c3d4e5f6"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Schema change: make media_file_id nullable
op.alter_column(
"artifact_media_files",
"media_file_id",
existing_type=sa.UUID(),
nullable=True,
)
conn = op.get_bind()
# Soft-delete MediaFiles that back transcription artifacts (BEFORE nulling the link)
conn.execute(
sa.text(
"UPDATE media_files SET is_deleted = true "
"WHERE id IN ("
" SELECT amf.media_file_id FROM artifact_media_files amf "
" WHERE amf.artifact_type = 'TRANSCRIPTION_JSON' "
" AND amf.media_file_id IS NOT NULL"
")"
)
)
# Null out media_file_id on transcription artifacts
conn.execute(
sa.text(
"UPDATE artifact_media_files "
"SET media_file_id = NULL "
"WHERE artifact_type = 'TRANSCRIPTION_JSON'"
)
)
# Null out project_id on File records backing transcription artifacts
conn.execute(
sa.text(
"UPDATE files SET project_id = NULL "
"FROM artifact_media_files "
"WHERE artifact_media_files.file_id = files.id "
" AND artifact_media_files.artifact_type = 'TRANSCRIPTION_JSON'"
)
)
def downgrade() -> None:
# Restore NOT NULL constraint (data changes are not reversible)
op.alter_column(
"artifact_media_files",
"media_file_id",
existing_type=sa.UUID(),
nullable=False,
)
@@ -0,0 +1,26 @@
"""add workspace_state to projects
Revision ID: c4d5e6f7a8b9
Revises: b3c4d5e6f7a8
Create Date: 2026-02-22 12:00:00.000000
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = "c4d5e6f7a8b9"
down_revision: Union[str, None] = "b3c4d5e6f7a8"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.add_column("projects", sa.Column("workspace_state", sa.JSON(), nullable=True))
def downgrade() -> None:
op.drop_column("projects", "workspace_state")
@@ -0,0 +1,58 @@
"""strip presigned query params from avatar URLs
Revision ID: a1b2c3d4e5f6
Revises: 6a41fa07bd94
Create Date: 2026-02-21 12:00:00.000000
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = "a1b2c3d4e5f6"
down_revision: Union[str, None] = "6a41fa07bd94"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
conn = op.get_bind()
# Extract bare S3 key from presigned URLs.
# Example: http://localhost:9000/coffee-bucket/avatars/abc.jpg?X-Amz-...
# -> avatars/abc.jpg
#
# Strategy: strip the query string, then remove the scheme+host+bucket prefix.
# Only touch rows where avatar looks like a full URL (contains '://').
result = conn.execute(
sa.text("SELECT id, avatar FROM users WHERE avatar IS NOT NULL AND avatar LIKE :pattern"),
{"pattern": "%://%"},
)
for row in result:
avatar_url: str = row[1]
# Remove query string
path = avatar_url.split("?")[0]
# Remove scheme + host + bucket prefix: everything up to and including the bucket segment
# e.g. "http://localhost:9000/coffee-bucket/avatars/abc.jpg" -> "avatars/abc.jpg"
parts = path.split("/")
# Find the bucket segment (after host) and take everything after it
# URL structure: scheme: / / host:port / bucket / key...
# parts: ['http:', '', 'localhost:9000', 'coffee-bucket', 'avatars', 'abc.jpg']
try:
# Skip scheme ('http:'), empty (''), host, bucket -> index 4 onward is the key
key = "/".join(parts[4:])
except IndexError:
continue
if key:
conn.execute(
sa.text("UPDATE users SET avatar = :key WHERE id = :id"),
{"key": key, "id": row[0]},
)
def downgrade() -> None:
# Data-only migration; cannot restore original presigned URLs.
pass