init: new structure + fix lint errors

This commit is contained in:
Daniil
2026-02-03 02:15:07 +03:00
commit 67e0f22b4f
89 changed files with 7654 additions and 0 deletions
+3
View File
@@ -0,0 +1,3 @@
"""
Infrastructure layer - app bootstrapping, configuration, security, and external integrations.
"""
+60
View File
@@ -0,0 +1,60 @@
from __future__ import annotations
import uuid
from fastapi import Depends, HTTPException, status
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
from jwt import ExpiredSignatureError, InvalidTokenError
from sqlalchemy.ext.asyncio import AsyncSession
from cpv3.infrastructure.security import decode_token
from cpv3.db.session import get_db
from cpv3.modules.users.models import User
from cpv3.modules.users.repository import UserRepository
_bearer = HTTPBearer(auto_error=True)
async def get_current_user(
credentials: HTTPAuthorizationCredentials = Depends(_bearer),
db: AsyncSession = Depends(get_db),
) -> User:
token = credentials.credentials
try:
payload = decode_token(token)
except ExpiredSignatureError as e:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED, detail="Token expired"
) from e
except InvalidTokenError as e:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid token"
) from e
if payload.get("type") != "access":
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid token"
)
sub = payload.get("sub")
if not sub:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid token"
)
try:
user_id = uuid.UUID(str(sub))
except ValueError as e:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid token"
) from e
user_repo = UserRepository(db)
user = await user_repo.get_by_id(user_id)
if user is None or not user.is_active:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid credentials"
)
return user
+43
View File
@@ -0,0 +1,43 @@
"""
Infrastructure-level dependencies for FastAPI dependency injection.
"""
from __future__ import annotations
from functools import lru_cache
from cpv3.infrastructure.settings import get_settings
from cpv3.infrastructure.storage.base import StorageBackend, StorageService
from cpv3.infrastructure.storage.local import LocalConfig, LocalStorageBackend
from cpv3.infrastructure.storage.s3 import S3Config, S3StorageBackend
@lru_cache
def _get_storage_service() -> StorageService:
settings = get_settings()
backend: StorageBackend
if settings.storage_backend.upper() == "LOCAL":
backend = LocalStorageBackend(LocalConfig(root_dir=settings.local_storage_dir))
else:
if not settings.s3_access_key or not settings.s3_secret_key:
raise RuntimeError(
"S3_ACCESS_KEY and S3_SECRET_KEY are required for S3 storage"
)
backend = S3StorageBackend(
S3Config(
access_key=settings.s3_access_key,
secret_key=settings.s3_secret_key,
bucket_name=settings.s3_bucket_name,
endpoint_url_internal=settings.s3_endpoint_url_internal,
endpoint_url_public=settings.s3_endpoint_url_public,
presign_expires_seconds=settings.s3_presign_expires_seconds,
)
)
return StorageService(backend)
async def get_storage() -> StorageService:
return _get_storage_service()
+55
View File
@@ -0,0 +1,55 @@
from __future__ import annotations
from datetime import datetime, timedelta, timezone
from typing import Any, Literal
import jwt
from passlib.context import CryptContext # type: ignore[import-untyped]
from cpv3.infrastructure.settings import get_settings
# Use bcrypt_sha256 to lift the 72-byte password limit while still verifying legacy bcrypt hashes.
pwd_context = CryptContext(schemes=["bcrypt_sha256", "bcrypt"], deprecated="auto")
def hash_password(password: str) -> str:
return pwd_context.hash(password)
def verify_password(password: str, password_hash: str) -> bool:
return pwd_context.verify(password, password_hash)
def utcnow() -> datetime:
return datetime.now(timezone.utc)
def create_token(
*,
subject: str,
token_type: Literal["access", "refresh"],
expires_in: timedelta,
extra: dict[str, Any] | None = None,
) -> str:
settings = get_settings()
now = utcnow()
payload: dict[str, Any] = {
"sub": subject,
"type": token_type,
"iat": int(now.timestamp()),
"exp": int((now + expires_in).timestamp()),
}
if extra:
payload.update(extra)
return jwt.encode(
payload, settings.jwt_secret_key, algorithm=settings.jwt_algorithm
)
def decode_token(token: str) -> dict[str, Any]:
settings = get_settings()
return jwt.decode(
token, settings.jwt_secret_key, algorithms=[settings.jwt_algorithm]
)
+89
View File
@@ -0,0 +1,89 @@
from __future__ import annotations
from functools import lru_cache
from pathlib import Path
from pydantic import Field
from pydantic_settings import BaseSettings, SettingsConfigDict
class Settings(BaseSettings):
model_config = SettingsConfigDict(
env_file=".env",
env_file_encoding="utf-8",
extra="ignore",
)
# App
debug: bool = Field(default=True, alias="DEBUG")
cors_allowed_origins: list[str] = Field(
default_factory=lambda: ["http://localhost:3000", "http://localhost:8000"],
alias="CORS_ALLOWED_ORIGINS",
)
# JWT
jwt_secret_key: str = Field(default="dev-secret", alias="JWT_SECRET_KEY")
jwt_algorithm: str = Field(default="HS256", alias="JWT_ALGORITHM")
jwt_access_ttl_minutes: int = Field(default=60, alias="JWT_ACCESS_TTL_MINUTES")
jwt_refresh_ttl_days: int = Field(default=30, alias="JWT_REFRESH_TTL_DAYS")
# DB
postgres_user: str = Field(default="postgres", alias="POSTGRES_USER")
postgres_password: str = Field(default="postgres", alias="POSTGRES_PASSWORD")
postgres_host: str = Field(default="localhost", alias="POSTGRES_HOST")
postgres_port: int = Field(default=5332, alias="POSTGRES_PORT")
postgres_database: str = Field(
default="coffee_project_db", alias="POSTGRES_DATABASE"
)
database_url: str | None = Field(default=None, alias="DATABASE_URL")
# Storage
storage_backend: str = Field(default="S3", alias="STORAGE_BACKEND")
s3_access_key: str | None = Field(default=None, alias="S3_ACCESS_KEY")
s3_secret_key: str | None = Field(default=None, alias="S3_SECRET_KEY")
s3_bucket_name: str = Field(default="coffee-bucket", alias="S3_BUCKET_NAME")
# Internal endpoint is used by the API container to talk to MinIO/S3.
s3_endpoint_url_internal: str | None = Field(
default=None, alias="S3_ENDPOINT_URL_INTERNAL"
)
# Public endpoint is only used to generate browser-accessible URLs.
s3_endpoint_url_public: str | None = Field(
default=None, alias="S3_ENDPOINT_URL_PUBLIC"
)
s3_presign_expires_seconds: int = Field(default=3600, alias="S3_PRESIGN_EXPIRES")
local_storage_dir: Path = Field(
default=Path("./.local_storage"), alias="LOCAL_STORAGE_DIR"
)
# External services
remotion_service_url: str = Field(
default="http://localhost:8001", alias="REMOTION_SERVICE_URL"
)
transcription_models_dir: Path = Field(
default=Path("./.artifacts/Models/transcription"),
alias="TRANSCRIPTION_MODELS_DIR",
)
google_service_key_path: Path = Field(
default=Path("./.s_data/keyapispeech.json"),
alias="GOOGLE_APPLICATION_CREDENTIALS",
)
def get_database_url(self) -> str:
if self.database_url:
return self.database_url
return (
f"postgresql+asyncpg://{self.postgres_user}:{self.postgres_password}"
f"@{self.postgres_host}:{self.postgres_port}/{self.postgres_database}"
)
@lru_cache
def get_settings() -> Settings:
return Settings()
+17
View File
@@ -0,0 +1,17 @@
"""
Storage infrastructure - file storage backends (local, S3).
"""
from cpv3.infrastructure.storage.base import StorageBackend
from cpv3.infrastructure.storage.local import LocalConfig, LocalStorageBackend
from cpv3.infrastructure.storage.s3 import S3Config, S3StorageBackend
from cpv3.infrastructure.storage.types import FileInfo
__all__ = [
"StorageBackend",
"LocalConfig",
"LocalStorageBackend",
"S3Config",
"S3StorageBackend",
"FileInfo",
]
+124
View File
@@ -0,0 +1,124 @@
from __future__ import annotations
from abc import ABC, abstractmethod
from dataclasses import dataclass
from os import path
from tempfile import NamedTemporaryFile
from typing import BinaryIO, Callable, Protocol
from uuid import uuid4
import anyio
import anyio.to_thread
from cpv3.infrastructure.storage.types import FileInfo
@dataclass(frozen=True)
class TempFile:
path: str
cleanup: Callable[[], None]
class StorageBackend(Protocol):
"""Protocol defining the interface for storage backends."""
def upload_fileobj(
self, key: str, fileobj: BinaryIO, *, content_type: str | None
) -> None: ...
def download_fileobj(self, key: str, fileobj: BinaryIO) -> None: ...
def exists(self, key: str) -> bool: ...
def size(self, key: str) -> int: ...
def delete(self, key: str) -> None: ...
def read(self, key: str) -> bytes: ...
def generate_url(self, key: str) -> str: ...
class StorageService:
"""High-level async storage service wrapping a backend."""
def __init__(self, backend: StorageBackend) -> None:
self._backend = backend
def _make_key(self, file_name: str, folder: str, gen_name: bool) -> str:
if gen_name:
_, ext = path.splitext(file_name)
file_name = f"{uuid4().hex}{ext if ext else ''}"
return path.join(folder, file_name) if folder else file_name
async def upload_fileobj(
self,
*,
fileobj: BinaryIO,
file_name: str,
folder: str = "",
gen_name: bool = True,
content_type: str | None = None,
) -> str:
key = self._make_key(file_name, folder, gen_name)
def _upload() -> None:
fileobj.seek(0)
self._backend.upload_fileobj(key, fileobj, content_type=content_type)
await anyio.to_thread.run_sync(_upload)
return key
async def exists(self, key: str) -> bool:
return await anyio.to_thread.run_sync(lambda: self._backend.exists(key))
async def delete(self, key: str) -> None:
await anyio.to_thread.run_sync(lambda: self._backend.delete(key))
async def size(self, key: str) -> int:
return await anyio.to_thread.run_sync(lambda: self._backend.size(key))
async def read(self, key: str) -> bytes:
return await anyio.to_thread.run_sync(lambda: self._backend.read(key))
async def url(self, key: str) -> str:
return await anyio.to_thread.run_sync(lambda: self._backend.generate_url(key))
async def get_file_info(self, key: str) -> FileInfo:
if not await self.exists(key):
raise FileNotFoundError(f"File '{key}' does not exist")
file_url = await self.url(key)
file_size = await self.size(key)
return FileInfo(
file_path=key,
file_url=file_url,
file_size=file_size,
filename=path.basename(key),
)
async def download_to_temp(self, key: str) -> TempFile:
if not await self.exists(key):
raise FileNotFoundError(f"File '{key}' does not exist")
_, ext = path.splitext(key)
suffix = ext if ext else ".bin"
out_path: str
with NamedTemporaryFile(suffix=suffix, delete=False) as tmp:
out_path = tmp.name
def _download() -> None:
with open(out_path, "wb") as out:
self._backend.download_fileobj(key, out)
await anyio.to_thread.run_sync(_download)
def _cleanup() -> None:
import os
if os.path.exists(out_path):
os.remove(out_path)
return TempFile(path=out_path, cleanup=_cleanup)
+62
View File
@@ -0,0 +1,62 @@
from __future__ import annotations
from dataclasses import dataclass
from pathlib import Path
from typing import BinaryIO
@dataclass(frozen=True)
class LocalConfig:
root_dir: Path
class LocalStorageBackend:
def __init__(self, cfg: LocalConfig) -> None:
self._cfg = cfg
self._cfg.root_dir.mkdir(parents=True, exist_ok=True)
def _full_path(self, key: str) -> Path:
return (self._cfg.root_dir / key).resolve()
def upload_fileobj(
self, key: str, fileobj: BinaryIO, *, content_type: str | None
) -> None:
# content_type is unused for filesystem backend.
_ = content_type
full_path = self._full_path(key)
full_path.parent.mkdir(parents=True, exist_ok=True)
with open(full_path, "wb") as out:
while True:
chunk = fileobj.read(1024 * 1024)
if not chunk:
break
out.write(chunk)
def download_fileobj(self, key: str, fileobj: BinaryIO) -> None:
full_path = self._full_path(key)
with open(full_path, "rb") as src:
while True:
chunk = src.read(1024 * 1024)
if not chunk:
break
fileobj.write(chunk)
def exists(self, key: str) -> bool:
return self._full_path(key).exists()
def size(self, key: str) -> int:
return self._full_path(key).stat().st_size
def delete(self, key: str) -> None:
path = self._full_path(key)
if path.exists():
path.unlink()
def read(self, key: str) -> bytes:
return self._full_path(key).read_bytes()
def generate_url(self, key: str) -> str:
# Served by cpv3 via /api/files/local/{path}
return f"/api/files/local/{key}"
+107
View File
@@ -0,0 +1,107 @@
from __future__ import annotations
from dataclasses import dataclass
from typing import BinaryIO
import boto3 # type: ignore[import-untyped]
import boto3.session # type: ignore[import-untyped]
from botocore.config import Config # type: ignore[import-untyped]
from botocore.exceptions import ClientError # type: ignore[import-untyped]
@dataclass(frozen=True)
class S3Config:
access_key: str
secret_key: str
bucket_name: str
endpoint_url_internal: str | None
endpoint_url_public: str | None
presign_expires_seconds: int = 3600
class S3StorageBackend:
def __init__(self, cfg: S3Config) -> None:
self._cfg = cfg
self._bucket_ready = False
session = boto3.session.Session()
common = {
"aws_access_key_id": cfg.access_key,
"aws_secret_access_key": cfg.secret_key,
"region_name": "us-east-1",
"config": Config(signature_version="s3v4", s3={"addressing_style": "path"}),
}
self._client = session.client(
"s3", endpoint_url=cfg.endpoint_url_internal, **common
)
presign_endpoint = cfg.endpoint_url_public or cfg.endpoint_url_internal
self._presign_client = session.client(
"s3", endpoint_url=presign_endpoint, **common
)
def ensure_bucket(self) -> None:
if self._bucket_ready:
return
try:
self._client.head_bucket(Bucket=self._cfg.bucket_name)
except ClientError as e:
code = str(e.response.get("Error", {}).get("Code", ""))
if code in {"404", "NoSuchBucket"}:
self._client.create_bucket(Bucket=self._cfg.bucket_name)
else:
raise
self._bucket_ready = True
def upload_fileobj(
self, key: str, fileobj: BinaryIO, *, content_type: str | None
) -> None:
self.ensure_bucket()
extra_args = {"ContentType": content_type} if content_type else None
self._client.upload_fileobj(
Fileobj=fileobj,
Bucket=self._cfg.bucket_name,
Key=key,
ExtraArgs=extra_args,
)
def download_fileobj(self, key: str, fileobj: BinaryIO) -> None:
self.ensure_bucket()
self._client.download_fileobj(self._cfg.bucket_name, key, fileobj)
def exists(self, key: str) -> bool:
self.ensure_bucket()
try:
self._client.head_object(Bucket=self._cfg.bucket_name, Key=key)
return True
except ClientError as e:
code = str(e.response.get("Error", {}).get("Code", ""))
if code in {"404", "NoSuchKey"}:
return False
raise
def size(self, key: str) -> int:
self.ensure_bucket()
resp = self._client.head_object(Bucket=self._cfg.bucket_name, Key=key)
return int(resp.get("ContentLength", 0))
def delete(self, key: str) -> None:
self.ensure_bucket()
self._client.delete_object(Bucket=self._cfg.bucket_name, Key=key)
def read(self, key: str) -> bytes:
self.ensure_bucket()
resp = self._client.get_object(Bucket=self._cfg.bucket_name, Key=key)
body = resp["Body"].read()
return body
def generate_url(self, key: str) -> str:
self.ensure_bucket()
return self._presign_client.generate_presigned_url(
ClientMethod="get_object",
Params={"Bucket": self._cfg.bucket_name, "Key": key},
ExpiresIn=self._cfg.presign_expires_seconds,
)
+11
View File
@@ -0,0 +1,11 @@
from __future__ import annotations
from dataclasses import dataclass
@dataclass(frozen=True)
class FileInfo:
file_path: str
file_url: str
file_size: int | None = None
filename: str | None = None