init: new structure + fix lint errors

This commit is contained in:
Daniil
2026-02-03 02:15:07 +03:00
commit 67e0f22b4f
89 changed files with 7654 additions and 0 deletions
+17
View File
@@ -0,0 +1,17 @@
"""
Storage infrastructure - file storage backends (local, S3).
"""
from cpv3.infrastructure.storage.base import StorageBackend
from cpv3.infrastructure.storage.local import LocalConfig, LocalStorageBackend
from cpv3.infrastructure.storage.s3 import S3Config, S3StorageBackend
from cpv3.infrastructure.storage.types import FileInfo
__all__ = [
"StorageBackend",
"LocalConfig",
"LocalStorageBackend",
"S3Config",
"S3StorageBackend",
"FileInfo",
]
+124
View File
@@ -0,0 +1,124 @@
from __future__ import annotations
from abc import ABC, abstractmethod
from dataclasses import dataclass
from os import path
from tempfile import NamedTemporaryFile
from typing import BinaryIO, Callable, Protocol
from uuid import uuid4
import anyio
import anyio.to_thread
from cpv3.infrastructure.storage.types import FileInfo
@dataclass(frozen=True)
class TempFile:
path: str
cleanup: Callable[[], None]
class StorageBackend(Protocol):
"""Protocol defining the interface for storage backends."""
def upload_fileobj(
self, key: str, fileobj: BinaryIO, *, content_type: str | None
) -> None: ...
def download_fileobj(self, key: str, fileobj: BinaryIO) -> None: ...
def exists(self, key: str) -> bool: ...
def size(self, key: str) -> int: ...
def delete(self, key: str) -> None: ...
def read(self, key: str) -> bytes: ...
def generate_url(self, key: str) -> str: ...
class StorageService:
"""High-level async storage service wrapping a backend."""
def __init__(self, backend: StorageBackend) -> None:
self._backend = backend
def _make_key(self, file_name: str, folder: str, gen_name: bool) -> str:
if gen_name:
_, ext = path.splitext(file_name)
file_name = f"{uuid4().hex}{ext if ext else ''}"
return path.join(folder, file_name) if folder else file_name
async def upload_fileobj(
self,
*,
fileobj: BinaryIO,
file_name: str,
folder: str = "",
gen_name: bool = True,
content_type: str | None = None,
) -> str:
key = self._make_key(file_name, folder, gen_name)
def _upload() -> None:
fileobj.seek(0)
self._backend.upload_fileobj(key, fileobj, content_type=content_type)
await anyio.to_thread.run_sync(_upload)
return key
async def exists(self, key: str) -> bool:
return await anyio.to_thread.run_sync(lambda: self._backend.exists(key))
async def delete(self, key: str) -> None:
await anyio.to_thread.run_sync(lambda: self._backend.delete(key))
async def size(self, key: str) -> int:
return await anyio.to_thread.run_sync(lambda: self._backend.size(key))
async def read(self, key: str) -> bytes:
return await anyio.to_thread.run_sync(lambda: self._backend.read(key))
async def url(self, key: str) -> str:
return await anyio.to_thread.run_sync(lambda: self._backend.generate_url(key))
async def get_file_info(self, key: str) -> FileInfo:
if not await self.exists(key):
raise FileNotFoundError(f"File '{key}' does not exist")
file_url = await self.url(key)
file_size = await self.size(key)
return FileInfo(
file_path=key,
file_url=file_url,
file_size=file_size,
filename=path.basename(key),
)
async def download_to_temp(self, key: str) -> TempFile:
if not await self.exists(key):
raise FileNotFoundError(f"File '{key}' does not exist")
_, ext = path.splitext(key)
suffix = ext if ext else ".bin"
out_path: str
with NamedTemporaryFile(suffix=suffix, delete=False) as tmp:
out_path = tmp.name
def _download() -> None:
with open(out_path, "wb") as out:
self._backend.download_fileobj(key, out)
await anyio.to_thread.run_sync(_download)
def _cleanup() -> None:
import os
if os.path.exists(out_path):
os.remove(out_path)
return TempFile(path=out_path, cleanup=_cleanup)
+62
View File
@@ -0,0 +1,62 @@
from __future__ import annotations
from dataclasses import dataclass
from pathlib import Path
from typing import BinaryIO
@dataclass(frozen=True)
class LocalConfig:
root_dir: Path
class LocalStorageBackend:
def __init__(self, cfg: LocalConfig) -> None:
self._cfg = cfg
self._cfg.root_dir.mkdir(parents=True, exist_ok=True)
def _full_path(self, key: str) -> Path:
return (self._cfg.root_dir / key).resolve()
def upload_fileobj(
self, key: str, fileobj: BinaryIO, *, content_type: str | None
) -> None:
# content_type is unused for filesystem backend.
_ = content_type
full_path = self._full_path(key)
full_path.parent.mkdir(parents=True, exist_ok=True)
with open(full_path, "wb") as out:
while True:
chunk = fileobj.read(1024 * 1024)
if not chunk:
break
out.write(chunk)
def download_fileobj(self, key: str, fileobj: BinaryIO) -> None:
full_path = self._full_path(key)
with open(full_path, "rb") as src:
while True:
chunk = src.read(1024 * 1024)
if not chunk:
break
fileobj.write(chunk)
def exists(self, key: str) -> bool:
return self._full_path(key).exists()
def size(self, key: str) -> int:
return self._full_path(key).stat().st_size
def delete(self, key: str) -> None:
path = self._full_path(key)
if path.exists():
path.unlink()
def read(self, key: str) -> bytes:
return self._full_path(key).read_bytes()
def generate_url(self, key: str) -> str:
# Served by cpv3 via /api/files/local/{path}
return f"/api/files/local/{key}"
+107
View File
@@ -0,0 +1,107 @@
from __future__ import annotations
from dataclasses import dataclass
from typing import BinaryIO
import boto3 # type: ignore[import-untyped]
import boto3.session # type: ignore[import-untyped]
from botocore.config import Config # type: ignore[import-untyped]
from botocore.exceptions import ClientError # type: ignore[import-untyped]
@dataclass(frozen=True)
class S3Config:
access_key: str
secret_key: str
bucket_name: str
endpoint_url_internal: str | None
endpoint_url_public: str | None
presign_expires_seconds: int = 3600
class S3StorageBackend:
def __init__(self, cfg: S3Config) -> None:
self._cfg = cfg
self._bucket_ready = False
session = boto3.session.Session()
common = {
"aws_access_key_id": cfg.access_key,
"aws_secret_access_key": cfg.secret_key,
"region_name": "us-east-1",
"config": Config(signature_version="s3v4", s3={"addressing_style": "path"}),
}
self._client = session.client(
"s3", endpoint_url=cfg.endpoint_url_internal, **common
)
presign_endpoint = cfg.endpoint_url_public or cfg.endpoint_url_internal
self._presign_client = session.client(
"s3", endpoint_url=presign_endpoint, **common
)
def ensure_bucket(self) -> None:
if self._bucket_ready:
return
try:
self._client.head_bucket(Bucket=self._cfg.bucket_name)
except ClientError as e:
code = str(e.response.get("Error", {}).get("Code", ""))
if code in {"404", "NoSuchBucket"}:
self._client.create_bucket(Bucket=self._cfg.bucket_name)
else:
raise
self._bucket_ready = True
def upload_fileobj(
self, key: str, fileobj: BinaryIO, *, content_type: str | None
) -> None:
self.ensure_bucket()
extra_args = {"ContentType": content_type} if content_type else None
self._client.upload_fileobj(
Fileobj=fileobj,
Bucket=self._cfg.bucket_name,
Key=key,
ExtraArgs=extra_args,
)
def download_fileobj(self, key: str, fileobj: BinaryIO) -> None:
self.ensure_bucket()
self._client.download_fileobj(self._cfg.bucket_name, key, fileobj)
def exists(self, key: str) -> bool:
self.ensure_bucket()
try:
self._client.head_object(Bucket=self._cfg.bucket_name, Key=key)
return True
except ClientError as e:
code = str(e.response.get("Error", {}).get("Code", ""))
if code in {"404", "NoSuchKey"}:
return False
raise
def size(self, key: str) -> int:
self.ensure_bucket()
resp = self._client.head_object(Bucket=self._cfg.bucket_name, Key=key)
return int(resp.get("ContentLength", 0))
def delete(self, key: str) -> None:
self.ensure_bucket()
self._client.delete_object(Bucket=self._cfg.bucket_name, Key=key)
def read(self, key: str) -> bytes:
self.ensure_bucket()
resp = self._client.get_object(Bucket=self._cfg.bucket_name, Key=key)
body = resp["Body"].read()
return body
def generate_url(self, key: str) -> str:
self.ensure_bucket()
return self._presign_client.generate_presigned_url(
ClientMethod="get_object",
Params={"Bucket": self._cfg.bucket_name, "Key": key},
ExpiresIn=self._cfg.presign_expires_seconds,
)
+11
View File
@@ -0,0 +1,11 @@
from __future__ import annotations
from dataclasses import dataclass
@dataclass(frozen=True)
class FileInfo:
file_path: str
file_url: str
file_size: int | None = None
filename: str | None = None