diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..9acc380 --- /dev/null +++ b/.gitignore @@ -0,0 +1,20 @@ +__pycache__/ +*.py[cod] +*.pyo +*.pyd + +build/ +dist/ +*.egg-info/ + +.pytest_cache/ + +.venv/ +venv/ +env/ +ENV/ + +.mypy_cache/ +.ruff_cache/ + +.DS_Store diff --git a/insforge/storage/client.py b/insforge/storage/client.py index 83ee0af..b72de14 100644 --- a/insforge/storage/client.py +++ b/insforge/storage/client.py @@ -1,21 +1,33 @@ from __future__ import annotations from collections.abc import Mapping +from typing import Any +from typing import Iterable import httpx from .._base_client import BaseClient from ..exceptions import InsforgeHTTPError from .._utils import quote_path_segment +from .models import DownloadStrategy from .models import StorageBucketListResponse +from .models import StorageBucketResponse +from .models import StorageBucketUpdateResponse +from .models import StorageDeleteBucketResponse from .models import StorageDeleteObjectResponse +from .models import StorageDownloadResult from .models import StorageObjectResponse +from .models import StoredFileList +from .models import UploadStrategy class StorageClient: def __init__(self, client: BaseClient) -> None: self._client = client + def _bucket_path(self, bucket_name: str) -> str: + return f"/api/storage/buckets/{quote_path_segment(bucket_name)}" + def _object_url_path(self, bucket_name: str, object_key: str) -> str: return ( "/api/storage/buckets/" @@ -32,6 +44,81 @@ async def list_buckets(self, *, access_token: str | None = None) -> StorageBucke ) return StorageBucketListResponse.model_validate(payload) + async def create_bucket( + self, + *, + bucket_name: str, + is_public: bool | None = None, + access_token: str | None = None, + ) -> StorageBucketResponse: + payload: dict[str, Any] = {"bucketName": bucket_name} + if is_public is not None: + payload["isPublic"] = is_public + + response = await self._client._request_json( + "POST", + "/api/storage/buckets", + json=payload, + access_token=access_token, + ) + return StorageBucketResponse.model_validate(response) + + async def update_bucket( + self, + bucket_name: str, + *, + is_public: bool, + access_token: str | None = None, + ) -> StorageBucketUpdateResponse: + response = await self._client._request_json( + "PATCH", + self._bucket_path(bucket_name), + json={"isPublic": is_public}, + access_token=access_token, + ) + return StorageBucketUpdateResponse.model_validate(response) + + async def delete_bucket( + self, + bucket_name: str, + *, + access_token: str | None = None, + ) -> StorageDeleteBucketResponse: + response = await self._client._request_json( + "DELETE", + self._bucket_path(bucket_name), + access_token=access_token, + ) + return StorageDeleteBucketResponse.model_validate(response) + + async def list_objects( + self, + bucket_name: str, + *, + prefix: str | None = None, + limit: int | None = None, + offset: int | None = None, + search: str | None = None, + access_token: str | None = None, + ) -> StoredFileList: + params: dict[str, str] = {} + if prefix is not None: + params["prefix"] = prefix + if limit is not None: + params["limit"] = str(limit) + if offset is not None: + params["offset"] = str(offset) + if search is not None: + params["search"] = search + + payload = await self._client._request_json( + "GET", + f"/api/storage/buckets/{quote_path_segment(bucket_name)}/objects", + params=params or None, + access_token=access_token, + ) + return StoredFileList.model_validate(payload) + async def upload_object( self, bucket_name: str, @@ -69,7 +156,7 @@ async def download_object( *, access_token: str | None = None, extra_headers: Mapping[str, str] | None = None, - ) -> bytes: + ) -> StorageDownloadResult: path = self._object_url_path(bucket_name, object_key) response = await self._client.http_client.request( "GET", @@ -87,7 +174,18 @@ async def download_object( response, ) - return response.content + headers = response.headers + content_type = (headers.get("Content-Type") or headers.get("content-type")) + content_length = ( + headers.get("Content-Length") + or headers.get("content-length") + or str(len(response.content)) + ) + return StorageDownloadResult( + content=response.content, + content_type=content_type, + content_length=content_length, + ) async def delete_object( self, @@ -105,3 +203,92 @@ async def delete_object( extra_headers=extra_headers, ) return StorageDeleteObjectResponse.model_validate(payload) + + async def upload_object_auto( + self, + bucket_name: str, + *, + data: bytes, + filename: str, + content_type: str | None = None, + access_token: str | None = None, + ) -> StorageObjectResponse: + path = f"/api/storage/buckets/{quote_path_segment(bucket_name)}/objects" + response = await self._client.http_client.request( + "POST", + self._client._build_url(path), + files={"file": (filename, data, content_type or "application/octet-stream")}, + headers=self._client._build_headers(access_token=access_token), + ) + + if response.is_error: + raise InsforgeHTTPError.from_response("POST", path, response) + + return StorageObjectResponse.model_validate(response.json()) + + async def confirm_upload( + self, + bucket_name: str, + object_key: str, + *, + size: int, + content_type: str | None = None, + etag: str | None = None, + access_token: str | None = None, + ) -> StorageObjectResponse: + payload: dict[str, Any] = {"size": size} + if content_type is not None: + payload["contentType"] = content_type + if etag is not None: + payload["etag"] = etag + + response = await self._client._request_json( + "POST", + f"{self._object_url_path(bucket_name, object_key)}/confirm-upload", + json=payload, + access_token=access_token, + ) + return StorageObjectResponse.model_validate(response) + + async def get_upload_strategy( + self, + bucket_name: str, + *, + filename: str, + content_type: str | None = None, + size: int | None = None, + access_token: str | None = None, + ) -> UploadStrategy: + payload: dict[str, Any] = {"filename": filename} + if content_type is not None: + payload["contentType"] = content_type + if size is not None: + payload["size"] = size + + response = await self._client._request_json( + "POST", + f"/api/storage/buckets/{quote_path_segment(bucket_name)}/upload-strategy", + json=payload, + access_token=access_token, + ) + return UploadStrategy.model_validate(response) + + async def get_download_strategy( + self, + bucket_name: str, + object_key: str, + *, + expires_in: int | None = None, + access_token: str | None = None, + ) -> DownloadStrategy: + payload: dict[str, Any] = {} + if expires_in is not None: + payload["expiresIn"] = expires_in + + response = await self._client._request_json( + "POST", + f"{self._object_url_path(bucket_name, object_key)}/download-strategy", + json=payload or None, + access_token=access_token, + ) + return DownloadStrategy.model_validate(response) diff --git a/insforge/storage/models.py b/insforge/storage/models.py index bde3660..6546caf 100644 --- a/insforge/storage/models.py +++ b/insforge/storage/models.py @@ -1,6 +1,8 @@ from __future__ import annotations from datetime import datetime +from dataclasses import dataclass +from typing import Any from pydantic import BaseModel, ConfigDict, Field @@ -11,6 +13,21 @@ class StorageBucketListResponse(BaseModel): buckets: list[str] = Field(default_factory=list) +class StorageBucketResponse(BaseModel): + model_config = ConfigDict(extra="ignore") + + message: str + bucket_name: str = Field(alias="bucketName") + + +class StorageBucketUpdateResponse(BaseModel): + model_config = ConfigDict(extra="ignore") + + message: str + bucket: str + is_public: bool = Field(alias="isPublic") + + class StorageObjectResponse(BaseModel): model_config = ConfigDict(extra="ignore", populate_by_name=True) @@ -26,3 +43,54 @@ class StorageDeleteObjectResponse(BaseModel): model_config = ConfigDict(extra="ignore") message: str + + +class StoragePagination(BaseModel): + model_config = ConfigDict(extra="ignore") + + limit: int | None = None + offset: int | None = None + total: int | None = None + + +class StoredFileList(BaseModel): + model_config = ConfigDict(extra="ignore", populate_by_name=True) + + data: list[StorageObjectResponse] = Field(default_factory=list) + pagination: StoragePagination | None = None + next_actions: str | None = Field(default=None, alias="nextActions") + + +class UploadStrategy(BaseModel): + model_config = ConfigDict(extra="ignore", populate_by_name=True) + + method: str + upload_url: str = Field(alias="uploadUrl") + key: str + confirm_required: bool = Field(alias="confirmRequired") + confirm_url: str | None = Field(default=None, alias="confirmUrl") + expires_at: datetime | None = Field(default=None, alias="expiresAt") + fields: dict[str, Any] | None = None + + +class DownloadStrategy(BaseModel): + model_config = ConfigDict(extra="ignore", populate_by_name=True) + + method: str + url: str + expires_at: datetime | None = Field(default=None, alias="expiresAt") + headers: dict[str, str] | None = None + + +class StorageDeleteBucketResponse(BaseModel): + model_config = ConfigDict(extra="ignore") + + message: str + next_actions: str | None = Field(default=None, alias="nextActions") + + +@dataclass +class StorageDownloadResult: + content: bytes + content_type: str | None = None + content_length: str | None = None diff --git a/tests/storage/test_storage_client.py b/tests/storage/test_storage_client.py index 112b2da..96f27d5 100644 --- a/tests/storage/test_storage_client.py +++ b/tests/storage/test_storage_client.py @@ -7,6 +7,7 @@ from insforge import InsforgeClient from insforge.exceptions import InsforgeHTTPError +from insforge.storage.models import StorageDownloadResult def test_pyproject_includes_storage_package_metadata() -> None: @@ -86,6 +87,45 @@ async def fake_request(method: str, url: httpx.URL, **kwargs: object) -> httpx.R assert result.key == "me.png" +def test_storage_methods_merge_extra_headers() -> None: + async def scenario() -> list[dict[str, object]]: + captured: list[dict[str, object]] = [] + + async def fake_request(method: str, url: httpx.URL, **kwargs: object) -> httpx.Response: + captured.append({"method": method, "headers": kwargs.get("headers"), "files": kwargs.get("files")}) + if method == "PUT": + return httpx.Response( + 201, + json={ + "bucket": "avatars", + "key": "me.png", + "size": 9, + "mimeType": "image/png", + "uploadedAt": "2024-01-21T10:30:00Z", + "url": "/api/storage/buckets/avatars/objects/me.png", + }, + ) + if method == "GET": + return httpx.Response( + 200, + content=b"bytes", + headers={"Content-Type": "image/png"}, + ) + return httpx.Response(200, json={"message": "Object deleted"}) + + async with InsforgeClient(base_url="https://example.com", api_key="ins_test") as client: + client.http_client.request = fake_request # type: ignore[method-assign] + await client.storage.upload_object("avatars", "me.png", b"data", extra_headers={"X-Custom": "value"}) + await client.storage.download_object("avatars", "me.png", extra_headers={"X-Custom": "value"}) + await client.storage.delete_object("avatars", "me.png", extra_headers={"X-Custom": "value"}) + return captured + + captures = asyncio.run(scenario()) + assert captures[0]["headers"]["X-Custom"] == "value" + assert captures[1]["headers"]["X-Custom"] == "value" + assert captures[2]["headers"]["X-Custom"] == "value" + + def test_upload_object_encodes_reserved_characters_in_object_key() -> None: async def scenario() -> tuple[object, dict[str, object]]: captured: dict[str, object] = {} @@ -121,15 +161,19 @@ async def fake_request(method: str, url: httpx.URL, **kwargs: object) -> httpx.R assert result.key == "a#b.txt" -def test_download_object_returns_raw_bytes() -> None: - async def scenario() -> tuple[object, dict[str, object]]: +def test_download_object_returns_bytes_and_headers() -> None: + async def scenario() -> tuple[StorageDownloadResult, dict[str, object]]: captured: dict[str, object] = {} async def fake_request(method: str, url: httpx.URL, **kwargs: object) -> httpx.Response: captured["method"] = method captured["url"] = str(url) captured["kwargs"] = kwargs - return httpx.Response(200, content=b"png-bytes") + return httpx.Response( + 200, + content=b"png-bytes", + headers={"Content-Type": "image/png", "Content-Length": "9"}, + ) async with InsforgeClient( base_url="https://example.com", @@ -145,7 +189,8 @@ async def fake_request(method: str, url: httpx.URL, **kwargs: object) -> httpx.R assert captured["method"] == "GET" assert captured["url"] == "https://example.com/api/storage/buckets/avatars/objects/me.png" assert captured["kwargs"]["headers"]["X-API-Key"] == "ins_test" - assert result == b"png-bytes" + assert result.content == b"png-bytes" + assert result.content_length is not None def test_download_object_encodes_reserved_characters_in_object_key() -> None: @@ -170,7 +215,7 @@ async def fake_request(method: str, url: httpx.URL, **kwargs: object) -> httpx.R result, captured = asyncio.run(scenario()) assert captured["url"] == "https://example.com/api/storage/buckets/avatars/objects/a%3Fb.txt" - assert result == b"plain-bytes" + assert result.content == b"plain-bytes" def test_delete_object_uses_delete_and_returns_success_payload() -> None: @@ -222,3 +267,194 @@ async def fake_request(method: str, url: httpx.URL, **kwargs: object) -> httpx.R asyncio.run(scenario()) assert exc_info.value.status_code == 500 + + +def test_bucket_admin_endpoints_and_object_listing() -> None: + async def scenario() -> tuple[list[dict[str, object]], object]: + calls: list[dict[str, object]] = [] + + async def fake_request(method: str, url: httpx.URL, **kwargs: object) -> httpx.Response: + calls.append({"method": method, "url": str(url), "kwargs": kwargs}) + if method == "POST" and url.path.endswith("/api/storage/buckets"): + return httpx.Response(201, json={"message": "Bucket created successfully", "bucketName": "avatars"}) + if method == "PATCH": + return httpx.Response(200, json={"message": "Bucket visibility updated", "bucket": "avatars", "isPublic": True}) + if method == "DELETE": + return httpx.Response(200, json={"message": "Bucket deleted successfully", "nextActions": "foo"}) + return httpx.Response( + 200, + json={ + "data": [ + { + "bucket": "avatars", + "key": "file.png", + "size": 10, + "mimeType": "image/png", + "uploadedAt": "2024-01-01T00:00:00Z", + "url": "/api/storage/buckets/avatars/objects/file.png", + } + ], + "pagination": {"offset": 0, "limit": 1, "total": 1}, + }, + ) + + async with InsforgeClient(base_url="https://example.com", api_key="ins_test") as client: + client.http_client.request = fake_request # type: ignore[method-assign] + bucket = await client.storage.create_bucket(bucket_name="avatars", is_public=True) + updated = await client.storage.update_bucket("avatars", is_public=True) + deleted = await client.storage.delete_bucket("avatars") + objects = await client.storage.list_objects("avatars", prefix="file", limit=1, offset=0, search="file") + return calls, (bucket, updated, deleted, objects) + + calls, results = asyncio.run(scenario()) + bucket, updated, deleted, objects = results + + assert calls[0]["method"] == "POST" + assert calls[0]["kwargs"]["json"] == {"bucketName": "avatars", "isPublic": True} + assert bucket.bucket_name == "avatars" + + assert calls[1]["method"] == "PATCH" + assert calls[1]["kwargs"]["json"] == {"isPublic": True} + assert updated.bucket == "avatars" + + assert calls[2]["method"] == "DELETE" + assert deleted.message == "Bucket deleted successfully" + assert deleted.next_actions == "foo" + + assert calls[3]["method"] == "GET" + assert calls[3]["kwargs"]["params"] == {"prefix": "file", "limit": "1", "offset": "0", "search": "file"} + assert objects.data[0].key == "file.png" + + +def test_upload_auto_and_strategy_endpoints() -> None: + async def scenario() -> tuple[list[dict[str, object]], object, object, object]: + calls: list[dict[str, object]] = [] + + async def fake_request(method: str, url: httpx.URL, **kwargs: object) -> httpx.Response: + calls.append({"method": method, "url": str(url), "kwargs": kwargs}) + if url.path.endswith("/objects"): + return httpx.Response( + 201, + json={ + "bucket": "avatars", + "key": "auto.jpg", + "size": 10, + "mimeType": "image/jpeg", + "uploadedAt": "2024-01-01T00:00:00Z", + "url": "/api/storage/buckets/avatars/objects/auto.jpg", + }, + ) + if url.path.endswith("/confirm-upload"): + return httpx.Response( + 201, + json={ + "bucket": "avatars", + "key": "auto.jpg", + "size": 10, + "mimeType": "image/jpeg", + "uploadedAt": "2024-01-01T00:00:00Z", + "url": "/api/storage/buckets/avatars/objects/auto.jpg", + }, + ) + if url.path.endswith("/upload-strategy"): + return httpx.Response( + 200, + json={ + "method": "presigned", + "uploadUrl": "https://example.com/upload", + "key": "auto.jpg", + "confirmRequired": True, + "confirmUrl": "/confirm", + "expiresAt": "2025-01-01T00:00:00Z", + }, + ) + return httpx.Response( + 200, + json={"method": "direct", "url": "/download", "expiresAt": "2025-01-01T00:00:00Z"}, + ) + + async with InsforgeClient(base_url="https://example.com", api_key="ins_test") as client: + client.http_client.request = fake_request # type: ignore[method-assign] + auto = await client.storage.upload_object_auto("avatars", data=b"bytes", filename="auto.jpg") + confirm = await client.storage.confirm_upload("avatars", "auto.jpg", size=10, etag="etag123") + upload_strategy = await client.storage.get_upload_strategy("avatars", filename="auto.jpg", content_type="image/jpeg", size=10) + download_strategy = await client.storage.get_download_strategy("avatars", "auto.jpg", expires_in=3600) + return calls, auto, confirm, upload_strategy, download_strategy + + calls, auto, confirm, upload_strategy, download_strategy = asyncio.run(scenario()) + + assert calls[0]["method"] == "POST" and calls[0]["url"].endswith("/objects") + assert auto.key == "auto.jpg" + + assert calls[1]["method"] == "POST" and calls[1]["url"].endswith("/confirm-upload") + assert calls[1]["kwargs"]["json"] == {"size": 10, "etag": "etag123"} + assert confirm.key == "auto.jpg" + + assert calls[2]["method"] == "POST" and calls[2]["url"].endswith("/upload-strategy") + assert calls[2]["kwargs"]["json"] == { + "filename": "auto.jpg", + "contentType": "image/jpeg", + "size": 10, + } + assert upload_strategy.method == "presigned" + + assert calls[3]["method"] == "POST" and calls[3]["url"].endswith("/download-strategy") + assert calls[3]["kwargs"]["json"] == {"expiresIn": 3600} + assert download_strategy.method == "direct" + + +def test_storage_encoding_for_new_admin_paths() -> None: + async def scenario() -> list[dict[str, object]]: + calls: list[dict[str, object]] = [] + + async def fake_request(method: str, url: httpx.URL, **kwargs: object) -> httpx.Response: + calls.append({"method": method, "url": str(url), "kwargs": kwargs}) + if method == "PATCH": + return httpx.Response(200, json={"message": "Bucket visibility updated", "bucket": "av atars", "isPublic": True}) + if method == "DELETE": + return httpx.Response(200, json={"message": "Bucket deleted successfully"}) + if url.path.endswith("/confirm-upload"): + return httpx.Response( + 201, + json={ + "bucket": "my bucket", + "key": "dir/file.txt", + "size": 1, + "mimeType": "application/octet-stream", + "uploadedAt": "2024-01-01T00:00:00Z", + "url": "/api/storage/buckets/my%20bucket/objects/dir%2Ffile.txt", + }, + ) + if url.path.endswith("/upload-strategy"): + return httpx.Response( + 200, + json={ + "method": "presigned", + "uploadUrl": "https://example.com/upload", + "key": "dir/file.txt", + "confirmRequired": True, + "confirmUrl": "/confirm", + }, + ) + if url.path.endswith("/download-strategy"): + return httpx.Response( + 200, + json={"method": "direct", "url": "/download"}, + ) + return httpx.Response(200, json={"message": "ok"}) + + async with InsforgeClient(base_url="https://example.com", api_key="ins_test") as client: + client.http_client.request = fake_request # type: ignore[method-assign] + await client.storage.update_bucket("av atars", is_public=True, access_token="admin") + await client.storage.delete_bucket("av atars", access_token="admin") + await client.storage.confirm_upload("my bucket", "dir/file.txt", size=1, access_token="admin") + await client.storage.get_upload_strategy("my bucket", filename="file.txt", access_token="admin") + await client.storage.get_download_strategy("my bucket", "dir/file.txt", expires_in=10, access_token="admin") + return calls + + calls = asyncio.run(scenario()) + assert "av%20atars" in calls[0]["url"] + assert "av%20atars" in calls[1]["url"] + assert "/confirm-upload" in calls[2]["url"] + assert "/upload-strategy" in calls[3]["url"] + assert "/download-strategy" in calls[4]["url"]