Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Allow storage extra & headers attributes #18

Merged
merged 6 commits into from
Nov 28, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
53 changes: 52 additions & 1 deletion docs/tutorial/using-files-in-models.md
Original file line number Diff line number Diff line change
Expand Up @@ -132,8 +132,59 @@ the default attributes used by [File][sqlalchemy_file.file.File] object internal
[File][sqlalchemy_file.file.File] provides also attribute style access.
You can access your keys as attributes.

### Extra & Headers

`Apache-libcloud` allow you to store each object with some `extra` attributes or additional `headers`.

They are two ways to add `extra` and `headers` with *sqlalchemy-file*

- on field declaration (shared by all associated files)

```python

class Attachment(Base):
__tablename__ = "attachment"

id = Column(Integer, autoincrement=True, primary_key=True)
name = Column(String(50), unique=True)
content = Column(
FileField(
extra={
"acl": "private",
"dummy_key": "dummy_value",
"meta_data": {"key1": "value1", "key2": "value2"},
},
headers={
"Access-Control-Allow-Origin": "http://test.com",
"Custom-Key": "xxxxxxx",
},
)
)

```

- in your [File][sqlalchemy_file.file.File] object

!!! important
When the Field has default `extra` attribute, it's overridden by [File][sqlalchemy_file.file.File] object `extra`
attribute
```python hl_lines="4"
with Session(engine) as session:
attachment = Attachment(
name="Public document",
content=File(DummyFile(), extra={"acl": "public-read"}),
)
session.add(attachment)
session.commit()
session.refresh(attachment)

assert attachment.content.file.object.extra["acl"] == "public-read"
```
### Metadata

!!! warning
This attribute is now deprecated, migrate to [extra](#extra-headers)

*SQLAlchemy-file* store the uploaded file with some metadata. Only `filename` and `content_type` are sent by default,
. You can complete with `metadata` key inside your [File][sqlalchemy_file.file.File] object.

Expand All @@ -153,7 +204,7 @@ the default attributes used by [File][sqlalchemy_file.file.File] object internal

## Uploading on a Specific Storage

By default all the files are uploaded on the default storage which is the first added storage. This can be changed
By default, all the files are uploaded on the default storage which is the first added storage. This can be changed
by passing a `upload_storage` argument explicitly on field declaration:

```Python
Expand Down
25 changes: 25 additions & 0 deletions docs_src/tutorial/using-files-in-models/009_extra_and_headers.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
from sqlalchemy import Column, Integer, String
from sqlalchemy.orm import declarative_base
from sqlalchemy_file.types import FileField

Base = declarative_base()


class Attachment(Base):
__tablename__ = "attachment"

id = Column(Integer, autoincrement=True, primary_key=True)
name = Column(String(50), unique=True)
content = Column(
FileField(
extra={
"acl": "private",
"dummy_key": "dummy_value",
"meta_data": {"key1": "value1", "key2": "value2"},
},
headers={
"Access-Control-Allow-Origin": "http://test.com",
"Custom-Key": "xxxxxxx",
},
)
)
2 changes: 1 addition & 1 deletion scripts/format.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,5 +2,5 @@

set -e
set -x
ruff sqlalchemy_file tests --fix
ruff sqlalchemy_file tests docs_src --fix
black .
34 changes: 30 additions & 4 deletions sqlalchemy_file/file.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import uuid
import warnings
from datetime import datetime
from typing import Any, Dict, List, Optional

Expand Down Expand Up @@ -84,12 +85,28 @@ def apply_processors(

def save_to_storage(self, upload_storage: Optional[str] = None) -> None:
"""Save current file into provided `upload_storage`"""
metadata = self.get("metadata", {})
metadata.update({"filename": self.filename, "content_type": self.content_type})
extra = self.get("extra", {})
extra.update({"content_type": self.content_type})

metadata = self.get("metadata", None)
if metadata is not None:
warnings.warn(
'metadata attribute is deprecated. Use extra={"meta_data": ...} instead',
DeprecationWarning,
)
extra.update({"meta_data": metadata})

if extra.get("meta_data", None) is None:
extra["meta_data"] = {}

extra["meta_data"].update(
{"filename": self.filename, "content_type": self.content_type}
)
stored_file = self.store_content(
self.original_content,
upload_storage,
metadata=metadata,
extra=extra,
headers=self.get("headers", None),
)
self["file_id"] = stored_file.name
self["upload_storage"] = upload_storage
Expand All @@ -104,13 +121,22 @@ def store_content(
upload_storage: Optional[str] = None,
name: Optional[str] = None,
metadata: Optional[Dict[str, Any]] = None,
extra: Optional[Dict[str, Any]] = None,
headers: Optional[Dict[str, str]] = None,
) -> StoredFile:
"""Store content into provided `upload_storage`
with additional `metadata`. Can be use by processors
to store additional files.
"""
name = name or str(uuid.uuid4())
stored_file = StorageManager.save_file(name, content, upload_storage, metadata)
stored_file = StorageManager.save_file(
name=name,
content=content,
upload_storage=upload_storage,
metadata=metadata,
extra=extra,
headers=headers,
)
self["files"].append("%s/%s" % (upload_storage, name))
return stored_file

Expand Down
1 change: 0 additions & 1 deletion sqlalchemy_file/mutable_list.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ class MutableList(Mutable, typing.List[T]):
def __init__(self, *args, **kwargs) -> None: # type: ignore
super(MutableList, self).__init__(*args, **kwargs)
self._removed: List[T] = []
# logging.warning(('init', self._removed, args, kwargs))

@classmethod
def coerce(cls, key: Any, value: Any) -> Any:
Expand Down
8 changes: 4 additions & 4 deletions sqlalchemy_file/processors.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,8 @@ def process(self, file: "File", upload_storage: Optional[str] = None) -> None:
f"image/{self.thumbnail_format}".lower(),
)
ext = mimetypes.guess_extension(content_type)
metadata = file.get("metadata", {})
extra = file.get("extra", {})
metadata = extra.get("meta_data", {})
metadata.update(
{
"filename": file["filename"] + f".thumbnail{width}x{height}{ext}",
Expand All @@ -123,10 +124,9 @@ def process(self, file: "File", upload_storage: Optional[str] = None) -> None:
"height": height,
}
)
extra.update({"meta_data": metadata})
stored_file = file.store_content(
output,
upload_storage,
metadata=metadata,
output, upload_storage, extra=extra, headers=file.get("headers", None)
)
file.update(
{
Expand Down
56 changes: 32 additions & 24 deletions sqlalchemy_file/storage.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import warnings
from typing import Any, Dict, Iterator, Optional

from libcloud.storage.base import Container
Expand Down Expand Up @@ -70,38 +71,45 @@ def save_file(
content: Iterator[bytes],
upload_storage: Optional[str] = None,
metadata: Optional[Dict[str, Any]] = None,
extra: Optional[Dict[str, Any]] = None,
headers: Optional[Dict[str, str]] = None,
) -> StoredFile:
if metadata is not None:
warnings.warn(
'metadata attribute is deprecated. Use extra={"meta_data": ...} instead',
DeprecationWarning,
)
extra = {
"meta_data": metadata,
"content_type": metadata.get(
"content_type", "application/octet-stream"
),
}
"""Save file into provided `upload_storage`"""
container = cls.get(upload_storage)
if container.driver.name == LOCAL_STORAGE_DRIVER_NAME:
obj = container.upload_object_via_stream(iterator=content, object_name=name)
if metadata is not None:
"""
Libcloud local storage driver doesn't support metadata, so the metadata
is saved in the same container with the combination of the original name
and `.metadata.json` as name
"""
container.upload_object_via_stream(
iterator=get_metadata_file_obj(metadata),
object_name=f"{name}.metadata.json",
)
return StoredFile(obj)
else:
extra = {}
if metadata is not None:
if "content_type" in metadata:
extra["content_type"] = metadata["content_type"]
extra["meta_data"] = metadata
return StoredFile(
container.upload_object_via_stream(
iterator=content, object_name=name, extra=extra, headers=headers
)
if (
container.driver.name == LOCAL_STORAGE_DRIVER_NAME
and extra is not None
and extra.get("meta_data", None) is not None
):
"""
Libcloud local storage driver doesn't support metadata, so the metadata
is saved in the same container with the combination of the original name
and `.metadata.json` as name
"""
container.upload_object_via_stream(
iterator=get_metadata_file_obj(extra["meta_data"]),
object_name=f"{name}.metadata.json",
)
return StoredFile(
container.upload_object_via_stream(
iterator=content, object_name=name, extra=extra, headers=headers
)
)

@classmethod
def get_file(cls, path: str) -> StoredFile:
"""Retrieve the file with `provided` path
"""Retrieve the file with `provided` path,
path is expected to be `storage_name/file_id`
"""
upload_storage, file_id = path.split("/")
Expand Down
24 changes: 22 additions & 2 deletions sqlalchemy_file/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,8 @@ def __init__(
processors: Optional[List[Processor]] = None,
upload_type: Type[File] = File,
multiple: Optional[bool] = False,
extra: Optional[Dict[str, Any]] = None,
headers: Optional[Dict[str, str]] = None,
**kwargs: Dict[str, Any],
) -> None:
"""
Expand All @@ -52,8 +54,12 @@ def __init__(
validators: List of validators to apply
processors: List of validators to apply
upload_type: File class to use, could be
use to set custom File class
used to set custom File class
multiple: Use this to save multiple files
extra: Extra attributes (driver specific)
headers: Additional request headers,
such as CORS headers. For example:
headers = {'Access-Control-Allow-Origin': 'http://mozilla.com'}
"""
super().__init__(*args, **kwargs)
if processors is None:
Expand All @@ -63,6 +69,8 @@ def __init__(
self.upload_storage = upload_storage
self.upload_type = upload_type
self.multiple = multiple
self.extra = extra
self.headers = headers
self.validators = validators
self.processors = processors

Expand Down Expand Up @@ -115,6 +123,8 @@ def __init__(
processors: Optional[List[Processor]] = None,
upload_type: Type[File] = File,
multiple: Optional[bool] = False,
extra: Optional[Dict[str, str]] = None,
headers: Optional[Dict[str, str]] = None,
**kwargs: Dict[str, Any],
) -> None:
"""
Expand All @@ -128,8 +138,9 @@ def __init__(
validators: List of additional validators to apply
processors: List of validators to apply
upload_type: File class to use, could be
use to set custom File class
used to set custom File class
multiple: Use this to save multiple files
extra: Extra attributes (driver specific)
"""
if validators is None:
validators = []
Expand All @@ -147,6 +158,8 @@ def __init__(
processors=processors,
upload_type=upload_type,
multiple=multiple,
extra=extra,
headers=headers,
**kwargs,
)

Expand Down Expand Up @@ -323,6 +336,13 @@ def prepare_file_attr(
upload_storage = column_type.upload_storage or StorageManager.get_default()
for value in prepared_values:
if not getattr(value, "saved", False):
if column_type.extra is not None and value.get("extra", None) is None:
value["extra"] = column_type.extra
if (
column_type.headers is not None
and value.get("headers", None) is None
):
value["headers"] = column_type.headers
value.save_to_storage(upload_storage)
value.apply_processors(column_type.processors, upload_storage)
return changed, (
Expand Down
11 changes: 11 additions & 0 deletions tests/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
from libcloud.storage.drivers.dummy import DummyFileObject as BaseDummyFileObject


class DummyFile(BaseDummyFileObject):
"""Add size just for test purpose"""

def __init__(self, yield_count=5, chunk_len=10):
super().__init__(yield_count, chunk_len)
self.size = len(self)
self.filename = "dummy-file"
self.content_type = "application/octet-stream"
Loading