first commit
This commit is contained in:
45
backend/venv/lib/python3.9/site-packages/minio/__init__.py
Normal file
45
backend/venv/lib/python3.9/site-packages/minio/__init__.py
Normal file
@@ -0,0 +1,45 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage,
|
||||
# (C) 2015, 2016, 2017 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""
|
||||
minio - MinIO Python SDK for Amazon S3 Compatible Cloud Storage
|
||||
|
||||
>>> from minio import Minio
|
||||
>>> client = Minio(
|
||||
... "play.min.io",
|
||||
... access_key="Q3AM3UQ867SPQQA43P2F",
|
||||
... secret_key="zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG",
|
||||
... )
|
||||
>>> buckets = client.list_buckets()
|
||||
>>> for bucket in buckets:
|
||||
... print(bucket.name, bucket.creation_date)
|
||||
|
||||
:copyright: (C) 2015-2020 MinIO, Inc.
|
||||
:license: Apache 2.0, see LICENSE for more details.
|
||||
"""
|
||||
|
||||
__title__ = "minio-py"
|
||||
__author__ = "MinIO, Inc."
|
||||
__version__ = "7.2.20"
|
||||
__license__ = "Apache 2.0"
|
||||
__copyright__ = "Copyright 2015, 2016, 2017, 2018, 2019, 2020 MinIO, Inc."
|
||||
|
||||
# pylint: disable=unused-import,useless-import-alias
|
||||
from .api import Minio as Minio
|
||||
from .error import InvalidResponseError as InvalidResponseError
|
||||
from .error import S3Error as S3Error
|
||||
from .error import ServerError as ServerError
|
||||
from .minioadmin import MinioAdmin as MinioAdmin
|
||||
3443
backend/venv/lib/python3.9/site-packages/minio/api.py
Normal file
3443
backend/venv/lib/python3.9/site-packages/minio/api.py
Normal file
File diff suppressed because it is too large
Load Diff
457
backend/venv/lib/python3.9/site-packages/minio/commonconfig.py
Normal file
457
backend/venv/lib/python3.9/site-packages/minio/commonconfig.py
Normal file
@@ -0,0 +1,457 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C)
|
||||
# 2020 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Common request/response configuration of S3 APIs."""
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
from __future__ import absolute_import, annotations
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from typing import IO, Optional, Type, TypeVar, cast
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
from .error import MinioException
|
||||
from .helpers import quote
|
||||
from .sse import SseCustomerKey
|
||||
from .time import to_http_header
|
||||
from .xml import SubElement, find, findall, findtext
|
||||
|
||||
COPY = "COPY"
|
||||
REPLACE = "REPLACE"
|
||||
DISABLED = "Disabled"
|
||||
ENABLED = "Enabled"
|
||||
GOVERNANCE = "GOVERNANCE"
|
||||
COMPLIANCE = "COMPLIANCE"
|
||||
_MAX_KEY_LENGTH = 128
|
||||
_MAX_VALUE_LENGTH = 256
|
||||
_MAX_OBJECT_TAG_COUNT = 10
|
||||
_MAX_TAG_COUNT = 50
|
||||
|
||||
A = TypeVar("A", bound="Tags")
|
||||
|
||||
|
||||
class Tags(dict):
|
||||
"""dict extended to bucket/object tags."""
|
||||
|
||||
def __init__(self, for_object: bool = False):
|
||||
self._for_object = for_object
|
||||
super().__init__()
|
||||
|
||||
def __setitem__(self, key: str, value: str):
|
||||
limit = _MAX_OBJECT_TAG_COUNT if self._for_object else _MAX_TAG_COUNT
|
||||
if len(self) == limit:
|
||||
tag_type = "object" if self._for_object else "bucket"
|
||||
raise ValueError(f"only {limit} {tag_type} tags are allowed")
|
||||
if not key or len(key) > _MAX_KEY_LENGTH or "&" in key:
|
||||
raise ValueError(f"invalid tag key '{key}'")
|
||||
if value is None or len(value) > _MAX_VALUE_LENGTH or "&" in value:
|
||||
raise ValueError(f"invalid tag value '{value}'")
|
||||
super().__setitem__(key, value)
|
||||
|
||||
@classmethod
|
||||
def new_bucket_tags(cls: Type[A]) -> A:
|
||||
"""Create new bucket tags."""
|
||||
return cls()
|
||||
|
||||
@classmethod
|
||||
def new_object_tags(cls: Type[A]) -> A:
|
||||
"""Create new object tags."""
|
||||
return cls(True)
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[A], element: ET.Element) -> A:
|
||||
"""Create new object with values from XML element."""
|
||||
elements = findall(element, "Tag")
|
||||
obj = cls()
|
||||
for tag in elements:
|
||||
key = cast(str, findtext(tag, "Key", True))
|
||||
value = cast(str, findtext(tag, "Value", True))
|
||||
obj[key] = value
|
||||
return obj
|
||||
|
||||
def toxml(self, element: Optional[ET.Element]) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
for key, value in self.items():
|
||||
tag = SubElement(element, "Tag")
|
||||
SubElement(tag, "Key", key)
|
||||
SubElement(tag, "Value", value)
|
||||
return element
|
||||
|
||||
|
||||
B = TypeVar("B", bound="Tag")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Tag:
|
||||
"""Tag."""
|
||||
|
||||
key: str
|
||||
value: str
|
||||
|
||||
def __post_init__(self):
|
||||
if not self.key:
|
||||
raise ValueError("key must be provided")
|
||||
if self.value is None:
|
||||
raise ValueError("value must be provided")
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[B], element: ET.Element) -> B:
|
||||
"""Create new object with values from XML element."""
|
||||
element = cast(ET.Element, find(element, "Tag", True))
|
||||
key = cast(str, findtext(element, "Key", True))
|
||||
value = cast(str, findtext(element, "Value", True))
|
||||
return cls(key, value)
|
||||
|
||||
def toxml(self, element: Optional[ET.Element]) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, "Tag")
|
||||
SubElement(element, "Key", self.key)
|
||||
SubElement(element, "Value", self.value)
|
||||
return element
|
||||
|
||||
|
||||
C = TypeVar("C", bound="AndOperator")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class AndOperator:
|
||||
"""AND operator."""
|
||||
|
||||
prefix: Optional[str] = None
|
||||
tags: Optional[Tags] = None
|
||||
|
||||
def __post_init__(self):
|
||||
if self.prefix is None and not self.tags:
|
||||
raise ValueError("at least prefix or tags must be provided")
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[C], element: ET.Element) -> C:
|
||||
"""Create new object with values from XML element."""
|
||||
element = cast(ET.Element, find(element, "And", True))
|
||||
prefix = findtext(element, "Prefix")
|
||||
tags = (
|
||||
None if find(element, "Tag") is None
|
||||
else Tags.fromxml(element)
|
||||
)
|
||||
return cls(prefix, tags)
|
||||
|
||||
def toxml(self, element: Optional[ET.Element]) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, "And")
|
||||
if self.prefix is not None:
|
||||
SubElement(element, "Prefix", self.prefix)
|
||||
if self.tags is not None:
|
||||
self.tags.toxml(element)
|
||||
return element
|
||||
|
||||
|
||||
D = TypeVar("D", bound="Filter")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Filter:
|
||||
"""Lifecycle rule filter."""
|
||||
|
||||
and_operator: Optional[AndOperator] = None
|
||||
prefix: Optional[str] = None
|
||||
tag: Optional[Tag] = None
|
||||
|
||||
def __post_init__(self):
|
||||
valid = (
|
||||
(self.and_operator is not None) ^
|
||||
(self.prefix is not None) ^
|
||||
(self.tag is not None)
|
||||
)
|
||||
if not valid:
|
||||
raise ValueError("only one of and, prefix or tag must be provided")
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[D], element: ET.Element) -> D:
|
||||
"""Create new object with values from XML element."""
|
||||
element = cast(ET.Element, find(element, "Filter", True))
|
||||
and_operator = (
|
||||
None if find(element, "And") is None
|
||||
else AndOperator.fromxml(element)
|
||||
)
|
||||
prefix = findtext(element, "Prefix")
|
||||
tag = None if find(element, "Tag") is None else Tag.fromxml(element)
|
||||
return cls(and_operator, prefix, tag)
|
||||
|
||||
def toxml(self, element: Optional[ET.Element]) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, "Filter")
|
||||
if self.and_operator:
|
||||
self.and_operator.toxml(element)
|
||||
if self.prefix is not None:
|
||||
SubElement(element, "Prefix", self.prefix)
|
||||
if self.tag is not None:
|
||||
self.tag.toxml(element)
|
||||
return element
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class BaseRule(ABC):
|
||||
"""Base rule class for Replication and Lifecycle."""
|
||||
status: str
|
||||
rule_filter: Optional[Filter] = None
|
||||
rule_id: Optional[str] = None
|
||||
|
||||
def __post_init__(self):
|
||||
check_status(self.status)
|
||||
if self.rule_id is not None:
|
||||
self.rule_id = self.rule_id.strip()
|
||||
if not self.rule_id:
|
||||
raise ValueError("rule ID must be non-empty string")
|
||||
if len(self.rule_id) > 255:
|
||||
raise ValueError("rule ID must not exceed 255 characters")
|
||||
|
||||
@abstractmethod
|
||||
def _require_subclass_implementation(self) -> None:
|
||||
"""Dummy abstract method to enforce abstract class behavior."""
|
||||
|
||||
@staticmethod
|
||||
def parsexml(
|
||||
element: ET.Element,
|
||||
) -> tuple[str, Optional[Filter], Optional[str]]:
|
||||
"""Parse XML and return filter and ID."""
|
||||
return (
|
||||
cast(str, findtext(element, "Status", True)),
|
||||
(
|
||||
None if find(element, "Filter") is None
|
||||
else Filter.fromxml(element)
|
||||
),
|
||||
findtext(element, "ID"),
|
||||
)
|
||||
|
||||
def toxml(self, element: Optional[ET.Element]) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
SubElement(element, "Status", self.status)
|
||||
if self.rule_filter:
|
||||
self.rule_filter.toxml(element)
|
||||
if self.rule_id is not None:
|
||||
SubElement(element, "ID", self.rule_id)
|
||||
return element
|
||||
|
||||
|
||||
def check_status(status: str):
|
||||
"""Validate status."""
|
||||
if status not in [ENABLED, DISABLED]:
|
||||
raise ValueError("status must be 'Enabled' or 'Disabled'")
|
||||
|
||||
|
||||
@dataclass
|
||||
class ObjectConditionalReadArgs(ABC):
|
||||
"""Base argument class holds condition properties for reading object."""
|
||||
bucket_name: str
|
||||
object_name: str
|
||||
region: Optional[str] = None
|
||||
version_id: Optional[str] = None
|
||||
ssec: Optional[SseCustomerKey] = None
|
||||
offset: Optional[int] = None
|
||||
length: Optional[int] = None
|
||||
match_etag: Optional[str] = None
|
||||
not_match_etag: Optional[str] = None
|
||||
modified_since: Optional[datetime] = None
|
||||
unmodified_since: Optional[datetime] = None
|
||||
|
||||
def __post_init__(self):
|
||||
if (
|
||||
self.ssec is not None and
|
||||
not isinstance(self.ssec, SseCustomerKey)
|
||||
):
|
||||
raise ValueError("ssec must be SseCustomerKey type")
|
||||
if self.offset is not None and self.offset < 0:
|
||||
raise ValueError("offset should be zero or greater")
|
||||
if self.length is not None and self.length <= 0:
|
||||
raise ValueError("length should be greater than zero")
|
||||
if self.match_etag is not None and self.match_etag == "":
|
||||
raise ValueError("match_etag must not be empty")
|
||||
if self.not_match_etag is not None and self.not_match_etag == "":
|
||||
raise ValueError("not_match_etag must not be empty")
|
||||
if (
|
||||
self.modified_since is not None and
|
||||
not isinstance(self.modified_since, datetime)
|
||||
):
|
||||
raise ValueError("modified_since must be datetime type")
|
||||
if (
|
||||
self.unmodified_since is not None and
|
||||
not isinstance(self.unmodified_since, datetime)
|
||||
):
|
||||
raise ValueError("unmodified_since must be datetime type")
|
||||
|
||||
@abstractmethod
|
||||
def _require_subclass_implementation(self) -> None:
|
||||
"""Dummy abstract method to enforce abstract class behavior."""
|
||||
|
||||
def gen_copy_headers(self) -> dict[str, str]:
|
||||
"""Generate copy source headers."""
|
||||
copy_source = quote("/" + self.bucket_name + "/" + self.object_name)
|
||||
if self.version_id:
|
||||
copy_source += "?versionId=" + quote(self.version_id)
|
||||
|
||||
headers = {"x-amz-copy-source": copy_source}
|
||||
if self.ssec:
|
||||
headers.update(self.ssec.copy_headers())
|
||||
if self.match_etag:
|
||||
headers["x-amz-copy-source-if-match"] = self.match_etag
|
||||
if self.not_match_etag:
|
||||
headers["x-amz-copy-source-if-none-match"] = self.not_match_etag
|
||||
if self.modified_since:
|
||||
headers["x-amz-copy-source-if-modified-since"] = (
|
||||
to_http_header(self.modified_since)
|
||||
)
|
||||
if self.unmodified_since:
|
||||
headers["x-amz-copy-source-if-unmodified-since"] = (
|
||||
to_http_header(self.unmodified_since)
|
||||
)
|
||||
return headers
|
||||
|
||||
|
||||
E = TypeVar("E", bound="CopySource")
|
||||
|
||||
|
||||
@dataclass
|
||||
class CopySource(ObjectConditionalReadArgs):
|
||||
"""A source object definition for copy_object method."""
|
||||
|
||||
def _require_subclass_implementation(self) -> None:
|
||||
"""Dummy abstract method to enforce abstract class behavior."""
|
||||
|
||||
@classmethod
|
||||
def of(cls: Type[E], src: ObjectConditionalReadArgs) -> E:
|
||||
"""Create CopySource from another source."""
|
||||
return cls(
|
||||
bucket_name=src.bucket_name,
|
||||
object_name=src.object_name,
|
||||
region=src.region,
|
||||
version_id=src.version_id,
|
||||
ssec=src.ssec,
|
||||
offset=src.offset,
|
||||
length=src.length,
|
||||
match_etag=src.match_etag,
|
||||
not_match_etag=src.not_match_etag,
|
||||
modified_since=src.modified_since,
|
||||
unmodified_since=src.unmodified_since,
|
||||
)
|
||||
|
||||
|
||||
F = TypeVar("F", bound="ComposeSource")
|
||||
|
||||
|
||||
@dataclass
|
||||
class ComposeSource(ObjectConditionalReadArgs):
|
||||
"""A source object definition for compose_object method."""
|
||||
_object_size: Optional[int] = field(default=None, init=False)
|
||||
_headers: Optional[dict[str, str]] = field(default=None, init=False)
|
||||
|
||||
def _require_subclass_implementation(self) -> None:
|
||||
"""Dummy abstract method to enforce abstract class behavior."""
|
||||
|
||||
def _validate_size(self, object_size: int):
|
||||
"""Validate object size with offset and length."""
|
||||
def make_error(name, value):
|
||||
ver = ("?versionId="+self.version_id) if self.version_id else ""
|
||||
return ValueError(
|
||||
f"Source {self.bucket_name}/{self.object_name}{ver}: "
|
||||
f"{name} {value} is beyond object size {object_size}"
|
||||
)
|
||||
|
||||
if self.offset is not None and self.offset >= object_size:
|
||||
raise make_error("offset", self.offset)
|
||||
if self.length is not None:
|
||||
if self.length > object_size:
|
||||
raise make_error("length", self.length)
|
||||
offset = self.offset or 0
|
||||
if offset+self.length > object_size:
|
||||
raise make_error("compose size", offset+self.length)
|
||||
|
||||
def build_headers(self, object_size: int, etag: str):
|
||||
"""Build headers."""
|
||||
self._validate_size(object_size)
|
||||
self._object_size = object_size
|
||||
headers = self.gen_copy_headers()
|
||||
headers["x-amz-copy-source-if-match"] = self.match_etag or etag
|
||||
self._headers = headers
|
||||
|
||||
@property
|
||||
def object_size(self) -> Optional[int]:
|
||||
"""Get object size."""
|
||||
if self._object_size is None:
|
||||
raise MinioException(
|
||||
"build_headers() must be called prior to "
|
||||
"this method invocation",
|
||||
)
|
||||
return self._object_size
|
||||
|
||||
@property
|
||||
def headers(self) -> dict[str, str]:
|
||||
"""Get headers."""
|
||||
if self._headers is None:
|
||||
raise MinioException(
|
||||
"build_headers() must be called prior to "
|
||||
"this method invocation",
|
||||
)
|
||||
return self._headers.copy()
|
||||
|
||||
@classmethod
|
||||
def of(cls: Type[F], src: ObjectConditionalReadArgs) -> F:
|
||||
"""Create ComposeSource from another source."""
|
||||
return cls(
|
||||
bucket_name=src.bucket_name,
|
||||
object_name=src.object_name,
|
||||
region=src.region,
|
||||
version_id=src.version_id,
|
||||
ssec=src.ssec,
|
||||
offset=src.offset,
|
||||
length=src.length,
|
||||
match_etag=src.match_etag,
|
||||
not_match_etag=src.not_match_etag,
|
||||
modified_since=src.modified_since,
|
||||
unmodified_since=src.unmodified_since,
|
||||
)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class SnowballObject:
|
||||
"""A source object definition for upload_snowball_objects method."""
|
||||
object_name: str
|
||||
filename: Optional[str] = None
|
||||
data: Optional[IO[bytes]] = None
|
||||
length: Optional[int] = None
|
||||
mod_time: Optional[datetime] = None
|
||||
|
||||
def __post_init__(self):
|
||||
if not (self.filename is not None) ^ (self.data is not None):
|
||||
raise ValueError("only one of filename or data must be provided")
|
||||
if self.data is not None and self.length is None:
|
||||
raise ValueError("length must be provided for data")
|
||||
if (
|
||||
self.mod_time is not None and
|
||||
not isinstance(self.mod_time, datetime)
|
||||
):
|
||||
raise ValueError("mod_time must be datetime type")
|
||||
@@ -0,0 +1,26 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage,
|
||||
# (C) 2020 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Credential module."""
|
||||
|
||||
# pylint: disable=unused-import
|
||||
from .credentials import Credentials
|
||||
from .providers import (AssumeRoleProvider, AWSConfigProvider,
|
||||
CertificateIdentityProvider, ChainedProvider,
|
||||
ClientGrantsProvider, EnvAWSProvider, EnvMinioProvider,
|
||||
IamAwsProvider, LdapIdentityProvider,
|
||||
MinioClientConfigProvider, Provider, StaticProvider,
|
||||
WebIdentityProvider)
|
||||
@@ -0,0 +1,54 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage,
|
||||
# (C) 2020 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Credential definitions to access S3 service."""
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Optional
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Credentials:
|
||||
"""
|
||||
Represents credentials access key, secret key and session token.
|
||||
"""
|
||||
|
||||
access_key: str
|
||||
secret_key: str
|
||||
session_token: Optional[str] = None
|
||||
expiration: Optional[datetime] = None
|
||||
|
||||
def __post_init__(self):
|
||||
if not self.access_key:
|
||||
raise ValueError("Access key must not be empty")
|
||||
|
||||
if not self.secret_key:
|
||||
raise ValueError("Secret key must not be empty")
|
||||
|
||||
if self.expiration and self.expiration.tzinfo:
|
||||
object.__setattr__(
|
||||
self, "expiration",
|
||||
self.expiration.astimezone(timezone.utc).replace(tzinfo=None),
|
||||
)
|
||||
|
||||
def is_expired(self) -> bool:
|
||||
"""Check whether this credentials expired or not."""
|
||||
return (
|
||||
self.expiration < (datetime.utcnow() + timedelta(seconds=10))
|
||||
if self.expiration else False
|
||||
)
|
||||
@@ -0,0 +1,803 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage,
|
||||
# (C) 2020 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# pylint: disable=too-many-branches
|
||||
|
||||
"""Credential providers."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import configparser
|
||||
import ipaddress
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import socket
|
||||
import sys
|
||||
import time
|
||||
from abc import ABC, abstractmethod
|
||||
from datetime import timedelta
|
||||
from pathlib import Path
|
||||
from typing import Callable, Optional, cast
|
||||
from urllib.parse import urlencode, urlsplit, urlunsplit
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
import certifi
|
||||
from urllib3.poolmanager import PoolManager
|
||||
|
||||
try:
|
||||
from urllib3.response import BaseHTTPResponse # type: ignore[attr-defined]
|
||||
except ImportError:
|
||||
from urllib3.response import HTTPResponse as BaseHTTPResponse
|
||||
|
||||
from urllib3.util import Retry, parse_url
|
||||
|
||||
from minio.helpers import sha256_hash, url_replace
|
||||
from minio.signer import sign_v4_sts
|
||||
from minio.time import from_iso8601utc, to_amz_date, utcnow
|
||||
from minio.xml import find, findtext
|
||||
|
||||
from .credentials import Credentials
|
||||
|
||||
_MIN_DURATION_SECONDS = int(timedelta(minutes=15).total_seconds())
|
||||
_MAX_DURATION_SECONDS = int(timedelta(days=7).total_seconds())
|
||||
_DEFAULT_DURATION_SECONDS = int(timedelta(hours=1).total_seconds())
|
||||
|
||||
|
||||
def _parse_credentials(data: str, name: str) -> Credentials:
|
||||
"""Parse data containing credentials XML."""
|
||||
element = ET.fromstring(data)
|
||||
element = cast(ET.Element, find(element, name, True))
|
||||
element = cast(ET.Element, find(element, "Credentials", True))
|
||||
expiration = from_iso8601utc(findtext(element, "Expiration", True))
|
||||
return Credentials(
|
||||
access_key=cast(str, findtext(element, "AccessKeyId", True)),
|
||||
secret_key=cast(str, findtext(element, "SecretAccessKey", True)),
|
||||
session_token=findtext(element, "SessionToken", True),
|
||||
expiration=expiration,
|
||||
)
|
||||
|
||||
|
||||
def _urlopen(
|
||||
http_client: PoolManager,
|
||||
method: str,
|
||||
url: str,
|
||||
body: Optional[str | bytes] = None,
|
||||
headers: Optional[dict[str, str | list[str] | tuple[str]]] = None,
|
||||
) -> BaseHTTPResponse:
|
||||
"""Wrapper of urlopen() handles HTTP status code."""
|
||||
res = http_client.urlopen(method, url, body=body, headers=headers)
|
||||
if res.status not in [200, 204, 206]:
|
||||
safe_url = re.sub(
|
||||
r"LDAPPassword=([^&]+)", "LDAPPassword=*REDACTED*", url,
|
||||
)
|
||||
raise ValueError(
|
||||
f"{safe_url} failed with HTTP status code {res.status}",
|
||||
)
|
||||
return res
|
||||
|
||||
|
||||
def _user_home_dir() -> str:
|
||||
"""Return current user home folder."""
|
||||
return (
|
||||
os.environ.get("HOME") or
|
||||
os.environ.get("UserProfile") or
|
||||
str(Path.home())
|
||||
)
|
||||
|
||||
|
||||
class Provider(ABC): # pylint: disable=too-few-public-methods
|
||||
"""Credential retriever."""
|
||||
|
||||
@abstractmethod
|
||||
def retrieve(self) -> Credentials:
|
||||
"""Retrieve credentials and its expiry if available."""
|
||||
|
||||
|
||||
class AssumeRoleProvider(Provider):
|
||||
"""Assume-role credential provider."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
sts_endpoint: str,
|
||||
access_key: str,
|
||||
secret_key: str,
|
||||
duration_seconds: int = 0,
|
||||
policy: Optional[str] = None,
|
||||
region: Optional[str] = None,
|
||||
role_arn: Optional[str] = None,
|
||||
role_session_name: Optional[str] = None,
|
||||
external_id: Optional[str] = None,
|
||||
http_client: Optional[PoolManager] = None,
|
||||
):
|
||||
self._sts_endpoint = sts_endpoint
|
||||
self._access_key = access_key
|
||||
self._secret_key = secret_key
|
||||
self._region = region or ""
|
||||
self._http_client = http_client or PoolManager(
|
||||
retries=Retry(
|
||||
total=5,
|
||||
backoff_factor=0.2,
|
||||
status_forcelist=[500, 502, 503, 504],
|
||||
),
|
||||
)
|
||||
|
||||
query_params = {
|
||||
"Action": "AssumeRole",
|
||||
"Version": "2011-06-15",
|
||||
"DurationSeconds": str(
|
||||
duration_seconds
|
||||
if duration_seconds > _DEFAULT_DURATION_SECONDS
|
||||
else _DEFAULT_DURATION_SECONDS
|
||||
),
|
||||
}
|
||||
|
||||
if role_arn:
|
||||
query_params["RoleArn"] = role_arn
|
||||
if role_session_name:
|
||||
query_params["RoleSessionName"] = role_session_name
|
||||
if policy:
|
||||
query_params["Policy"] = policy
|
||||
if external_id:
|
||||
query_params["ExternalId"] = external_id
|
||||
|
||||
self._body = urlencode(query_params)
|
||||
self._content_sha256 = sha256_hash(self._body)
|
||||
url = urlsplit(sts_endpoint)
|
||||
self._url = url
|
||||
self._host = url.netloc
|
||||
if (
|
||||
(url.scheme == "http" and url.port == 80) or
|
||||
(url.scheme == "https" and url.port == 443)
|
||||
):
|
||||
self._host = cast(str, url.hostname)
|
||||
self._credentials: Optional[Credentials] = None
|
||||
|
||||
def retrieve(self) -> Credentials:
|
||||
"""Retrieve credentials."""
|
||||
if self._credentials and not self._credentials.is_expired():
|
||||
return self._credentials
|
||||
|
||||
utctime = utcnow()
|
||||
headers = sign_v4_sts(
|
||||
method="POST",
|
||||
url=self._url,
|
||||
region=self._region,
|
||||
headers={
|
||||
"Content-Type": "application/x-www-form-urlencoded",
|
||||
"Host": self._host,
|
||||
"X-Amz-Date": to_amz_date(utctime),
|
||||
},
|
||||
credentials=Credentials(
|
||||
access_key=self._access_key,
|
||||
secret_key=self._secret_key,
|
||||
),
|
||||
content_sha256=self._content_sha256,
|
||||
date=utctime,
|
||||
)
|
||||
|
||||
res = _urlopen(
|
||||
self._http_client,
|
||||
"POST",
|
||||
self._sts_endpoint,
|
||||
body=self._body,
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
self._credentials = _parse_credentials(
|
||||
res.data.decode(), "AssumeRoleResult",
|
||||
)
|
||||
|
||||
return self._credentials
|
||||
|
||||
|
||||
class ChainedProvider(Provider):
|
||||
"""Chained credential provider."""
|
||||
|
||||
def __init__(self, providers: list[Provider]):
|
||||
self._providers = providers
|
||||
self._provider: Optional[Provider] = None
|
||||
self._credentials: Optional[Credentials] = None
|
||||
|
||||
def retrieve(self) -> Credentials:
|
||||
"""Retrieve credentials from one of available provider."""
|
||||
if self._credentials and not self._credentials.is_expired():
|
||||
return self._credentials
|
||||
|
||||
if self._provider:
|
||||
try:
|
||||
self._credentials = self._provider.retrieve()
|
||||
return self._credentials
|
||||
except ValueError:
|
||||
# Ignore this error and iterate other providers.
|
||||
pass
|
||||
|
||||
for provider in self._providers:
|
||||
try:
|
||||
self._credentials = provider.retrieve()
|
||||
self._provider = provider
|
||||
return self._credentials
|
||||
except ValueError:
|
||||
# Ignore this error and iterate other providers.
|
||||
pass
|
||||
|
||||
raise ValueError("All providers fail to fetch credentials")
|
||||
|
||||
|
||||
class EnvAWSProvider(Provider):
|
||||
"""Credential provider from AWS environment variables."""
|
||||
|
||||
def retrieve(self) -> Credentials:
|
||||
"""Retrieve credentials."""
|
||||
return Credentials(
|
||||
access_key=(
|
||||
cast(
|
||||
str,
|
||||
os.environ.get("AWS_ACCESS_KEY_ID") or
|
||||
os.environ.get("AWS_ACCESS_KEY"),
|
||||
)
|
||||
),
|
||||
secret_key=(
|
||||
cast(
|
||||
str,
|
||||
os.environ.get("AWS_SECRET_ACCESS_KEY") or
|
||||
os.environ.get("AWS_SECRET_KEY"),
|
||||
)
|
||||
),
|
||||
session_token=os.environ.get("AWS_SESSION_TOKEN"),
|
||||
)
|
||||
|
||||
|
||||
class EnvMinioProvider(Provider):
|
||||
"""Credential provider from MinIO environment variables."""
|
||||
|
||||
def retrieve(self) -> Credentials:
|
||||
"""Retrieve credentials."""
|
||||
return Credentials(
|
||||
access_key=os.environ.get("MINIO_ACCESS_KEY") or "",
|
||||
secret_key=os.environ.get("MINIO_SECRET_KEY") or "",
|
||||
)
|
||||
|
||||
|
||||
class AWSConfigProvider(Provider):
|
||||
"""Credential provider from AWS credential file."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
filename: Optional[str] = None,
|
||||
profile: Optional[str] = None,
|
||||
):
|
||||
self._filename = (
|
||||
filename or
|
||||
os.environ.get("AWS_SHARED_CREDENTIALS_FILE") or
|
||||
os.path.join(_user_home_dir(), ".aws", "credentials")
|
||||
)
|
||||
self._profile = profile or os.environ.get("AWS_PROFILE") or "default"
|
||||
|
||||
def retrieve(self) -> Credentials:
|
||||
"""Retrieve credentials from AWS configuration file."""
|
||||
parser = configparser.ConfigParser()
|
||||
parser.read(self._filename)
|
||||
access_key = parser.get(
|
||||
self._profile,
|
||||
"aws_access_key_id",
|
||||
fallback=None,
|
||||
)
|
||||
secret_key = parser.get(
|
||||
self._profile,
|
||||
"aws_secret_access_key",
|
||||
fallback=None,
|
||||
)
|
||||
session_token = parser.get(
|
||||
self._profile,
|
||||
"aws_session_token",
|
||||
fallback=None,
|
||||
)
|
||||
|
||||
if not access_key:
|
||||
raise ValueError(
|
||||
f"access key does not exist in profile "
|
||||
f"{self._profile} in AWS credential file {self._filename}"
|
||||
)
|
||||
|
||||
if not secret_key:
|
||||
raise ValueError(
|
||||
f"secret key does not exist in profile "
|
||||
f"{self._profile} in AWS credential file {self._filename}"
|
||||
)
|
||||
|
||||
return Credentials(
|
||||
access_key,
|
||||
secret_key,
|
||||
session_token=session_token,
|
||||
)
|
||||
|
||||
|
||||
class MinioClientConfigProvider(Provider):
|
||||
"""Credential provider from MinIO Client configuration file."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
filename: Optional[str] = None,
|
||||
alias: Optional[str] = None,
|
||||
):
|
||||
self._filename = (
|
||||
filename or
|
||||
os.path.join(
|
||||
_user_home_dir(),
|
||||
"mc" if sys.platform == "win32" else ".mc",
|
||||
"config.json",
|
||||
)
|
||||
)
|
||||
self._alias = alias or os.environ.get("MINIO_ALIAS") or "s3"
|
||||
|
||||
def retrieve(self) -> Credentials:
|
||||
"""Retrieve credential value from MinIO client configuration file."""
|
||||
try:
|
||||
with open(self._filename, encoding="utf-8") as conf_file:
|
||||
config = json.load(conf_file)
|
||||
aliases = config.get("hosts") or config.get("aliases")
|
||||
if not aliases:
|
||||
raise ValueError(
|
||||
f"invalid configuration in file {self._filename}",
|
||||
)
|
||||
creds = aliases.get(self._alias)
|
||||
if not creds:
|
||||
raise ValueError(
|
||||
f"alias {self._alias} not found in MinIO client"
|
||||
f"configuration file {self._filename}"
|
||||
)
|
||||
return Credentials(creds.get("accessKey"), creds.get("secretKey"))
|
||||
except (IOError, OSError) as exc:
|
||||
raise ValueError(
|
||||
f"error in reading file {self._filename}",
|
||||
) from exc
|
||||
|
||||
|
||||
def _check_loopback_host(url: str):
|
||||
"""Check whether host in url points only to localhost."""
|
||||
host = cast(str, parse_url(url).host)
|
||||
try:
|
||||
addrs = set(info[4][0] for info in socket.getaddrinfo(host, None))
|
||||
for addr in addrs:
|
||||
if not ipaddress.ip_address(addr).is_loopback:
|
||||
raise ValueError(host + " is not loopback only host")
|
||||
except socket.gaierror as exc:
|
||||
raise ValueError("Host " + host + " is not loopback address") from exc
|
||||
|
||||
|
||||
def _get_jwt_token(token_file: str) -> dict[str, str]:
|
||||
"""Read and return content of token file. """
|
||||
try:
|
||||
with open(token_file, encoding="utf-8") as file:
|
||||
return {"access_token": file.read(), "expires_in": "0"}
|
||||
except (IOError, OSError) as exc:
|
||||
raise ValueError(f"error in reading file {token_file}") from exc
|
||||
|
||||
|
||||
class IamAwsProvider(Provider):
|
||||
"""Credential provider using IAM roles for Amazon EC2/ECS."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
custom_endpoint: Optional[str] = None,
|
||||
http_client: Optional[PoolManager] = None,
|
||||
auth_token: Optional[str] = None,
|
||||
relative_uri: Optional[str] = None,
|
||||
full_uri: Optional[str] = None,
|
||||
token_file: Optional[str] = None,
|
||||
role_arn: Optional[str] = None,
|
||||
role_session_name: Optional[str] = None,
|
||||
region: Optional[str] = None,
|
||||
):
|
||||
self._custom_endpoint = custom_endpoint
|
||||
self._http_client = http_client or PoolManager(
|
||||
retries=Retry(
|
||||
total=5,
|
||||
backoff_factor=0.2,
|
||||
status_forcelist=[500, 502, 503, 504],
|
||||
),
|
||||
)
|
||||
self._token = (
|
||||
os.environ.get("AWS_CONTAINER_AUTHORIZATION_TOKEN") or
|
||||
auth_token
|
||||
)
|
||||
self._token_file = (
|
||||
os.environ.get("AWS_CONTAINER_AUTHORIZATION_TOKEN_FILE") or
|
||||
auth_token
|
||||
)
|
||||
self._identity_file = (
|
||||
os.environ.get("AWS_WEB_IDENTITY_TOKEN_FILE") or token_file
|
||||
)
|
||||
self._aws_region = os.environ.get("AWS_REGION") or region
|
||||
self._role_arn = os.environ.get("AWS_ROLE_ARN") or role_arn
|
||||
self._role_session_name = (
|
||||
os.environ.get("AWS_ROLE_SESSION_NAME") or role_session_name
|
||||
)
|
||||
self._relative_uri = (
|
||||
os.environ.get("AWS_CONTAINER_CREDENTIALS_RELATIVE_URI") or
|
||||
relative_uri
|
||||
)
|
||||
if self._relative_uri and not self._relative_uri.startswith("/"):
|
||||
self._relative_uri = "/" + self._relative_uri
|
||||
self._full_uri = (
|
||||
os.environ.get("AWS_CONTAINER_CREDENTIALS_FULL_URI") or
|
||||
full_uri
|
||||
)
|
||||
self._credentials: Optional[Credentials] = None
|
||||
|
||||
def fetch(
|
||||
self,
|
||||
url: str,
|
||||
headers: Optional[dict[str, str | list[str] | tuple[str]]] = None,
|
||||
) -> Credentials:
|
||||
"""Fetch credentials from EC2/ECS."""
|
||||
res = _urlopen(self._http_client, "GET", url, headers=headers)
|
||||
data = json.loads(res.data)
|
||||
if data.get("Code", "Success") != "Success":
|
||||
raise ValueError(
|
||||
f"{url} failed with code {data['Code']} "
|
||||
f"message {data.get('Message')}"
|
||||
)
|
||||
data["Expiration"] = from_iso8601utc(data["Expiration"])
|
||||
|
||||
return Credentials(
|
||||
data["AccessKeyId"],
|
||||
data["SecretAccessKey"],
|
||||
data["Token"],
|
||||
data["Expiration"],
|
||||
)
|
||||
|
||||
def retrieve(self) -> Credentials:
|
||||
"""Retrieve credentials from WebIdentity/EC2/ECS."""
|
||||
|
||||
if self._credentials and not self._credentials.is_expired():
|
||||
return self._credentials
|
||||
|
||||
url = self._custom_endpoint
|
||||
if self._identity_file:
|
||||
if not url:
|
||||
url = "https://sts.amazonaws.com"
|
||||
if self._aws_region:
|
||||
url = f"https://sts.{self._aws_region}.amazonaws.com"
|
||||
if self._aws_region.startswith("cn-"):
|
||||
url += ".cn"
|
||||
|
||||
provider = WebIdentityProvider(
|
||||
jwt_provider_func=lambda: _get_jwt_token(
|
||||
cast(str, self._identity_file),
|
||||
),
|
||||
sts_endpoint=url,
|
||||
role_arn=self._role_arn,
|
||||
role_session_name=self._role_session_name,
|
||||
http_client=self._http_client,
|
||||
)
|
||||
self._credentials = provider.retrieve()
|
||||
return cast(Credentials, self._credentials)
|
||||
|
||||
headers: Optional[dict[str, str | list[str] | tuple[str]]] = None
|
||||
if self._relative_uri:
|
||||
if not url:
|
||||
url = "http://169.254.170.2" + self._relative_uri
|
||||
headers = {"Authorization": self._token} if self._token else None
|
||||
elif self._full_uri:
|
||||
token = self._token
|
||||
if self._token_file:
|
||||
url = self._full_uri
|
||||
with open(self._token_file, encoding="utf-8") as file:
|
||||
token = file.read()
|
||||
else:
|
||||
if not url:
|
||||
url = self._full_uri
|
||||
_check_loopback_host(url)
|
||||
headers = {"Authorization": token} if token else None
|
||||
else:
|
||||
if not url:
|
||||
url = "http://169.254.169.254"
|
||||
|
||||
# Get IMDS Token
|
||||
res = _urlopen(
|
||||
self._http_client,
|
||||
"PUT",
|
||||
url+"/latest/api/token",
|
||||
headers={"X-aws-ec2-metadata-token-ttl-seconds": "21600"},
|
||||
)
|
||||
token = res.data.decode("utf-8")
|
||||
headers = {"X-aws-ec2-metadata-token": token} if token else None
|
||||
|
||||
# Get role name
|
||||
url = urlunsplit(
|
||||
url_replace(
|
||||
url=urlsplit(url),
|
||||
path="/latest/meta-data/iam/security-credentials/",
|
||||
),
|
||||
)
|
||||
res = _urlopen(self._http_client, "GET", url, headers=headers)
|
||||
role_names = res.data.decode("utf-8").split("\n")
|
||||
if not role_names:
|
||||
raise ValueError(f"no IAM roles attached to EC2 service {url}")
|
||||
url += role_names[0].strip("\r")
|
||||
if not url:
|
||||
raise ValueError("url is empty; this should not happen")
|
||||
self._credentials = self.fetch(url, headers=headers)
|
||||
return self._credentials
|
||||
|
||||
|
||||
class LdapIdentityProvider(Provider):
|
||||
"""Credential provider using AssumeRoleWithLDAPIdentity API."""
|
||||
|
||||
def __init__( # pylint: disable=too-many-positional-arguments
|
||||
self,
|
||||
sts_endpoint: str,
|
||||
ldap_username: str,
|
||||
ldap_password: str,
|
||||
duration_seconds: Optional[int] = None,
|
||||
policy: Optional[str] = None,
|
||||
token_revoke_type: Optional[str] = None,
|
||||
http_client: Optional[PoolManager] = None,
|
||||
):
|
||||
query_params = {
|
||||
"Action": "AssumeRoleWithLDAPIdentity",
|
||||
"Version": "2011-06-15",
|
||||
"LDAPUsername": ldap_username,
|
||||
"LDAPPassword": ldap_password,
|
||||
}
|
||||
if duration_seconds:
|
||||
query_params["DurationSeconds"] = str(duration_seconds)
|
||||
if policy:
|
||||
query_params["Policy"] = policy
|
||||
if token_revoke_type:
|
||||
query_params["TokenRevokeType"] = token_revoke_type
|
||||
|
||||
self._sts_endpoint = sts_endpoint + "?" + urlencode(query_params)
|
||||
self._http_client = http_client or PoolManager(
|
||||
retries=Retry(
|
||||
total=5,
|
||||
backoff_factor=0.2,
|
||||
status_forcelist=[500, 502, 503, 504],
|
||||
),
|
||||
)
|
||||
self._credentials: Optional[Credentials] = None
|
||||
|
||||
def retrieve(self) -> Credentials:
|
||||
"""Retrieve credentials."""
|
||||
|
||||
if self._credentials and not self._credentials.is_expired():
|
||||
return self._credentials
|
||||
|
||||
res = _urlopen(
|
||||
self._http_client,
|
||||
"POST",
|
||||
self._sts_endpoint,
|
||||
)
|
||||
|
||||
self._credentials = _parse_credentials(
|
||||
res.data.decode(), "AssumeRoleWithLDAPIdentityResult",
|
||||
)
|
||||
|
||||
return self._credentials
|
||||
|
||||
|
||||
class StaticProvider(Provider):
|
||||
"""Fixed credential provider."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
access_key: str,
|
||||
secret_key: str,
|
||||
session_token: Optional[str] = None,
|
||||
):
|
||||
self._credentials = Credentials(access_key, secret_key, session_token)
|
||||
|
||||
def retrieve(self) -> Credentials:
|
||||
"""Return passed credentials."""
|
||||
return self._credentials
|
||||
|
||||
|
||||
class WebIdentityClientGrantsProvider(Provider, ABC):
|
||||
"""Base class for WebIdentity and ClientGrants credentials provider."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
jwt_provider_func: Callable[[], dict[str, str]],
|
||||
sts_endpoint: str,
|
||||
duration_seconds: int = 0,
|
||||
policy: Optional[str] = None,
|
||||
role_arn: Optional[str] = None,
|
||||
role_session_name: Optional[str] = None,
|
||||
http_client: Optional[PoolManager] = None,
|
||||
):
|
||||
self._jwt_provider_func = jwt_provider_func
|
||||
self._sts_endpoint = sts_endpoint
|
||||
self._duration_seconds = duration_seconds
|
||||
self._policy = policy
|
||||
self._role_arn = role_arn
|
||||
self._role_session_name = role_session_name
|
||||
self._http_client = http_client or PoolManager(
|
||||
retries=Retry(
|
||||
total=5,
|
||||
backoff_factor=0.2,
|
||||
status_forcelist=[500, 502, 503, 504],
|
||||
),
|
||||
)
|
||||
self._credentials: Optional[Credentials] = None
|
||||
|
||||
@abstractmethod
|
||||
def _is_web_identity(self) -> bool:
|
||||
"""Check if derived class deal with WebIdentity."""
|
||||
|
||||
def _get_duration_seconds(self, expiry: int) -> int:
|
||||
"""Get DurationSeconds optimal value."""
|
||||
|
||||
if self._duration_seconds:
|
||||
expiry = self._duration_seconds
|
||||
|
||||
if expiry > _MAX_DURATION_SECONDS:
|
||||
return _MAX_DURATION_SECONDS
|
||||
|
||||
if expiry <= 0:
|
||||
return expiry
|
||||
|
||||
return (
|
||||
_MIN_DURATION_SECONDS if expiry < _MIN_DURATION_SECONDS else expiry
|
||||
)
|
||||
|
||||
def retrieve(self) -> Credentials:
|
||||
"""Retrieve credentials."""
|
||||
|
||||
if self._credentials and not self._credentials.is_expired():
|
||||
return self._credentials
|
||||
|
||||
jwt = self._jwt_provider_func()
|
||||
|
||||
query_params = {"Version": "2011-06-15"}
|
||||
duration_seconds = self._get_duration_seconds(
|
||||
int(jwt.get("expires_in", "0")),
|
||||
)
|
||||
if duration_seconds:
|
||||
query_params["DurationSeconds"] = str(duration_seconds)
|
||||
if self._policy:
|
||||
query_params["Policy"] = self._policy
|
||||
|
||||
access_token = jwt.get("access_token") or jwt.get("id_token", "")
|
||||
if self._is_web_identity():
|
||||
query_params["Action"] = "AssumeRoleWithWebIdentity"
|
||||
query_params["WebIdentityToken"] = access_token
|
||||
if self._role_arn:
|
||||
query_params["RoleArn"] = self._role_arn
|
||||
query_params["RoleSessionName"] = (
|
||||
self._role_session_name
|
||||
if self._role_session_name
|
||||
else str(time.time()).replace(".", "")
|
||||
)
|
||||
else:
|
||||
query_params["Action"] = "AssumeRoleWithClientGrants"
|
||||
query_params["Token"] = access_token
|
||||
|
||||
url = self._sts_endpoint + "?" + urlencode(query_params)
|
||||
res = _urlopen(self._http_client, "POST", url)
|
||||
|
||||
self._credentials = _parse_credentials(
|
||||
res.data.decode(),
|
||||
(
|
||||
"AssumeRoleWithWebIdentityResult"
|
||||
if self._is_web_identity()
|
||||
else "AssumeRoleWithClientGrantsResult"
|
||||
),
|
||||
)
|
||||
|
||||
return self._credentials
|
||||
|
||||
|
||||
class ClientGrantsProvider(WebIdentityClientGrantsProvider):
|
||||
"""Credential provider using AssumeRoleWithClientGrants API."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
jwt_provider_func: Callable[[], dict[str, str]],
|
||||
sts_endpoint: str,
|
||||
duration_seconds: int = 0,
|
||||
policy: Optional[str] = None,
|
||||
http_client: Optional[PoolManager] = None,
|
||||
):
|
||||
super().__init__(
|
||||
jwt_provider_func=jwt_provider_func,
|
||||
sts_endpoint=sts_endpoint,
|
||||
duration_seconds=duration_seconds,
|
||||
policy=policy,
|
||||
http_client=http_client,
|
||||
)
|
||||
|
||||
def _is_web_identity(self) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
class WebIdentityProvider(WebIdentityClientGrantsProvider):
|
||||
"""Credential provider using AssumeRoleWithWebIdentity API."""
|
||||
|
||||
def _is_web_identity(self) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
class CertificateIdentityProvider(Provider):
|
||||
"""Credential provider using AssumeRoleWithCertificate API."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
sts_endpoint: str,
|
||||
cert_file: Optional[str] = None,
|
||||
key_file: Optional[str] = None,
|
||||
key_password: Optional[str] = None,
|
||||
ca_certs: Optional[str] = None,
|
||||
duration_seconds: int = 0,
|
||||
http_client: Optional[PoolManager] = None,
|
||||
):
|
||||
if urlsplit(sts_endpoint).scheme != "https":
|
||||
raise ValueError("STS endpoint scheme must be HTTPS")
|
||||
|
||||
if not bool(http_client) != (cert_file and key_file):
|
||||
raise ValueError(
|
||||
"either cert/key file or custom http_client must be provided",
|
||||
)
|
||||
|
||||
self._sts_endpoint = sts_endpoint + "?" + urlencode(
|
||||
{
|
||||
"Action": "AssumeRoleWithCertificate",
|
||||
"Version": "2011-06-15",
|
||||
"DurationSeconds": str(
|
||||
duration_seconds
|
||||
if duration_seconds > _DEFAULT_DURATION_SECONDS
|
||||
else _DEFAULT_DURATION_SECONDS
|
||||
),
|
||||
},
|
||||
)
|
||||
self._http_client = http_client or PoolManager(
|
||||
maxsize=10,
|
||||
cert_file=cert_file,
|
||||
cert_reqs='CERT_REQUIRED',
|
||||
key_file=key_file,
|
||||
key_password=key_password,
|
||||
ca_certs=ca_certs or certifi.where(),
|
||||
retries=Retry(
|
||||
total=5,
|
||||
backoff_factor=0.2,
|
||||
status_forcelist=[500, 502, 503, 504],
|
||||
),
|
||||
)
|
||||
self._credentials: Optional[Credentials] = None
|
||||
|
||||
def retrieve(self) -> Credentials:
|
||||
"""Retrieve credentials."""
|
||||
|
||||
if self._credentials and not self._credentials.is_expired():
|
||||
return self._credentials
|
||||
|
||||
res = _urlopen(
|
||||
self._http_client,
|
||||
"POST",
|
||||
self._sts_endpoint,
|
||||
)
|
||||
|
||||
self._credentials = _parse_credentials(
|
||||
res.data.decode(), "AssumeRoleWithCertificateResult",
|
||||
)
|
||||
|
||||
return self._credentials
|
||||
251
backend/venv/lib/python3.9/site-packages/minio/crypto.py
Normal file
251
backend/venv/lib/python3.9/site-packages/minio/crypto.py
Normal file
@@ -0,0 +1,251 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C)
|
||||
# 2015, 2016, 2017 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Cryptography to read and write encrypted MinIO Admin payload"""
|
||||
|
||||
from __future__ import absolute_import, annotations
|
||||
|
||||
import os
|
||||
|
||||
from argon2.low_level import Type, hash_secret_raw
|
||||
from Crypto.Cipher import AES, ChaCha20_Poly1305
|
||||
from Crypto.Cipher._mode_gcm import GcmMode
|
||||
from Crypto.Cipher.ChaCha20_Poly1305 import ChaCha20Poly1305Cipher
|
||||
|
||||
try:
|
||||
from urllib3.response import BaseHTTPResponse # type: ignore[attr-defined]
|
||||
except ImportError:
|
||||
from urllib3.response import HTTPResponse as BaseHTTPResponse
|
||||
|
||||
#
|
||||
# Encrypted Message Format:
|
||||
#
|
||||
# | 41 bytes HEADER |
|
||||
# |-------------------------|
|
||||
# | 16 KiB encrypted chunk |
|
||||
# | + 16 bytes TAG |
|
||||
# |-------------------------|
|
||||
# | .... |
|
||||
# |-------------------------|
|
||||
# | ~16 KiB encrypted chunk |
|
||||
# | + 16 bytes TAG |
|
||||
# |-------------------------|
|
||||
#
|
||||
# HEADER:
|
||||
#
|
||||
# | 32 bytes salt |
|
||||
# |----------------|
|
||||
# | 1 byte AEAD ID |
|
||||
# |----------------|
|
||||
# | 8 bytes NONCE |
|
||||
# |----------------|
|
||||
#
|
||||
|
||||
|
||||
_TAG_LEN = 16
|
||||
_CHUNK_SIZE = 16 * 1024
|
||||
_MAX_CHUNK_SIZE = _TAG_LEN + _CHUNK_SIZE
|
||||
_SALT_LEN = 32
|
||||
_NONCE_LEN = 8
|
||||
|
||||
|
||||
def _get_cipher(
|
||||
aead_id: int,
|
||||
key: bytes,
|
||||
nonce: bytes,
|
||||
) -> GcmMode | ChaCha20Poly1305Cipher:
|
||||
"""Get cipher for AEAD ID."""
|
||||
if aead_id == 0:
|
||||
return AES.new(key, AES.MODE_GCM, nonce)
|
||||
if aead_id == 1:
|
||||
return ChaCha20_Poly1305.new(key=key, nonce=nonce)
|
||||
raise ValueError(f"Unknown AEAD ID {aead_id}")
|
||||
|
||||
|
||||
def _generate_key(secret: bytes, salt: bytes) -> bytes:
|
||||
"""Generate 256-bit Argon2ID key"""
|
||||
return hash_secret_raw(
|
||||
secret=secret,
|
||||
salt=salt,
|
||||
time_cost=1,
|
||||
memory_cost=65536,
|
||||
parallelism=4,
|
||||
hash_len=32,
|
||||
type=Type.ID,
|
||||
version=19,
|
||||
)
|
||||
|
||||
|
||||
def _generate_additional_data(
|
||||
aead_id: int, key: bytes, padded_nonce: bytes
|
||||
) -> bytes:
|
||||
"""Generate additional data"""
|
||||
cipher = _get_cipher(aead_id, key, padded_nonce)
|
||||
return b"\x00" + cipher.digest()
|
||||
|
||||
|
||||
def _mark_as_last(additional_data: bytes) -> bytes:
|
||||
"""Mark additional data as the last in the sequence"""
|
||||
return b'\x80' + additional_data[1:]
|
||||
|
||||
|
||||
def _update_nonce_id(nonce: bytes, idx: int) -> bytes:
|
||||
"""Set nonce id (4 last bytes)"""
|
||||
return nonce + idx.to_bytes(4, byteorder="little")
|
||||
|
||||
|
||||
def encrypt(payload: bytes, password: str) -> bytes:
|
||||
"""Encrypt given payload."""
|
||||
nonce = os.urandom(_NONCE_LEN)
|
||||
salt = os.urandom(_SALT_LEN)
|
||||
key = _generate_key(password.encode(), salt)
|
||||
aead_id = b"\x00"
|
||||
padded_nonce = nonce + b"\x00\x00\x00\x00"
|
||||
additional_data = _generate_additional_data(aead_id[0], key, padded_nonce)
|
||||
|
||||
indices = range(0, len(payload), _CHUNK_SIZE)
|
||||
nonce_id = 0
|
||||
result = salt + aead_id + nonce
|
||||
for i in indices:
|
||||
nonce_id += 1
|
||||
if i == indices[-1]:
|
||||
additional_data = _mark_as_last(additional_data)
|
||||
padded_nonce = _update_nonce_id(nonce, nonce_id)
|
||||
cipher = _get_cipher(aead_id[0], key, padded_nonce)
|
||||
cipher.update(additional_data)
|
||||
encrypted_data, hmac_tag = cipher.encrypt_and_digest(
|
||||
payload[i:i+_CHUNK_SIZE],
|
||||
)
|
||||
|
||||
result += encrypted_data
|
||||
result += hmac_tag
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class DecryptReader:
|
||||
"""
|
||||
BufferedIOBase compatible reader represents decrypted data of MinioAdmin
|
||||
APIs.
|
||||
"""
|
||||
|
||||
def __init__(self, response: BaseHTTPResponse, secret: bytes):
|
||||
self._response = response
|
||||
self._secret = secret
|
||||
self._payload = None
|
||||
|
||||
header = self._response.read(41)
|
||||
if len(header) != 41:
|
||||
raise IOError("insufficient data")
|
||||
self._salt = header[:32]
|
||||
self._aead_id = header[32]
|
||||
self._nonce = header[33:]
|
||||
self._key = _generate_key(self._secret, self._salt)
|
||||
padded_nonce = self._nonce + b"\x00\x00\x00\x00"
|
||||
self._additional_data = _generate_additional_data(
|
||||
self._aead_id, self._key, padded_nonce
|
||||
)
|
||||
self._chunk = b""
|
||||
self._count = 0
|
||||
self._is_closed = False
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, exc_traceback):
|
||||
return self.close()
|
||||
|
||||
def readable(self): # pylint: disable=no-self-use
|
||||
"""Return this is readable."""
|
||||
return True
|
||||
|
||||
def writeable(self): # pylint: disable=no-self-use
|
||||
"""Return this is not writeable."""
|
||||
return False
|
||||
|
||||
def close(self):
|
||||
"""Close response and release network resources."""
|
||||
self._response.close()
|
||||
self._response.release_conn()
|
||||
|
||||
def _decrypt(self, payload: bytes, last_chunk: bool = False) -> bytes:
|
||||
"""Decrypt given payload."""
|
||||
self._count += 1
|
||||
if last_chunk:
|
||||
self._additional_data = _mark_as_last(self._additional_data)
|
||||
|
||||
padded_nonce = _update_nonce_id(self._nonce, self._count)
|
||||
cipher = _get_cipher(self._aead_id, self._key, padded_nonce)
|
||||
cipher.update(self._additional_data)
|
||||
|
||||
hmac_tag = payload[-_TAG_LEN:]
|
||||
encrypted_data = payload[:-_TAG_LEN]
|
||||
decrypted_data = cipher.decrypt_and_verify(encrypted_data, hmac_tag)
|
||||
return decrypted_data
|
||||
|
||||
def _read_chunk(self) -> bool:
|
||||
"""Read a chunk at least one byte more than chunk size."""
|
||||
if self._is_closed:
|
||||
return True
|
||||
|
||||
while len(self._chunk) != (1 + _MAX_CHUNK_SIZE):
|
||||
chunk = self._response.read(1 + _MAX_CHUNK_SIZE - len(self._chunk))
|
||||
self._chunk += chunk
|
||||
if len(chunk) == 0:
|
||||
self._is_closed = True
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def _read(self) -> bytes:
|
||||
"""Read and decrypt response."""
|
||||
stop = self._read_chunk()
|
||||
|
||||
if len(self._chunk) == 0:
|
||||
return self._chunk
|
||||
|
||||
length = _MAX_CHUNK_SIZE
|
||||
if len(self._chunk) < length:
|
||||
length = len(self._chunk)
|
||||
stop = True
|
||||
payload = self._chunk[:length]
|
||||
self._chunk = self._chunk[length:]
|
||||
return self._decrypt(payload, stop)
|
||||
|
||||
def stream(self, num_bytes=32*1024):
|
||||
"""
|
||||
Stream extracted payload from response data. Upon completion, caller
|
||||
should call self.close() to release network resources.
|
||||
"""
|
||||
while True:
|
||||
data = self._read()
|
||||
if not data:
|
||||
break
|
||||
while data:
|
||||
result = data
|
||||
if num_bytes < len(data):
|
||||
result = data[:num_bytes]
|
||||
data = data[len(result):]
|
||||
yield result
|
||||
|
||||
|
||||
def decrypt(response: BaseHTTPResponse, secret_key: str) -> bytes:
|
||||
"""Decrypt response data."""
|
||||
result = b""
|
||||
with DecryptReader(response, secret_key.encode()) as reader:
|
||||
for data in reader.stream():
|
||||
result += data
|
||||
return result
|
||||
799
backend/venv/lib/python3.9/site-packages/minio/datatypes.py
Normal file
799
backend/venv/lib/python3.9/site-packages/minio/datatypes.py
Normal file
@@ -0,0 +1,799 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C)
|
||||
# 2020 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# pylint: disable=too-many-lines
|
||||
|
||||
"""
|
||||
Response of ListBuckets, ListObjects, ListObjectsV2 and ListObjectVersions API.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, annotations
|
||||
|
||||
import base64
|
||||
import json
|
||||
from collections import OrderedDict
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
from typing import Any, List, Optional, Tuple, Type, TypeVar, Union, cast
|
||||
from urllib.parse import unquote_plus
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
from urllib3._collections import HTTPHeaderDict
|
||||
|
||||
try:
|
||||
from urllib3.response import BaseHTTPResponse # type: ignore[attr-defined]
|
||||
except ImportError:
|
||||
from urllib3.response import HTTPResponse as BaseHTTPResponse
|
||||
|
||||
from .commonconfig import Tags
|
||||
from .credentials import Credentials
|
||||
from .helpers import check_bucket_name
|
||||
from .signer import get_credential_string, post_presign_v4
|
||||
from .time import from_iso8601utc, to_amz_date, to_iso8601utc
|
||||
from .xml import find, findall, findtext
|
||||
|
||||
JSONDecodeError: type[ValueError]
|
||||
try:
|
||||
from json.decoder import JSONDecodeError
|
||||
except ImportError:
|
||||
JSONDecodeError = ValueError
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Bucket:
|
||||
"""Bucket information."""
|
||||
name: str
|
||||
creation_date: Optional[datetime]
|
||||
|
||||
|
||||
A = TypeVar("A", bound="ListAllMyBucketsResult")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class ListAllMyBucketsResult:
|
||||
"""LissBuckets API result."""
|
||||
buckets: list[Bucket]
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[A], element: ET.Element) -> A:
|
||||
"""Create new object with values from XML element."""
|
||||
element = cast(ET.Element, find(element, "Buckets", True))
|
||||
buckets = []
|
||||
elements = findall(element, "Bucket")
|
||||
for bucket in elements:
|
||||
name = cast(str, findtext(bucket, "Name", True))
|
||||
creation_date = findtext(bucket, "CreationDate")
|
||||
buckets.append(Bucket(
|
||||
name,
|
||||
from_iso8601utc(creation_date) if creation_date else None,
|
||||
))
|
||||
return cls(buckets)
|
||||
|
||||
|
||||
B = TypeVar("B", bound="Object")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Object:
|
||||
"""Object information."""
|
||||
bucket_name: str
|
||||
object_name: Optional[str]
|
||||
last_modified: Optional[datetime] = None
|
||||
etag: Optional[str] = None
|
||||
size: Optional[int] = None
|
||||
metadata: Optional[Union[dict[str, str], HTTPHeaderDict]] = None
|
||||
version_id: Optional[str] = None
|
||||
is_latest: Optional[str] = None
|
||||
storage_class: Optional[str] = None
|
||||
owner_id: Optional[str] = None
|
||||
owner_name: Optional[str] = None
|
||||
content_type: Optional[str] = None
|
||||
is_delete_marker: bool = False
|
||||
tags: Optional[Tags] = None
|
||||
is_dir: bool = field(default=False, init=False)
|
||||
|
||||
def __post_init__(self):
|
||||
object.__setattr__(
|
||||
self,
|
||||
"is_dir",
|
||||
bool(self.object_name and self.object_name.endswith("/")),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def fromxml(
|
||||
cls: Type[B],
|
||||
element: ET.Element,
|
||||
bucket_name: str,
|
||||
is_delete_marker: bool = False,
|
||||
encoding_type: Optional[str] = None,
|
||||
) -> B:
|
||||
"""Create new object with values from XML element."""
|
||||
tag = findtext(element, "LastModified")
|
||||
last_modified = None if tag is None else from_iso8601utc(tag)
|
||||
|
||||
tag = findtext(element, "ETag")
|
||||
etag = None if tag is None else tag.replace('"', "")
|
||||
|
||||
tag = findtext(element, "Size")
|
||||
size = None if tag is None else int(tag)
|
||||
|
||||
elem = find(element, "Owner")
|
||||
owner_id, owner_name = (
|
||||
(None, None) if elem is None
|
||||
else (findtext(elem, "ID"), findtext(elem, "DisplayName"))
|
||||
)
|
||||
|
||||
elems: ET.Element | list = find(element, "UserMetadata") or []
|
||||
metadata: dict[str, str] = {}
|
||||
for child in elems:
|
||||
key = child.tag.split("}")[1] if "}" in child.tag else child.tag
|
||||
metadata[key] = child.text or ""
|
||||
|
||||
object_name = cast(str, findtext(element, "Key", True))
|
||||
if encoding_type == "url":
|
||||
object_name = unquote_plus(object_name)
|
||||
|
||||
tags_text = findtext(element, "UserTags")
|
||||
tags: Optional[Tags] = None
|
||||
if tags_text:
|
||||
tags = Tags.new_object_tags()
|
||||
tags.update(
|
||||
cast(
|
||||
List[Tuple[Any, Any]],
|
||||
[tokens.split("=") for tokens in tags_text.split("&")],
|
||||
),
|
||||
)
|
||||
|
||||
return cls(
|
||||
bucket_name=bucket_name,
|
||||
object_name=object_name,
|
||||
last_modified=last_modified,
|
||||
etag=etag,
|
||||
size=size,
|
||||
version_id=findtext(element, "VersionId"),
|
||||
is_latest=findtext(element, "IsLatest"),
|
||||
storage_class=findtext(element, "StorageClass"),
|
||||
owner_id=owner_id,
|
||||
owner_name=owner_name,
|
||||
metadata=metadata,
|
||||
is_delete_marker=is_delete_marker,
|
||||
tags=tags
|
||||
)
|
||||
|
||||
|
||||
def parse_list_objects(
|
||||
response: BaseHTTPResponse,
|
||||
bucket_name: Optional[str] = None,
|
||||
) -> tuple[list[Object], bool, Optional[str], Optional[str]]:
|
||||
"""Parse ListObjects/ListObjectsV2/ListObjectVersions response."""
|
||||
element = ET.fromstring(response.data.decode())
|
||||
bucket_name = cast(str, findtext(element, "Name", True))
|
||||
encoding_type = findtext(element, "EncodingType")
|
||||
elements = findall(element, "Contents")
|
||||
objects = [
|
||||
Object.fromxml(tag, bucket_name, encoding_type=encoding_type)
|
||||
for tag in elements
|
||||
]
|
||||
marker = objects[-1].object_name if objects else None
|
||||
|
||||
elements = findall(element, "Version")
|
||||
objects += [
|
||||
Object.fromxml(tag, bucket_name, encoding_type=encoding_type)
|
||||
for tag in elements
|
||||
]
|
||||
|
||||
elements = findall(element, "CommonPrefixes")
|
||||
objects += [
|
||||
Object(
|
||||
bucket_name, unquote_plus(findtext(tag, "Prefix", True) or "")
|
||||
if encoding_type == "url" else findtext(tag, "Prefix", True)
|
||||
) for tag in elements
|
||||
]
|
||||
|
||||
elements = findall(element, "DeleteMarker")
|
||||
objects += [
|
||||
Object.fromxml(tag, bucket_name, is_delete_marker=True,
|
||||
encoding_type=encoding_type)
|
||||
for tag in elements
|
||||
]
|
||||
|
||||
is_truncated = (findtext(element, "IsTruncated") or "").lower() == "true"
|
||||
key_marker = findtext(element, "NextKeyMarker")
|
||||
if key_marker and encoding_type == "url":
|
||||
key_marker = unquote_plus(key_marker)
|
||||
version_id_marker = findtext(element, "NextVersionIdMarker")
|
||||
continuation_token = findtext(element, "NextContinuationToken")
|
||||
if key_marker is not None:
|
||||
continuation_token = key_marker
|
||||
if continuation_token is None:
|
||||
continuation_token = findtext(element, "NextMarker")
|
||||
if continuation_token and encoding_type == "url":
|
||||
continuation_token = unquote_plus(continuation_token)
|
||||
if continuation_token is None and is_truncated:
|
||||
continuation_token = marker
|
||||
return objects, is_truncated, continuation_token, version_id_marker
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class CompleteMultipartUploadResult:
|
||||
"""CompleteMultipartUpload API result."""
|
||||
|
||||
http_headers: HTTPHeaderDict
|
||||
bucket_name: Optional[str] = None
|
||||
object_name: Optional[str] = None
|
||||
location: Optional[str] = None
|
||||
etag: Optional[str] = None
|
||||
version_id: Optional[str] = None
|
||||
|
||||
def __init__(self, response: BaseHTTPResponse):
|
||||
object.__setattr__(self, "http_headers", response.headers)
|
||||
element = ET.fromstring(response.data.decode())
|
||||
object.__setattr__(self, "bucket_name", findtext(element, "Bucket"))
|
||||
object.__setattr__(self, "object_name", findtext(element, "Key"))
|
||||
object.__setattr__(self, "location", findtext(element, "Location"))
|
||||
etag = findtext(element, "ETag")
|
||||
if etag:
|
||||
object.__setattr__(
|
||||
self,
|
||||
"etag",
|
||||
cast(str, etag).replace('"', ""),
|
||||
)
|
||||
object.__setattr__(
|
||||
self,
|
||||
"version_id",
|
||||
response.headers.get("x-amz-version-id"),
|
||||
)
|
||||
|
||||
|
||||
C = TypeVar("C", bound="Part")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Part:
|
||||
"""Part information of a multipart upload."""
|
||||
part_number: int
|
||||
etag: str
|
||||
last_modified: Optional[datetime] = None
|
||||
size: Optional[int] = None
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[C], element: ET.Element) -> C:
|
||||
"""Create new object with values from XML element."""
|
||||
part_number = int(cast(str, findtext(element, "PartNumber", True)))
|
||||
etag = cast(str, findtext(element, "ETag", True))
|
||||
etag = etag.replace('"', "")
|
||||
tag = findtext(element, "LastModified")
|
||||
last_modified = None if tag is None else from_iso8601utc(tag)
|
||||
size = findtext(element, "Size")
|
||||
return cls(
|
||||
part_number=part_number,
|
||||
etag=etag,
|
||||
last_modified=last_modified,
|
||||
size=int(size) if size else None,
|
||||
)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class ListPartsResult:
|
||||
"""ListParts API result."""
|
||||
|
||||
bucket_name: Optional[str] = None
|
||||
object_name: Optional[str] = None
|
||||
initiator_id: Optional[str] = None
|
||||
initiator_name: Optional[str] = None
|
||||
owner_id: Optional[str] = None
|
||||
owner_name: Optional[str] = None
|
||||
storage_class: Optional[str] = None
|
||||
part_number_marker: Optional[str] = None
|
||||
next_part_number_marker: Optional[str] = None
|
||||
max_parts: Optional[int] = None
|
||||
is_truncated: bool = False
|
||||
parts: list[Part] = field(default_factory=list)
|
||||
|
||||
def __init__(self, response: BaseHTTPResponse):
|
||||
element = ET.fromstring(response.data.decode())
|
||||
object.__setattr__(self, "bucket_name", findtext(element, "Bucket"))
|
||||
object.__setattr__(self, "object_name", findtext(element, "Key"))
|
||||
tag = find(element, "Initiator")
|
||||
object.__setattr__(
|
||||
self,
|
||||
"initiator_id",
|
||||
None if tag is None else findtext(tag, "ID"),
|
||||
)
|
||||
object.__setattr__(
|
||||
self,
|
||||
"initiator_name",
|
||||
None if tag is None else findtext(tag, "DisplayName"),
|
||||
)
|
||||
tag = find(element, "Owner")
|
||||
object.__setattr__(
|
||||
self,
|
||||
"owner_id",
|
||||
None if tag is None else findtext(tag, "ID")
|
||||
)
|
||||
object.__setattr__(
|
||||
self,
|
||||
"owner_name",
|
||||
None if tag is None else findtext(tag, "DisplayName"),
|
||||
)
|
||||
object.__setattr__(
|
||||
self,
|
||||
"storage_class",
|
||||
findtext(element, "StorageClass"),
|
||||
)
|
||||
object.__setattr__(
|
||||
self,
|
||||
"part_number_marker",
|
||||
findtext(element, "PartNumberMarker"),
|
||||
)
|
||||
object.__setattr__(
|
||||
self,
|
||||
"next_part_number_marker",
|
||||
findtext(element, "NextPartNumberMarker"),
|
||||
)
|
||||
max_parts = findtext(element, "MaxParts")
|
||||
object.__setattr__(
|
||||
self,
|
||||
"max_parts",
|
||||
int(max_parts) if max_parts else None,
|
||||
)
|
||||
is_truncated = findtext(element, "IsTruncated")
|
||||
object.__setattr__(
|
||||
self,
|
||||
"is_truncated",
|
||||
is_truncated is not None and is_truncated.lower() == "true",
|
||||
)
|
||||
object.__setattr__(
|
||||
self,
|
||||
"parts",
|
||||
[Part.fromxml(tag) for tag in findall(element, "Part")],
|
||||
)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Upload:
|
||||
""" Upload information of a multipart upload."""
|
||||
|
||||
object_name: str
|
||||
encoding_type: Optional[str] = None
|
||||
upload_id: Optional[str] = None
|
||||
initiator_id: Optional[str] = None
|
||||
initiator_name: Optional[str] = None
|
||||
owner_id: Optional[str] = None
|
||||
owner_name: Optional[str] = None
|
||||
storage_class: Optional[str] = None
|
||||
initiated_time: Optional[datetime] = None
|
||||
|
||||
def __init__(
|
||||
self, element: ET.Element, encoding_type: Optional[str] = None,
|
||||
):
|
||||
object_name = cast(str, findtext(element, "Key", True))
|
||||
object.__setattr__(
|
||||
self,
|
||||
"object_name",
|
||||
unquote_plus(object_name) if encoding_type == "url"
|
||||
else object_name,
|
||||
)
|
||||
object.__setattr__(self, "encoding_type", encoding_type)
|
||||
object.__setattr__(self, "upload_id", findtext(element, "UploadId"))
|
||||
tag = find(element, "Initiator")
|
||||
object.__setattr__(
|
||||
self,
|
||||
"initiator_id",
|
||||
None if tag is None else findtext(tag, "ID"),
|
||||
)
|
||||
object.__setattr__(
|
||||
self,
|
||||
"initiator_name",
|
||||
None if tag is None else findtext(tag, "DisplayName"),
|
||||
)
|
||||
tag = find(element, "Owner")
|
||||
object.__setattr__(
|
||||
self,
|
||||
"owner_id",
|
||||
None if tag is None else findtext(tag, "ID"),
|
||||
)
|
||||
object.__setattr__(
|
||||
self,
|
||||
"owner_name",
|
||||
None if tag is None else findtext(tag, "DisplayName"),
|
||||
)
|
||||
object.__setattr__(
|
||||
self,
|
||||
"storage_class",
|
||||
findtext(element, "StorageClass"),
|
||||
)
|
||||
initiated_time = findtext(element, "Initiated")
|
||||
object.__setattr__(
|
||||
self,
|
||||
"initiated_time",
|
||||
from_iso8601utc(initiated_time) if initiated_time else None,
|
||||
)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class ListMultipartUploadsResult:
|
||||
"""ListMultipartUploads API result."""
|
||||
|
||||
encoding_type: Optional[str] = None
|
||||
bucket_name: Optional[str] = None
|
||||
key_marker: Optional[str] = None
|
||||
upload_id_marker: Optional[str] = None
|
||||
next_key_marker: Optional[str] = None
|
||||
next_upload_id_marker: Optional[str] = None
|
||||
max_uploads: Optional[int] = None
|
||||
is_truncated: bool = False
|
||||
uploads: list[Upload] = field(default_factory=list)
|
||||
|
||||
def __init__(self, response: BaseHTTPResponse):
|
||||
element = ET.fromstring(response.data.decode())
|
||||
encoding_type = findtext(element, "EncodingType")
|
||||
object.__setattr__(self, "encoding_type", encoding_type)
|
||||
object.__setattr__(
|
||||
self,
|
||||
"bucket_name",
|
||||
findtext(element, "Bucket"),
|
||||
)
|
||||
value = findtext(element, "KeyMarker")
|
||||
if value is not None and encoding_type == "url":
|
||||
value = unquote_plus(value)
|
||||
object.__setattr__(self, "key_marker", value)
|
||||
object.__setattr__(
|
||||
self,
|
||||
"upload_id_marker",
|
||||
findtext(element, "UploadIdMarker"),
|
||||
)
|
||||
value = findtext(element, "NextKeyMarker")
|
||||
if value is not None and encoding_type == "url":
|
||||
value = unquote_plus(value)
|
||||
object.__setattr__(self, "next_key_marker", value)
|
||||
object.__setattr__(
|
||||
self,
|
||||
"self._next_upload_id_marker",
|
||||
findtext(element, "NextUploadIdMarker"),
|
||||
)
|
||||
max_uploads = findtext(element, "MaxUploads")
|
||||
object.__setattr__(
|
||||
self,
|
||||
"max_uploads",
|
||||
int(max_uploads) if max_uploads else None,
|
||||
)
|
||||
is_truncated = findtext(element, "IsTruncated")
|
||||
object.__setattr__(
|
||||
self,
|
||||
"is_truncated",
|
||||
is_truncated is not None and is_truncated.lower() == "true",
|
||||
)
|
||||
object.__setattr__(
|
||||
self,
|
||||
"uploads",
|
||||
[
|
||||
Upload(tag, encoding_type)
|
||||
for tag in findall(element, "Upload")
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
_RESERVED_ELEMENTS = (
|
||||
"bucket",
|
||||
"x-amz-algorithm",
|
||||
"x-amz-credential",
|
||||
"x-amz-date",
|
||||
"policy",
|
||||
"x-amz-signature",
|
||||
)
|
||||
_EQ = "eq"
|
||||
_STARTS_WITH = "starts-with"
|
||||
_ALGORITHM = "AWS4-HMAC-SHA256"
|
||||
|
||||
|
||||
def _trim_dollar(value: str) -> str:
|
||||
"""Trim dollar character if present."""
|
||||
return value[1:] if value.startswith("$") else value
|
||||
|
||||
|
||||
class PostPolicy:
|
||||
"""
|
||||
Post policy information to be used to generate presigned post policy
|
||||
form-data. Condition elements and respective condition for Post policy
|
||||
is available at
|
||||
https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-HTTPPOSTConstructPolicy.html#sigv4-PolicyConditions
|
||||
"""
|
||||
|
||||
def __init__(self, bucket_name: str, expiration: datetime):
|
||||
check_bucket_name(bucket_name)
|
||||
if not isinstance(expiration, datetime):
|
||||
raise ValueError("expiration must be datetime type")
|
||||
self._bucket_name = bucket_name
|
||||
self._expiration = expiration
|
||||
self._conditions: OrderedDict = OrderedDict()
|
||||
self._conditions[_EQ] = OrderedDict()
|
||||
self._conditions[_STARTS_WITH] = OrderedDict()
|
||||
self._lower_limit: Optional[int] = None
|
||||
self._upper_limit: Optional[int] = None
|
||||
|
||||
def add_equals_condition(self, element: str, value: str):
|
||||
"""Add equals condition of an element and value."""
|
||||
if not element:
|
||||
raise ValueError("condition element cannot be empty")
|
||||
element = _trim_dollar(element)
|
||||
if (
|
||||
element in [
|
||||
"success_action_redirect",
|
||||
"redirect",
|
||||
"content-length-range",
|
||||
]
|
||||
):
|
||||
raise ValueError(element + " is unsupported for equals condition")
|
||||
if element in _RESERVED_ELEMENTS:
|
||||
raise ValueError(element + " cannot be set")
|
||||
self._conditions[_EQ][element] = value
|
||||
|
||||
def remove_equals_condition(self, element: str):
|
||||
"""Remove previously set equals condition of an element."""
|
||||
if not element:
|
||||
raise ValueError("condition element cannot be empty")
|
||||
self._conditions[_EQ].pop(element)
|
||||
|
||||
def add_starts_with_condition(self, element: str, value: str):
|
||||
"""
|
||||
Add starts-with condition of an element and value. Value set to empty
|
||||
string does matching any content condition.
|
||||
"""
|
||||
if not element:
|
||||
raise ValueError("condition element cannot be empty")
|
||||
element = _trim_dollar(element)
|
||||
if (
|
||||
element in ["success_action_status", "content-length-range"] or
|
||||
(
|
||||
element.startswith("x-amz-") and
|
||||
not element.startswith("x-amz-meta-")
|
||||
)
|
||||
):
|
||||
raise ValueError(
|
||||
f"{element} is unsupported for starts-with condition",
|
||||
)
|
||||
if element in _RESERVED_ELEMENTS:
|
||||
raise ValueError(element + " cannot be set")
|
||||
self._conditions[_STARTS_WITH][element] = value
|
||||
|
||||
def remove_starts_with_condition(self, element: str):
|
||||
"""Remove previously set starts-with condition of an element."""
|
||||
if not element:
|
||||
raise ValueError("condition element cannot be empty")
|
||||
self._conditions[_STARTS_WITH].pop(element)
|
||||
|
||||
def add_content_length_range_condition( # pylint: disable=invalid-name
|
||||
self, lower_limit: int, upper_limit: int):
|
||||
"""Add content-length-range condition with lower and upper limits."""
|
||||
if lower_limit < 0:
|
||||
raise ValueError("lower limit cannot be negative number")
|
||||
if upper_limit < 0:
|
||||
raise ValueError("upper limit cannot be negative number")
|
||||
if lower_limit > upper_limit:
|
||||
raise ValueError("lower limit cannot be greater than upper limit")
|
||||
self._lower_limit = lower_limit
|
||||
self._upper_limit = upper_limit
|
||||
|
||||
def remove_content_length_range_condition( # pylint: disable=invalid-name
|
||||
self):
|
||||
"""Remove previously set content-length-range condition."""
|
||||
self._lower_limit = None
|
||||
self._upper_limit = None
|
||||
|
||||
def form_data(self, creds: Credentials, region: str):
|
||||
"""
|
||||
Return form-data of this post policy. The returned dict contains
|
||||
x-amz-algorithm, x-amz-credential, x-amz-security-token, x-amz-date,
|
||||
policy and x-amz-signature.
|
||||
"""
|
||||
if not isinstance(creds, Credentials):
|
||||
raise ValueError("credentials must be Credentials type")
|
||||
if not region:
|
||||
raise ValueError("region cannot be empty")
|
||||
if (
|
||||
"key" not in self._conditions[_EQ] and
|
||||
"key" not in self._conditions[_STARTS_WITH]
|
||||
):
|
||||
raise ValueError("key condition must be set")
|
||||
|
||||
policy: OrderedDict = OrderedDict()
|
||||
policy["expiration"] = to_iso8601utc(self._expiration)
|
||||
policy["conditions"] = [[_EQ, "$bucket", self._bucket_name]]
|
||||
for cond_key, conditions in self._conditions.items():
|
||||
for key, value in conditions.items():
|
||||
policy["conditions"].append([cond_key, "$"+key, value])
|
||||
if self._lower_limit is not None and self._upper_limit is not None:
|
||||
policy["conditions"].append(
|
||||
["content-length-range", self._lower_limit, self._upper_limit],
|
||||
)
|
||||
utcnow = datetime.utcnow()
|
||||
credential = get_credential_string(creds.access_key, utcnow, region)
|
||||
amz_date = to_amz_date(utcnow)
|
||||
policy["conditions"].append([_EQ, "$x-amz-algorithm", _ALGORITHM])
|
||||
policy["conditions"].append([_EQ, "$x-amz-credential", credential])
|
||||
if creds.session_token:
|
||||
policy["conditions"].append(
|
||||
[_EQ, "$x-amz-security-token", creds.session_token],
|
||||
)
|
||||
policy["conditions"].append([_EQ, "$x-amz-date", amz_date])
|
||||
|
||||
policy_encoded = base64.b64encode(
|
||||
json.dumps(policy).encode(),
|
||||
).decode("utf-8")
|
||||
signature = post_presign_v4(
|
||||
policy_encoded, creds.secret_key, utcnow, region,
|
||||
)
|
||||
form_data = {
|
||||
"x-amz-algorithm": _ALGORITHM,
|
||||
"x-amz-credential": credential,
|
||||
"x-amz-date": amz_date,
|
||||
"policy": policy_encoded,
|
||||
"x-amz-signature": signature,
|
||||
}
|
||||
if creds.session_token:
|
||||
form_data["x-amz-security-token"] = creds.session_token
|
||||
return form_data
|
||||
|
||||
@property
|
||||
def bucket_name(self) -> str:
|
||||
"""Get bucket name."""
|
||||
return self._bucket_name
|
||||
|
||||
|
||||
def parse_copy_object(
|
||||
response: BaseHTTPResponse,
|
||||
) -> tuple[str, Optional[datetime]]:
|
||||
"""Parse CopyObject/UploadPartCopy response."""
|
||||
element = ET.fromstring(response.data.decode())
|
||||
etag = cast(str, findtext(element, "ETag", True)).replace('"', "")
|
||||
last_modified = findtext(element, "LastModified")
|
||||
return etag, from_iso8601utc(last_modified) if last_modified else None
|
||||
|
||||
|
||||
class EventIterable:
|
||||
"""Context manager friendly event iterable."""
|
||||
|
||||
def __init__(self, func):
|
||||
self._func = func
|
||||
self._response = None
|
||||
|
||||
def _close_response(self):
|
||||
"""Close response."""
|
||||
if self._response:
|
||||
self._response.close()
|
||||
self._response.release_conn()
|
||||
self._response = None
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def _get_records(self):
|
||||
"""Get event records from response stream."""
|
||||
try:
|
||||
line = self._response.readline().strip()
|
||||
if not line:
|
||||
return None
|
||||
if hasattr(line, 'decode'):
|
||||
line = line.decode()
|
||||
event = json.loads(line)
|
||||
if event['Records']:
|
||||
return event
|
||||
except (StopIteration, JSONDecodeError):
|
||||
self._close_response()
|
||||
return None
|
||||
|
||||
def __next__(self):
|
||||
records = None
|
||||
while not records:
|
||||
if not self._response or self._response.closed:
|
||||
self._response = self._func()
|
||||
records = self._get_records()
|
||||
return records
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, value, traceback):
|
||||
self._close_response()
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class PeerSite:
|
||||
"""Represents a cluster/site to be added to the set of replicated sites."""
|
||||
name: str
|
||||
endpoint: str
|
||||
access_key: str
|
||||
secret_key: str
|
||||
|
||||
def to_dict(self) -> dict[str, str]:
|
||||
"""Convert to dictionary."""
|
||||
return {
|
||||
"name": self.name,
|
||||
"endpoints": self.endpoint,
|
||||
"accessKey": self.access_key,
|
||||
"secretKey": self.secret_key,
|
||||
}
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class SiteReplicationStatusOptions:
|
||||
"""Represents site replication status options."""
|
||||
ENTITY_TYPE = Enum(
|
||||
"ENTITY_TYPE",
|
||||
{
|
||||
"BUCKET": "bucket",
|
||||
"POLICY": "policy",
|
||||
"USER": "user",
|
||||
"GROUP": "group",
|
||||
},
|
||||
)
|
||||
buckets: bool = False
|
||||
policies: bool = False
|
||||
users: bool = False
|
||||
groups: bool = False
|
||||
metrics: bool = False
|
||||
show_deleted: bool = False
|
||||
entity: Optional[str] = None
|
||||
entity_value: Optional[str] = None
|
||||
|
||||
def to_query_params(self) -> dict[str, str]:
|
||||
"""Convert this options to query parameters."""
|
||||
params = {
|
||||
"buckets": str(self.buckets).lower(),
|
||||
"policies": str(self.policies).lower(),
|
||||
"users": str(self.users).lower(),
|
||||
"groups": str(self.groups).lower(),
|
||||
"metrics": str(self.metrics).lower(),
|
||||
"showDeleted": str(self.show_deleted).lower(),
|
||||
}
|
||||
if self.entity and self.entity_value:
|
||||
params["entity"] = self.entity
|
||||
params["entityvalue"] = self.entity_value
|
||||
return params
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class PeerInfo:
|
||||
"""Site replication peer information."""
|
||||
deployment_id: str
|
||||
endpoint: str
|
||||
bucket_bandwidth_limit: str
|
||||
bucket_bandwidth_set: str
|
||||
name: Optional[str] = None
|
||||
sync_status: Optional[str] = None
|
||||
bucket_bandwidth_updated_at: Optional[datetime] = None
|
||||
|
||||
def to_dict(self):
|
||||
"""Converts peer information to dictionary."""
|
||||
data = {
|
||||
"endpoint": self.endpoint,
|
||||
"deploymentID": self.deployment_id,
|
||||
"defaultbandwidth": {
|
||||
"bandwidthLimitPerBucket": self.bucket_bandwidth_limit,
|
||||
"set": self.bucket_bandwidth_set,
|
||||
},
|
||||
}
|
||||
if self.name:
|
||||
data["name"] = self.name
|
||||
if self.sync_status is not None:
|
||||
data["sync"] = "enable" if self.sync_status else "disable"
|
||||
if self.bucket_bandwidth_updated_at:
|
||||
data["defaultbandwidth"]["updatedAt"] = to_iso8601utc(
|
||||
self.bucket_bandwidth_updated_at,
|
||||
)
|
||||
return data
|
||||
140
backend/venv/lib/python3.9/site-packages/minio/deleteobjects.py
Normal file
140
backend/venv/lib/python3.9/site-packages/minio/deleteobjects.py
Normal file
@@ -0,0 +1,140 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C)
|
||||
# 2020 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Request/response of DeleteObjects API."""
|
||||
|
||||
from __future__ import absolute_import, annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional, Type, TypeVar, cast
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
from .xml import Element, SubElement, findall, findtext
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class DeleteObject:
|
||||
"""Delete object request information."""
|
||||
|
||||
name: str
|
||||
version_id: Optional[str] = None
|
||||
|
||||
def toxml(self, element: Optional[ET.Element]) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, "Object")
|
||||
SubElement(element, "Key", self.name)
|
||||
if self.version_id is not None:
|
||||
SubElement(element, "VersionId", self.version_id)
|
||||
return element
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class DeleteRequest:
|
||||
"""Delete object request."""
|
||||
|
||||
object_list: list[DeleteObject]
|
||||
quiet: bool = False
|
||||
|
||||
def toxml(self, element: Optional[ET.Element]) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
element = Element("Delete")
|
||||
if self.quiet:
|
||||
SubElement(element, "Quiet", "true")
|
||||
for obj in self.object_list:
|
||||
obj.toxml(element)
|
||||
return element
|
||||
|
||||
|
||||
A = TypeVar("A", bound="DeletedObject")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class DeletedObject:
|
||||
"""Deleted object information."""
|
||||
|
||||
name: str
|
||||
version_id: Optional[str]
|
||||
delete_marker: bool
|
||||
delete_marker_version_id: Optional[str]
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[A], element: ET.Element) -> A:
|
||||
"""Create new object with values from XML element."""
|
||||
name = cast(str, findtext(element, "Key", True))
|
||||
version_id = findtext(element, "VersionId")
|
||||
delete_marker = findtext(element, "DeleteMarker")
|
||||
delete_marker_version_id = findtext(element, "DeleteMarkerVersionId")
|
||||
return cls(
|
||||
name=name,
|
||||
version_id=version_id,
|
||||
delete_marker=(
|
||||
delete_marker is not None and delete_marker.title() == "True"
|
||||
),
|
||||
delete_marker_version_id=delete_marker_version_id,
|
||||
)
|
||||
|
||||
|
||||
B = TypeVar("B", bound="DeleteError")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class DeleteError:
|
||||
"""Delete error information."""
|
||||
|
||||
code: str
|
||||
message: Optional[str]
|
||||
name: Optional[str]
|
||||
version_id: Optional[str]
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[B], element: ET.Element) -> B:
|
||||
"""Create new object with values from XML element."""
|
||||
code = cast(str, findtext(element, "Code", True))
|
||||
message = findtext(element, "Message")
|
||||
name = findtext(element, "Key")
|
||||
version_id = findtext(element, "VersionId")
|
||||
return cls(
|
||||
code=code,
|
||||
message=message,
|
||||
name=name,
|
||||
version_id=version_id,
|
||||
)
|
||||
|
||||
|
||||
C = TypeVar("C", bound="DeleteResult")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class DeleteResult:
|
||||
"""Delete object result."""
|
||||
|
||||
object_list: list[DeletedObject]
|
||||
error_list: list[DeleteError]
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[C], element: ET.Element) -> C:
|
||||
"""Create new object with values from XML element."""
|
||||
elements = findall(element, "Deleted")
|
||||
object_list = []
|
||||
for tag in elements:
|
||||
object_list.append(DeletedObject.fromxml(tag))
|
||||
elements = findall(element, "Error")
|
||||
error_list = []
|
||||
for tag in elements:
|
||||
error_list.append(DeleteError.fromxml(tag))
|
||||
return cls(object_list=object_list, error_list=error_list)
|
||||
226
backend/venv/lib/python3.9/site-packages/minio/error.py
Normal file
226
backend/venv/lib/python3.9/site-packages/minio/error.py
Normal file
@@ -0,0 +1,226 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage,
|
||||
# (C) 2015-2019 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# pylint: disable=too-many-lines
|
||||
|
||||
"""
|
||||
minio.error
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
This module provides custom exception classes for MinIO library
|
||||
and API specific errors.
|
||||
|
||||
:copyright: (c) 2015, 2016, 2017 by MinIO, Inc.
|
||||
:license: Apache 2.0, see LICENSE for more details.
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, annotations
|
||||
|
||||
from typing import Optional, Type, TypeVar
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
try:
|
||||
from urllib3.response import BaseHTTPResponse # type: ignore[attr-defined]
|
||||
except ImportError:
|
||||
from urllib3.response import HTTPResponse as BaseHTTPResponse
|
||||
|
||||
from .xml import findtext
|
||||
|
||||
|
||||
class MinioException(Exception):
|
||||
"""Base Minio exception."""
|
||||
|
||||
|
||||
class InvalidResponseError(MinioException):
|
||||
"""Raised to indicate that non-xml response from server."""
|
||||
|
||||
def __init__(
|
||||
self, code: int, content_type: Optional[str], body: Optional[str],
|
||||
):
|
||||
self._code = code
|
||||
self._content_type = content_type
|
||||
self._body = body
|
||||
super().__init__(
|
||||
f"non-XML response from server; Response code: {code}, "
|
||||
f"Content-Type: {content_type}, Body: {body}"
|
||||
)
|
||||
|
||||
def __reduce__(self):
|
||||
return type(self), (self._code, self._content_type, self._body)
|
||||
|
||||
|
||||
class ServerError(MinioException):
|
||||
"""Raised to indicate that S3 service returning HTTP server error."""
|
||||
|
||||
def __init__(self, message: str, status_code: int):
|
||||
self._status_code = status_code
|
||||
super().__init__(message)
|
||||
|
||||
@property
|
||||
def status_code(self) -> int:
|
||||
"""Get HTTP status code."""
|
||||
return self._status_code
|
||||
|
||||
|
||||
A = TypeVar("A", bound="S3Error")
|
||||
|
||||
|
||||
class S3Error(MinioException):
|
||||
"""
|
||||
Raised to indicate that error response is received
|
||||
when executing S3 operation.
|
||||
"""
|
||||
response: BaseHTTPResponse
|
||||
code: Optional[str]
|
||||
message: Optional[str]
|
||||
resource: Optional[str]
|
||||
request_id: Optional[str]
|
||||
host_id: Optional[str]
|
||||
bucket_name: Optional[str]
|
||||
object_name: Optional[str]
|
||||
|
||||
_EXC_MUTABLES = {"__traceback__", "__context__", "__cause__"}
|
||||
|
||||
def __init__( # pylint: disable=too-many-positional-arguments
|
||||
self,
|
||||
response: BaseHTTPResponse,
|
||||
code: Optional[str],
|
||||
message: Optional[str],
|
||||
resource: Optional[str],
|
||||
request_id: Optional[str],
|
||||
host_id: Optional[str],
|
||||
bucket_name: Optional[str] = None,
|
||||
object_name: Optional[str] = None,
|
||||
):
|
||||
object.__setattr__(self, "response", response)
|
||||
object.__setattr__(self, "code", code)
|
||||
object.__setattr__(self, "message", message)
|
||||
object.__setattr__(self, "resource", resource)
|
||||
object.__setattr__(self, "request_id", request_id)
|
||||
object.__setattr__(self, "host_id", host_id)
|
||||
object.__setattr__(self, "bucket_name", bucket_name)
|
||||
object.__setattr__(self, "object_name", object_name)
|
||||
|
||||
bucket_message = f", bucket_name: {bucket_name}" if bucket_name else ""
|
||||
object_message = f", object_name: {object_name}" if object_name else ""
|
||||
|
||||
super().__init__(
|
||||
f"S3 operation failed; code: {code}, message: {message}, "
|
||||
f"resource: {resource}, request_id: {request_id}, "
|
||||
f"host_id: {host_id}{bucket_message}{object_message}"
|
||||
)
|
||||
|
||||
# freeze after init
|
||||
object.__setattr__(self, "_is_frozen", True)
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
if name in self._EXC_MUTABLES:
|
||||
object.__setattr__(self, name, value)
|
||||
return
|
||||
if getattr(self, "_is_frozen", False):
|
||||
raise AttributeError(
|
||||
f"{self.__class__.__name__} is frozen and "
|
||||
"does not allow attribute assignment"
|
||||
)
|
||||
object.__setattr__(self, name, value)
|
||||
|
||||
def __delattr__(self, name):
|
||||
if name in self._EXC_MUTABLES:
|
||||
object.__delattr__(self, name)
|
||||
return
|
||||
if getattr(self, "_is_frozen", False):
|
||||
raise AttributeError(
|
||||
f"{self.__class__.__name__} is frozen and "
|
||||
"does not allow attribute deletion"
|
||||
)
|
||||
object.__delattr__(self, name)
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[A], response: BaseHTTPResponse) -> A:
|
||||
"""Create new object with values from XML element."""
|
||||
element = ET.fromstring(response.data.decode())
|
||||
return cls(
|
||||
response=response,
|
||||
code=findtext(element, "Code"),
|
||||
message=findtext(element, "Message"),
|
||||
resource=findtext(element, "Resource"),
|
||||
request_id=findtext(element, "RequestId"),
|
||||
host_id=findtext(element, "HostId"),
|
||||
bucket_name=findtext(element, "BucketName"),
|
||||
object_name=findtext(element, "Key"),
|
||||
)
|
||||
|
||||
def copy(self, code: str, message: str) -> S3Error:
|
||||
"""Make a copy with replaced code and message."""
|
||||
return S3Error(
|
||||
response=self.response,
|
||||
code=code,
|
||||
message=message,
|
||||
resource=self.resource,
|
||||
request_id=self.request_id,
|
||||
host_id=self.host_id,
|
||||
bucket_name=self.bucket_name,
|
||||
object_name=self.object_name,
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
f"S3Error(code={self.code!r}, message={self.message!r}, "
|
||||
f"resource={self.resource!r}, request_id={self.request_id!r}, "
|
||||
f"host_id={self.host_id!r}, bucket_name={self.bucket_name!r}, "
|
||||
f"object_name={self.object_name!r})"
|
||||
)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, S3Error):
|
||||
return NotImplemented
|
||||
return (
|
||||
self.code == other.code
|
||||
and self.message == other.message
|
||||
and self.resource == other.resource
|
||||
and self.request_id == other.request_id
|
||||
and self.host_id == other.host_id
|
||||
and self.bucket_name == other.bucket_name
|
||||
and self.object_name == other.object_name
|
||||
)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(
|
||||
(
|
||||
self.code,
|
||||
self.message,
|
||||
self.resource,
|
||||
self.request_id,
|
||||
self.host_id,
|
||||
self.bucket_name,
|
||||
self.object_name,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class MinioAdminException(Exception):
|
||||
"""Raised to indicate admin API execution error."""
|
||||
|
||||
def __init__(self, code: str, body: str):
|
||||
self._code = code
|
||||
self._body = body
|
||||
super().__init__(
|
||||
f"admin request failed; Status: {code}, Body: {body}",
|
||||
)
|
||||
|
||||
def __reduce__(self):
|
||||
return type(self), (self._code, self._body)
|
||||
860
backend/venv/lib/python3.9/site-packages/minio/helpers.py
Normal file
860
backend/venv/lib/python3.9/site-packages/minio/helpers.py
Normal file
@@ -0,0 +1,860 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C)
|
||||
# 2015, 2016, 2017 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Helper functions."""
|
||||
|
||||
from __future__ import absolute_import, annotations, division, unicode_literals
|
||||
|
||||
import base64
|
||||
import errno
|
||||
import hashlib
|
||||
import math
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import urllib.parse
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from queue import Queue
|
||||
from threading import BoundedSemaphore, Thread
|
||||
from typing import BinaryIO, Dict, List, Mapping, Optional, Tuple, Union
|
||||
|
||||
from typing_extensions import Protocol
|
||||
from urllib3._collections import HTTPHeaderDict
|
||||
|
||||
from . import __title__, __version__
|
||||
from .sse import Sse, SseCustomerKey
|
||||
from .time import to_iso8601utc
|
||||
|
||||
_DEFAULT_USER_AGENT = (
|
||||
f"MinIO ({platform.system()}; {platform.machine()}) "
|
||||
f"{__title__}/{__version__}"
|
||||
)
|
||||
|
||||
MAX_MULTIPART_COUNT = 10000 # 10000 parts
|
||||
MAX_MULTIPART_OBJECT_SIZE = 5 * 1024 * 1024 * 1024 * 1024 # 5TiB
|
||||
MAX_PART_SIZE = 5 * 1024 * 1024 * 1024 # 5GiB
|
||||
MIN_PART_SIZE = 5 * 1024 * 1024 # 5MiB
|
||||
|
||||
_AWS_S3_PREFIX = (r'^(((bucket\.|accesspoint\.)'
|
||||
r'vpce(-(?!_)[a-z_\d]+(?<!-)(?<!_))+\.s3\.)|'
|
||||
r'((?!s3)(?!-)(?!_)[a-z_\d-]{1,63}(?<!-)(?<!_)\.)'
|
||||
r's3-control(-(?!_)[a-z_\d]+(?<!-)(?<!_))*\.|'
|
||||
r'(s3(-(?!_)[a-z_\d]+(?<!-)(?<!_))*\.))')
|
||||
|
||||
_BUCKET_NAME_REGEX = re.compile(r'^[a-z0-9][a-z0-9\.\-]{1,61}[a-z0-9]$')
|
||||
_OLD_BUCKET_NAME_REGEX = re.compile(r'^[a-z0-9][a-z0-9_\.\-\:]{1,61}[a-z0-9]$',
|
||||
re.IGNORECASE)
|
||||
_IPV4_REGEX = re.compile(
|
||||
r'^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])\.){3}'
|
||||
r'(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])$')
|
||||
_HOSTNAME_REGEX = re.compile(
|
||||
r'^((?!-)(?!_)[a-z_\d-]{1,63}(?<!-)(?<!_)\.)*'
|
||||
r'((?!_)(?!-)[a-z_\d-]{1,63}(?<!-)(?<!_))$',
|
||||
re.IGNORECASE)
|
||||
_AWS_ENDPOINT_REGEX = re.compile(r'.*\.amazonaws\.com(|\.cn)$', re.IGNORECASE)
|
||||
_AWS_S3_ENDPOINT_REGEX = re.compile(
|
||||
_AWS_S3_PREFIX +
|
||||
r'((?!s3)(?!-)(?!_)[a-z_\d-]{1,63}(?<!-)(?<!_)\.)*'
|
||||
r'amazonaws\.com(|\.cn)$',
|
||||
re.IGNORECASE)
|
||||
_AWS_ELB_ENDPOINT_REGEX = re.compile(
|
||||
r'^(?!-)(?!_)[a-z_\d-]{1,63}(?<!-)(?<!_)\.'
|
||||
r'(?!-)(?!_)[a-z_\d-]{1,63}(?<!-)(?<!_)\.'
|
||||
r'elb\.amazonaws\.com$',
|
||||
re.IGNORECASE)
|
||||
_AWS_S3_PREFIX_REGEX = re.compile(_AWS_S3_PREFIX, re.IGNORECASE)
|
||||
_REGION_REGEX = re.compile(r'^((?!_)(?!-)[a-z_\d-]{1,63}(?<!-)(?<!_))$',
|
||||
re.IGNORECASE)
|
||||
|
||||
DictType = Dict[str, Union[str, List[str], Tuple[str]]]
|
||||
|
||||
|
||||
def quote(
|
||||
resource: str,
|
||||
safe: str = "/",
|
||||
encoding: Optional[str] = None,
|
||||
errors: Optional[str] = None,
|
||||
) -> str:
|
||||
"""
|
||||
Wrapper to urllib.parse.quote() replacing back to '~' for older python
|
||||
versions.
|
||||
"""
|
||||
return urllib.parse.quote(
|
||||
resource,
|
||||
safe=safe,
|
||||
encoding=encoding,
|
||||
errors=errors,
|
||||
).replace("%7E", "~")
|
||||
|
||||
|
||||
def queryencode(
|
||||
query: str,
|
||||
safe: str = "",
|
||||
encoding: Optional[str] = None,
|
||||
errors: Optional[str] = None,
|
||||
) -> str:
|
||||
"""Encode query parameter value."""
|
||||
return quote(query, safe, encoding, errors)
|
||||
|
||||
|
||||
def headers_to_strings(
|
||||
headers: Mapping[str, str | list[str] | tuple[str]],
|
||||
titled_key: bool = False,
|
||||
) -> str:
|
||||
"""Convert HTTP headers to multi-line string."""
|
||||
values = []
|
||||
for key, value in headers.items():
|
||||
key = key.title() if titled_key else key
|
||||
for item in value if isinstance(value, (list, tuple)) else [value]:
|
||||
item = re.sub(
|
||||
r"Credential=([^/]+)",
|
||||
"Credential=*REDACTED*",
|
||||
re.sub(r"Signature=([0-9a-f]+)", "Signature=*REDACTED*", item),
|
||||
) if titled_key else item
|
||||
values.append(f"{key}: {item}")
|
||||
return "\n".join(values)
|
||||
|
||||
|
||||
def _validate_sizes(object_size: int, part_size: int):
|
||||
"""Validate object and part size."""
|
||||
if part_size > 0:
|
||||
if part_size < MIN_PART_SIZE:
|
||||
raise ValueError(
|
||||
f"part size {part_size} is not supported; minimum allowed 5MiB"
|
||||
)
|
||||
if part_size > MAX_PART_SIZE:
|
||||
raise ValueError(
|
||||
f"part size {part_size} is not supported; maximum allowed 5GiB"
|
||||
)
|
||||
|
||||
if object_size >= 0:
|
||||
if object_size > MAX_MULTIPART_OBJECT_SIZE:
|
||||
raise ValueError(
|
||||
f"object size {object_size} is not supported; "
|
||||
f"maximum allowed 5TiB"
|
||||
)
|
||||
elif part_size <= 0:
|
||||
raise ValueError(
|
||||
"valid part size must be provided when object size is unknown",
|
||||
)
|
||||
|
||||
|
||||
def _get_part_info(object_size: int, part_size: int):
|
||||
"""Compute part information for object and part size."""
|
||||
_validate_sizes(object_size, part_size)
|
||||
|
||||
if object_size < 0:
|
||||
return part_size, -1
|
||||
|
||||
if part_size > 0:
|
||||
part_size = min(part_size, object_size)
|
||||
return part_size, math.ceil(object_size / part_size) if part_size else 1
|
||||
|
||||
part_size = math.ceil(
|
||||
math.ceil(object_size / MAX_MULTIPART_COUNT) / MIN_PART_SIZE,
|
||||
) * MIN_PART_SIZE
|
||||
return part_size, math.ceil(object_size / part_size) if part_size else 1
|
||||
|
||||
|
||||
def get_part_info(object_size: int, part_size: int) -> tuple[int, int]:
|
||||
"""Compute part information for object and part size."""
|
||||
part_size, part_count = _get_part_info(object_size, part_size)
|
||||
if part_count > MAX_MULTIPART_COUNT:
|
||||
raise ValueError(
|
||||
f"object size {object_size} and part size {part_size} "
|
||||
f"make more than {MAX_MULTIPART_COUNT} parts for upload"
|
||||
)
|
||||
return part_size, part_count
|
||||
|
||||
|
||||
class ProgressType(Protocol):
|
||||
"""typing stub for Put/Get object progress."""
|
||||
|
||||
def set_meta(self, object_name: str, total_length: int):
|
||||
"""Set process meta information."""
|
||||
|
||||
def update(self, length: int):
|
||||
"""Set current progress length."""
|
||||
|
||||
|
||||
def read_part_data(
|
||||
stream: BinaryIO,
|
||||
size: int,
|
||||
part_data: bytes = b"",
|
||||
progress: Optional[ProgressType] = None,
|
||||
) -> bytes:
|
||||
"""Read part data of given size from stream."""
|
||||
size -= len(part_data)
|
||||
while size:
|
||||
data = stream.read(size)
|
||||
if not data:
|
||||
break # EOF reached
|
||||
if not isinstance(data, bytes):
|
||||
raise ValueError("read() must return 'bytes' object")
|
||||
part_data += data
|
||||
size -= len(data)
|
||||
if progress:
|
||||
progress.update(len(data))
|
||||
return part_data
|
||||
|
||||
|
||||
def makedirs(path: str):
|
||||
"""Wrapper of os.makedirs() ignores errno.EEXIST."""
|
||||
try:
|
||||
if path:
|
||||
os.makedirs(path)
|
||||
except OSError as exc: # Python >2.5
|
||||
if exc.errno != errno.EEXIST:
|
||||
raise
|
||||
|
||||
if not os.path.isdir(path):
|
||||
raise ValueError(f"path {path} is not a directory") from exc
|
||||
|
||||
|
||||
def check_bucket_name(
|
||||
bucket_name: str,
|
||||
strict: bool = False,
|
||||
s3_check: bool = False,
|
||||
):
|
||||
"""Check whether bucket name is valid optional with strict check or not."""
|
||||
|
||||
if strict:
|
||||
if not _BUCKET_NAME_REGEX.match(bucket_name):
|
||||
raise ValueError(f'invalid bucket name {bucket_name}')
|
||||
else:
|
||||
if not _OLD_BUCKET_NAME_REGEX.match(bucket_name):
|
||||
raise ValueError(f'invalid bucket name {bucket_name}')
|
||||
|
||||
if _IPV4_REGEX.match(bucket_name):
|
||||
raise ValueError(f'bucket name {bucket_name} must not be formatted '
|
||||
'as an IP address')
|
||||
|
||||
unallowed_successive_chars = ['..', '.-', '-.']
|
||||
if any(x in bucket_name for x in unallowed_successive_chars):
|
||||
raise ValueError(f'bucket name {bucket_name} contains invalid '
|
||||
'successive characters')
|
||||
|
||||
if (
|
||||
s3_check and
|
||||
bucket_name.startswith("xn--") or
|
||||
bucket_name.endswith("-s3alias") or
|
||||
bucket_name.endswith("--ol-s3")
|
||||
):
|
||||
raise ValueError(f"bucket name {bucket_name} must not start with "
|
||||
"'xn--' and must not end with '--s3alias' or "
|
||||
"'--ol-s3'")
|
||||
|
||||
|
||||
def check_non_empty_string(string: str | bytes):
|
||||
"""Check whether given string is not empty."""
|
||||
try:
|
||||
if not string.strip():
|
||||
raise ValueError()
|
||||
except AttributeError as exc:
|
||||
raise TypeError() from exc
|
||||
|
||||
|
||||
def check_object_name(object_name: str):
|
||||
"""Check whether given object name is valid."""
|
||||
check_non_empty_string(object_name)
|
||||
tokens = object_name.split("/")
|
||||
if "." in tokens or ".." in tokens:
|
||||
raise ValueError(
|
||||
"object name with '.' or '..' path segment is not supported",
|
||||
)
|
||||
|
||||
|
||||
def is_valid_policy_type(policy: str | bytes):
|
||||
"""
|
||||
Validate if policy is type str
|
||||
|
||||
:param policy: S3 style Bucket policy.
|
||||
:return: True if policy parameter is of a valid type, 'string'.
|
||||
Raise :exc:`TypeError` otherwise.
|
||||
"""
|
||||
if not isinstance(policy, (str, bytes)):
|
||||
raise TypeError("policy must be str or bytes type")
|
||||
|
||||
check_non_empty_string(policy)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def check_ssec(sse: Optional[SseCustomerKey]):
|
||||
"""Check sse is SseCustomerKey type or not."""
|
||||
if sse and not isinstance(sse, SseCustomerKey):
|
||||
raise ValueError("SseCustomerKey type is required")
|
||||
|
||||
|
||||
def check_sse(sse: Optional[Sse]):
|
||||
"""Check sse is Sse type or not."""
|
||||
if sse and not isinstance(sse, Sse):
|
||||
raise ValueError("Sse type is required")
|
||||
|
||||
|
||||
def md5sum_hash(data: Optional[str | bytes]) -> Optional[str]:
|
||||
"""Compute MD5 of data and return hash as Base64 encoded value."""
|
||||
if data is None:
|
||||
return None
|
||||
|
||||
# indicate md5 hashing algorithm is not used in a security context.
|
||||
# Refer https://bugs.python.org/issue9216 for more information.
|
||||
hasher = hashlib.new( # type: ignore[call-arg]
|
||||
"md5",
|
||||
usedforsecurity=False,
|
||||
)
|
||||
hasher.update(data.encode() if isinstance(data, str) else data)
|
||||
md5sum = base64.b64encode(hasher.digest())
|
||||
return md5sum.decode() if isinstance(md5sum, bytes) else md5sum
|
||||
|
||||
|
||||
def sha256_hash(data: Optional[str | bytes]) -> str:
|
||||
"""Compute SHA-256 of data and return hash as hex encoded value."""
|
||||
data = data or b""
|
||||
hasher = hashlib.sha256()
|
||||
hasher.update(data.encode() if isinstance(data, str) else data)
|
||||
sha256sum = hasher.hexdigest()
|
||||
if isinstance(sha256sum, bytes):
|
||||
return sha256sum.decode()
|
||||
return sha256sum
|
||||
|
||||
|
||||
def url_replace(
|
||||
*,
|
||||
url: urllib.parse.SplitResult,
|
||||
scheme: Optional[str] = None,
|
||||
netloc: Optional[str] = None,
|
||||
path: Optional[str] = None,
|
||||
query: Optional[str] = None,
|
||||
fragment: Optional[str] = None,
|
||||
) -> urllib.parse.SplitResult:
|
||||
"""Return new URL with replaced properties in given URL."""
|
||||
return urllib.parse.SplitResult(
|
||||
scheme if scheme is not None else url.scheme,
|
||||
netloc if netloc is not None else url.netloc,
|
||||
path if path is not None else url.path,
|
||||
query if query is not None else url.query,
|
||||
fragment if fragment is not None else url.fragment,
|
||||
)
|
||||
|
||||
|
||||
def _metadata_to_headers(metadata: DictType) -> dict[str, list[str]]:
|
||||
"""Convert user metadata to headers."""
|
||||
def normalize_key(key: str) -> str:
|
||||
if not key.lower().startswith("x-amz-meta-"):
|
||||
key = "X-Amz-Meta-" + key
|
||||
return key
|
||||
|
||||
def to_string(value) -> str:
|
||||
value = str(value)
|
||||
try:
|
||||
value.encode("us-ascii")
|
||||
except UnicodeEncodeError as exc:
|
||||
raise ValueError(
|
||||
f"unsupported metadata value {value}; "
|
||||
f"only US-ASCII encoded characters are supported"
|
||||
) from exc
|
||||
return value
|
||||
|
||||
def normalize_value(values: str | list[str] | tuple[str]) -> list[str]:
|
||||
if not isinstance(values, (list, tuple)):
|
||||
values = [values]
|
||||
return [to_string(value) for value in values]
|
||||
|
||||
return {
|
||||
normalize_key(key): normalize_value(value)
|
||||
for key, value in (metadata or {}).items()
|
||||
}
|
||||
|
||||
|
||||
def normalize_headers(headers: Optional[DictType]) -> DictType:
|
||||
"""Normalize headers by prefixing 'X-Amz-Meta-' for user metadata."""
|
||||
headers = {str(key): value for key, value in (headers or {}).items()}
|
||||
|
||||
def guess_user_metadata(key: str) -> bool:
|
||||
key = key.lower()
|
||||
return not (
|
||||
key.startswith("x-amz-") or
|
||||
key in [
|
||||
"cache-control",
|
||||
"content-encoding",
|
||||
"content-type",
|
||||
"content-disposition",
|
||||
"content-language",
|
||||
]
|
||||
)
|
||||
|
||||
user_metadata = {
|
||||
key: value for key, value in headers.items()
|
||||
if guess_user_metadata(key)
|
||||
}
|
||||
|
||||
# Remove guessed user metadata.
|
||||
_ = [headers.pop(key) for key in user_metadata]
|
||||
|
||||
headers.update(_metadata_to_headers(user_metadata))
|
||||
return headers
|
||||
|
||||
|
||||
def genheaders(
|
||||
headers: Optional[DictType],
|
||||
sse: Optional[Sse],
|
||||
tags: Optional[dict[str, str]],
|
||||
retention,
|
||||
legal_hold: bool,
|
||||
) -> DictType:
|
||||
"""Generate headers for given parameters."""
|
||||
headers = normalize_headers(headers)
|
||||
headers.update(sse.headers() if sse else {})
|
||||
tagging = "&".join(
|
||||
[
|
||||
queryencode(key) + "=" + queryencode(value)
|
||||
for key, value in (tags or {}).items()
|
||||
],
|
||||
)
|
||||
if tagging:
|
||||
headers["x-amz-tagging"] = tagging
|
||||
if retention and retention.mode:
|
||||
headers["x-amz-object-lock-mode"] = retention.mode
|
||||
headers["x-amz-object-lock-retain-until-date"] = (
|
||||
to_iso8601utc(retention.retain_until_date) or ""
|
||||
)
|
||||
if legal_hold:
|
||||
headers["x-amz-object-lock-legal-hold"] = "ON"
|
||||
return headers
|
||||
|
||||
|
||||
def _get_aws_info(
|
||||
host: str,
|
||||
https: bool,
|
||||
region: Optional[str],
|
||||
) -> tuple[Optional[dict], Optional[str]]:
|
||||
"""Extract AWS domain information. """
|
||||
|
||||
if not _HOSTNAME_REGEX.match(host):
|
||||
return (None, None)
|
||||
|
||||
if _AWS_ELB_ENDPOINT_REGEX.match(host):
|
||||
region_in_host = host.split(".elb.amazonaws.com", 1)[0].split(".")[-1]
|
||||
return (None, region or region_in_host)
|
||||
|
||||
if not _AWS_ENDPOINT_REGEX.match(host):
|
||||
return (None, None)
|
||||
|
||||
if host.startswith("ec2-"):
|
||||
return (None, None)
|
||||
|
||||
if not _AWS_S3_ENDPOINT_REGEX.match(host):
|
||||
raise ValueError(f"invalid Amazon AWS host {host}")
|
||||
|
||||
matcher = _AWS_S3_PREFIX_REGEX.match(host)
|
||||
end = matcher.end() if matcher else 0
|
||||
aws_s3_prefix = host[:end]
|
||||
|
||||
if "s3-accesspoint" in aws_s3_prefix and not https:
|
||||
raise ValueError(f"use HTTPS scheme for host {host}")
|
||||
|
||||
tokens = host[end:].split(".")
|
||||
dualstack = tokens[0] == "dualstack"
|
||||
if dualstack:
|
||||
tokens = tokens[1:]
|
||||
region_in_host = ""
|
||||
if tokens[0] not in ["vpce", "amazonaws"]:
|
||||
region_in_host = tokens[0]
|
||||
tokens = tokens[1:]
|
||||
aws_domain_suffix = ".".join(tokens)
|
||||
|
||||
if host in "s3-external-1.amazonaws.com":
|
||||
region_in_host = "us-east-1"
|
||||
|
||||
if host in ["s3-us-gov-west-1.amazonaws.com",
|
||||
"s3-fips-us-gov-west-1.amazonaws.com"]:
|
||||
region_in_host = "us-gov-west-1"
|
||||
|
||||
if (aws_domain_suffix.endswith(".cn") and
|
||||
not aws_s3_prefix.endswith("s3-accelerate.") and
|
||||
not region_in_host and
|
||||
not region):
|
||||
raise ValueError(
|
||||
f"region missing in Amazon S3 China endpoint {host}",
|
||||
)
|
||||
|
||||
return ({"s3_prefix": aws_s3_prefix,
|
||||
"domain_suffix": aws_domain_suffix,
|
||||
"region": region or region_in_host,
|
||||
"dualstack": dualstack}, None)
|
||||
|
||||
|
||||
def _parse_url(endpoint: str) -> urllib.parse.SplitResult:
|
||||
"""Parse url string."""
|
||||
|
||||
url = urllib.parse.urlsplit(endpoint)
|
||||
host = url.hostname
|
||||
|
||||
if url.scheme.lower() not in ["http", "https"]:
|
||||
raise ValueError("scheme in endpoint must be http or https")
|
||||
|
||||
url = url_replace(url=url, scheme=url.scheme.lower())
|
||||
|
||||
if url.path and url.path != "/":
|
||||
raise ValueError("path in endpoint is not allowed")
|
||||
|
||||
url = url_replace(url=url, path="")
|
||||
|
||||
if url.query:
|
||||
raise ValueError("query in endpoint is not allowed")
|
||||
|
||||
if url.fragment:
|
||||
raise ValueError("fragment in endpoint is not allowed")
|
||||
|
||||
try:
|
||||
url.port
|
||||
except ValueError as exc:
|
||||
raise ValueError("invalid port") from exc
|
||||
|
||||
if url.username:
|
||||
raise ValueError("username in endpoint is not allowed")
|
||||
|
||||
if url.password:
|
||||
raise ValueError("password in endpoint is not allowed")
|
||||
|
||||
if (
|
||||
(url.scheme == "http" and url.port == 80) or
|
||||
(url.scheme == "https" and url.port == 443)
|
||||
):
|
||||
url = url_replace(url=url, netloc=host)
|
||||
|
||||
return url
|
||||
|
||||
|
||||
class BaseURL:
|
||||
"""Base URL of S3 endpoint."""
|
||||
_aws_info: Optional[dict]
|
||||
_virtual_style_flag: bool
|
||||
_url: urllib.parse.SplitResult
|
||||
_region: Optional[str]
|
||||
_accelerate_host_flag: bool
|
||||
|
||||
def __init__(self, endpoint: str, region: Optional[str]):
|
||||
url = _parse_url(endpoint)
|
||||
|
||||
if region and not _REGION_REGEX.match(region):
|
||||
raise ValueError(f"invalid region {region}")
|
||||
|
||||
hostname = url.hostname or ""
|
||||
self._aws_info, region_in_host = _get_aws_info(
|
||||
hostname, url.scheme == "https", region)
|
||||
self._virtual_style_flag = (
|
||||
self._aws_info is not None or hostname.endswith("aliyuncs.com")
|
||||
)
|
||||
self._url = url
|
||||
self._region = region or region_in_host
|
||||
self._accelerate_host_flag = False
|
||||
if self._aws_info:
|
||||
self._region = self._aws_info["region"]
|
||||
self._accelerate_host_flag = (
|
||||
self._aws_info["s3_prefix"].endswith("s3-accelerate.")
|
||||
)
|
||||
|
||||
@property
|
||||
def region(self) -> Optional[str]:
|
||||
"""Get region."""
|
||||
return self._region
|
||||
|
||||
@property
|
||||
def is_https(self) -> bool:
|
||||
"""Check if scheme is HTTPS."""
|
||||
return self._url.scheme == "https"
|
||||
|
||||
@property
|
||||
def host(self) -> str:
|
||||
"""Get hostname."""
|
||||
return self._url.netloc
|
||||
|
||||
@property
|
||||
def is_aws_host(self) -> bool:
|
||||
"""Check if URL points to AWS host."""
|
||||
return self._aws_info is not None
|
||||
|
||||
@property
|
||||
def aws_s3_prefix(self) -> Optional[str]:
|
||||
"""Get AWS S3 domain prefix."""
|
||||
return self._aws_info["s3_prefix"] if self._aws_info else None
|
||||
|
||||
@aws_s3_prefix.setter
|
||||
def aws_s3_prefix(self, s3_prefix: str):
|
||||
"""Set AWS s3 domain prefix."""
|
||||
if not _AWS_S3_PREFIX_REGEX.match(s3_prefix):
|
||||
raise ValueError(f"invalid AWS S3 domain prefix {s3_prefix}")
|
||||
if self._aws_info:
|
||||
self._aws_info["s3_prefix"] = s3_prefix
|
||||
|
||||
@property
|
||||
def accelerate_host_flag(self) -> bool:
|
||||
"""Get AWS accelerate host flag."""
|
||||
return self._accelerate_host_flag
|
||||
|
||||
@accelerate_host_flag.setter
|
||||
def accelerate_host_flag(self, flag: bool):
|
||||
"""Set AWS accelerate host flag."""
|
||||
self._accelerate_host_flag = flag
|
||||
|
||||
@property
|
||||
def dualstack_host_flag(self) -> bool:
|
||||
"""Check if URL points to AWS dualstack host."""
|
||||
return self._aws_info["dualstack"] if self._aws_info else False
|
||||
|
||||
@dualstack_host_flag.setter
|
||||
def dualstack_host_flag(self, flag: bool):
|
||||
"""Set AWS dualstack host."""
|
||||
if self._aws_info:
|
||||
self._aws_info["dualstack"] = flag
|
||||
|
||||
@property
|
||||
def virtual_style_flag(self) -> bool:
|
||||
"""Check to use virtual style or not."""
|
||||
return self._virtual_style_flag
|
||||
|
||||
@virtual_style_flag.setter
|
||||
def virtual_style_flag(self, flag: bool):
|
||||
"""Check to use virtual style or not."""
|
||||
self._virtual_style_flag = flag
|
||||
|
||||
@classmethod
|
||||
def _build_aws_url(
|
||||
cls,
|
||||
*,
|
||||
aws_info: dict,
|
||||
url: urllib.parse.SplitResult,
|
||||
bucket_name: Optional[str],
|
||||
enforce_path_style: bool,
|
||||
region: str,
|
||||
) -> urllib.parse.SplitResult:
|
||||
"""Build URL for given information."""
|
||||
s3_prefix = aws_info["s3_prefix"]
|
||||
domain_suffix = aws_info["domain_suffix"]
|
||||
|
||||
host = f"{s3_prefix}{domain_suffix}"
|
||||
if host in ["s3-external-1.amazonaws.com",
|
||||
"s3-us-gov-west-1.amazonaws.com",
|
||||
"s3-fips-us-gov-west-1.amazonaws.com"]:
|
||||
return url_replace(url=url, netloc=host)
|
||||
|
||||
netloc = s3_prefix
|
||||
if "s3-accelerate" in s3_prefix:
|
||||
if "." in (bucket_name or ""):
|
||||
raise ValueError(
|
||||
f"bucket name '{bucket_name}' with '.' is not allowed "
|
||||
f"for accelerate endpoint"
|
||||
)
|
||||
if enforce_path_style:
|
||||
netloc = netloc.replace("-accelerate", "", 1)
|
||||
|
||||
if aws_info["dualstack"]:
|
||||
netloc += "dualstack."
|
||||
if "s3-accelerate" not in s3_prefix:
|
||||
netloc += region + "."
|
||||
netloc += domain_suffix
|
||||
|
||||
return url_replace(url=url, netloc=netloc)
|
||||
|
||||
def _build_list_buckets_url(
|
||||
self,
|
||||
url: urllib.parse.SplitResult,
|
||||
region: Optional[str],
|
||||
) -> urllib.parse.SplitResult:
|
||||
"""Build URL for ListBuckets API."""
|
||||
if not self._aws_info:
|
||||
return url
|
||||
|
||||
s3_prefix = self._aws_info["s3_prefix"]
|
||||
domain_suffix = self._aws_info["domain_suffix"]
|
||||
|
||||
host = f"{s3_prefix}{domain_suffix}"
|
||||
if host in ["s3-external-1.amazonaws.com",
|
||||
"s3-us-gov-west-1.amazonaws.com",
|
||||
"s3-fips-us-gov-west-1.amazonaws.com"]:
|
||||
return url_replace(url=url, netloc=host)
|
||||
|
||||
if s3_prefix.startswith("s3.") or s3_prefix.startswith("s3-"):
|
||||
s3_prefix = "s3."
|
||||
cn_suffix = ".cn" if domain_suffix.endswith(".cn") else ""
|
||||
domain_suffix = f"amazonaws.com{cn_suffix}"
|
||||
return url_replace(
|
||||
url=url,
|
||||
netloc=f"{s3_prefix}{region}.{domain_suffix}",
|
||||
)
|
||||
|
||||
def build(
|
||||
self,
|
||||
*,
|
||||
method: str,
|
||||
region: str,
|
||||
bucket_name: Optional[str] = None,
|
||||
object_name: Optional[str] = None,
|
||||
query_params: Optional[DictType] = None,
|
||||
) -> urllib.parse.SplitResult:
|
||||
"""Build URL for given information."""
|
||||
if not bucket_name and object_name:
|
||||
raise ValueError(
|
||||
f"empty bucket name for object name {object_name}",
|
||||
)
|
||||
|
||||
url = url_replace(url=self._url, path="/")
|
||||
|
||||
query = []
|
||||
for key, values in sorted((query_params or {}).items()):
|
||||
values = values if isinstance(values, (list, tuple)) else [values]
|
||||
query += [
|
||||
f"{queryencode(key)}={queryencode(value)}"
|
||||
for value in sorted(values)
|
||||
]
|
||||
url = url_replace(url=url, query="&".join(query))
|
||||
|
||||
if not bucket_name:
|
||||
return self._build_list_buckets_url(url, region)
|
||||
|
||||
enforce_path_style = (
|
||||
# CreateBucket API requires path style in Amazon AWS S3.
|
||||
(method == "PUT" and not object_name and not query_params) or
|
||||
|
||||
# GetBucketLocation API requires path style in Amazon AWS S3.
|
||||
(query_params and "location" in query_params) or
|
||||
|
||||
# Use path style for bucket name containing '.' which causes
|
||||
# SSL certificate validation error.
|
||||
("." in bucket_name and self._url.scheme == "https")
|
||||
)
|
||||
|
||||
if self._aws_info:
|
||||
url = BaseURL._build_aws_url(
|
||||
aws_info=self._aws_info,
|
||||
url=url,
|
||||
bucket_name=bucket_name,
|
||||
enforce_path_style=enforce_path_style,
|
||||
region=region,
|
||||
)
|
||||
|
||||
netloc = url.netloc
|
||||
path = "/"
|
||||
|
||||
if enforce_path_style or not self._virtual_style_flag:
|
||||
path = f"/{bucket_name}"
|
||||
else:
|
||||
netloc = f"{bucket_name}.{netloc}"
|
||||
if object_name:
|
||||
path += ("" if path.endswith("/") else "/") + quote(object_name)
|
||||
|
||||
return url_replace(url=url, netloc=netloc, path=path)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class ObjectWriteResult:
|
||||
"""Result class of any APIs doing object creation."""
|
||||
bucket_name: str
|
||||
object_name: str
|
||||
version_id: Optional[str]
|
||||
etag: Optional[str]
|
||||
http_headers: HTTPHeaderDict
|
||||
last_modified: Optional[datetime] = None
|
||||
location: Optional[str] = None
|
||||
|
||||
|
||||
class Worker(Thread):
|
||||
""" Thread executing tasks from a given tasks queue """
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
tasks_queue: Queue,
|
||||
results_queue: Queue,
|
||||
exceptions_queue: Queue,
|
||||
):
|
||||
Thread.__init__(self, daemon=True)
|
||||
self._tasks_queue = tasks_queue
|
||||
self._results_queue = results_queue
|
||||
self._exceptions_queue = exceptions_queue
|
||||
self.start()
|
||||
|
||||
def run(self):
|
||||
""" Continuously receive tasks and execute them """
|
||||
while True:
|
||||
task = self._tasks_queue.get()
|
||||
if not task:
|
||||
self._tasks_queue.task_done()
|
||||
break
|
||||
func, args, kargs, cleanup_func = task
|
||||
# No exception detected in any thread,
|
||||
# continue the execution.
|
||||
if self._exceptions_queue.empty():
|
||||
try:
|
||||
result = func(*args, **kargs)
|
||||
self._results_queue.put(result)
|
||||
except Exception as ex: # pylint: disable=broad-except
|
||||
self._exceptions_queue.put(ex)
|
||||
|
||||
# call cleanup i.e. Semaphore.release irrespective of task
|
||||
# execution to avoid race condition.
|
||||
cleanup_func()
|
||||
# Mark this task as done, whether an exception happened or not
|
||||
self._tasks_queue.task_done()
|
||||
|
||||
|
||||
class ThreadPool:
|
||||
""" Pool of threads consuming tasks from a queue """
|
||||
_results_queue: Queue
|
||||
_exceptions_queue: Queue
|
||||
_tasks_queue: Queue
|
||||
_sem: BoundedSemaphore
|
||||
_num_threads: int
|
||||
|
||||
def __init__(self, num_threads: int):
|
||||
self._results_queue = Queue()
|
||||
self._exceptions_queue = Queue()
|
||||
self._tasks_queue = Queue()
|
||||
self._sem = BoundedSemaphore(num_threads)
|
||||
self._num_threads = num_threads
|
||||
|
||||
def add_task(self, func, *args, **kargs):
|
||||
"""
|
||||
Add a task to the queue. Calling this function can block
|
||||
until workers have a room for processing new tasks. Blocking
|
||||
the caller also prevents the latter from allocating a lot of
|
||||
memory while workers are still busy running their assigned tasks.
|
||||
"""
|
||||
self._sem.acquire() # pylint: disable=consider-using-with
|
||||
cleanup_func = self._sem.release
|
||||
self._tasks_queue.put((func, args, kargs, cleanup_func))
|
||||
|
||||
def start_parallel(self):
|
||||
""" Prepare threads to run tasks"""
|
||||
for _ in range(self._num_threads):
|
||||
Worker(
|
||||
self._tasks_queue, self._results_queue, self._exceptions_queue,
|
||||
)
|
||||
|
||||
def result(self) -> Queue:
|
||||
""" Stop threads and return the result of all called tasks """
|
||||
# Send None to all threads to cleanly stop them
|
||||
for _ in range(self._num_threads):
|
||||
self._tasks_queue.put(None)
|
||||
# Wait for completion of all the tasks in the queue
|
||||
self._tasks_queue.join()
|
||||
# Check if one of the thread raised an exception, if yes
|
||||
# raise it here in the function
|
||||
if not self._exceptions_queue.empty():
|
||||
raise self._exceptions_queue.get()
|
||||
return self._results_queue
|
||||
46
backend/venv/lib/python3.9/site-packages/minio/legalhold.py
Normal file
46
backend/venv/lib/python3.9/site-packages/minio/legalhold.py
Normal file
@@ -0,0 +1,46 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C)
|
||||
# 2020 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Request/response of PutObjectLegalHold and GetObjectLegalHold S3 APIs."""
|
||||
|
||||
from __future__ import absolute_import, annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional, Type, TypeVar
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
from .xml import Element, SubElement, findtext
|
||||
|
||||
A = TypeVar("A", bound="LegalHold")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class LegalHold:
|
||||
"""Legal hold configuration."""
|
||||
|
||||
status: bool = False
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[A], element: ET.Element) -> A:
|
||||
"""Create new object with values from XML element."""
|
||||
status = findtext(element, "Status")
|
||||
return cls(status=status == "ON")
|
||||
|
||||
def toxml(self, element: Optional[ET.Element]) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
element = Element("LegalHold")
|
||||
SubElement(element, "Status", "ON" if self.status is True else "OFF")
|
||||
return element
|
||||
@@ -0,0 +1,344 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C)
|
||||
# 2015, 2016, 2017, 2018, 2019 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""
|
||||
Request/response of PutBucketLifecycleConfiguration and
|
||||
GetBucketLifecycleConfiguration APIs.
|
||||
"""
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
from __future__ import absolute_import, annotations
|
||||
|
||||
from abc import ABC
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from typing import Optional, Type, TypeVar, cast
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
from .commonconfig import BaseRule
|
||||
from .time import from_iso8601utc, to_iso8601utc
|
||||
from .xml import Element, SubElement, find, findall, findtext
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class DateDays(ABC):
|
||||
"""Base class holds date and days of Transition and Expiration."""
|
||||
date: Optional[datetime] = None
|
||||
days: Optional[int] = None
|
||||
|
||||
@staticmethod
|
||||
def parsexml(
|
||||
element: ET.Element) -> tuple[Optional[datetime], Optional[int]]:
|
||||
"""Parse XML to date and days."""
|
||||
date = from_iso8601utc(findtext(element, "Date"))
|
||||
days = findtext(element, "Days")
|
||||
return date, int(days) if days else None
|
||||
|
||||
def toxml(self, element: Optional[ET.Element]) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
if self.date is not None:
|
||||
SubElement(
|
||||
element, "Date", to_iso8601utc(self.date),
|
||||
)
|
||||
if self.days:
|
||||
SubElement(element, "Days", str(self.days))
|
||||
return element
|
||||
|
||||
|
||||
A = TypeVar("A", bound="Transition")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Transition(DateDays):
|
||||
"""Transition."""
|
||||
storage_class: Optional[str] = None
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[A], element: ET.Element) -> A:
|
||||
"""Create new object with values from XML element."""
|
||||
element = cast(ET.Element, find(element, "Transition", True))
|
||||
date, days = cls.parsexml(element)
|
||||
return cls(date, days, findtext(element, "StorageClass"))
|
||||
|
||||
def toxml(self, element: Optional[ET.Element]) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, "Transition")
|
||||
super().toxml(element)
|
||||
if self.storage_class:
|
||||
SubElement(element, "StorageClass", self.storage_class)
|
||||
return element
|
||||
|
||||
|
||||
B = TypeVar("B", bound="NoncurrentVersionTransition")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class NoncurrentVersionTransition:
|
||||
"""Noncurrent version transition."""
|
||||
noncurrent_days: Optional[int] = None
|
||||
storage_class: Optional[str] = None
|
||||
newer_noncurrent_versions: Optional[int] = None
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[B], element: ET.Element) -> B:
|
||||
"""Create new object with values from XML element."""
|
||||
element = cast(
|
||||
ET.Element,
|
||||
find(element, "NoncurrentVersionTransition", True),
|
||||
)
|
||||
noncurrent_days = findtext(element, "NoncurrentDays")
|
||||
versions = findtext(element, "NewerNoncurrentVersions")
|
||||
return cls(
|
||||
int(noncurrent_days) if noncurrent_days else None,
|
||||
findtext(element, "StorageClass"),
|
||||
int(versions) if versions else None,
|
||||
)
|
||||
|
||||
def toxml(self, element: Optional[ET.Element]) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, "NoncurrentVersionTransition")
|
||||
if self.noncurrent_days:
|
||||
SubElement(element, "NoncurrentDays", str(self.noncurrent_days))
|
||||
if self.storage_class:
|
||||
SubElement(element, "StorageClass", self.storage_class)
|
||||
if self.newer_noncurrent_versions:
|
||||
SubElement(element, "NewerNoncurrentVersions",
|
||||
str(self.newer_noncurrent_versions))
|
||||
return element
|
||||
|
||||
|
||||
C = TypeVar("C", bound="NoncurrentVersionExpiration")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class NoncurrentVersionExpiration:
|
||||
"""Noncurrent version expiration."""
|
||||
noncurrent_days: Optional[int] = None
|
||||
newer_noncurrent_versions: Optional[int] = None
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[C], element: ET.Element) -> C:
|
||||
"""Create new object with values from XML element."""
|
||||
element = cast(
|
||||
ET.Element,
|
||||
find(element, "NoncurrentVersionExpiration", True),
|
||||
)
|
||||
noncurrent_days = findtext(element, "NoncurrentDays")
|
||||
versions = findtext(element, "NewerNoncurrentVersions")
|
||||
return cls(int(noncurrent_days) if noncurrent_days else None,
|
||||
int(versions) if versions else None)
|
||||
|
||||
def toxml(self, element: Optional[ET.Element]) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, "NoncurrentVersionExpiration")
|
||||
if self.noncurrent_days:
|
||||
SubElement(element, "NoncurrentDays", str(self.noncurrent_days))
|
||||
if self.newer_noncurrent_versions:
|
||||
SubElement(element, "NewerNoncurrentVersions",
|
||||
str(self.newer_noncurrent_versions))
|
||||
return element
|
||||
|
||||
|
||||
D = TypeVar("D", bound="Expiration")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Expiration(DateDays):
|
||||
"""Expiration."""
|
||||
expired_object_delete_marker: Optional[bool] = None
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[D], element: ET.Element) -> D:
|
||||
"""Create new object with values from XML element."""
|
||||
element = cast(ET.Element, find(element, "Expiration", True))
|
||||
date, days = cls.parsexml(element)
|
||||
expired_object_delete_marker = findtext(
|
||||
element, "ExpiredObjectDeleteMarker",
|
||||
)
|
||||
if expired_object_delete_marker is None:
|
||||
return cls(date, days, None)
|
||||
|
||||
if expired_object_delete_marker.title() not in ["False", "True"]:
|
||||
raise ValueError(
|
||||
"value of ExpiredObjectDeleteMarker must be "
|
||||
"'True' or 'False'",
|
||||
)
|
||||
return cls(date, days, expired_object_delete_marker.title() == "True")
|
||||
|
||||
def toxml(self, element: Optional[ET.Element]) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, "Expiration")
|
||||
super().toxml(element)
|
||||
if self.expired_object_delete_marker is not None:
|
||||
SubElement(
|
||||
element,
|
||||
"ExpiredObjectDeleteMarker",
|
||||
str(self.expired_object_delete_marker).lower(),
|
||||
)
|
||||
return element
|
||||
|
||||
|
||||
E = TypeVar("E", bound="AbortIncompleteMultipartUpload")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class AbortIncompleteMultipartUpload:
|
||||
"""Abort incomplete multipart upload."""
|
||||
days_after_initiation: Optional[int] = None
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[E], element: ET.Element) -> E:
|
||||
"""Create new object with values from XML element."""
|
||||
element = cast(
|
||||
ET.Element,
|
||||
find(element, "AbortIncompleteMultipartUpload", True),
|
||||
)
|
||||
days_after_initiation = findtext(element, "DaysAfterInitiation")
|
||||
return cls(
|
||||
int(days_after_initiation) if days_after_initiation else None,
|
||||
)
|
||||
|
||||
def toxml(self, element: Optional[ET.Element]) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, "AbortIncompleteMultipartUpload")
|
||||
if self.days_after_initiation:
|
||||
SubElement(
|
||||
element,
|
||||
"DaysAfterInitiation",
|
||||
str(self.days_after_initiation),
|
||||
)
|
||||
return element
|
||||
|
||||
|
||||
F = TypeVar("F", bound="Rule")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Rule(BaseRule):
|
||||
"""Lifecycle rule. """
|
||||
abort_incomplete_multipart_upload: Optional[
|
||||
AbortIncompleteMultipartUpload] = None
|
||||
expiration: Optional[Expiration] = None
|
||||
noncurrent_version_expiration: Optional[NoncurrentVersionExpiration] = None
|
||||
noncurrent_version_transition: Optional[NoncurrentVersionTransition] = None
|
||||
transition: Optional[Transition] = None
|
||||
|
||||
def __post_init__(self):
|
||||
if (not self.abort_incomplete_multipart_upload and not self.expiration
|
||||
and not self.noncurrent_version_expiration
|
||||
and not self.noncurrent_version_transition
|
||||
and not self.transition):
|
||||
raise ValueError(
|
||||
"at least one of action (AbortIncompleteMultipartUpload, "
|
||||
"Expiration, NoncurrentVersionExpiration, "
|
||||
"NoncurrentVersionTransition or Transition) must be specified "
|
||||
"in a rule")
|
||||
|
||||
def _require_subclass_implementation(self) -> None:
|
||||
"""Dummy abstract method to enforce abstract class behavior."""
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[F], element: ET.Element) -> F:
|
||||
"""Create new object with values from XML element."""
|
||||
status, rule_filter, rule_id = cls.parsexml(element)
|
||||
abort_incomplete_multipart_upload = (
|
||||
None if find(element, "AbortIncompleteMultipartUpload") is None
|
||||
else AbortIncompleteMultipartUpload.fromxml(element)
|
||||
)
|
||||
expiration = (
|
||||
None if find(element, "Expiration") is None
|
||||
else Expiration.fromxml(element)
|
||||
)
|
||||
noncurrent_version_expiration = (
|
||||
None if find(element, "NoncurrentVersionExpiration") is None
|
||||
else NoncurrentVersionExpiration.fromxml(element)
|
||||
)
|
||||
noncurrent_version_transition = (
|
||||
None if find(element, "NoncurrentVersionTransition") is None
|
||||
else NoncurrentVersionTransition.fromxml(element)
|
||||
)
|
||||
transition = (
|
||||
None if find(element, "Transition") is None
|
||||
else Transition.fromxml(element)
|
||||
)
|
||||
|
||||
return cls(
|
||||
status=status,
|
||||
rule_filter=rule_filter,
|
||||
rule_id=rule_id,
|
||||
abort_incomplete_multipart_upload=(
|
||||
abort_incomplete_multipart_upload
|
||||
),
|
||||
expiration=expiration,
|
||||
noncurrent_version_expiration=noncurrent_version_expiration,
|
||||
noncurrent_version_transition=noncurrent_version_transition,
|
||||
transition=transition,
|
||||
)
|
||||
|
||||
def toxml(self, element: Optional[ET.Element]) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, "Rule")
|
||||
super().toxml(element)
|
||||
if self.abort_incomplete_multipart_upload:
|
||||
self.abort_incomplete_multipart_upload.toxml(element)
|
||||
if self.expiration:
|
||||
self.expiration.toxml(element)
|
||||
if self.noncurrent_version_expiration:
|
||||
self.noncurrent_version_expiration.toxml(element)
|
||||
if self.noncurrent_version_transition:
|
||||
self.noncurrent_version_transition.toxml(element)
|
||||
if self.transition:
|
||||
self.transition.toxml(element)
|
||||
return element
|
||||
|
||||
|
||||
G = TypeVar("G", bound="LifecycleConfig")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class LifecycleConfig:
|
||||
"""Lifecycle configuration."""
|
||||
rules: list[Rule]
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[G], element: ET.Element) -> G:
|
||||
"""Create new object with values from XML element."""
|
||||
elements = findall(element, "Rule")
|
||||
rules = []
|
||||
for tag in elements:
|
||||
rules.append(Rule.fromxml(tag))
|
||||
return cls(rules)
|
||||
|
||||
def toxml(self, element: Optional[ET.Element]) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
element = Element("LifecycleConfiguration")
|
||||
for rule in self.rules:
|
||||
rule.toxml(element)
|
||||
return element
|
||||
1016
backend/venv/lib/python3.9/site-packages/minio/minioadmin.py
Normal file
1016
backend/venv/lib/python3.9/site-packages/minio/minioadmin.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,283 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C)
|
||||
# 2020 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""
|
||||
Request/response of PutBucketNotificationConfiguration and
|
||||
GetBucketNotiicationConfiguration APIs.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, annotations
|
||||
|
||||
from abc import ABC
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Optional, Type, TypeVar, cast
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
from .xml import Element, SubElement, find, findall, findtext
|
||||
|
||||
A = TypeVar("A", bound="FilterRule")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class FilterRule(ABC):
|
||||
"""Filter rule."""
|
||||
|
||||
name: str
|
||||
value: str
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[A], element: ET.Element) -> A:
|
||||
"""Create new object with values from XML element."""
|
||||
name = cast(str, findtext(element, "Name", True))
|
||||
value = cast(str, findtext(element, "Value", True))
|
||||
return cls(name, value)
|
||||
|
||||
def toxml(self, element: Optional[ET.Element]) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, "FilterRule")
|
||||
SubElement(element, "Name", self.name)
|
||||
SubElement(element, "Value", self.value)
|
||||
return element
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class PrefixFilterRule(FilterRule):
|
||||
"""Prefix filter rule."""
|
||||
|
||||
def __init__(self, value: str):
|
||||
super().__init__(name="prefix", value=value)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class SuffixFilterRule(FilterRule):
|
||||
"""Suffix filter rule."""
|
||||
|
||||
def __init__(self, value: str):
|
||||
super().__init__(name="suffix", value=value)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class CommonConfig(ABC):
|
||||
"""Common for cloud-function/queue/topic configuration."""
|
||||
|
||||
events: list[str]
|
||||
config_id: Optional[str] = None
|
||||
prefix_filter_rule: Optional[PrefixFilterRule] = None
|
||||
suffix_filter_rule: Optional[SuffixFilterRule] = None
|
||||
|
||||
def __post_init__(self):
|
||||
if not self.events:
|
||||
raise ValueError("events must be provided")
|
||||
|
||||
@staticmethod
|
||||
def parsexml(
|
||||
element: ET.Element,
|
||||
) -> tuple[
|
||||
list[str],
|
||||
Optional[str],
|
||||
Optional[PrefixFilterRule],
|
||||
Optional[SuffixFilterRule],
|
||||
]:
|
||||
"""Parse XML."""
|
||||
elements = findall(element, "Event")
|
||||
events = []
|
||||
for tag in elements:
|
||||
if tag.text is None:
|
||||
raise ValueError("missing value in XML tag 'Event'")
|
||||
events.append(tag.text)
|
||||
config_id = findtext(element, "Id")
|
||||
elem = find(element, "Filter")
|
||||
if elem is None:
|
||||
return events, config_id, None, None
|
||||
prefix_filter_rule = None
|
||||
suffix_filter_rule = None
|
||||
elem = cast(ET.Element, find(elem, "S3Key", True))
|
||||
elements = findall(elem, "FilterRule")
|
||||
for tag in elements:
|
||||
filter_rule = FilterRule.fromxml(tag)
|
||||
if filter_rule.name == "prefix":
|
||||
prefix_filter_rule = PrefixFilterRule(filter_rule.value)
|
||||
else:
|
||||
suffix_filter_rule = SuffixFilterRule(filter_rule.value)
|
||||
return events, config_id, prefix_filter_rule, suffix_filter_rule
|
||||
|
||||
def toxml(self, element: Optional[ET.Element]) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
for event in self.events:
|
||||
SubElement(element, "Event", event)
|
||||
if self.config_id is not None:
|
||||
SubElement(element, "Id", self.config_id)
|
||||
if self.prefix_filter_rule or self.suffix_filter_rule:
|
||||
rule = SubElement(element, "Filter")
|
||||
rule = SubElement(rule, "S3Key")
|
||||
if self.prefix_filter_rule:
|
||||
self.prefix_filter_rule.toxml(rule)
|
||||
if self.suffix_filter_rule:
|
||||
self.suffix_filter_rule.toxml(rule)
|
||||
return element
|
||||
|
||||
|
||||
B = TypeVar("B", bound="CloudFuncConfig")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class CloudFuncConfig(CommonConfig):
|
||||
"""Cloud function configuration."""
|
||||
cloud_func: Optional[str] = None
|
||||
|
||||
def __post_init__(self):
|
||||
if not self.cloud_func:
|
||||
raise ValueError("cloud function must be provided")
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[B], element: ET.Element) -> B:
|
||||
"""Create new object with values from XML element."""
|
||||
cloud_func = cast(str, findtext(element, "CloudFunction", True))
|
||||
(events, config_id, prefix_filter_rule,
|
||||
suffix_filter_rule) = cls.parsexml(element)
|
||||
return cls(
|
||||
cloud_func=cloud_func,
|
||||
events=events,
|
||||
config_id=config_id,
|
||||
prefix_filter_rule=prefix_filter_rule,
|
||||
suffix_filter_rule=suffix_filter_rule,
|
||||
)
|
||||
|
||||
def toxml(self, element: Optional[ET.Element]) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, "CloudFunctionConfiguration")
|
||||
SubElement(element, "CloudFunction", self.cloud_func)
|
||||
super().toxml(element)
|
||||
return element
|
||||
|
||||
|
||||
C = TypeVar("C", bound="QueueConfig")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class QueueConfig(CommonConfig):
|
||||
"""Queue configuration."""
|
||||
queue: Optional[str] = None
|
||||
|
||||
def __post_init__(self):
|
||||
if not self.queue:
|
||||
raise ValueError("queue must be provided")
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[C], element: ET.Element) -> C:
|
||||
"""Create new object with values from XML element."""
|
||||
queue = cast(str, findtext(element, "Queue", True))
|
||||
(events, config_id, prefix_filter_rule,
|
||||
suffix_filter_rule) = cls.parsexml(element)
|
||||
return cls(
|
||||
queue=queue,
|
||||
events=events,
|
||||
config_id=config_id,
|
||||
prefix_filter_rule=prefix_filter_rule,
|
||||
suffix_filter_rule=suffix_filter_rule,
|
||||
)
|
||||
|
||||
def toxml(self, element: Optional[ET.Element]) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, "QueueConfiguration")
|
||||
SubElement(element, "Queue", self.queue)
|
||||
super().toxml(element)
|
||||
return element
|
||||
|
||||
|
||||
D = TypeVar("D", bound="TopicConfig")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class TopicConfig(CommonConfig):
|
||||
"""Get topic configuration."""
|
||||
topic: Optional[str] = None
|
||||
|
||||
def __post_init__(self):
|
||||
if not self.topic:
|
||||
raise ValueError("topic must be provided")
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[D], element: ET.Element) -> D:
|
||||
"""Create new object with values from XML element."""
|
||||
topic = cast(str, findtext(element, "Topic", True))
|
||||
(events, config_id, prefix_filter_rule,
|
||||
suffix_filter_rule) = cls.parsexml(element)
|
||||
return cls(
|
||||
topic=topic,
|
||||
events=events,
|
||||
config_id=config_id,
|
||||
prefix_filter_rule=prefix_filter_rule,
|
||||
suffix_filter_rule=suffix_filter_rule,
|
||||
)
|
||||
|
||||
def toxml(self, element: Optional[ET.Element]) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, "TopicConfiguration")
|
||||
SubElement(element, "Topic", self.topic)
|
||||
super().toxml(element)
|
||||
return element
|
||||
|
||||
|
||||
E = TypeVar("E", bound="NotificationConfig")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class NotificationConfig:
|
||||
"""Notification configuration."""
|
||||
cloud_func_config_list: list[CloudFuncConfig] = field(default_factory=list)
|
||||
queue_config_list: list[QueueConfig] = field(default_factory=list)
|
||||
topic_config_list: list[TopicConfig] = field(default_factory=list)
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[E], element: ET.Element) -> E:
|
||||
"""Create new object with values from XML element."""
|
||||
elements = findall(element, "CloudFunctionConfiguration")
|
||||
cloud_func_config_list = []
|
||||
for tag in elements:
|
||||
cloud_func_config_list.append(CloudFuncConfig.fromxml(tag))
|
||||
elements = findall(element, "QueueConfiguration")
|
||||
queue_config_list = []
|
||||
for tag in elements:
|
||||
queue_config_list.append(QueueConfig.fromxml(tag))
|
||||
elements = findall(element, "TopicConfiguration")
|
||||
topic_config_list = []
|
||||
for tag in elements:
|
||||
topic_config_list.append(TopicConfig.fromxml(tag))
|
||||
return cls(
|
||||
cloud_func_config_list, queue_config_list, topic_config_list,
|
||||
)
|
||||
|
||||
def toxml(self, element: Optional[ET.Element]) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
element = Element("NotificationConfiguration")
|
||||
for cloud_func_config in self.cloud_func_config_list:
|
||||
cloud_func_config.toxml(element)
|
||||
for queue_config in self.queue_config_list:
|
||||
queue_config.toxml(element)
|
||||
for config in self.topic_config_list:
|
||||
config.toxml(element)
|
||||
return element
|
||||
@@ -0,0 +1,94 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C)
|
||||
# 2020 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""
|
||||
Request/response of PutObjectLockConfiguration and GetObjectLockConfiguration
|
||||
APIs.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional, Type, TypeVar, cast
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
from .commonconfig import COMPLIANCE, ENABLED, GOVERNANCE
|
||||
from .xml import Element, SubElement, find, findtext
|
||||
|
||||
DAYS = "Days"
|
||||
YEARS = "Years"
|
||||
|
||||
A = TypeVar("A", bound="ObjectLockConfig")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class ObjectLockConfig:
|
||||
"""Object lock configuration."""
|
||||
|
||||
mode: Optional[str]
|
||||
duration: Optional[int]
|
||||
duration_unit: Optional[str]
|
||||
|
||||
def __post_init__(self):
|
||||
if (self.mode is not None) ^ (self.duration is not None):
|
||||
if self.mode is None:
|
||||
raise ValueError("mode must be provided")
|
||||
raise ValueError("duration must be provided")
|
||||
if self.mode is not None and self.mode not in [GOVERNANCE, COMPLIANCE]:
|
||||
raise ValueError(f"mode must be {GOVERNANCE} or {COMPLIANCE}")
|
||||
if (
|
||||
self.duration is not None and
|
||||
self.duration_unit not in [DAYS, YEARS]
|
||||
):
|
||||
raise ValueError(f"duration unit must be {DAYS} or {YEARS}")
|
||||
if self.duration_unit:
|
||||
object.__setattr__(
|
||||
self, "duration_unit", self.duration_unit.title(),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[A], element: ET.Element) -> A:
|
||||
"""Create new object with values from XML element."""
|
||||
elem = find(element, "Rule")
|
||||
if elem is None:
|
||||
return cls(None, None, None)
|
||||
elem = cast(ET.Element, find(elem, "DefaultRetention", True))
|
||||
mode = findtext(elem, "Mode")
|
||||
duration_unit = DAYS
|
||||
duration = findtext(elem, duration_unit)
|
||||
if not duration:
|
||||
duration_unit = YEARS
|
||||
duration = findtext(elem, duration_unit)
|
||||
if not duration:
|
||||
raise ValueError(f"XML element <{DAYS}> or <{YEARS}> not found")
|
||||
return cls(
|
||||
mode=mode,
|
||||
duration=int(duration),
|
||||
duration_unit=duration_unit,
|
||||
)
|
||||
|
||||
def toxml(self, element: Optional[ET.Element]) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
element = Element("ObjectLockConfiguration")
|
||||
SubElement(element, "ObjectLockEnabled", ENABLED)
|
||||
if self.mode:
|
||||
rule = SubElement(element, "Rule")
|
||||
retention = SubElement(rule, "DefaultRetention")
|
||||
SubElement(retention, "Mode", self.mode)
|
||||
if not self.duration_unit:
|
||||
raise ValueError("duration unit must be provided")
|
||||
SubElement(retention, self.duration_unit, str(self.duration))
|
||||
return element
|
||||
@@ -0,0 +1,437 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C)
|
||||
# 2020 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Request/response of PutBucketReplication and GetBucketReplication APIs."""
|
||||
|
||||
from __future__ import absolute_import, annotations
|
||||
|
||||
from abc import ABC
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional, Type, TypeVar, cast
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
from .commonconfig import DISABLED, BaseRule, Filter, check_status
|
||||
from .xml import Element, SubElement, find, findall, findtext
|
||||
|
||||
A = TypeVar("A", bound="Status")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Status(ABC):
|
||||
"""Status."""
|
||||
|
||||
status: str
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[A], element: ET.Element) -> A:
|
||||
"""Create new object with values from XML element."""
|
||||
element = cast(ET.Element, find(element, cls.__name__, True))
|
||||
status = cast(str, findtext(element, "Status", True))
|
||||
return cls(status)
|
||||
|
||||
def toxml(self, element: Optional[ET.Element]) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, self.__class__.__name__)
|
||||
SubElement(element, "Status", self.status)
|
||||
return element
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class SseKmsEncryptedObjects(Status):
|
||||
"""SSE KMS encrypted objects."""
|
||||
|
||||
|
||||
B = TypeVar("B", bound="SourceSelectionCriteria")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class SourceSelectionCriteria:
|
||||
"""Source selection criteria."""
|
||||
|
||||
sse_kms_encrypted_objects: Optional[SseKmsEncryptedObjects] = None
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[B], element: ET.Element) -> B:
|
||||
"""Create new object with values from XML element."""
|
||||
element = cast(
|
||||
ET.Element,
|
||||
find(element, "SourceSelectionCriteria", True),
|
||||
)
|
||||
return cls(
|
||||
None if find(element, "SseKmsEncryptedObjects") is None
|
||||
else SseKmsEncryptedObjects.fromxml(element)
|
||||
)
|
||||
|
||||
def toxml(self, element: Optional[ET.Element]) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, "SourceSelectionCriteria")
|
||||
if self.sse_kms_encrypted_objects:
|
||||
self.sse_kms_encrypted_objects.toxml(element)
|
||||
return element
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class ExistingObjectReplication(Status):
|
||||
"""Existing object replication."""
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class DeleteMarkerReplication(Status):
|
||||
"""Delete marker replication."""
|
||||
|
||||
def __init__(self, status=DISABLED):
|
||||
super().__init__(status)
|
||||
|
||||
|
||||
C = TypeVar("C", bound="ReplicationTimeValue")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class ReplicationTimeValue(ABC):
|
||||
"""Replication time value."""
|
||||
|
||||
minutes: Optional[int] = 15
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[C], element: ET.Element) -> C:
|
||||
"""Create new object with values from XML element."""
|
||||
element = cast(ET.Element, find(element, cls.__name__, True))
|
||||
minutes = findtext(element, "Minutes")
|
||||
return cls(int(minutes) if minutes else None)
|
||||
|
||||
def toxml(self, element: Optional[ET.Element]) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, self.__class__.__name__)
|
||||
if self.minutes is not None:
|
||||
SubElement(element, "Minutes", str(self.minutes))
|
||||
return element
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Time(ReplicationTimeValue):
|
||||
"""Time."""
|
||||
|
||||
|
||||
D = TypeVar("D", bound="ReplicationTime")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class ReplicationTime:
|
||||
"""Replication time."""
|
||||
|
||||
time: Time
|
||||
status: str
|
||||
|
||||
def __post_init__(self,):
|
||||
if not self.time:
|
||||
raise ValueError("time must be provided")
|
||||
check_status(self.status)
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[D], element: ET.Element) -> D:
|
||||
"""Create new object with values from XML element."""
|
||||
element = cast(ET.Element, find(element, "ReplicationTime", True))
|
||||
time = Time.fromxml(element)
|
||||
status = cast(str, findtext(element, "Status", True))
|
||||
return cls(time, status)
|
||||
|
||||
def toxml(self, element: Optional[ET.Element]) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, "ReplicationTime")
|
||||
self.time.toxml(element)
|
||||
SubElement(element, "Status", self.status)
|
||||
return element
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class EventThreshold(ReplicationTimeValue):
|
||||
"""Event threshold."""
|
||||
|
||||
|
||||
E = TypeVar("E", bound="Metrics")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Metrics:
|
||||
"""Metrics."""
|
||||
|
||||
event_threshold: EventThreshold
|
||||
status: str
|
||||
|
||||
def __post_init__(self):
|
||||
if not self.event_threshold:
|
||||
raise ValueError("event threshold must be provided")
|
||||
check_status(self.status)
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[E], element: ET.Element) -> E:
|
||||
"""Create new object with values from XML element."""
|
||||
element = cast(ET.Element, find(element, "Metrics", True))
|
||||
event_threshold = EventThreshold.fromxml(element)
|
||||
status = cast(str, findtext(element, "Status", True))
|
||||
return cls(event_threshold, status)
|
||||
|
||||
def toxml(self, element: Optional[ET.Element]) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, "Metrics")
|
||||
self.event_threshold.toxml(element)
|
||||
SubElement(element, "Status", self.status)
|
||||
return element
|
||||
|
||||
|
||||
F = TypeVar("F", bound="EncryptionConfig")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class EncryptionConfig:
|
||||
"""Encryption configuration."""
|
||||
|
||||
replica_kms_key_id: Optional[str] = None
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[F], element: ET.Element) -> F:
|
||||
"""Create new object with values from XML element."""
|
||||
element = cast(
|
||||
ET.Element,
|
||||
find(element, "EncryptionConfiguration", True),
|
||||
)
|
||||
return cls(findtext(element, "ReplicaKmsKeyID"))
|
||||
|
||||
def toxml(self, element: Optional[ET.Element]) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, "EncryptionConfiguration")
|
||||
SubElement(element, "ReplicaKmsKeyID", self.replica_kms_key_id)
|
||||
return element
|
||||
|
||||
|
||||
G = TypeVar("G", bound="AccessControlTranslation")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class AccessControlTranslation:
|
||||
"""Access control translation."""
|
||||
|
||||
owner: str = "Destination"
|
||||
|
||||
def __post_init__(self):
|
||||
if not self.owner:
|
||||
raise ValueError("owner must be provided")
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[G], element: ET.Element) -> G:
|
||||
"""Create new object with values from XML element."""
|
||||
element = cast(
|
||||
ET.Element, find(element, "AccessControlTranslation", True),
|
||||
)
|
||||
owner = cast(str, findtext(element, "Owner", True))
|
||||
return cls(owner)
|
||||
|
||||
def toxml(self, element: Optional[ET.Element]) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, "AccessControlTranslation")
|
||||
SubElement(element, "Owner", self.owner)
|
||||
return element
|
||||
|
||||
|
||||
H = TypeVar("H", bound="Destination")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Destination:
|
||||
"""Replication destination."""
|
||||
|
||||
bucket_arn: str
|
||||
access_control_translation: Optional[AccessControlTranslation] = None
|
||||
account: Optional[str] = None
|
||||
encryption_config: Optional[EncryptionConfig] = None
|
||||
metrics: Optional[Metrics] = None
|
||||
replication_time: Optional[ReplicationTime] = None
|
||||
storage_class: Optional[str] = None
|
||||
|
||||
def __post_init__(self):
|
||||
if not self.bucket_arn:
|
||||
raise ValueError("bucket ARN must be provided")
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[H], element: ET.Element) -> H:
|
||||
"""Create new object with values from XML element."""
|
||||
element = cast(ET.Element, find(element, "Destination", True))
|
||||
access_control_translation = (
|
||||
None if find(element, "AccessControlTranslation") is None
|
||||
else AccessControlTranslation.fromxml(element)
|
||||
)
|
||||
account = findtext(element, "Account")
|
||||
bucket_arn = cast(str, findtext(element, "Bucket", True))
|
||||
encryption_config = (
|
||||
None if find(element, "EncryptionConfiguration") is None
|
||||
else EncryptionConfig.fromxml(element)
|
||||
)
|
||||
metrics = (
|
||||
None if find(element, "Metrics") is None
|
||||
else Metrics.fromxml(element)
|
||||
)
|
||||
replication_time = (
|
||||
None if find(element, "ReplicationTime") is None
|
||||
else ReplicationTime.fromxml(element)
|
||||
)
|
||||
storage_class = findtext(element, "StorageClass")
|
||||
return cls(bucket_arn, access_control_translation, account,
|
||||
encryption_config, metrics, replication_time, storage_class)
|
||||
|
||||
def toxml(self, element: Optional[ET.Element]) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, "Destination")
|
||||
if self.access_control_translation:
|
||||
self.access_control_translation.toxml(element)
|
||||
if self.account is not None:
|
||||
SubElement(element, "Account", self.account)
|
||||
SubElement(element, "Bucket", self.bucket_arn)
|
||||
if self.encryption_config:
|
||||
self.encryption_config.toxml(element)
|
||||
if self.metrics:
|
||||
self.metrics.toxml(element)
|
||||
if self.replication_time:
|
||||
self.replication_time.toxml(element)
|
||||
if self.storage_class:
|
||||
SubElement(element, "StorageClass", self.storage_class)
|
||||
return element
|
||||
|
||||
|
||||
I = TypeVar("I", bound="Rule")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Rule(BaseRule):
|
||||
"""Replication rule. """
|
||||
|
||||
destination: Optional[Destination] = None
|
||||
delete_marker_replication: Optional[DeleteMarkerReplication] = None
|
||||
existing_object_replication: Optional[ExistingObjectReplication] = None
|
||||
rule_filter: Optional[Filter] = None
|
||||
rule_id: Optional[str] = None
|
||||
prefix: Optional[str] = None
|
||||
priority: Optional[int] = None
|
||||
source_selection_criteria: Optional[SourceSelectionCriteria] = None
|
||||
|
||||
def __post_init__(self):
|
||||
if not self.destination:
|
||||
raise ValueError("destination must be provided")
|
||||
|
||||
def _require_subclass_implementation(self) -> None:
|
||||
"""Dummy abstract method to enforce abstract class behavior."""
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[I], element: ET.Element) -> I:
|
||||
"""Create new object with values from XML element."""
|
||||
status, rule_filter, rule_id = cls.parsexml(element)
|
||||
delete_marker_replication = (
|
||||
None if find(element, "DeleteMarkerReplication") is None
|
||||
else DeleteMarkerReplication.fromxml(element)
|
||||
)
|
||||
destination = Destination.fromxml(element)
|
||||
existing_object_replication = (
|
||||
None if find(element, "ExistingObjectReplication") is None
|
||||
else ExistingObjectReplication.fromxml(element)
|
||||
)
|
||||
prefix = findtext(element, "Prefix")
|
||||
priority = findtext(element, "Priority")
|
||||
source_selection_criteria = (
|
||||
None if find(element, "SourceSelectionCriteria") is None
|
||||
else SourceSelectionCriteria.fromxml(element)
|
||||
)
|
||||
|
||||
return cls(
|
||||
status=status,
|
||||
rule_filter=rule_filter,
|
||||
rule_id=rule_id,
|
||||
destination=destination,
|
||||
delete_marker_replication=delete_marker_replication,
|
||||
existing_object_replication=existing_object_replication,
|
||||
prefix=prefix,
|
||||
priority=int(priority) if priority else None,
|
||||
source_selection_criteria=source_selection_criteria,
|
||||
)
|
||||
|
||||
def toxml(self, element: Optional[ET.Element]) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, "Rule")
|
||||
super().toxml(element)
|
||||
if self.delete_marker_replication:
|
||||
self.delete_marker_replication.toxml(element)
|
||||
if self.destination:
|
||||
self.destination.toxml(element)
|
||||
if self.existing_object_replication:
|
||||
self.existing_object_replication.toxml(element)
|
||||
if self.prefix is not None:
|
||||
SubElement(element, "Prefix", self.prefix)
|
||||
if self.priority is not None:
|
||||
SubElement(element, "Priority", str(self.priority))
|
||||
if self.source_selection_criteria:
|
||||
self.source_selection_criteria.toxml(element)
|
||||
return element
|
||||
|
||||
|
||||
J = TypeVar("J", bound="ReplicationConfig")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class ReplicationConfig:
|
||||
"""Replication configuration."""
|
||||
|
||||
role: str
|
||||
rules: list[Rule]
|
||||
|
||||
def __post_init__(self):
|
||||
if not self.rules:
|
||||
raise ValueError("rules must be provided")
|
||||
if len(self.rules) > 1000:
|
||||
raise ValueError("more than 1000 rules are not supported")
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[J], element: ET.Element) -> J:
|
||||
"""Create new object with values from XML element."""
|
||||
role = cast(str, findtext(element, "Role", True))
|
||||
elements = findall(element, "Rule")
|
||||
rules = []
|
||||
for tag in elements:
|
||||
rules.append(Rule.fromxml(tag))
|
||||
return cls(role, rules)
|
||||
|
||||
def toxml(self, element: Optional[ET.Element]) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
element = Element("ReplicationConfiguration")
|
||||
SubElement(element, "Role", self.role)
|
||||
for rule in self.rules:
|
||||
rule.toxml(element)
|
||||
return element
|
||||
69
backend/venv/lib/python3.9/site-packages/minio/retention.py
Normal file
69
backend/venv/lib/python3.9/site-packages/minio/retention.py
Normal file
@@ -0,0 +1,69 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C)
|
||||
# 2020 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Request/response of PutObjectRetention and GetObjectRetention APIs."""
|
||||
|
||||
from __future__ import absolute_import, annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from typing import Optional, Type, TypeVar, cast
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
from .commonconfig import COMPLIANCE, GOVERNANCE
|
||||
from .time import from_iso8601utc, to_iso8601utc
|
||||
from .xml import Element, SubElement, findtext
|
||||
|
||||
A = TypeVar("A", bound="Retention")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Retention:
|
||||
"""Retention configuration."""
|
||||
|
||||
mode: str
|
||||
retain_until_date: datetime
|
||||
|
||||
def __post_init__(self):
|
||||
if self.mode not in [GOVERNANCE, COMPLIANCE]:
|
||||
raise ValueError(f"mode must be {GOVERNANCE} or {COMPLIANCE}")
|
||||
if not isinstance(self.retain_until_date, datetime):
|
||||
raise ValueError(
|
||||
"retain until date must be datetime type",
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[A], element: ET.Element) -> A:
|
||||
"""Create new object with values from XML element."""
|
||||
mode = cast(str, findtext(element, "Mode", True))
|
||||
retain_until_date = cast(
|
||||
datetime,
|
||||
from_iso8601utc(
|
||||
cast(str, findtext(element, "RetainUntilDate", True)),
|
||||
),
|
||||
)
|
||||
return cls(mode=mode, retain_until_date=retain_until_date)
|
||||
|
||||
def toxml(self, element: Optional[ET.Element]) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
element = Element("Retention")
|
||||
SubElement(element, "Mode", self.mode)
|
||||
SubElement(
|
||||
element,
|
||||
"RetainUntilDate",
|
||||
to_iso8601utc(self.retain_until_date),
|
||||
)
|
||||
return element
|
||||
417
backend/venv/lib/python3.9/site-packages/minio/select.py
Normal file
417
backend/venv/lib/python3.9/site-packages/minio/select.py
Normal file
@@ -0,0 +1,417 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C)
|
||||
# 2020 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Request/response of SelectObjectContent API."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from binascii import crc32
|
||||
from dataclasses import dataclass
|
||||
from io import BytesIO
|
||||
from typing import Optional
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
from .error import MinioException
|
||||
from .xml import Element, SubElement, findtext
|
||||
|
||||
COMPRESSION_TYPE_NONE = "NONE"
|
||||
COMPRESSION_TYPE_GZIP = "GZIP"
|
||||
COMPRESSION_TYPE_BZIP2 = "BZIP2"
|
||||
|
||||
FILE_HEADER_INFO_USE = "USE"
|
||||
FILE_HEADER_INFO_IGNORE = "IGNORE"
|
||||
FILE_HEADER_INFO_NONE = "NONE"
|
||||
|
||||
JSON_TYPE_DOCUMENT = "DOCUMENT"
|
||||
JSON_TYPE_LINES = "LINES"
|
||||
|
||||
QUOTE_FIELDS_ALWAYS = "ALWAYS"
|
||||
QUOTE_FIELDS_ASNEEDED = "ASNEEDED"
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class InputSerialization(ABC):
|
||||
"""Input serialization."""
|
||||
|
||||
compression_type: Optional[str] = None
|
||||
|
||||
def __post_init__(self):
|
||||
if (
|
||||
self.compression_type is not None and
|
||||
self.compression_type not in [
|
||||
COMPRESSION_TYPE_NONE,
|
||||
COMPRESSION_TYPE_GZIP,
|
||||
COMPRESSION_TYPE_BZIP2,
|
||||
]
|
||||
):
|
||||
raise ValueError(
|
||||
f"compression type must be {COMPRESSION_TYPE_NONE}, "
|
||||
f"{COMPRESSION_TYPE_GZIP} or {COMPRESSION_TYPE_BZIP2}"
|
||||
)
|
||||
|
||||
def toxml(self, element):
|
||||
"""Convert to XML."""
|
||||
if self.compression_type is not None:
|
||||
SubElement(element, "CompressionType", self.compression_type)
|
||||
return element
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class CSVInputSerialization(InputSerialization):
|
||||
"""CSV input serialization."""
|
||||
|
||||
allow_quoted_record_delimiter: Optional[str] = None
|
||||
comments: Optional[str] = None
|
||||
field_delimiter: Optional[str] = None
|
||||
file_header_info: Optional[str] = None
|
||||
quote_character: Optional[str] = None
|
||||
quote_escape_character: Optional[str] = None
|
||||
record_delimiter: Optional[str] = None
|
||||
|
||||
def __post_init__(self):
|
||||
if (
|
||||
self.file_header_info is not None and
|
||||
self.file_header_info not in [
|
||||
FILE_HEADER_INFO_USE,
|
||||
FILE_HEADER_INFO_IGNORE,
|
||||
FILE_HEADER_INFO_NONE,
|
||||
]
|
||||
):
|
||||
raise ValueError(
|
||||
f"file header info must be {FILE_HEADER_INFO_USE}, "
|
||||
f"{FILE_HEADER_INFO_IGNORE} or {FILE_HEADER_INFO_NONE}"
|
||||
)
|
||||
|
||||
def toxml(self, element):
|
||||
"""Convert to XML."""
|
||||
super().toxml(element)
|
||||
element = SubElement(element, "CSV")
|
||||
if self.allow_quoted_record_delimiter is not None:
|
||||
SubElement(
|
||||
element,
|
||||
"AllowQuotedRecordDelimiter",
|
||||
self.allow_quoted_record_delimiter,
|
||||
)
|
||||
if self.comments is not None:
|
||||
SubElement(element, "Comments", self.comments)
|
||||
if self.field_delimiter is not None:
|
||||
SubElement(element, "FieldDelimiter", self.field_delimiter)
|
||||
if self.file_header_info is not None:
|
||||
SubElement(element, "FileHeaderInfo", self.file_header_info)
|
||||
if self.quote_character is not None:
|
||||
SubElement(element, "QuoteCharacter", self.quote_character)
|
||||
if self.quote_escape_character is not None:
|
||||
SubElement(
|
||||
element,
|
||||
"QuoteEscapeCharacter",
|
||||
self.quote_escape_character,
|
||||
)
|
||||
if self.record_delimiter is not None:
|
||||
SubElement(element, "RecordDelimiter", self.record_delimiter)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class JSONInputSerialization(InputSerialization):
|
||||
"""JSON input serialization."""
|
||||
|
||||
json_type: Optional[str] = None
|
||||
|
||||
def __post_init__(self):
|
||||
if (
|
||||
self.json_type is not None and
|
||||
self.json_type not in [JSON_TYPE_DOCUMENT, JSON_TYPE_LINES]
|
||||
):
|
||||
raise ValueError(
|
||||
f"json type must be {JSON_TYPE_DOCUMENT} or {JSON_TYPE_LINES}"
|
||||
)
|
||||
|
||||
def toxml(self, element):
|
||||
"""Convert to XML."""
|
||||
super().toxml(element)
|
||||
element = SubElement(element, "JSON")
|
||||
if self.json_type is not None:
|
||||
SubElement(element, "Type", self.json_type)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class ParquetInputSerialization(InputSerialization):
|
||||
"""Parquet input serialization."""
|
||||
|
||||
def toxml(self, element):
|
||||
"""Convert to XML."""
|
||||
super().toxml(element)
|
||||
return SubElement(element, "Parquet")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class OutputSerialization(ABC):
|
||||
"""Output serialization."""
|
||||
|
||||
@abstractmethod
|
||||
def toxml(self, element):
|
||||
"""Convert to XML."""
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class CSVOutputSerialization(OutputSerialization):
|
||||
"""CSV output serialization."""
|
||||
|
||||
field_delimiter: Optional[str] = None
|
||||
quote_character: Optional[str] = None
|
||||
quote_escape_character: Optional[str] = None
|
||||
quote_fields: Optional[str] = None
|
||||
record_delimiter: Optional[str] = None
|
||||
|
||||
def __post_init__(self):
|
||||
if (
|
||||
self.quote_fields is not None and
|
||||
self.quote_fields not in [
|
||||
QUOTE_FIELDS_ALWAYS, QUOTE_FIELDS_ASNEEDED,
|
||||
]
|
||||
):
|
||||
raise ValueError(
|
||||
f"quote fields must be {QUOTE_FIELDS_ALWAYS} or "
|
||||
f"{QUOTE_FIELDS_ASNEEDED}"
|
||||
)
|
||||
|
||||
def toxml(self, element):
|
||||
"""Convert to XML."""
|
||||
element = SubElement(element, "CSV")
|
||||
if self.field_delimiter is not None:
|
||||
SubElement(element, "FieldDelimiter", self.field_delimiter)
|
||||
if self.quote_character is not None:
|
||||
SubElement(element, "QuoteCharacter", self.quote_character)
|
||||
if self.quote_escape_character is not None:
|
||||
SubElement(
|
||||
element,
|
||||
"QuoteEscapeCharacter",
|
||||
self.quote_escape_character,
|
||||
)
|
||||
if self.quote_fields is not None:
|
||||
SubElement(element, "QuoteFields", self.quote_fields)
|
||||
if self.record_delimiter is not None:
|
||||
SubElement(element, "RecordDelimiter", self.record_delimiter)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class JSONOutputSerialization(OutputSerialization):
|
||||
"""JSON output serialization."""
|
||||
|
||||
record_delimiter: Optional[str] = None
|
||||
|
||||
def toxml(self, element):
|
||||
"""Convert to XML."""
|
||||
element = SubElement(element, "JSON")
|
||||
if self.record_delimiter is not None:
|
||||
SubElement(element, "RecordDelimiter", self.record_delimiter)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class SelectRequest:
|
||||
"""Select object content request."""
|
||||
|
||||
expression: str
|
||||
input_serialization: InputSerialization
|
||||
output_serialization: OutputSerialization
|
||||
request_progress: bool = False
|
||||
scan_start_range: Optional[int] = None
|
||||
scan_end_range: Optional[int] = None
|
||||
|
||||
def toxml(self, element):
|
||||
"""Convert to XML."""
|
||||
element = Element("SelectObjectContentRequest")
|
||||
SubElement(element, "Expression", self.expression)
|
||||
SubElement(element, "ExpressionType", "SQL")
|
||||
self.input_serialization.toxml(
|
||||
SubElement(element, "InputSerialization"),
|
||||
)
|
||||
self.output_serialization.toxml(
|
||||
SubElement(element, "OutputSerialization"),
|
||||
)
|
||||
if self.request_progress:
|
||||
SubElement(
|
||||
SubElement(element, "RequestProgress"), "Enabled", "true",
|
||||
)
|
||||
if self.scan_start_range or self.scan_end_range:
|
||||
tag = SubElement(element, "ScanRange")
|
||||
if self.scan_start_range:
|
||||
SubElement(tag, "Start", self.scan_start_range)
|
||||
if self.scan_end_range:
|
||||
SubElement(tag, "End", self.scan_end_range)
|
||||
return element
|
||||
|
||||
|
||||
def _read(reader, size):
|
||||
"""Wrapper to RawIOBase.read() to error out on short reads."""
|
||||
data = reader.read(size)
|
||||
if len(data) != size:
|
||||
raise IOError("insufficient data")
|
||||
return data
|
||||
|
||||
|
||||
def _int(data):
|
||||
"""Convert byte data to big-endian int."""
|
||||
return int.from_bytes(data, byteorder="big")
|
||||
|
||||
|
||||
def _crc32(data):
|
||||
"""Wrapper to binascii.crc32()."""
|
||||
return crc32(data) & 0xffffffff
|
||||
|
||||
|
||||
def _decode_header(data):
|
||||
"""Decode header data."""
|
||||
reader = BytesIO(data)
|
||||
headers = {}
|
||||
while True:
|
||||
length = reader.read(1)
|
||||
if not length:
|
||||
break
|
||||
name = _read(reader, _int(length))
|
||||
if _int(_read(reader, 1)) != 7:
|
||||
raise IOError("header value type is not 7")
|
||||
value = _read(reader, _int(_read(reader, 2)))
|
||||
headers[name.decode()] = value.decode()
|
||||
return headers
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Stats:
|
||||
"""Progress/Stats information."""
|
||||
|
||||
bytes_scanned: Optional[str] = None
|
||||
bytes_processed: Optional[str] = None
|
||||
bytes_returned: Optional[str] = None
|
||||
|
||||
def __init__(self, data):
|
||||
element = ET.fromstring(data.decode())
|
||||
object.__setattr__(
|
||||
self,
|
||||
"bytes_scanned",
|
||||
findtext(element, "BytesScanned"),
|
||||
)
|
||||
object.__setattr__(
|
||||
self,
|
||||
"bytes_processed",
|
||||
findtext(element, "BytesProcessed"),
|
||||
)
|
||||
object.__setattr__(
|
||||
self,
|
||||
"bytes_returned",
|
||||
findtext(element, "BytesReturned"),
|
||||
)
|
||||
|
||||
|
||||
class SelectObjectReader:
|
||||
"""
|
||||
BufferedIOBase compatible reader represents response data of
|
||||
Minio.select_object_content() API.
|
||||
"""
|
||||
|
||||
def __init__(self, response):
|
||||
self._response = response
|
||||
self._stats = None
|
||||
self._payload = None
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, exc_traceback):
|
||||
return self.close()
|
||||
|
||||
def readable(self): # pylint: disable=no-self-use
|
||||
"""Return this is readable."""
|
||||
return True
|
||||
|
||||
def writeable(self): # pylint: disable=no-self-use
|
||||
"""Return this is not writeable."""
|
||||
return False
|
||||
|
||||
def close(self):
|
||||
"""Close response and release network resources."""
|
||||
self._response.close()
|
||||
self._response.release_conn()
|
||||
|
||||
def stats(self):
|
||||
"""Get stats information."""
|
||||
return self._stats
|
||||
|
||||
def _read(self):
|
||||
"""Read and decode response."""
|
||||
if self._response.isclosed():
|
||||
return 0
|
||||
|
||||
prelude = _read(self._response, 8)
|
||||
prelude_crc = _read(self._response, 4)
|
||||
if _crc32(prelude) != _int(prelude_crc):
|
||||
raise IOError(
|
||||
f"prelude CRC mismatch; expected: {_crc32(prelude)}, "
|
||||
f"got: {_int(prelude_crc)}"
|
||||
)
|
||||
|
||||
total_length = _int(prelude[:4])
|
||||
data = _read(self._response, total_length - 8 - 4 - 4)
|
||||
message_crc = _int(_read(self._response, 4))
|
||||
if _crc32(prelude + prelude_crc + data) != message_crc:
|
||||
raise IOError(
|
||||
f"message CRC mismatch; "
|
||||
f"expected: {_crc32(prelude + prelude_crc + data)}, "
|
||||
f"got: {message_crc}"
|
||||
)
|
||||
|
||||
header_length = _int(prelude[4:])
|
||||
headers = _decode_header(data[:header_length])
|
||||
|
||||
if headers.get(":message-type") == "error":
|
||||
raise MinioException(
|
||||
f"{headers.get(':error-code')}: "
|
||||
f"{headers.get(':error-message')}"
|
||||
)
|
||||
|
||||
if headers.get(":event-type") == "End":
|
||||
return 0
|
||||
|
||||
payload_length = total_length - header_length - 16
|
||||
if headers.get(":event-type") == "Cont" or payload_length < 1:
|
||||
return self._read()
|
||||
|
||||
payload = data[header_length:header_length+payload_length]
|
||||
|
||||
if headers.get(":event-type") in ["Progress", "Stats"]:
|
||||
self._stats = Stats(payload)
|
||||
return self._read()
|
||||
|
||||
if headers.get(":event-type") == "Records":
|
||||
self._payload = payload
|
||||
return len(payload)
|
||||
|
||||
raise MinioException(
|
||||
f"unknown event-type {headers.get(':event-type')}",
|
||||
)
|
||||
|
||||
def stream(self, num_bytes=32*1024):
|
||||
"""
|
||||
Stream extracted payload from response data. Upon completion, caller
|
||||
should call self.close() to release network resources.
|
||||
"""
|
||||
while self._read() > 0:
|
||||
while self._payload:
|
||||
result = self._payload
|
||||
if num_bytes < len(self._payload):
|
||||
result = self._payload[:num_bytes]
|
||||
self._payload = self._payload[len(result):]
|
||||
yield result
|
||||
354
backend/venv/lib/python3.9/site-packages/minio/signer.py
Normal file
354
backend/venv/lib/python3.9/site-packages/minio/signer.py
Normal file
@@ -0,0 +1,354 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage,
|
||||
# (C) 2015-2020 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
"""
|
||||
minio.signer
|
||||
~~~~~~~~~~~~~~~
|
||||
|
||||
This module implements all helpers for AWS Signature version '4' support.
|
||||
|
||||
:copyright: (c) 2015 by MinIO, Inc.
|
||||
:license: Apache 2.0, see LICENSE for more details.
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, annotations
|
||||
|
||||
import hashlib
|
||||
import hmac
|
||||
import re
|
||||
from collections import OrderedDict
|
||||
from datetime import datetime
|
||||
from typing import Mapping, cast
|
||||
from urllib.parse import SplitResult
|
||||
|
||||
from . import time
|
||||
from .credentials import Credentials
|
||||
from .helpers import DictType, queryencode, sha256_hash
|
||||
|
||||
SIGN_V4_ALGORITHM = 'AWS4-HMAC-SHA256'
|
||||
_MULTI_SPACE_REGEX = re.compile(r"( +)")
|
||||
|
||||
|
||||
def _hmac_hash(
|
||||
key: bytes,
|
||||
data: bytes,
|
||||
hexdigest: bool = False,
|
||||
) -> bytes | str:
|
||||
"""Return HMacSHA256 digest of given key and data."""
|
||||
|
||||
hasher = hmac.new(key, data, hashlib.sha256)
|
||||
return hasher.hexdigest() if hexdigest else hasher.digest()
|
||||
|
||||
|
||||
def _get_scope(date: datetime, region: str, service_name: str) -> str:
|
||||
"""Get scope string."""
|
||||
return f"{time.to_signer_date(date)}/{region}/{service_name}/aws4_request"
|
||||
|
||||
|
||||
def _get_canonical_headers(
|
||||
headers: Mapping[str, str | list[str] | tuple[str]],
|
||||
) -> tuple[str, str]:
|
||||
"""Get canonical headers."""
|
||||
|
||||
ordered_headers = {}
|
||||
for key, values in headers.items():
|
||||
key = key.lower()
|
||||
if key not in (
|
||||
"authorization",
|
||||
"user-agent",
|
||||
):
|
||||
values = values if isinstance(values, (list, tuple)) else [values]
|
||||
ordered_headers[key] = ",".join([
|
||||
_MULTI_SPACE_REGEX.sub(" ", value).strip() for value in values
|
||||
])
|
||||
|
||||
ordered_headers = OrderedDict(sorted(ordered_headers.items()))
|
||||
signed_headers = ";".join(ordered_headers.keys())
|
||||
canonical_headers = "\n".join(
|
||||
[f"{key}:{value}" for key, value in ordered_headers.items()],
|
||||
)
|
||||
return canonical_headers, signed_headers
|
||||
|
||||
|
||||
def _get_canonical_query_string(query: str) -> str:
|
||||
"""Get canonical query string."""
|
||||
|
||||
query = query or ""
|
||||
return "&".join(
|
||||
[
|
||||
"=".join(pair) for pair in sorted(
|
||||
[params.split("=") for params in query.split("&")],
|
||||
)
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def _get_canonical_request_hash(
|
||||
method: str,
|
||||
url: SplitResult,
|
||||
headers: Mapping[str, str | list[str] | tuple[str]],
|
||||
content_sha256: str,
|
||||
) -> tuple[str, str]:
|
||||
"""Get canonical request hash."""
|
||||
canonical_headers, signed_headers = _get_canonical_headers(headers)
|
||||
canonical_query_string = _get_canonical_query_string(url.query)
|
||||
|
||||
# CanonicalRequest =
|
||||
# HTTPRequestMethod + '\n' +
|
||||
# CanonicalURI + '\n' +
|
||||
# CanonicalQueryString + '\n' +
|
||||
# CanonicalHeaders + '\n\n' +
|
||||
# SignedHeaders + '\n' +
|
||||
# HexEncode(Hash(RequestPayload))
|
||||
canonical_request = (
|
||||
f"{method}\n"
|
||||
f"{url.path or '/'}\n"
|
||||
f"{canonical_query_string}\n"
|
||||
f"{canonical_headers}\n\n"
|
||||
f"{signed_headers}\n"
|
||||
f"{content_sha256}"
|
||||
)
|
||||
return sha256_hash(canonical_request), signed_headers
|
||||
|
||||
|
||||
def _get_string_to_sign(
|
||||
date: datetime,
|
||||
scope: str,
|
||||
canonical_request_hash: str,
|
||||
) -> str:
|
||||
"""Get string-to-sign."""
|
||||
return (
|
||||
f"AWS4-HMAC-SHA256\n{time.to_amz_date(date)}\n{scope}\n"
|
||||
f"{canonical_request_hash}"
|
||||
)
|
||||
|
||||
|
||||
def _get_signing_key(
|
||||
secret_key: str,
|
||||
date: datetime,
|
||||
region: str,
|
||||
service_name: str,
|
||||
) -> bytes:
|
||||
"""Get signing key."""
|
||||
|
||||
date_key = cast(
|
||||
bytes,
|
||||
_hmac_hash(
|
||||
("AWS4" + secret_key).encode(),
|
||||
time.to_signer_date(date).encode(),
|
||||
),
|
||||
)
|
||||
date_region_key = cast(bytes, _hmac_hash(date_key, region.encode()))
|
||||
date_region_service_key = cast(
|
||||
bytes,
|
||||
_hmac_hash(date_region_key, service_name.encode()),
|
||||
)
|
||||
return cast(
|
||||
bytes,
|
||||
_hmac_hash(date_region_service_key, b"aws4_request"),
|
||||
)
|
||||
|
||||
|
||||
def _get_signature(signing_key: bytes, string_to_sign: str) -> str:
|
||||
"""Get signature."""
|
||||
|
||||
return cast(
|
||||
str,
|
||||
_hmac_hash(signing_key, string_to_sign.encode(), hexdigest=True),
|
||||
)
|
||||
|
||||
|
||||
def _get_authorization(
|
||||
access_key: str,
|
||||
scope: str,
|
||||
signed_headers: str,
|
||||
signature: str,
|
||||
) -> str:
|
||||
"""Get authorization."""
|
||||
return (
|
||||
f"AWS4-HMAC-SHA256 Credential={access_key}/{scope}, "
|
||||
f"SignedHeaders={signed_headers}, Signature={signature}"
|
||||
)
|
||||
|
||||
|
||||
def _sign_v4(
|
||||
*,
|
||||
service_name: str,
|
||||
method: str,
|
||||
url: SplitResult,
|
||||
region: str,
|
||||
headers: DictType,
|
||||
credentials: Credentials,
|
||||
content_sha256: str,
|
||||
date: datetime,
|
||||
) -> DictType:
|
||||
"""Do signature V4 of given request for given service name."""
|
||||
|
||||
scope = _get_scope(date, region, service_name)
|
||||
canonical_request_hash, signed_headers = _get_canonical_request_hash(
|
||||
method, url, headers, content_sha256,
|
||||
)
|
||||
string_to_sign = _get_string_to_sign(date, scope, canonical_request_hash)
|
||||
signing_key = _get_signing_key(
|
||||
credentials.secret_key, date, region, service_name,
|
||||
)
|
||||
signature = _get_signature(signing_key, string_to_sign)
|
||||
authorization = _get_authorization(
|
||||
credentials.access_key, scope, signed_headers, signature,
|
||||
)
|
||||
headers["Authorization"] = authorization
|
||||
return headers
|
||||
|
||||
|
||||
def sign_v4_s3(
|
||||
*,
|
||||
method: str,
|
||||
url: SplitResult,
|
||||
region: str,
|
||||
headers: DictType,
|
||||
credentials: Credentials,
|
||||
content_sha256: str,
|
||||
date: datetime,
|
||||
) -> DictType:
|
||||
"""Do signature V4 of given request for S3 service."""
|
||||
return _sign_v4(
|
||||
service_name="s3",
|
||||
method=method,
|
||||
url=url,
|
||||
region=region,
|
||||
headers=headers,
|
||||
credentials=credentials,
|
||||
content_sha256=content_sha256,
|
||||
date=date,
|
||||
)
|
||||
|
||||
|
||||
def sign_v4_sts(
|
||||
*,
|
||||
method: str,
|
||||
url: SplitResult,
|
||||
region: str,
|
||||
headers: DictType,
|
||||
credentials: Credentials,
|
||||
content_sha256: str,
|
||||
date: datetime,
|
||||
) -> DictType:
|
||||
"""Do signature V4 of given request for STS service."""
|
||||
return _sign_v4(
|
||||
service_name="sts",
|
||||
method=method,
|
||||
url=url,
|
||||
region=region,
|
||||
headers=headers,
|
||||
credentials=credentials,
|
||||
content_sha256=content_sha256,
|
||||
date=date,
|
||||
)
|
||||
|
||||
|
||||
def _get_presign_canonical_request_hash( # pylint: disable=invalid-name
|
||||
*,
|
||||
method: str,
|
||||
url: SplitResult,
|
||||
access_key: str,
|
||||
scope: str,
|
||||
date: datetime,
|
||||
expires: int,
|
||||
) -> tuple[str, SplitResult]:
|
||||
"""Get canonical request hash for presign request."""
|
||||
x_amz_credential = queryencode(access_key + "/" + scope)
|
||||
canonical_headers, signed_headers = "host:" + url.netloc, "host"
|
||||
|
||||
query = url.query+"&" if url.query else ""
|
||||
query += (
|
||||
f"X-Amz-Algorithm=AWS4-HMAC-SHA256"
|
||||
f"&X-Amz-Credential={x_amz_credential}"
|
||||
f"&X-Amz-Date={time.to_amz_date(date)}"
|
||||
f"&X-Amz-Expires={expires}"
|
||||
f"&X-Amz-SignedHeaders={signed_headers}"
|
||||
)
|
||||
parts = list(url)
|
||||
parts[3] = query
|
||||
url = SplitResult(*parts)
|
||||
|
||||
canonical_query_string = _get_canonical_query_string(query)
|
||||
|
||||
# CanonicalRequest =
|
||||
# HTTPRequestMethod + '\n' +
|
||||
# CanonicalURI + '\n' +
|
||||
# CanonicalQueryString + '\n' +
|
||||
# CanonicalHeaders + '\n\n' +
|
||||
# SignedHeaders + '\n' +
|
||||
# HexEncode(Hash(RequestPayload))
|
||||
canonical_request = (
|
||||
f"{method}\n"
|
||||
f"{url.path or '/'}\n"
|
||||
f"{canonical_query_string}\n"
|
||||
f"{canonical_headers}\n\n"
|
||||
f"{signed_headers}\n"
|
||||
f"UNSIGNED-PAYLOAD"
|
||||
)
|
||||
return sha256_hash(canonical_request), url
|
||||
|
||||
|
||||
def presign_v4(
|
||||
*,
|
||||
method: str,
|
||||
url: SplitResult,
|
||||
region: str,
|
||||
credentials: Credentials,
|
||||
date: datetime,
|
||||
expires: int,
|
||||
) -> SplitResult:
|
||||
"""Do signature V4 of given presign request."""
|
||||
|
||||
scope = _get_scope(date, region, "s3")
|
||||
canonical_request_hash, url = _get_presign_canonical_request_hash(
|
||||
method=method,
|
||||
url=url,
|
||||
access_key=credentials.access_key,
|
||||
scope=scope,
|
||||
date=date,
|
||||
expires=expires,
|
||||
)
|
||||
string_to_sign = _get_string_to_sign(date, scope, canonical_request_hash)
|
||||
signing_key = _get_signing_key(credentials.secret_key, date, region, "s3")
|
||||
signature = _get_signature(signing_key, string_to_sign)
|
||||
|
||||
parts = list(url)
|
||||
parts[3] = url.query + "&X-Amz-Signature=" + queryencode(signature)
|
||||
url = SplitResult(*parts)
|
||||
return url
|
||||
|
||||
|
||||
def get_credential_string(access_key: str, date: datetime, region: str) -> str:
|
||||
"""Get credential string of given access key, date and region."""
|
||||
return f"{access_key}/{time.to_signer_date(date)}/{region}/s3/aws4_request"
|
||||
|
||||
|
||||
def post_presign_v4(
|
||||
data: str,
|
||||
secret_key: str,
|
||||
date: datetime,
|
||||
region: str,
|
||||
) -> str:
|
||||
"""Do signature V4 of given presign POST form-data."""
|
||||
return _get_signature(
|
||||
_get_signing_key(secret_key, date, region, "s3"),
|
||||
data,
|
||||
)
|
||||
111
backend/venv/lib/python3.9/site-packages/minio/sse.py
Normal file
111
backend/venv/lib/python3.9/site-packages/minio/sse.py
Normal file
@@ -0,0 +1,111 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage,
|
||||
# (C) 2018 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""
|
||||
minio.sse
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
This module contains core API parsers.
|
||||
|
||||
:copyright: (c) 2018 by MinIO, Inc.
|
||||
:license: Apache 2.0, see LICENSE for more details.
|
||||
|
||||
"""
|
||||
from __future__ import absolute_import, annotations
|
||||
|
||||
import base64
|
||||
import json
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Any, cast
|
||||
|
||||
|
||||
class Sse(ABC):
|
||||
"""Server-side encryption base class."""
|
||||
|
||||
@abstractmethod
|
||||
def headers(self) -> dict[str, str]:
|
||||
"""Return headers."""
|
||||
|
||||
def tls_required(self) -> bool: # pylint: disable=no-self-use
|
||||
"""Return TLS required to use this server-side encryption."""
|
||||
return True
|
||||
|
||||
def copy_headers(self) -> dict[str, str]: # pylint: disable=no-self-use
|
||||
"""Return copy headers."""
|
||||
return {}
|
||||
|
||||
|
||||
class SseCustomerKey(Sse):
|
||||
""" Server-side encryption - customer key type."""
|
||||
|
||||
def __init__(self, key: bytes):
|
||||
if len(key) != 32:
|
||||
raise ValueError(
|
||||
"SSE-C keys must be exactly 256 bits (32 bytes) long. "
|
||||
"Pass raw bytes, not the base64 encoded value.",
|
||||
)
|
||||
b64key = base64.b64encode(key).decode()
|
||||
from .helpers import \
|
||||
md5sum_hash # pylint: disable=import-outside-toplevel
|
||||
md5key = cast(str, md5sum_hash(key))
|
||||
self._headers: dict[str, str] = {
|
||||
"X-Amz-Server-Side-Encryption-Customer-Algorithm": "AES256",
|
||||
"X-Amz-Server-Side-Encryption-Customer-Key": b64key,
|
||||
"X-Amz-Server-Side-Encryption-Customer-Key-MD5": md5key,
|
||||
}
|
||||
self._copy_headers: dict[str, str] = {
|
||||
"X-Amz-Copy-Source-Server-Side-Encryption-Customer-Algorithm":
|
||||
"AES256",
|
||||
"X-Amz-Copy-Source-Server-Side-Encryption-Customer-Key": b64key,
|
||||
"X-Amz-Copy-Source-Server-Side-Encryption-Customer-Key-MD5":
|
||||
md5key,
|
||||
}
|
||||
|
||||
def headers(self) -> dict[str, str]:
|
||||
return self._headers.copy()
|
||||
|
||||
def copy_headers(self) -> dict[str, str]:
|
||||
return self._copy_headers.copy()
|
||||
|
||||
|
||||
class SseKMS(Sse):
|
||||
"""Server-side encryption - KMS type."""
|
||||
|
||||
def __init__(self, key: str, context: dict[str, Any]):
|
||||
self._headers = {
|
||||
"X-Amz-Server-Side-Encryption-Aws-Kms-Key-Id": key,
|
||||
"X-Amz-Server-Side-Encryption": "aws:kms"
|
||||
}
|
||||
if context:
|
||||
data = bytes(json.dumps(context), "utf-8")
|
||||
self._headers["X-Amz-Server-Side-Encryption-Context"] = (
|
||||
base64.b64encode(data).decode()
|
||||
)
|
||||
|
||||
def headers(self) -> dict[str, str]:
|
||||
return self._headers.copy()
|
||||
|
||||
|
||||
class SseS3(Sse):
|
||||
"""Server-side encryption - S3 type."""
|
||||
|
||||
def headers(self) -> dict[str, str]:
|
||||
return {
|
||||
"X-Amz-Server-Side-Encryption": "AES256"
|
||||
}
|
||||
|
||||
def tls_required(self) -> bool:
|
||||
return False
|
||||
101
backend/venv/lib/python3.9/site-packages/minio/sseconfig.py
Normal file
101
backend/venv/lib/python3.9/site-packages/minio/sseconfig.py
Normal file
@@ -0,0 +1,101 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C)
|
||||
# 2020 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Request/response of PutBucketEncryption and GetBucketEncryption APIs."""
|
||||
|
||||
from __future__ import absolute_import, annotations
|
||||
|
||||
from abc import ABC
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional, Type, TypeVar, cast
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
from .xml import Element, SubElement, find, findtext
|
||||
|
||||
AES256 = "AES256"
|
||||
AWS_KMS = "aws:kms"
|
||||
|
||||
A = TypeVar("A", bound="Rule")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Rule(ABC):
|
||||
"""Server-side encryption rule. """
|
||||
|
||||
sse_algorithm: str
|
||||
kms_master_key_id: Optional[str] = None
|
||||
|
||||
@classmethod
|
||||
def new_sse_s3_rule(cls: Type[A]) -> A:
|
||||
"""Create SSE-S3 rule."""
|
||||
return cls(sse_algorithm=AES256)
|
||||
|
||||
@classmethod
|
||||
def new_sse_kms_rule(
|
||||
cls: Type[A],
|
||||
kms_master_key_id: Optional[str] = None,
|
||||
) -> A:
|
||||
"""Create new SSE-KMS rule."""
|
||||
return cls(sse_algorithm=AWS_KMS, kms_master_key_id=kms_master_key_id)
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[A], element: ET.Element) -> A:
|
||||
"""Create new object with values from XML element."""
|
||||
element = cast(
|
||||
ET.Element,
|
||||
find(element, "ApplyServerSideEncryptionByDefault", True),
|
||||
)
|
||||
return cls(
|
||||
sse_algorithm=cast(str, findtext(element, "SSEAlgorithm", True)),
|
||||
kms_master_key_id=findtext(element, "KMSMasterKeyID"),
|
||||
)
|
||||
|
||||
def toxml(self, element: Optional[ET.Element]) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, "Rule")
|
||||
tag = SubElement(element, "ApplyServerSideEncryptionByDefault")
|
||||
SubElement(tag, "SSEAlgorithm", self.sse_algorithm)
|
||||
if self.kms_master_key_id is not None:
|
||||
SubElement(tag, "KMSMasterKeyID", self.kms_master_key_id)
|
||||
return element
|
||||
|
||||
|
||||
B = TypeVar("B", bound="SSEConfig")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class SSEConfig:
|
||||
"""server-side encryption configuration."""
|
||||
|
||||
rule: Rule
|
||||
|
||||
def __post_init__(self):
|
||||
if not self.rule:
|
||||
raise ValueError("rule must be provided")
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[B], element: ET.Element) -> B:
|
||||
"""Create new object with values from XML element."""
|
||||
element = cast(ET.Element, find(element, "Rule", True))
|
||||
return cls(Rule.fromxml(element))
|
||||
|
||||
def toxml(self, element: Optional[ET.Element]) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
element = Element("ServerSideEncryptionConfiguration")
|
||||
self.rule.toxml(element)
|
||||
return element
|
||||
52
backend/venv/lib/python3.9/site-packages/minio/tagging.py
Normal file
52
backend/venv/lib/python3.9/site-packages/minio/tagging.py
Normal file
@@ -0,0 +1,52 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C)
|
||||
# 2020 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Tagging for bucket and object."""
|
||||
|
||||
from __future__ import absolute_import, annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional, Type, TypeVar, cast
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
from .commonconfig import Tags
|
||||
from .xml import Element, SubElement, find
|
||||
|
||||
A = TypeVar("A", bound="Tagging")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Tagging:
|
||||
"""Tagging for buckets and objects."""
|
||||
|
||||
tags: Optional[Tags]
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[A], element: ET.Element) -> A:
|
||||
"""Create new object with values from XML element."""
|
||||
element = cast(ET.Element, find(element, "TagSet", True))
|
||||
tags = (
|
||||
None if find(element, "Tag") is None
|
||||
else Tags.fromxml(element)
|
||||
)
|
||||
return cls(tags=tags)
|
||||
|
||||
def toxml(self, element: Optional[ET.Element]) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
element = Element("Tagging")
|
||||
if self.tags:
|
||||
self.tags.toxml(SubElement(element, "TagSet"))
|
||||
return element
|
||||
129
backend/venv/lib/python3.9/site-packages/minio/time.py
Normal file
129
backend/venv/lib/python3.9/site-packages/minio/time.py
Normal file
@@ -0,0 +1,129 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C)
|
||||
# 2020 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Time formatter for S3 APIs."""
|
||||
|
||||
from __future__ import absolute_import, annotations
|
||||
|
||||
import time as ctime
|
||||
from datetime import datetime, timezone
|
||||
|
||||
try:
|
||||
from datetime import UTC # type: ignore[attr-defined]
|
||||
_UTC_IMPORTED = True
|
||||
except ImportError:
|
||||
_UTC_IMPORTED = False
|
||||
|
||||
from typing import Optional
|
||||
|
||||
_WEEK_DAYS = ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"]
|
||||
_MONTHS = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct",
|
||||
"Nov", "Dec"]
|
||||
|
||||
|
||||
def _to_utc(value: datetime) -> datetime:
|
||||
"""Convert to UTC time if value is not naive."""
|
||||
return (
|
||||
value.astimezone(timezone.utc).replace(tzinfo=None)
|
||||
if value.tzinfo else value
|
||||
)
|
||||
|
||||
|
||||
def from_iso8601utc(value: Optional[str]) -> Optional[datetime]:
|
||||
"""Parse UTC ISO-8601 formatted string to datetime."""
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
try:
|
||||
time = datetime.strptime(value, "%Y-%m-%dT%H:%M:%S.%fZ")
|
||||
except ValueError:
|
||||
time = datetime.strptime(value, "%Y-%m-%dT%H:%M:%SZ")
|
||||
return time.replace(tzinfo=timezone.utc)
|
||||
|
||||
|
||||
def to_iso8601utc(value: Optional[datetime]) -> Optional[str]:
|
||||
"""Format datetime into UTC ISO-8601 formatted string."""
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
value = _to_utc(value)
|
||||
return (
|
||||
value.strftime("%Y-%m-%dT%H:%M:%S.") + value.strftime("%f")[:3] + "Z"
|
||||
)
|
||||
|
||||
|
||||
def from_http_header(value: str) -> datetime:
|
||||
"""Parse HTTP header date formatted string to datetime."""
|
||||
if len(value) != 29:
|
||||
raise ValueError(
|
||||
f"time data {value} does not match HTTP header format")
|
||||
|
||||
if value[0:3] not in _WEEK_DAYS or value[3] != ",":
|
||||
raise ValueError(
|
||||
f"time data {value} does not match HTTP header format")
|
||||
weekday = _WEEK_DAYS.index(value[0:3])
|
||||
|
||||
if value[4] != " " or value[7] != " ":
|
||||
raise ValueError(
|
||||
f"time data {value} does not match HTTP header format"
|
||||
)
|
||||
day = int(value[5:7])
|
||||
|
||||
if value[8:11] not in _MONTHS:
|
||||
raise ValueError(
|
||||
f"time data {value} does not match HTTP header format")
|
||||
month = _MONTHS.index(value[8:11])
|
||||
|
||||
time = datetime.strptime(value[11:], " %Y %H:%M:%S GMT")
|
||||
time = time.replace(day=day, month=month+1, tzinfo=timezone.utc)
|
||||
|
||||
if weekday != time.weekday():
|
||||
raise ValueError(
|
||||
f"time data {value} does not match HTTP header format")
|
||||
|
||||
return time
|
||||
|
||||
|
||||
def to_http_header(value: datetime) -> str:
|
||||
"""Format datatime into HTTP header date formatted string."""
|
||||
value = _to_utc(value)
|
||||
weekday = _WEEK_DAYS[value.weekday()]
|
||||
day = value.strftime(" %d ")
|
||||
month = _MONTHS[value.month - 1]
|
||||
suffix = value.strftime(" %Y %H:%M:%S GMT")
|
||||
return f"{weekday},{day}{month}{suffix}"
|
||||
|
||||
|
||||
def to_amz_date(value: datetime) -> str:
|
||||
"""Format datetime into AMZ date formatted string."""
|
||||
return _to_utc(value).strftime("%Y%m%dT%H%M%SZ")
|
||||
|
||||
|
||||
def utcnow() -> datetime:
|
||||
"""Timezone-aware wrapper to datetime.utcnow()."""
|
||||
if _UTC_IMPORTED:
|
||||
return datetime.now(UTC).replace(tzinfo=timezone.utc)
|
||||
return datetime.utcnow().replace(tzinfo=timezone.utc)
|
||||
|
||||
|
||||
def to_signer_date(value: datetime) -> str:
|
||||
"""Format datetime into SignatureV4 date formatted string."""
|
||||
return _to_utc(value).strftime("%Y%m%d")
|
||||
|
||||
|
||||
def to_float(value: datetime) -> float:
|
||||
"""Convert datetime into float value."""
|
||||
return ctime.mktime(value.timetuple()) + value.microsecond * 1e-6
|
||||
@@ -0,0 +1,92 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C)
|
||||
# 2020 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Request/response of PutBucketVersioning and GetBucketVersioning APIs."""
|
||||
|
||||
from __future__ import absolute_import, annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import List, Optional, Type, TypeVar, Union, cast
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
from .commonconfig import DISABLED, ENABLED
|
||||
from .xml import Element, SubElement, findall, findtext
|
||||
|
||||
OFF = "Off"
|
||||
SUSPENDED = "Suspended"
|
||||
|
||||
A = TypeVar("A", bound="VersioningConfig")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class VersioningConfig:
|
||||
"""Versioning configuration."""
|
||||
|
||||
status: Optional[str] = None
|
||||
mfa_delete: Optional[str] = None
|
||||
excluded_prefixes: Optional[list[str]] = None
|
||||
exclude_folders: bool = False
|
||||
|
||||
def __post_init__(self):
|
||||
if self.status is not None and self.status not in [ENABLED, SUSPENDED]:
|
||||
raise ValueError(f"status must be {ENABLED} or {SUSPENDED}")
|
||||
if (
|
||||
self.mfa_delete is not None and
|
||||
self.mfa_delete not in [ENABLED, DISABLED]
|
||||
):
|
||||
raise ValueError(f"MFA delete must be {ENABLED} or {DISABLED}")
|
||||
|
||||
@property
|
||||
def status_string(self) -> str:
|
||||
"""Convert status to status string. """
|
||||
return OFF if self.status is None else self.status
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[A], element: ET.Element) -> A:
|
||||
"""Create new object with values from XML element."""
|
||||
status = findtext(element, "Status")
|
||||
mfa_delete = findtext(element, "MFADelete")
|
||||
excluded_prefixes = [
|
||||
prefix.text
|
||||
for prefix in findall(
|
||||
element,
|
||||
"ExcludedPrefixes/Prefix",
|
||||
)
|
||||
]
|
||||
exclude_folders = findtext(element, "ExcludeFolders") == "true"
|
||||
return cls(
|
||||
status=status,
|
||||
mfa_delete=mfa_delete,
|
||||
excluded_prefixes=cast(Union[List[str], None], excluded_prefixes),
|
||||
exclude_folders=exclude_folders,
|
||||
)
|
||||
|
||||
def toxml(self, element: Optional[ET.Element]) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
element = Element("VersioningConfiguration")
|
||||
if self.status:
|
||||
SubElement(element, "Status", self.status)
|
||||
if self.mfa_delete:
|
||||
SubElement(element, "MFADelete", self.mfa_delete)
|
||||
for prefix in self.excluded_prefixes or []:
|
||||
SubElement(
|
||||
SubElement(element, "ExcludedPrefixes"),
|
||||
"Prefix",
|
||||
prefix,
|
||||
)
|
||||
if self.exclude_folders:
|
||||
SubElement(element, "ExcludeFolders", "true")
|
||||
return element
|
||||
136
backend/venv/lib/python3.9/site-packages/minio/xml.py
Normal file
136
backend/venv/lib/python3.9/site-packages/minio/xml.py
Normal file
@@ -0,0 +1,136 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C)
|
||||
# 2020 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""XML utility module."""
|
||||
|
||||
from __future__ import absolute_import, annotations
|
||||
|
||||
import io
|
||||
from typing import Optional, Type, TypeVar
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
from typing_extensions import Protocol
|
||||
|
||||
_S3_NAMESPACE = "http://s3.amazonaws.com/doc/2006-03-01/"
|
||||
|
||||
|
||||
def Element( # pylint: disable=invalid-name
|
||||
tag: str,
|
||||
namespace: str = _S3_NAMESPACE,
|
||||
) -> ET.Element:
|
||||
"""Create ElementTree.Element with tag and namespace."""
|
||||
return ET.Element(tag, {"xmlns": namespace} if namespace else {})
|
||||
|
||||
|
||||
def SubElement( # pylint: disable=invalid-name
|
||||
parent: ET.Element, tag: str, text: Optional[str] = None
|
||||
) -> ET.Element:
|
||||
"""Create ElementTree.SubElement on parent with tag and text."""
|
||||
element = ET.SubElement(parent, tag)
|
||||
if text is not None:
|
||||
element.text = text
|
||||
return element
|
||||
|
||||
|
||||
def _get_namespace(element: ET.Element) -> str:
|
||||
"""Exact namespace if found."""
|
||||
start = element.tag.find("{")
|
||||
if start < 0:
|
||||
return ""
|
||||
start += 1
|
||||
end = element.tag.find("}")
|
||||
if end < 0:
|
||||
return ""
|
||||
return element.tag[start:end]
|
||||
|
||||
|
||||
def findall(element: ET.Element, name: str) -> list[ET.Element]:
|
||||
"""Namespace aware ElementTree.Element.findall()."""
|
||||
namespace = _get_namespace(element)
|
||||
if namespace:
|
||||
name = "/".join(["ns:" + token for token in name.split("/")])
|
||||
return element.findall(name, {"ns": namespace} if namespace else {})
|
||||
|
||||
|
||||
def find(
|
||||
element: ET.Element,
|
||||
name: str,
|
||||
strict: bool = False,
|
||||
) -> Optional[ET.Element]:
|
||||
"""Namespace aware ElementTree.Element.find()."""
|
||||
namespace = _get_namespace(element)
|
||||
elem = element.find(
|
||||
"ns:" + name if namespace else name,
|
||||
{"ns": namespace} if namespace else {},
|
||||
)
|
||||
if strict and elem is None:
|
||||
raise ValueError(f"XML element <{name}> not found")
|
||||
return elem
|
||||
|
||||
|
||||
def findtext(
|
||||
element: ET.Element,
|
||||
name: str,
|
||||
strict: bool = False,
|
||||
) -> Optional[str]:
|
||||
"""
|
||||
Namespace aware ElementTree.Element.findtext() with strict flag
|
||||
raises ValueError if element name not exist.
|
||||
"""
|
||||
elem = find(element, name, strict=strict)
|
||||
return None if elem is None else (elem.text or "")
|
||||
|
||||
|
||||
A = TypeVar("A")
|
||||
|
||||
|
||||
class FromXmlType(Protocol):
|
||||
"""typing stub for class with `fromxml` method"""
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[A], element: ET.Element) -> A:
|
||||
"""Create python object with values from XML element."""
|
||||
|
||||
|
||||
B = TypeVar("B", bound=FromXmlType)
|
||||
|
||||
|
||||
def unmarshal(cls: Type[B], xmlstring: str) -> B:
|
||||
"""Unmarshal given XML string to an object of passed class."""
|
||||
return cls.fromxml(ET.fromstring(xmlstring))
|
||||
|
||||
|
||||
def getbytes(element: ET.Element) -> bytes:
|
||||
"""Convert ElementTree.Element to bytes."""
|
||||
with io.BytesIO() as data:
|
||||
ET.ElementTree(element).write(
|
||||
data,
|
||||
encoding=None,
|
||||
xml_declaration=False,
|
||||
)
|
||||
return data.getvalue()
|
||||
|
||||
|
||||
class ToXmlType(Protocol):
|
||||
"""typing stub for class with `toxml` method"""
|
||||
|
||||
def toxml(self, element: Optional[ET.Element]) -> ET.Element:
|
||||
"""Convert python object to ElementTree.Element."""
|
||||
|
||||
|
||||
def marshal(obj: ToXmlType) -> bytes:
|
||||
"""Get XML data as bytes of ElementTree.Element."""
|
||||
return getbytes(obj.toxml(None))
|
||||
Reference in New Issue
Block a user