v1 milestone
This commit is contained in:
45
venv/lib/python3.12/site-packages/minio/__init__.py
Normal file
45
venv/lib/python3.12/site-packages/minio/__init__.py
Normal file
@@ -0,0 +1,45 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage,
|
||||
# (C) 2015, 2016, 2017 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""
|
||||
minio - MinIO Python SDK for Amazon S3 Compatible Cloud Storage
|
||||
|
||||
>>> from minio import Minio
|
||||
>>> client = Minio(
|
||||
... "play.min.io",
|
||||
... access_key="Q3AM3UQ867SPQQA43P2F",
|
||||
... secret_key="zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG",
|
||||
... )
|
||||
>>> buckets = client.list_buckets()
|
||||
>>> for bucket in buckets:
|
||||
... print(bucket.name, bucket.creation_date)
|
||||
|
||||
:copyright: (C) 2015-2020 MinIO, Inc.
|
||||
:license: Apache 2.0, see LICENSE for more details.
|
||||
"""
|
||||
|
||||
__title__ = "minio-py"
|
||||
__author__ = "MinIO, Inc."
|
||||
__version__ = "7.2.5"
|
||||
__license__ = "Apache 2.0"
|
||||
__copyright__ = "Copyright 2015, 2016, 2017, 2018, 2019, 2020 MinIO, Inc."
|
||||
|
||||
# pylint: disable=unused-import,useless-import-alias
|
||||
from .api import Minio as Minio
|
||||
from .error import InvalidResponseError as InvalidResponseError
|
||||
from .error import S3Error as S3Error
|
||||
from .error import ServerError as ServerError
|
||||
from .minioadmin import MinioAdmin as MinioAdmin
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
3224
venv/lib/python3.12/site-packages/minio/api.py
Normal file
3224
venv/lib/python3.12/site-packages/minio/api.py
Normal file
File diff suppressed because it is too large
Load Diff
574
venv/lib/python3.12/site-packages/minio/commonconfig.py
Normal file
574
venv/lib/python3.12/site-packages/minio/commonconfig.py
Normal file
@@ -0,0 +1,574 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C)
|
||||
# 2020 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Common request/response configuration of S3 APIs."""
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
from __future__ import absolute_import, annotations
|
||||
|
||||
from abc import ABCMeta
|
||||
from datetime import datetime
|
||||
from typing import Type, TypeVar, cast
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
from .error import MinioException
|
||||
from .helpers import quote
|
||||
from .sse import SseCustomerKey
|
||||
from .time import to_http_header
|
||||
from .xml import SubElement, find, findall, findtext
|
||||
|
||||
COPY = "COPY"
|
||||
REPLACE = "REPLACE"
|
||||
DISABLED = "Disabled"
|
||||
ENABLED = "Enabled"
|
||||
GOVERNANCE = "GOVERNANCE"
|
||||
COMPLIANCE = "COMPLIANCE"
|
||||
_MAX_KEY_LENGTH = 128
|
||||
_MAX_VALUE_LENGTH = 256
|
||||
_MAX_OBJECT_TAG_COUNT = 10
|
||||
_MAX_TAG_COUNT = 50
|
||||
|
||||
A = TypeVar("A", bound="Tags")
|
||||
|
||||
|
||||
class Tags(dict):
|
||||
"""dict extended to bucket/object tags."""
|
||||
|
||||
def __init__(self, for_object: bool = False):
|
||||
self._for_object = for_object
|
||||
super().__init__()
|
||||
|
||||
def __setitem__(self, key: str, value: str):
|
||||
limit = _MAX_OBJECT_TAG_COUNT if self._for_object else _MAX_TAG_COUNT
|
||||
if len(self) == limit:
|
||||
tag_type = "object" if self._for_object else "bucket"
|
||||
raise ValueError(f"only {limit} {tag_type} tags are allowed")
|
||||
if not key or len(key) > _MAX_KEY_LENGTH or "&" in key:
|
||||
raise ValueError(f"invalid tag key '{key}'")
|
||||
if value is None or len(value) > _MAX_VALUE_LENGTH or "&" in value:
|
||||
raise ValueError(f"invalid tag value '{value}'")
|
||||
super().__setitem__(key, value)
|
||||
|
||||
@classmethod
|
||||
def new_bucket_tags(cls: Type[A]) -> A:
|
||||
"""Create new bucket tags."""
|
||||
return cls()
|
||||
|
||||
@classmethod
|
||||
def new_object_tags(cls: Type[A]) -> A:
|
||||
"""Create new object tags."""
|
||||
return cls(True)
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[A], element: ET.Element) -> A:
|
||||
"""Create new object with values from XML element."""
|
||||
elements = findall(element, "Tag")
|
||||
obj = cls()
|
||||
for tag in elements:
|
||||
key = cast(str, findtext(tag, "Key", True))
|
||||
value = cast(str, findtext(tag, "Value", True))
|
||||
obj[key] = value
|
||||
return obj
|
||||
|
||||
def toxml(self, element: ET.Element | None) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
for key, value in self.items():
|
||||
tag = SubElement(element, "Tag")
|
||||
SubElement(tag, "Key", key)
|
||||
SubElement(tag, "Value", value)
|
||||
return element
|
||||
|
||||
|
||||
B = TypeVar("B", bound="Tag")
|
||||
|
||||
|
||||
class Tag:
|
||||
"""Tag."""
|
||||
|
||||
def __init__(self, key: str, value: str):
|
||||
if not key:
|
||||
raise ValueError("key must be provided")
|
||||
if value is None:
|
||||
raise ValueError("value must be provided")
|
||||
self._key = key
|
||||
self._value = value
|
||||
|
||||
@property
|
||||
def key(self) -> str:
|
||||
"""Get key."""
|
||||
return self._key
|
||||
|
||||
@property
|
||||
def value(self) -> str:
|
||||
"""Get value."""
|
||||
return self._value
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[B], element: ET.Element) -> B:
|
||||
"""Create new object with values from XML element."""
|
||||
element = cast(ET.Element, find(element, "Tag", True))
|
||||
key = cast(str, findtext(element, "Key", True))
|
||||
value = cast(str, findtext(element, "Value", True))
|
||||
return cls(key, value)
|
||||
|
||||
def toxml(self, element: ET.Element | None) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, "Tag")
|
||||
SubElement(element, "Key", self._key)
|
||||
SubElement(element, "Value", self._value)
|
||||
return element
|
||||
|
||||
|
||||
C = TypeVar("C", bound="AndOperator")
|
||||
|
||||
|
||||
class AndOperator:
|
||||
"""AND operator."""
|
||||
|
||||
def __init__(self, prefix: str | None = None, tags: Tags | None = None):
|
||||
if prefix is None and not tags:
|
||||
raise ValueError("at least prefix or tags must be provided")
|
||||
self._prefix = prefix
|
||||
self._tags = tags
|
||||
|
||||
@property
|
||||
def prefix(self) -> str | None:
|
||||
"""Get prefix."""
|
||||
return self._prefix
|
||||
|
||||
@property
|
||||
def tags(self) -> Tags | None:
|
||||
"""Get tags."""
|
||||
return self._tags
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[C], element: ET.Element) -> C:
|
||||
"""Create new object with values from XML element."""
|
||||
element = cast(ET.Element, find(element, "And", True))
|
||||
prefix = findtext(element, "Prefix")
|
||||
tags = (
|
||||
None if find(element, "Tag") is None
|
||||
else Tags.fromxml(element)
|
||||
)
|
||||
return cls(prefix, tags)
|
||||
|
||||
def toxml(self, element: ET.Element | None) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, "And")
|
||||
if self._prefix is not None:
|
||||
SubElement(element, "Prefix", self._prefix)
|
||||
if self._tags is not None:
|
||||
self._tags.toxml(element)
|
||||
return element
|
||||
|
||||
|
||||
D = TypeVar("D", bound="Filter")
|
||||
|
||||
|
||||
class Filter:
|
||||
"""Lifecycle rule filter."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
and_operator: AndOperator | None = None,
|
||||
prefix: str | None = None,
|
||||
tag: Tag | None = None,
|
||||
):
|
||||
valid = (
|
||||
(and_operator is not None) ^
|
||||
(prefix is not None) ^
|
||||
(tag is not None)
|
||||
)
|
||||
if not valid:
|
||||
raise ValueError("only one of and, prefix or tag must be provided")
|
||||
self._and_operator = and_operator
|
||||
self._prefix = prefix
|
||||
self._tag = tag
|
||||
|
||||
@property
|
||||
def and_operator(self) -> AndOperator | None:
|
||||
"""Get AND operator."""
|
||||
return self._and_operator
|
||||
|
||||
@property
|
||||
def prefix(self) -> str | None:
|
||||
"""Get prefix."""
|
||||
return self._prefix
|
||||
|
||||
@property
|
||||
def tag(self) -> Tag | None:
|
||||
"""Get tag."""
|
||||
return self._tag
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[D], element: ET.Element) -> D:
|
||||
"""Create new object with values from XML element."""
|
||||
element = cast(ET.Element, find(element, "Filter", True))
|
||||
and_operator = (
|
||||
None if find(element, "And") is None
|
||||
else AndOperator.fromxml(element)
|
||||
)
|
||||
prefix = findtext(element, "Prefix")
|
||||
tag = None if find(element, "Tag") is None else Tag.fromxml(element)
|
||||
return cls(and_operator, prefix, tag)
|
||||
|
||||
def toxml(self, element: ET.Element | None) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, "Filter")
|
||||
if self._and_operator:
|
||||
self._and_operator.toxml(element)
|
||||
if self._prefix is not None:
|
||||
SubElement(element, "Prefix", self._prefix)
|
||||
if self._tag is not None:
|
||||
self._tag.toxml(element)
|
||||
return element
|
||||
|
||||
|
||||
class BaseRule:
|
||||
"""Base rule class for Replication and Lifecycle."""
|
||||
__metaclass__ = ABCMeta
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
rule_filter: Filter | None = None,
|
||||
rule_id: str | None = None,
|
||||
):
|
||||
if rule_id is not None:
|
||||
rule_id = rule_id.strip()
|
||||
if not rule_id:
|
||||
raise ValueError("rule ID must be non-empty string")
|
||||
if len(rule_id) > 255:
|
||||
raise ValueError("rule ID must not exceed 255 characters")
|
||||
self._rule_filter = rule_filter
|
||||
self._rule_id = rule_id
|
||||
|
||||
@property
|
||||
def rule_filter(self) -> Filter | None:
|
||||
"""Get replication rule filter."""
|
||||
return self._rule_filter
|
||||
|
||||
@property
|
||||
def rule_id(self) -> str | None:
|
||||
"""Get rule ID."""
|
||||
return self._rule_id
|
||||
|
||||
@staticmethod
|
||||
def parsexml(element: ET.Element) -> tuple[Filter | None, str | None]:
|
||||
"""Parse XML and return filter and ID."""
|
||||
return (
|
||||
None if find(element, "Filter") is None
|
||||
else Filter.fromxml(element)
|
||||
), findtext(element, "ID")
|
||||
|
||||
def toxml(self, element: ET.Element | None) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
if self._rule_filter:
|
||||
self._rule_filter.toxml(element)
|
||||
if self._rule_id is not None:
|
||||
SubElement(element, "ID", self._rule_id)
|
||||
return element
|
||||
|
||||
|
||||
def check_status(status: str):
|
||||
"""Validate status."""
|
||||
if status not in [ENABLED, DISABLED]:
|
||||
raise ValueError("status must be 'Enabled' or 'Disabled'")
|
||||
|
||||
|
||||
class ObjectConditionalReadArgs:
|
||||
"""Base argument class holds condition properties for reading object."""
|
||||
__metaclass__ = ABCMeta
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
bucket_name: str,
|
||||
object_name: str,
|
||||
region: str | None = None,
|
||||
version_id: str | None = None,
|
||||
ssec: SseCustomerKey | None = None,
|
||||
offset: int | None = None,
|
||||
length: int | None = None,
|
||||
match_etag: str | None = None,
|
||||
not_match_etag: str | None = None,
|
||||
modified_since: str | None = None,
|
||||
unmodified_since: str | None = None,
|
||||
):
|
||||
if ssec is not None and not isinstance(ssec, SseCustomerKey):
|
||||
raise ValueError("ssec must be SseCustomerKey type")
|
||||
if offset is not None and offset < 0:
|
||||
raise ValueError("offset should be zero or greater")
|
||||
if length is not None and length <= 0:
|
||||
raise ValueError("length should be greater than zero")
|
||||
if match_etag is not None and match_etag == "":
|
||||
raise ValueError("match_etag must not be empty")
|
||||
if not_match_etag is not None and not_match_etag == "":
|
||||
raise ValueError("not_match_etag must not be empty")
|
||||
if (
|
||||
modified_since is not None and
|
||||
not isinstance(modified_since, datetime)
|
||||
):
|
||||
raise ValueError("modified_since must be datetime type")
|
||||
if (
|
||||
unmodified_since is not None and
|
||||
not isinstance(unmodified_since, datetime)
|
||||
):
|
||||
raise ValueError("unmodified_since must be datetime type")
|
||||
|
||||
self._bucket_name = bucket_name
|
||||
self._object_name = object_name
|
||||
self._region = region
|
||||
self._version_id = version_id
|
||||
self._ssec = ssec
|
||||
self._offset = offset
|
||||
self._length = length
|
||||
self._match_etag = match_etag
|
||||
self._not_match_etag = not_match_etag
|
||||
self._modified_since = modified_since
|
||||
self._unmodified_since = unmodified_since
|
||||
|
||||
@property
|
||||
def bucket_name(self) -> str:
|
||||
"""Get bucket name."""
|
||||
return self._bucket_name
|
||||
|
||||
@property
|
||||
def object_name(self) -> str:
|
||||
"""Get object name."""
|
||||
return self._object_name
|
||||
|
||||
@property
|
||||
def region(self) -> str | None:
|
||||
"""Get region."""
|
||||
return self._region
|
||||
|
||||
@property
|
||||
def version_id(self) -> str | None:
|
||||
"""Get version ID."""
|
||||
return self._version_id
|
||||
|
||||
@property
|
||||
def ssec(self) -> SseCustomerKey | None:
|
||||
"""Get SSE-C."""
|
||||
return self._ssec
|
||||
|
||||
@property
|
||||
def offset(self) -> int | None:
|
||||
"""Get offset."""
|
||||
return self._offset
|
||||
|
||||
@property
|
||||
def length(self) -> int | None:
|
||||
"""Get length."""
|
||||
return self._length
|
||||
|
||||
@property
|
||||
def match_etag(self) -> str | None:
|
||||
"""Get match ETag condition."""
|
||||
return self._match_etag
|
||||
|
||||
@property
|
||||
def not_match_etag(self) -> str | None:
|
||||
"""Get not-match ETag condition."""
|
||||
return self._not_match_etag
|
||||
|
||||
@property
|
||||
def modified_since(self) -> str | None:
|
||||
"""Get modified since condition."""
|
||||
return self._modified_since
|
||||
|
||||
@property
|
||||
def unmodified_since(self) -> str | None:
|
||||
"""Get unmodified since condition."""
|
||||
return self._unmodified_since
|
||||
|
||||
def gen_copy_headers(self) -> dict[str, str]:
|
||||
"""Generate copy source headers."""
|
||||
copy_source = quote("/" + self._bucket_name + "/" + self._object_name)
|
||||
if self._version_id:
|
||||
copy_source += "?versionId=" + quote(self._version_id)
|
||||
|
||||
headers = {"x-amz-copy-source": copy_source}
|
||||
if self._ssec:
|
||||
headers.update(self._ssec.copy_headers())
|
||||
if self._match_etag:
|
||||
headers["x-amz-copy-source-if-match"] = self._match_etag
|
||||
if self._not_match_etag:
|
||||
headers["x-amz-copy-source-if-none-match"] = self._not_match_etag
|
||||
if self._modified_since:
|
||||
headers["x-amz-copy-source-if-modified-since"] = (
|
||||
to_http_header(self._modified_since)
|
||||
)
|
||||
if self._unmodified_since:
|
||||
headers["x-amz-copy-source-if-unmodified-since"] = (
|
||||
to_http_header(self._unmodified_since)
|
||||
)
|
||||
return headers
|
||||
|
||||
|
||||
E = TypeVar("E", bound="CopySource")
|
||||
|
||||
|
||||
class CopySource(ObjectConditionalReadArgs):
|
||||
"""A source object definition for copy_object method."""
|
||||
@classmethod
|
||||
def of(cls: Type[E], src: ObjectConditionalReadArgs) -> E:
|
||||
"""Create CopySource from another source."""
|
||||
return cls(
|
||||
src.bucket_name, src.object_name, src.region, src.version_id,
|
||||
src.ssec, src.offset, src.length, src.match_etag,
|
||||
src.not_match_etag, src.modified_since, src.unmodified_since,
|
||||
)
|
||||
|
||||
|
||||
F = TypeVar("F", bound="ComposeSource")
|
||||
|
||||
|
||||
class ComposeSource(ObjectConditionalReadArgs):
|
||||
"""A source object definition for compose_object method."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
bucket_name: str,
|
||||
object_name: str,
|
||||
region: str | None = None,
|
||||
version_id: str | None = None,
|
||||
ssec: SseCustomerKey | None = None,
|
||||
offset: int | None = None,
|
||||
length: int | None = None,
|
||||
match_etag: str | None = None,
|
||||
not_match_etag: str | None = None,
|
||||
modified_since: str | None = None,
|
||||
unmodified_since: str | None = None,
|
||||
):
|
||||
super().__init__(
|
||||
bucket_name, object_name, region, version_id, ssec, offset, length,
|
||||
match_etag, not_match_etag, modified_since, unmodified_since,
|
||||
)
|
||||
self._object_size: int | None = None
|
||||
self._headers: dict[str, str] | None = None
|
||||
|
||||
def _validate_size(self, object_size: int):
|
||||
"""Validate object size with offset and length."""
|
||||
def make_error(name, value):
|
||||
ver = ("?versionId="+self._version_id) if self._version_id else ""
|
||||
return ValueError(
|
||||
f"Source {self._bucket_name}/{self._object_name}{ver}: "
|
||||
f"{name} {value} is beyond object size {object_size}"
|
||||
)
|
||||
|
||||
if self._offset is not None and self._offset >= object_size:
|
||||
raise make_error("offset", self._offset)
|
||||
if self._length is not None:
|
||||
if self._length > object_size:
|
||||
raise make_error("length", self._length)
|
||||
offset = self._offset or 0
|
||||
if offset+self._length > object_size:
|
||||
raise make_error("compose size", offset+self._length)
|
||||
|
||||
def build_headers(self, object_size: int, etag: str):
|
||||
"""Build headers."""
|
||||
self._validate_size(object_size)
|
||||
self._object_size = object_size
|
||||
headers = self.gen_copy_headers()
|
||||
headers["x-amz-copy-source-if-match"] = self._match_etag or etag
|
||||
self._headers = headers
|
||||
|
||||
@property
|
||||
def object_size(self) -> int | None:
|
||||
"""Get object size."""
|
||||
if self._object_size is None:
|
||||
raise MinioException(
|
||||
"build_headers() must be called prior to "
|
||||
"this method invocation",
|
||||
)
|
||||
return self._object_size
|
||||
|
||||
@property
|
||||
def headers(self) -> dict[str, str]:
|
||||
"""Get headers."""
|
||||
if self._headers is None:
|
||||
raise MinioException(
|
||||
"build_headers() must be called prior to "
|
||||
"this method invocation",
|
||||
)
|
||||
return self._headers.copy()
|
||||
|
||||
@classmethod
|
||||
def of(cls: Type[F], src: ObjectConditionalReadArgs) -> F:
|
||||
"""Create ComposeSource from another source."""
|
||||
return cls(
|
||||
src.bucket_name, src.object_name, src.region, src.version_id,
|
||||
src.ssec, src.offset, src.length, src.match_etag,
|
||||
src.not_match_etag, src.modified_since, src.unmodified_since,
|
||||
)
|
||||
|
||||
|
||||
class SnowballObject:
|
||||
"""A source object definition for upload_snowball_objects method."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
object_name: str,
|
||||
filename: str | None = None,
|
||||
data: bytes | None = None,
|
||||
length: int | None = None,
|
||||
mod_time: datetime | None = None,
|
||||
):
|
||||
self._object_name = object_name
|
||||
if (filename is not None) ^ (data is not None):
|
||||
self._filename = filename
|
||||
self._data = data
|
||||
self._length = length
|
||||
else:
|
||||
raise ValueError("only one of filename or data must be provided")
|
||||
if mod_time is not None and not isinstance(mod_time, datetime):
|
||||
raise ValueError("mod_time must be datetime type")
|
||||
self._mod_time = mod_time
|
||||
|
||||
@property
|
||||
def object_name(self) -> str:
|
||||
"""Get object name."""
|
||||
return self._object_name
|
||||
|
||||
@property
|
||||
def filename(self) -> str | None:
|
||||
"""Get filename."""
|
||||
return self._filename
|
||||
|
||||
@property
|
||||
def data(self) -> bytes | None:
|
||||
"""Get data."""
|
||||
return self._data
|
||||
|
||||
@property
|
||||
def length(self) -> int | None:
|
||||
"""Get length."""
|
||||
return self._length
|
||||
|
||||
@property
|
||||
def mod_time(self) -> datetime | None:
|
||||
"""Get modification time."""
|
||||
return self._mod_time
|
||||
@@ -0,0 +1,26 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage,
|
||||
# (C) 2020 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Credential module."""
|
||||
|
||||
# pylint: disable=unused-import
|
||||
from .credentials import Credentials
|
||||
from .providers import (AssumeRoleProvider, AWSConfigProvider,
|
||||
CertificateIdentityProvider, ChainedProvider,
|
||||
ClientGrantsProvider, EnvAWSProvider, EnvMinioProvider,
|
||||
IamAwsProvider, LdapIdentityProvider,
|
||||
MinioClientConfigProvider, Provider, StaticProvider,
|
||||
WebIdentityProvider)
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,75 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage,
|
||||
# (C) 2020 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Credential definitions to access S3 service."""
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
|
||||
class Credentials:
|
||||
"""
|
||||
Represents credentials access key, secret key and session token.
|
||||
"""
|
||||
|
||||
_access_key: str
|
||||
_secret_key: str
|
||||
_session_token: str | None
|
||||
_expiration: datetime | None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
access_key: str,
|
||||
secret_key: str,
|
||||
session_token: str | None = None,
|
||||
expiration: datetime | None = None,
|
||||
):
|
||||
if not access_key:
|
||||
raise ValueError("Access key must not be empty")
|
||||
|
||||
if not secret_key:
|
||||
raise ValueError("Secret key must not be empty")
|
||||
|
||||
self._access_key = access_key
|
||||
self._secret_key = secret_key
|
||||
self._session_token = session_token
|
||||
if expiration and expiration.tzinfo:
|
||||
expiration = (
|
||||
expiration.astimezone(timezone.utc).replace(tzinfo=None)
|
||||
)
|
||||
self._expiration = expiration
|
||||
|
||||
@property
|
||||
def access_key(self) -> str:
|
||||
"""Get access key."""
|
||||
return self._access_key
|
||||
|
||||
@property
|
||||
def secret_key(self) -> str:
|
||||
"""Get secret key."""
|
||||
return self._secret_key
|
||||
|
||||
@property
|
||||
def session_token(self) -> str | None:
|
||||
"""Get session token."""
|
||||
return self._session_token
|
||||
|
||||
def is_expired(self) -> bool:
|
||||
"""Check whether this credentials expired or not."""
|
||||
return (
|
||||
self._expiration < (datetime.utcnow() + timedelta(seconds=10))
|
||||
if self._expiration else False
|
||||
)
|
||||
723
venv/lib/python3.12/site-packages/minio/credentials/providers.py
Normal file
723
venv/lib/python3.12/site-packages/minio/credentials/providers.py
Normal file
@@ -0,0 +1,723 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage,
|
||||
# (C) 2020 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Credential providers."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import configparser
|
||||
import ipaddress
|
||||
import json
|
||||
import os
|
||||
import socket
|
||||
import sys
|
||||
import time
|
||||
from abc import ABCMeta, abstractmethod
|
||||
from datetime import timedelta
|
||||
from pathlib import Path
|
||||
from typing import Callable, cast
|
||||
from urllib.parse import urlencode, urlsplit
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
import certifi
|
||||
from urllib3.poolmanager import PoolManager
|
||||
|
||||
try:
|
||||
from urllib3.response import BaseHTTPResponse # type: ignore[attr-defined]
|
||||
except ImportError:
|
||||
from urllib3.response import HTTPResponse as BaseHTTPResponse
|
||||
|
||||
from urllib3.util import Retry, parse_url
|
||||
|
||||
from minio.helpers import sha256_hash
|
||||
from minio.signer import sign_v4_sts
|
||||
from minio.time import from_iso8601utc, to_amz_date, utcnow
|
||||
from minio.xml import find, findtext
|
||||
|
||||
from .credentials import Credentials
|
||||
|
||||
_MIN_DURATION_SECONDS = int(timedelta(minutes=15).total_seconds())
|
||||
_MAX_DURATION_SECONDS = int(timedelta(days=7).total_seconds())
|
||||
_DEFAULT_DURATION_SECONDS = int(timedelta(hours=1).total_seconds())
|
||||
|
||||
|
||||
def _parse_credentials(data: str, name: str) -> Credentials:
|
||||
"""Parse data containing credentials XML."""
|
||||
element = ET.fromstring(data)
|
||||
element = cast(ET.Element, find(element, name, True))
|
||||
element = cast(ET.Element, find(element, "Credentials", True))
|
||||
expiration = from_iso8601utc(findtext(element, "Expiration", True))
|
||||
return Credentials(
|
||||
cast(str, findtext(element, "AccessKeyId", True)),
|
||||
cast(str, findtext(element, "SecretAccessKey", True)),
|
||||
findtext(element, "SessionToken", True),
|
||||
expiration,
|
||||
)
|
||||
|
||||
|
||||
def _urlopen(
|
||||
http_client: PoolManager,
|
||||
method: str,
|
||||
url: str,
|
||||
body: str | bytes | None = None,
|
||||
headers: dict[str, str | list[str] | tuple[str]] | None = None,
|
||||
) -> BaseHTTPResponse:
|
||||
"""Wrapper of urlopen() handles HTTP status code."""
|
||||
res = http_client.urlopen(method, url, body=body, headers=headers)
|
||||
if res.status not in [200, 204, 206]:
|
||||
raise ValueError(f"{url} failed with HTTP status code {res.status}")
|
||||
return res
|
||||
|
||||
|
||||
def _user_home_dir() -> str:
|
||||
"""Return current user home folder."""
|
||||
return (
|
||||
os.environ.get("HOME") or
|
||||
os.environ.get("UserProfile") or
|
||||
str(Path.home())
|
||||
)
|
||||
|
||||
|
||||
class Provider: # pylint: disable=too-few-public-methods
|
||||
"""Credential retriever."""
|
||||
__metaclass__ = ABCMeta
|
||||
|
||||
@abstractmethod
|
||||
def retrieve(self) -> Credentials:
|
||||
"""Retrieve credentials and its expiry if available."""
|
||||
|
||||
|
||||
class AssumeRoleProvider(Provider):
|
||||
"""Assume-role credential provider."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
sts_endpoint: str,
|
||||
access_key: str,
|
||||
secret_key: str,
|
||||
duration_seconds: int = 0,
|
||||
policy: str | None = None,
|
||||
region: str | None = None,
|
||||
role_arn: str | None = None,
|
||||
role_session_name: str | None = None,
|
||||
external_id: str | None = None,
|
||||
http_client: PoolManager | None = None,
|
||||
):
|
||||
self._sts_endpoint = sts_endpoint
|
||||
self._access_key = access_key
|
||||
self._secret_key = secret_key
|
||||
self._region = region or ""
|
||||
self._http_client = http_client or PoolManager(
|
||||
retries=Retry(
|
||||
total=5,
|
||||
backoff_factor=0.2,
|
||||
status_forcelist=[500, 502, 503, 504],
|
||||
),
|
||||
)
|
||||
|
||||
query_params = {
|
||||
"Action": "AssumeRole",
|
||||
"Version": "2011-06-15",
|
||||
"DurationSeconds": str(
|
||||
duration_seconds
|
||||
if duration_seconds > _DEFAULT_DURATION_SECONDS
|
||||
else _DEFAULT_DURATION_SECONDS
|
||||
),
|
||||
}
|
||||
|
||||
if role_arn:
|
||||
query_params["RoleArn"] = role_arn
|
||||
if role_session_name:
|
||||
query_params["RoleSessionName"] = role_session_name
|
||||
if policy:
|
||||
query_params["Policy"] = policy
|
||||
if external_id:
|
||||
query_params["ExternalId"] = external_id
|
||||
|
||||
self._body = urlencode(query_params)
|
||||
self._content_sha256 = sha256_hash(self._body)
|
||||
url = urlsplit(sts_endpoint)
|
||||
self._url = url
|
||||
self._host = url.netloc
|
||||
if (
|
||||
(url.scheme == "http" and url.port == 80) or
|
||||
(url.scheme == "https" and url.port == 443)
|
||||
):
|
||||
self._host = cast(str, url.hostname)
|
||||
self._credentials: Credentials | None = None
|
||||
|
||||
def retrieve(self) -> Credentials:
|
||||
"""Retrieve credentials."""
|
||||
if self._credentials and not self._credentials.is_expired():
|
||||
return self._credentials
|
||||
|
||||
utctime = utcnow()
|
||||
headers = sign_v4_sts(
|
||||
"POST",
|
||||
self._url,
|
||||
self._region,
|
||||
{
|
||||
"Content-Type": "application/x-www-form-urlencoded",
|
||||
"Host": self._host,
|
||||
"X-Amz-Date": to_amz_date(utctime),
|
||||
},
|
||||
Credentials(self._access_key, self._secret_key),
|
||||
self._content_sha256,
|
||||
utctime,
|
||||
)
|
||||
|
||||
res = _urlopen(
|
||||
self._http_client,
|
||||
"POST",
|
||||
self._sts_endpoint,
|
||||
body=self._body,
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
self._credentials = _parse_credentials(
|
||||
res.data.decode(), "AssumeRoleResult",
|
||||
)
|
||||
|
||||
return self._credentials
|
||||
|
||||
|
||||
class ChainedProvider(Provider):
|
||||
"""Chained credential provider."""
|
||||
|
||||
def __init__(self, providers: list[Provider]):
|
||||
self._providers = providers
|
||||
self._provider: Provider | None = None
|
||||
self._credentials: Credentials | None = None
|
||||
|
||||
def retrieve(self) -> Credentials:
|
||||
"""Retrieve credentials from one of available provider."""
|
||||
if self._credentials and not self._credentials.is_expired():
|
||||
return self._credentials
|
||||
|
||||
if self._provider:
|
||||
try:
|
||||
self._credentials = self._provider.retrieve()
|
||||
return self._credentials
|
||||
except ValueError:
|
||||
# Ignore this error and iterate other providers.
|
||||
pass
|
||||
|
||||
for provider in self._providers:
|
||||
try:
|
||||
self._credentials = provider.retrieve()
|
||||
self._provider = provider
|
||||
return self._credentials
|
||||
except ValueError:
|
||||
# Ignore this error and iterate other providers.
|
||||
pass
|
||||
|
||||
raise ValueError("All providers fail to fetch credentials")
|
||||
|
||||
|
||||
class EnvAWSProvider(Provider):
|
||||
"""Credential provider from AWS environment variables."""
|
||||
|
||||
def retrieve(self) -> Credentials:
|
||||
"""Retrieve credentials."""
|
||||
return Credentials(
|
||||
access_key=(
|
||||
cast(
|
||||
str,
|
||||
os.environ.get("AWS_ACCESS_KEY_ID") or
|
||||
os.environ.get("AWS_ACCESS_KEY"),
|
||||
)
|
||||
),
|
||||
secret_key=(
|
||||
cast(
|
||||
str,
|
||||
os.environ.get("AWS_SECRET_ACCESS_KEY") or
|
||||
os.environ.get("AWS_SECRET_KEY"),
|
||||
)
|
||||
),
|
||||
session_token=os.environ.get("AWS_SESSION_TOKEN"),
|
||||
)
|
||||
|
||||
|
||||
class EnvMinioProvider(Provider):
|
||||
"""Credential provider from MinIO environment variables."""
|
||||
|
||||
def retrieve(self) -> Credentials:
|
||||
"""Retrieve credentials."""
|
||||
return Credentials(
|
||||
access_key=os.environ.get("MINIO_ACCESS_KEY") or "",
|
||||
secret_key=os.environ.get("MINIO_SECRET_KEY") or "",
|
||||
)
|
||||
|
||||
|
||||
class AWSConfigProvider(Provider):
|
||||
"""Credential provider from AWS credential file."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
filename: str | None = None,
|
||||
profile: str | None = None,
|
||||
):
|
||||
self._filename = (
|
||||
filename or
|
||||
os.environ.get("AWS_SHARED_CREDENTIALS_FILE") or
|
||||
os.path.join(_user_home_dir(), ".aws", "credentials")
|
||||
)
|
||||
self._profile = profile or os.environ.get("AWS_PROFILE") or "default"
|
||||
|
||||
def retrieve(self) -> Credentials:
|
||||
"""Retrieve credentials from AWS configuration file."""
|
||||
parser = configparser.ConfigParser()
|
||||
parser.read(self._filename)
|
||||
access_key = parser.get(
|
||||
self._profile,
|
||||
"aws_access_key_id",
|
||||
fallback=None,
|
||||
)
|
||||
secret_key = parser.get(
|
||||
self._profile,
|
||||
"aws_secret_access_key",
|
||||
fallback=None,
|
||||
)
|
||||
session_token = parser.get(
|
||||
self._profile,
|
||||
"aws_session_token",
|
||||
fallback=None,
|
||||
)
|
||||
|
||||
if not access_key:
|
||||
raise ValueError(
|
||||
f"access key does not exist in profile "
|
||||
f"{self._profile} in AWS credential file {self._filename}"
|
||||
)
|
||||
|
||||
if not secret_key:
|
||||
raise ValueError(
|
||||
f"secret key does not exist in profile "
|
||||
f"{self._profile} in AWS credential file {self._filename}"
|
||||
)
|
||||
|
||||
return Credentials(
|
||||
access_key,
|
||||
secret_key,
|
||||
session_token=session_token,
|
||||
)
|
||||
|
||||
|
||||
class MinioClientConfigProvider(Provider):
|
||||
"""Credential provider from MinIO Client configuration file."""
|
||||
|
||||
def __init__(self, filename: str | None = None, alias: str | None = None):
|
||||
self._filename = (
|
||||
filename or
|
||||
os.path.join(
|
||||
_user_home_dir(),
|
||||
"mc" if sys.platform == "win32" else ".mc",
|
||||
"config.json",
|
||||
)
|
||||
)
|
||||
self._alias = alias or os.environ.get("MINIO_ALIAS") or "s3"
|
||||
|
||||
def retrieve(self) -> Credentials:
|
||||
"""Retrieve credential value from MinIO client configuration file."""
|
||||
try:
|
||||
with open(self._filename, encoding="utf-8") as conf_file:
|
||||
config = json.load(conf_file)
|
||||
aliases = config.get("hosts") or config.get("aliases")
|
||||
if not aliases:
|
||||
raise ValueError(
|
||||
f"invalid configuration in file {self._filename}",
|
||||
)
|
||||
creds = aliases.get(self._alias)
|
||||
if not creds:
|
||||
raise ValueError(
|
||||
f"alias {self._alias} not found in MinIO client"
|
||||
f"configuration file {self._filename}"
|
||||
)
|
||||
return Credentials(creds.get("accessKey"), creds.get("secretKey"))
|
||||
except (IOError, OSError) as exc:
|
||||
raise ValueError(
|
||||
f"error in reading file {self._filename}",
|
||||
) from exc
|
||||
|
||||
|
||||
def _check_loopback_host(url: str):
|
||||
"""Check whether host in url points only to localhost."""
|
||||
host = cast(str, parse_url(url).host)
|
||||
try:
|
||||
addrs = set(info[4][0] for info in socket.getaddrinfo(host, None))
|
||||
for addr in addrs:
|
||||
if not ipaddress.ip_address(addr).is_loopback:
|
||||
raise ValueError(host + " is not loopback only host")
|
||||
except socket.gaierror as exc:
|
||||
raise ValueError("Host " + host + " is not loopback address") from exc
|
||||
|
||||
|
||||
def _get_jwt_token(token_file: str) -> dict[str, str]:
|
||||
"""Read and return content of token file. """
|
||||
try:
|
||||
with open(token_file, encoding="utf-8") as file:
|
||||
return {"access_token": file.read(), "expires_in": "0"}
|
||||
except (IOError, OSError) as exc:
|
||||
raise ValueError(f"error in reading file {token_file}") from exc
|
||||
|
||||
|
||||
class IamAwsProvider(Provider):
|
||||
"""Credential provider using IAM roles for Amazon EC2/ECS."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
custom_endpoint: str | None = None,
|
||||
http_client: PoolManager | None = None,
|
||||
):
|
||||
self._custom_endpoint = custom_endpoint
|
||||
self._http_client = http_client or PoolManager(
|
||||
retries=Retry(
|
||||
total=5,
|
||||
backoff_factor=0.2,
|
||||
status_forcelist=[500, 502, 503, 504],
|
||||
),
|
||||
)
|
||||
self._token_file = os.environ.get("AWS_WEB_IDENTITY_TOKEN_FILE")
|
||||
self._aws_region = os.environ.get("AWS_REGION")
|
||||
self._role_arn = os.environ.get("AWS_ROLE_ARN")
|
||||
self._role_session_name = os.environ.get("AWS_ROLE_SESSION_NAME")
|
||||
self._relative_uri = os.environ.get(
|
||||
"AWS_CONTAINER_CREDENTIALS_RELATIVE_URI",
|
||||
)
|
||||
if self._relative_uri and not self._relative_uri.startswith("/"):
|
||||
self._relative_uri = "/" + self._relative_uri
|
||||
self._full_uri = os.environ.get("AWS_CONTAINER_CREDENTIALS_FULL_URI")
|
||||
self._credentials: Credentials | None = None
|
||||
|
||||
def fetch(self, url: str) -> Credentials:
|
||||
"""Fetch credentials from EC2/ECS. """
|
||||
|
||||
res = _urlopen(self._http_client, "GET", url)
|
||||
data = json.loads(res.data)
|
||||
if data.get("Code", "Success") != "Success":
|
||||
raise ValueError(
|
||||
f"{url} failed with code {data['Code']} "
|
||||
f"message {data.get('Message')}"
|
||||
)
|
||||
data["Expiration"] = from_iso8601utc(data["Expiration"])
|
||||
|
||||
return Credentials(
|
||||
data["AccessKeyId"],
|
||||
data["SecretAccessKey"],
|
||||
data["Token"],
|
||||
data["Expiration"],
|
||||
)
|
||||
|
||||
def retrieve(self) -> Credentials:
|
||||
"""Retrieve credentials from WebIdentity/EC2/ECS."""
|
||||
|
||||
if self._credentials and not self._credentials.is_expired():
|
||||
return self._credentials
|
||||
|
||||
url = self._custom_endpoint
|
||||
if self._token_file:
|
||||
if not url:
|
||||
url = "https://sts.amazonaws.com"
|
||||
if self._aws_region:
|
||||
url = f"https://sts.{self._aws_region}.amazonaws.com"
|
||||
|
||||
provider = WebIdentityProvider(
|
||||
lambda: _get_jwt_token(cast(str, self._token_file)),
|
||||
url,
|
||||
role_arn=self._role_arn,
|
||||
role_session_name=self._role_session_name,
|
||||
http_client=self._http_client,
|
||||
)
|
||||
self._credentials = provider.retrieve()
|
||||
return cast(Credentials, self._credentials)
|
||||
|
||||
if self._relative_uri:
|
||||
if not url:
|
||||
url = "http://169.254.170.2" + self._relative_uri
|
||||
elif self._full_uri:
|
||||
if not url:
|
||||
url = self._full_uri
|
||||
_check_loopback_host(url)
|
||||
else:
|
||||
if not url:
|
||||
url = (
|
||||
"http://169.254.169.254" +
|
||||
"/latest/meta-data/iam/security-credentials/"
|
||||
)
|
||||
|
||||
res = _urlopen(self._http_client, "GET", url)
|
||||
role_names = res.data.decode("utf-8").split("\n")
|
||||
if not role_names:
|
||||
raise ValueError(f"no IAM roles attached to EC2 service {url}")
|
||||
url += "/" + role_names[0].strip("\r")
|
||||
|
||||
if not url:
|
||||
raise ValueError("url is empty; this should not happen")
|
||||
|
||||
self._credentials = self.fetch(url)
|
||||
return self._credentials
|
||||
|
||||
|
||||
class LdapIdentityProvider(Provider):
|
||||
"""Credential provider using AssumeRoleWithLDAPIdentity API."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
sts_endpoint: str,
|
||||
ldap_username: str,
|
||||
ldap_password: str,
|
||||
http_client: PoolManager | None = None,
|
||||
):
|
||||
self._sts_endpoint = sts_endpoint + "?" + urlencode(
|
||||
{
|
||||
"Action": "AssumeRoleWithLDAPIdentity",
|
||||
"Version": "2011-06-15",
|
||||
"LDAPUsername": ldap_username,
|
||||
"LDAPPassword": ldap_password,
|
||||
},
|
||||
)
|
||||
self._http_client = http_client or PoolManager(
|
||||
retries=Retry(
|
||||
total=5,
|
||||
backoff_factor=0.2,
|
||||
status_forcelist=[500, 502, 503, 504],
|
||||
),
|
||||
)
|
||||
self._credentials: Credentials | None = None
|
||||
|
||||
def retrieve(self) -> Credentials:
|
||||
"""Retrieve credentials."""
|
||||
|
||||
if self._credentials and not self._credentials.is_expired():
|
||||
return self._credentials
|
||||
|
||||
res = _urlopen(
|
||||
self._http_client,
|
||||
"POST",
|
||||
self._sts_endpoint,
|
||||
)
|
||||
|
||||
self._credentials = _parse_credentials(
|
||||
res.data.decode(), "AssumeRoleWithLDAPIdentityResult",
|
||||
)
|
||||
|
||||
return self._credentials
|
||||
|
||||
|
||||
class StaticProvider(Provider):
|
||||
"""Fixed credential provider."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
access_key: str,
|
||||
secret_key: str,
|
||||
session_token: str | None = None,
|
||||
):
|
||||
self._credentials = Credentials(access_key, secret_key, session_token)
|
||||
|
||||
def retrieve(self) -> Credentials:
|
||||
"""Return passed credentials."""
|
||||
return self._credentials
|
||||
|
||||
|
||||
class WebIdentityClientGrantsProvider(Provider):
|
||||
"""Base class for WebIdentity and ClientGrants credentials provider."""
|
||||
__metaclass__ = ABCMeta
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
jwt_provider_func: Callable[[], dict[str, str]],
|
||||
sts_endpoint: str,
|
||||
duration_seconds: int = 0,
|
||||
policy: str | None = None,
|
||||
role_arn: str | None = None,
|
||||
role_session_name: str | None = None,
|
||||
http_client: PoolManager | None = None,
|
||||
):
|
||||
self._jwt_provider_func = jwt_provider_func
|
||||
self._sts_endpoint = sts_endpoint
|
||||
self._duration_seconds = duration_seconds
|
||||
self._policy = policy
|
||||
self._role_arn = role_arn
|
||||
self._role_session_name = role_session_name
|
||||
self._http_client = http_client or PoolManager(
|
||||
retries=Retry(
|
||||
total=5,
|
||||
backoff_factor=0.2,
|
||||
status_forcelist=[500, 502, 503, 504],
|
||||
),
|
||||
)
|
||||
self._credentials: Credentials | None = None
|
||||
|
||||
@abstractmethod
|
||||
def _is_web_identity(self) -> bool:
|
||||
"""Check if derived class deal with WebIdentity."""
|
||||
|
||||
def _get_duration_seconds(self, expiry: int) -> int:
|
||||
"""Get DurationSeconds optimal value."""
|
||||
|
||||
if self._duration_seconds:
|
||||
expiry = self._duration_seconds
|
||||
|
||||
if expiry > _MAX_DURATION_SECONDS:
|
||||
return _MAX_DURATION_SECONDS
|
||||
|
||||
if expiry <= 0:
|
||||
return expiry
|
||||
|
||||
return (
|
||||
_MIN_DURATION_SECONDS if expiry < _MIN_DURATION_SECONDS else expiry
|
||||
)
|
||||
|
||||
def retrieve(self) -> Credentials:
|
||||
"""Retrieve credentials."""
|
||||
|
||||
if self._credentials and not self._credentials.is_expired():
|
||||
return self._credentials
|
||||
|
||||
jwt = self._jwt_provider_func()
|
||||
|
||||
query_params = {"Version": "2011-06-15"}
|
||||
duration_seconds = self._get_duration_seconds(
|
||||
int(jwt.get("expires_in", "0")),
|
||||
)
|
||||
if duration_seconds:
|
||||
query_params["DurationSeconds"] = str(duration_seconds)
|
||||
if self._policy:
|
||||
query_params["Policy"] = self._policy
|
||||
|
||||
if self._is_web_identity():
|
||||
query_params["Action"] = "AssumeRoleWithWebIdentity"
|
||||
query_params["WebIdentityToken"] = jwt.get("access_token", "")
|
||||
if self._role_arn:
|
||||
query_params["RoleArn"] = self._role_arn
|
||||
query_params["RoleSessionName"] = (
|
||||
self._role_session_name
|
||||
if self._role_session_name
|
||||
else str(time.time()).replace(".", "")
|
||||
)
|
||||
else:
|
||||
query_params["Action"] = "AssumeRoleWithClientGrants"
|
||||
query_params["Token"] = jwt.get("access_token", "")
|
||||
|
||||
url = self._sts_endpoint + "?" + urlencode(query_params)
|
||||
res = _urlopen(self._http_client, "POST", url)
|
||||
|
||||
self._credentials = _parse_credentials(
|
||||
res.data.decode(),
|
||||
(
|
||||
"AssumeRoleWithWebIdentityResult"
|
||||
if self._is_web_identity()
|
||||
else "AssumeRoleWithClientGrantsResult"
|
||||
),
|
||||
)
|
||||
|
||||
return self._credentials
|
||||
|
||||
|
||||
class ClientGrantsProvider(WebIdentityClientGrantsProvider):
|
||||
"""Credential provider using AssumeRoleWithClientGrants API."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
jwt_provider_func: Callable[[], dict[str, str]],
|
||||
sts_endpoint: str,
|
||||
duration_seconds: int = 0,
|
||||
policy: str | None = None,
|
||||
http_client: PoolManager | None = None,
|
||||
):
|
||||
super().__init__(
|
||||
jwt_provider_func, sts_endpoint, duration_seconds, policy,
|
||||
http_client=http_client,
|
||||
)
|
||||
|
||||
def _is_web_identity(self) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
class WebIdentityProvider(WebIdentityClientGrantsProvider):
|
||||
"""Credential provider using AssumeRoleWithWebIdentity API."""
|
||||
|
||||
def _is_web_identity(self) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
class CertificateIdentityProvider(Provider):
|
||||
"""Credential provider using AssumeRoleWithCertificate API."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
sts_endpoint: str,
|
||||
cert_file: str | None = None,
|
||||
key_file: str | None = None,
|
||||
key_password: str | None = None,
|
||||
ca_certs: str | None = None,
|
||||
duration_seconds: int = 0,
|
||||
http_client: PoolManager | None = None,
|
||||
):
|
||||
if urlsplit(sts_endpoint).scheme != "https":
|
||||
raise ValueError("STS endpoint scheme must be HTTPS")
|
||||
|
||||
if bool(http_client) != (cert_file and key_file):
|
||||
pass
|
||||
else:
|
||||
raise ValueError(
|
||||
"either cert/key file or custom http_client must be provided",
|
||||
)
|
||||
|
||||
self._sts_endpoint = sts_endpoint + "?" + urlencode(
|
||||
{
|
||||
"Action": "AssumeRoleWithCertificate",
|
||||
"Version": "2011-06-15",
|
||||
"DurationSeconds": str(
|
||||
duration_seconds
|
||||
if duration_seconds > _DEFAULT_DURATION_SECONDS
|
||||
else _DEFAULT_DURATION_SECONDS
|
||||
),
|
||||
},
|
||||
)
|
||||
self._http_client = http_client or PoolManager(
|
||||
maxsize=10,
|
||||
cert_file=cert_file,
|
||||
cert_reqs='CERT_REQUIRED',
|
||||
key_file=key_file,
|
||||
key_password=key_password,
|
||||
ca_certs=ca_certs or certifi.where(),
|
||||
retries=Retry(
|
||||
total=5,
|
||||
backoff_factor=0.2,
|
||||
status_forcelist=[500, 502, 503, 504],
|
||||
),
|
||||
)
|
||||
self._credentials: Credentials | None = None
|
||||
|
||||
def retrieve(self) -> Credentials:
|
||||
"""Retrieve credentials."""
|
||||
|
||||
if self._credentials and not self._credentials.is_expired():
|
||||
return self._credentials
|
||||
|
||||
res = _urlopen(
|
||||
self._http_client,
|
||||
"POST",
|
||||
self._sts_endpoint,
|
||||
)
|
||||
|
||||
self._credentials = _parse_credentials(
|
||||
res.data.decode(), "AssumeRoleWithCertificateResult",
|
||||
)
|
||||
|
||||
return self._credentials
|
||||
251
venv/lib/python3.12/site-packages/minio/crypto.py
Normal file
251
venv/lib/python3.12/site-packages/minio/crypto.py
Normal file
@@ -0,0 +1,251 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C)
|
||||
# 2015, 2016, 2017 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Cryptography to read and write encrypted MinIO Admin payload"""
|
||||
|
||||
from __future__ import absolute_import, annotations
|
||||
|
||||
import os
|
||||
|
||||
from argon2.low_level import Type, hash_secret_raw
|
||||
from Crypto.Cipher import AES, ChaCha20_Poly1305
|
||||
from Crypto.Cipher._mode_gcm import GcmMode
|
||||
from Crypto.Cipher.ChaCha20_Poly1305 import ChaCha20Poly1305Cipher
|
||||
|
||||
try:
|
||||
from urllib3.response import BaseHTTPResponse # type: ignore[attr-defined]
|
||||
except ImportError:
|
||||
from urllib3.response import HTTPResponse as BaseHTTPResponse
|
||||
|
||||
#
|
||||
# Encrypted Message Format:
|
||||
#
|
||||
# | 41 bytes HEADER |
|
||||
# |-------------------------|
|
||||
# | 16 KiB encrypted chunk |
|
||||
# | + 16 bytes TAG |
|
||||
# |-------------------------|
|
||||
# | .... |
|
||||
# |-------------------------|
|
||||
# | ~16 KiB encrypted chunk |
|
||||
# | + 16 bytes TAG |
|
||||
# |-------------------------|
|
||||
#
|
||||
# HEADER:
|
||||
#
|
||||
# | 32 bytes salt |
|
||||
# |----------------|
|
||||
# | 1 byte AEAD ID |
|
||||
# |----------------|
|
||||
# | 8 bytes NONCE |
|
||||
# |----------------|
|
||||
#
|
||||
|
||||
|
||||
_TAG_LEN = 16
|
||||
_CHUNK_SIZE = 16 * 1024
|
||||
_MAX_CHUNK_SIZE = _TAG_LEN + _CHUNK_SIZE
|
||||
_SALT_LEN = 32
|
||||
_NONCE_LEN = 8
|
||||
|
||||
|
||||
def _get_cipher(
|
||||
aead_id: int,
|
||||
key: bytes,
|
||||
nonce: bytes,
|
||||
) -> GcmMode | ChaCha20Poly1305Cipher:
|
||||
"""Get cipher for AEAD ID."""
|
||||
if aead_id == 0:
|
||||
return AES.new(key, AES.MODE_GCM, nonce)
|
||||
if aead_id == 1:
|
||||
return ChaCha20_Poly1305.new(key=key, nonce=nonce)
|
||||
raise ValueError("Unknown AEAD ID {aead_id}")
|
||||
|
||||
|
||||
def _generate_key(secret: bytes, salt: bytes) -> bytes:
|
||||
"""Generate 256-bit Argon2ID key"""
|
||||
return hash_secret_raw(
|
||||
secret=secret,
|
||||
salt=salt,
|
||||
time_cost=1,
|
||||
memory_cost=65536,
|
||||
parallelism=4,
|
||||
hash_len=32,
|
||||
type=Type.ID,
|
||||
version=19,
|
||||
)
|
||||
|
||||
|
||||
def _generate_additional_data(
|
||||
aead_id: int, key: bytes, padded_nonce: bytes
|
||||
) -> bytes:
|
||||
"""Generate additional data"""
|
||||
cipher = _get_cipher(aead_id, key, padded_nonce)
|
||||
return b"\x00" + cipher.digest()
|
||||
|
||||
|
||||
def _mark_as_last(additional_data: bytes) -> bytes:
|
||||
"""Mark additional data as the last in the sequence"""
|
||||
return b'\x80' + additional_data[1:]
|
||||
|
||||
|
||||
def _update_nonce_id(nonce: bytes, idx: int) -> bytes:
|
||||
"""Set nonce id (4 last bytes)"""
|
||||
return nonce + idx.to_bytes(4, byteorder="little")
|
||||
|
||||
|
||||
def encrypt(payload: bytes, password: str) -> bytes:
|
||||
"""Encrypt given payload."""
|
||||
nonce = os.urandom(_NONCE_LEN)
|
||||
salt = os.urandom(_SALT_LEN)
|
||||
key = _generate_key(password.encode(), salt)
|
||||
aead_id = b"\x00"
|
||||
padded_nonce = nonce + b"\x00\x00\x00\x00"
|
||||
additional_data = _generate_additional_data(aead_id[0], key, padded_nonce)
|
||||
|
||||
indices = range(0, len(payload), _CHUNK_SIZE)
|
||||
nonce_id = 0
|
||||
result = salt + aead_id + nonce
|
||||
for i in indices:
|
||||
nonce_id += 1
|
||||
if i == indices[-1]:
|
||||
additional_data = _mark_as_last(additional_data)
|
||||
padded_nonce = _update_nonce_id(nonce, nonce_id)
|
||||
cipher = _get_cipher(aead_id[0], key, padded_nonce)
|
||||
cipher.update(additional_data)
|
||||
encrypted_data, hmac_tag = cipher.encrypt_and_digest(
|
||||
payload[i:i+_CHUNK_SIZE],
|
||||
)
|
||||
|
||||
result += encrypted_data
|
||||
result += hmac_tag
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class DecryptReader:
|
||||
"""
|
||||
BufferedIOBase compatible reader represents decrypted data of MinioAdmin
|
||||
APIs.
|
||||
"""
|
||||
|
||||
def __init__(self, response: BaseHTTPResponse, secret: bytes):
|
||||
self._response = response
|
||||
self._secret = secret
|
||||
self._payload = None
|
||||
|
||||
header = self._response.read(41)
|
||||
if len(header) != 41:
|
||||
raise IOError("insufficient data")
|
||||
self._salt = header[:32]
|
||||
self._aead_id = header[32]
|
||||
self._nonce = header[33:]
|
||||
self._key = _generate_key(self._secret, self._salt)
|
||||
padded_nonce = self._nonce + b"\x00\x00\x00\x00"
|
||||
self._additional_data = _generate_additional_data(
|
||||
self._aead_id, self._key, padded_nonce
|
||||
)
|
||||
self._chunk = b""
|
||||
self._count = 0
|
||||
self._is_closed = False
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, exc_traceback):
|
||||
return self.close()
|
||||
|
||||
def readable(self): # pylint: disable=no-self-use
|
||||
"""Return this is readable."""
|
||||
return True
|
||||
|
||||
def writeable(self): # pylint: disable=no-self-use
|
||||
"""Return this is not writeable."""
|
||||
return False
|
||||
|
||||
def close(self):
|
||||
"""Close response and release network resources."""
|
||||
self._response.close()
|
||||
self._response.release_conn()
|
||||
|
||||
def _decrypt(self, payload: bytes, last_chunk: bool = False) -> bytes:
|
||||
"""Decrypt given payload."""
|
||||
self._count += 1
|
||||
if last_chunk:
|
||||
self._additional_data = _mark_as_last(self._additional_data)
|
||||
|
||||
padded_nonce = _update_nonce_id(self._nonce, self._count)
|
||||
cipher = _get_cipher(self._aead_id, self._key, padded_nonce)
|
||||
cipher.update(self._additional_data)
|
||||
|
||||
hmac_tag = payload[-_TAG_LEN:]
|
||||
encrypted_data = payload[:-_TAG_LEN]
|
||||
decrypted_data = cipher.decrypt_and_verify(encrypted_data, hmac_tag)
|
||||
return decrypted_data
|
||||
|
||||
def _read_chunk(self) -> bool:
|
||||
"""Read a chunk at least one byte more than chunk size."""
|
||||
if self._is_closed:
|
||||
return True
|
||||
|
||||
while len(self._chunk) != (1 + _MAX_CHUNK_SIZE):
|
||||
chunk = self._response.read(1 + _MAX_CHUNK_SIZE - len(self._chunk))
|
||||
self._chunk += chunk
|
||||
if len(chunk) == 0:
|
||||
self._is_closed = True
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def _read(self) -> bytes:
|
||||
"""Read and decrypt response."""
|
||||
stop = self._read_chunk()
|
||||
|
||||
if len(self._chunk) == 0:
|
||||
return self._chunk
|
||||
|
||||
length = _MAX_CHUNK_SIZE
|
||||
if len(self._chunk) < length:
|
||||
length = len(self._chunk)
|
||||
stop = True
|
||||
payload = self._chunk[:length]
|
||||
self._chunk = self._chunk[length:]
|
||||
return self._decrypt(payload, stop)
|
||||
|
||||
def stream(self, num_bytes=32*1024):
|
||||
"""
|
||||
Stream extracted payload from response data. Upon completion, caller
|
||||
should call self.close() to release network resources.
|
||||
"""
|
||||
while True:
|
||||
data = self._read()
|
||||
if not data:
|
||||
break
|
||||
while data:
|
||||
result = data
|
||||
if num_bytes < len(data):
|
||||
result = data[:num_bytes]
|
||||
data = data[len(result):]
|
||||
yield result
|
||||
|
||||
|
||||
def decrypt(response: BaseHTTPResponse, secret_key: str) -> bytes:
|
||||
"""Decrypt response data."""
|
||||
result = b""
|
||||
with DecryptReader(response, secret_key.encode()) as reader:
|
||||
for data in reader.stream():
|
||||
result += data
|
||||
return result
|
||||
1165
venv/lib/python3.12/site-packages/minio/datatypes.py
Normal file
1165
venv/lib/python3.12/site-packages/minio/datatypes.py
Normal file
File diff suppressed because it is too large
Load Diff
198
venv/lib/python3.12/site-packages/minio/deleteobjects.py
Normal file
198
venv/lib/python3.12/site-packages/minio/deleteobjects.py
Normal file
@@ -0,0 +1,198 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C)
|
||||
# 2020 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Request/response of DeleteObjects API."""
|
||||
|
||||
from __future__ import absolute_import, annotations
|
||||
|
||||
from typing import Type, TypeVar, cast
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
from .xml import Element, SubElement, findall, findtext
|
||||
|
||||
|
||||
class DeleteObject:
|
||||
"""Delete object request information."""
|
||||
|
||||
def __init__(self, name: str, version_id: str | None = None):
|
||||
self._name = name
|
||||
self._version_id = version_id
|
||||
|
||||
def toxml(self, element: ET.Element | None) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, "Object")
|
||||
SubElement(element, "Key", self._name)
|
||||
if self._version_id is not None:
|
||||
SubElement(element, "VersionId", self._version_id)
|
||||
return element
|
||||
|
||||
|
||||
class DeleteRequest:
|
||||
"""Delete object request."""
|
||||
|
||||
def __init__(self, object_list: list[DeleteObject], quiet: bool = False):
|
||||
self._object_list = object_list
|
||||
self._quiet = quiet
|
||||
|
||||
def toxml(self, element: ET.Element | None) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
element = Element("Delete")
|
||||
if self._quiet:
|
||||
SubElement(element, "Quiet", "true")
|
||||
for obj in self._object_list:
|
||||
obj.toxml(element)
|
||||
return element
|
||||
|
||||
|
||||
A = TypeVar("A", bound="DeletedObject")
|
||||
|
||||
|
||||
class DeletedObject:
|
||||
"""Deleted object information."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
version_id: str | None,
|
||||
delete_marker: bool,
|
||||
delete_marker_version_id: str | None,
|
||||
):
|
||||
self._name = name
|
||||
self._version_id = version_id
|
||||
self._delete_marker = delete_marker
|
||||
self._delete_marker_version_id = delete_marker_version_id
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
"""Get name."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def version_id(self) -> str | None:
|
||||
"""Get version ID."""
|
||||
return self._version_id
|
||||
|
||||
@property
|
||||
def delete_marker(self) -> bool:
|
||||
"""Get delete marker."""
|
||||
return self._delete_marker
|
||||
|
||||
@property
|
||||
def delete_marker_version_id(self) -> str | None:
|
||||
"""Get delete marker version ID."""
|
||||
return self._delete_marker_version_id
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[A], element: ET.Element) -> A:
|
||||
"""Create new object with values from XML element."""
|
||||
name = cast(str, findtext(element, "Key", True))
|
||||
version_id = findtext(element, "VersionId")
|
||||
delete_marker = findtext(element, "DeleteMarker")
|
||||
delete_marker_version_id = findtext(element, "DeleteMarkerVersionId")
|
||||
return cls(
|
||||
name,
|
||||
version_id,
|
||||
delete_marker is not None and delete_marker.title() == "True",
|
||||
delete_marker_version_id,
|
||||
)
|
||||
|
||||
|
||||
B = TypeVar("B", bound="DeleteError")
|
||||
|
||||
|
||||
class DeleteError:
|
||||
"""Delete error information."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
code: str,
|
||||
message: str | None,
|
||||
name: str | None,
|
||||
version_id: str | None,
|
||||
):
|
||||
self._code = code
|
||||
self._message = message
|
||||
self._name = name
|
||||
self._version_id = version_id
|
||||
|
||||
@property
|
||||
def code(self) -> str:
|
||||
"""Get error code."""
|
||||
return self._code
|
||||
|
||||
@property
|
||||
def message(self) -> str | None:
|
||||
"""Get error message."""
|
||||
return self._message
|
||||
|
||||
@property
|
||||
def name(self) -> str | None:
|
||||
"""Get name."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def version_id(self) -> str | None:
|
||||
"""Get version ID."""
|
||||
return self._version_id
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[B], element: ET.Element) -> B:
|
||||
"""Create new object with values from XML element."""
|
||||
code = cast(str, findtext(element, "Code", True))
|
||||
message = findtext(element, "Message")
|
||||
name = findtext(element, "Key")
|
||||
version_id = findtext(element, "VersionId")
|
||||
return cls(code, message, name, version_id)
|
||||
|
||||
|
||||
C = TypeVar("C", bound="DeleteResult")
|
||||
|
||||
|
||||
class DeleteResult:
|
||||
"""Delete object result."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
object_list: list[DeletedObject],
|
||||
error_list: list[DeleteError],
|
||||
):
|
||||
self._object_list = object_list
|
||||
self._error_list = error_list
|
||||
|
||||
@property
|
||||
def object_list(self) -> list[DeletedObject]:
|
||||
"""Get object list."""
|
||||
return self._object_list
|
||||
|
||||
@property
|
||||
def error_list(self) -> list[DeleteError]:
|
||||
"""Get error list."""
|
||||
return self._error_list
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[C], element: ET.Element) -> C:
|
||||
"""Create new object with values from XML element."""
|
||||
elements = findall(element, "Deleted")
|
||||
object_list = []
|
||||
for tag in elements:
|
||||
object_list.append(DeletedObject.fromxml(tag))
|
||||
elements = findall(element, "Error")
|
||||
error_list = []
|
||||
for tag in elements:
|
||||
error_list.append(DeleteError.fromxml(tag))
|
||||
return cls(object_list, error_list)
|
||||
180
venv/lib/python3.12/site-packages/minio/error.py
Normal file
180
venv/lib/python3.12/site-packages/minio/error.py
Normal file
@@ -0,0 +1,180 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage,
|
||||
# (C) 2015-2019 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# pylint: disable=too-many-lines
|
||||
|
||||
"""
|
||||
minio.error
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
This module provides custom exception classes for MinIO library
|
||||
and API specific errors.
|
||||
|
||||
:copyright: (c) 2015, 2016, 2017 by MinIO, Inc.
|
||||
:license: Apache 2.0, see LICENSE for more details.
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, annotations
|
||||
|
||||
from typing import Type, TypeVar
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
try:
|
||||
from urllib3.response import BaseHTTPResponse # type: ignore[attr-defined]
|
||||
except ImportError:
|
||||
from urllib3.response import HTTPResponse as BaseHTTPResponse
|
||||
|
||||
from .xml import findtext
|
||||
|
||||
|
||||
class MinioException(Exception):
|
||||
"""Base Minio exception."""
|
||||
|
||||
|
||||
class InvalidResponseError(MinioException):
|
||||
"""Raised to indicate that non-xml response from server."""
|
||||
|
||||
def __init__(self, code: int, content_type: str | None, body: str | None):
|
||||
self._code = code
|
||||
self._content_type = content_type
|
||||
self._body = body
|
||||
super().__init__(
|
||||
f"non-XML response from server; Response code: {code}, "
|
||||
f"Content-Type: {content_type}, Body: {body}"
|
||||
)
|
||||
|
||||
def __reduce__(self):
|
||||
return type(self), (self._code, self._content_type, self._body)
|
||||
|
||||
|
||||
class ServerError(MinioException):
|
||||
"""Raised to indicate that S3 service returning HTTP server error."""
|
||||
|
||||
def __init__(self, message: str, status_code: int):
|
||||
self._status_code = status_code
|
||||
super().__init__(message)
|
||||
|
||||
@property
|
||||
def status_code(self) -> int:
|
||||
"""Get HTTP status code."""
|
||||
return self._status_code
|
||||
|
||||
|
||||
A = TypeVar("A", bound="S3Error")
|
||||
|
||||
|
||||
class S3Error(MinioException):
|
||||
"""
|
||||
Raised to indicate that error response is received
|
||||
when executing S3 operation.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
code: str | None,
|
||||
message: str | None,
|
||||
resource: str | None,
|
||||
request_id: str | None,
|
||||
host_id: str | None,
|
||||
response: BaseHTTPResponse,
|
||||
bucket_name: str | None = None,
|
||||
object_name: str | None = None,
|
||||
):
|
||||
self._code = code
|
||||
self._message = message
|
||||
self._resource = resource
|
||||
self._request_id = request_id
|
||||
self._host_id = host_id
|
||||
self._response = response
|
||||
self._bucket_name = bucket_name
|
||||
self._object_name = object_name
|
||||
|
||||
bucket_message = (
|
||||
(", bucket_name: " + self._bucket_name)
|
||||
if self._bucket_name else ""
|
||||
)
|
||||
object_message = (
|
||||
(", object_name: " + self._object_name)
|
||||
if self._object_name else ""
|
||||
)
|
||||
super().__init__(
|
||||
f"S3 operation failed; code: {code}, message: {message}, "
|
||||
f"resource: {resource}, request_id: {request_id}, "
|
||||
f"host_id: {host_id}{bucket_message}{object_message}"
|
||||
)
|
||||
|
||||
def __reduce__(self):
|
||||
return type(self), (self._code, self._message, self._resource,
|
||||
self._request_id, self._host_id, self._response,
|
||||
self._bucket_name, self._object_name)
|
||||
|
||||
@property
|
||||
def code(self) -> str | None:
|
||||
"""Get S3 error code."""
|
||||
return self._code
|
||||
|
||||
@property
|
||||
def message(self) -> str | None:
|
||||
"""Get S3 error message."""
|
||||
return self._message
|
||||
|
||||
@property
|
||||
def response(self) -> BaseHTTPResponse:
|
||||
"""Get HTTP response."""
|
||||
return self._response
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[A], response: BaseHTTPResponse) -> A:
|
||||
"""Create new object with values from XML element."""
|
||||
element = ET.fromstring(response.data.decode())
|
||||
return cls(
|
||||
findtext(element, "Code"),
|
||||
findtext(element, "Message"),
|
||||
findtext(element, "Resource"),
|
||||
findtext(element, "RequestId"),
|
||||
findtext(element, "HostId"),
|
||||
bucket_name=findtext(element, "BucketName"),
|
||||
object_name=findtext(element, "Key"),
|
||||
response=response,
|
||||
)
|
||||
|
||||
def copy(self, code: str, message: str) -> S3Error:
|
||||
"""Make a copy with replace code and message."""
|
||||
return S3Error(
|
||||
code,
|
||||
message,
|
||||
self._resource,
|
||||
self._request_id,
|
||||
self._host_id,
|
||||
self._response,
|
||||
self._bucket_name,
|
||||
self._object_name,
|
||||
)
|
||||
|
||||
|
||||
class MinioAdminException(Exception):
|
||||
"""Raised to indicate admin API execution error."""
|
||||
|
||||
def __init__(self, code: str, body: str):
|
||||
self._code = code
|
||||
self._body = body
|
||||
super().__init__(
|
||||
f"admin request failed; Status: {code}, Body: {body}",
|
||||
)
|
||||
|
||||
def __reduce__(self):
|
||||
return type(self), (self._code, self._body)
|
||||
882
venv/lib/python3.12/site-packages/minio/helpers.py
Normal file
882
venv/lib/python3.12/site-packages/minio/helpers.py
Normal file
@@ -0,0 +1,882 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C)
|
||||
# 2015, 2016, 2017 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Helper functions."""
|
||||
|
||||
from __future__ import absolute_import, annotations, division, unicode_literals
|
||||
|
||||
import base64
|
||||
import errno
|
||||
import hashlib
|
||||
import math
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import urllib.parse
|
||||
from datetime import datetime
|
||||
from queue import Queue
|
||||
from threading import BoundedSemaphore, Thread
|
||||
from typing import BinaryIO, Dict, List, Mapping, Tuple, Union
|
||||
|
||||
from typing_extensions import Protocol
|
||||
from urllib3._collections import HTTPHeaderDict
|
||||
|
||||
from . import __title__, __version__
|
||||
from .sse import Sse, SseCustomerKey
|
||||
from .time import to_iso8601utc
|
||||
|
||||
_DEFAULT_USER_AGENT = (
|
||||
f"MinIO ({platform.system()}; {platform.machine()}) "
|
||||
f"{__title__}/{__version__}"
|
||||
)
|
||||
|
||||
MAX_MULTIPART_COUNT = 10000 # 10000 parts
|
||||
MAX_MULTIPART_OBJECT_SIZE = 5 * 1024 * 1024 * 1024 * 1024 # 5TiB
|
||||
MAX_PART_SIZE = 5 * 1024 * 1024 * 1024 # 5GiB
|
||||
MIN_PART_SIZE = 5 * 1024 * 1024 # 5MiB
|
||||
|
||||
_AWS_S3_PREFIX = (r'^(((bucket\.|accesspoint\.)'
|
||||
r'vpce(-(?!_)[a-z_\d]+(?<!-)(?<!_))+\.s3\.)|'
|
||||
r'((?!s3)(?!-)(?!_)[a-z_\d-]{1,63}(?<!-)(?<!_)\.)'
|
||||
r's3-control(-(?!_)[a-z_\d]+(?<!-)(?<!_))*\.|'
|
||||
r'(s3(-(?!_)[a-z_\d]+(?<!-)(?<!_))*\.))')
|
||||
|
||||
_BUCKET_NAME_REGEX = re.compile(r'^[a-z0-9][a-z0-9\.\-]{1,61}[a-z0-9]$')
|
||||
_OLD_BUCKET_NAME_REGEX = re.compile(r'^[a-z0-9][a-z0-9_\.\-\:]{1,61}[a-z0-9]$',
|
||||
re.IGNORECASE)
|
||||
_IPV4_REGEX = re.compile(
|
||||
r'^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])\.){3}'
|
||||
r'(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])$')
|
||||
_HOSTNAME_REGEX = re.compile(
|
||||
r'^((?!-)(?!_)[a-z_\d-]{1,63}(?<!-)(?<!_)\.)*'
|
||||
r'((?!_)(?!-)[a-z_\d-]{1,63}(?<!-)(?<!_))$',
|
||||
re.IGNORECASE)
|
||||
_AWS_ENDPOINT_REGEX = re.compile(r'.*\.amazonaws\.com(|\.cn)$', re.IGNORECASE)
|
||||
_AWS_S3_ENDPOINT_REGEX = re.compile(
|
||||
_AWS_S3_PREFIX +
|
||||
r'((?!s3)(?!-)(?!_)[a-z_\d-]{1,63}(?<!-)(?<!_)\.)*'
|
||||
r'amazonaws\.com(|\.cn)$',
|
||||
re.IGNORECASE)
|
||||
_AWS_ELB_ENDPOINT_REGEX = re.compile(
|
||||
r'^(?!-)(?!_)[a-z_\d-]{1,63}(?<!-)(?<!_)\.'
|
||||
r'(?!-)(?!_)[a-z_\d-]{1,63}(?<!-)(?<!_)\.'
|
||||
r'elb\.amazonaws\.com$',
|
||||
re.IGNORECASE)
|
||||
_AWS_S3_PREFIX_REGEX = re.compile(_AWS_S3_PREFIX, re.IGNORECASE)
|
||||
_REGION_REGEX = re.compile(r'^((?!_)(?!-)[a-z_\d-]{1,63}(?<!-)(?<!_))$',
|
||||
re.IGNORECASE)
|
||||
|
||||
DictType = Dict[str, Union[str, List[str], Tuple[str]]]
|
||||
|
||||
|
||||
def quote(
|
||||
resource: str,
|
||||
safe: str = "/",
|
||||
encoding: str | None = None,
|
||||
errors: str | None = None,
|
||||
) -> str:
|
||||
"""
|
||||
Wrapper to urllib.parse.quote() replacing back to '~' for older python
|
||||
versions.
|
||||
"""
|
||||
return urllib.parse.quote(
|
||||
resource,
|
||||
safe=safe,
|
||||
encoding=encoding,
|
||||
errors=errors,
|
||||
).replace("%7E", "~")
|
||||
|
||||
|
||||
def queryencode(
|
||||
query: str,
|
||||
safe: str = "",
|
||||
encoding: str | None = None,
|
||||
errors: str | None = None,
|
||||
) -> str:
|
||||
"""Encode query parameter value."""
|
||||
return quote(query, safe, encoding, errors)
|
||||
|
||||
|
||||
def headers_to_strings(
|
||||
headers: Mapping[str, str | list[str] | tuple[str]],
|
||||
titled_key: bool = False,
|
||||
) -> str:
|
||||
"""Convert HTTP headers to multi-line string."""
|
||||
values = []
|
||||
for key, value in headers.items():
|
||||
key = key.title() if titled_key else key
|
||||
for item in value if isinstance(value, (list, tuple)) else [value]:
|
||||
item = re.sub(
|
||||
r"Credential=([^/]+)",
|
||||
"Credential=*REDACTED*",
|
||||
re.sub(r"Signature=([0-9a-f]+)", "Signature=*REDACTED*", item),
|
||||
) if titled_key else item
|
||||
values.append(f"{key}: {item}")
|
||||
return "\n".join(values)
|
||||
|
||||
|
||||
def _validate_sizes(object_size: int, part_size: int):
|
||||
"""Validate object and part size."""
|
||||
if part_size > 0:
|
||||
if part_size < MIN_PART_SIZE:
|
||||
raise ValueError(
|
||||
f"part size {part_size} is not supported; minimum allowed 5MiB"
|
||||
)
|
||||
if part_size > MAX_PART_SIZE:
|
||||
raise ValueError(
|
||||
f"part size {part_size} is not supported; maximum allowed 5GiB"
|
||||
)
|
||||
|
||||
if object_size >= 0:
|
||||
if object_size > MAX_MULTIPART_OBJECT_SIZE:
|
||||
raise ValueError(
|
||||
f"object size {object_size} is not supported; "
|
||||
f"maximum allowed 5TiB"
|
||||
)
|
||||
elif part_size <= 0:
|
||||
raise ValueError(
|
||||
"valid part size must be provided when object size is unknown",
|
||||
)
|
||||
|
||||
|
||||
def _get_part_info(object_size: int, part_size: int):
|
||||
"""Compute part information for object and part size."""
|
||||
_validate_sizes(object_size, part_size)
|
||||
|
||||
if object_size < 0:
|
||||
return part_size, -1
|
||||
|
||||
if part_size > 0:
|
||||
part_size = min(part_size, object_size)
|
||||
return part_size, math.ceil(object_size / part_size) if part_size else 1
|
||||
|
||||
part_size = math.ceil(
|
||||
math.ceil(object_size / MAX_MULTIPART_COUNT) / MIN_PART_SIZE,
|
||||
) * MIN_PART_SIZE
|
||||
return part_size, math.ceil(object_size / part_size) if part_size else 1
|
||||
|
||||
|
||||
def get_part_info(object_size: int, part_size: int) -> tuple[int, int]:
|
||||
"""Compute part information for object and part size."""
|
||||
part_size, part_count = _get_part_info(object_size, part_size)
|
||||
if part_count > MAX_MULTIPART_COUNT:
|
||||
raise ValueError(
|
||||
f"object size {object_size} and part size {part_size} "
|
||||
f"make more than {MAX_MULTIPART_COUNT} parts for upload"
|
||||
)
|
||||
return part_size, part_count
|
||||
|
||||
|
||||
class ProgressType(Protocol):
|
||||
"""typing stub for Put/Get object progress."""
|
||||
|
||||
def set_meta(self, object_name: str, total_length: int):
|
||||
"""Set process meta information."""
|
||||
|
||||
def update(self, length: int):
|
||||
"""Set current progress length."""
|
||||
|
||||
|
||||
def read_part_data(
|
||||
stream: BinaryIO,
|
||||
size: int,
|
||||
part_data: bytes = b"",
|
||||
progress: ProgressType | None = None,
|
||||
) -> bytes:
|
||||
"""Read part data of given size from stream."""
|
||||
size -= len(part_data)
|
||||
while size:
|
||||
data = stream.read(size)
|
||||
if not data:
|
||||
break # EOF reached
|
||||
if not isinstance(data, bytes):
|
||||
raise ValueError("read() must return 'bytes' object")
|
||||
part_data += data
|
||||
size -= len(data)
|
||||
if progress:
|
||||
progress.update(len(data))
|
||||
return part_data
|
||||
|
||||
|
||||
def makedirs(path: str):
|
||||
"""Wrapper of os.makedirs() ignores errno.EEXIST."""
|
||||
try:
|
||||
if path:
|
||||
os.makedirs(path)
|
||||
except OSError as exc: # Python >2.5
|
||||
if exc.errno != errno.EEXIST:
|
||||
raise
|
||||
|
||||
if not os.path.isdir(path):
|
||||
raise ValueError(f"path {path} is not a directory") from exc
|
||||
|
||||
|
||||
def check_bucket_name(
|
||||
bucket_name: str,
|
||||
strict: bool = False,
|
||||
s3_check: bool = False,
|
||||
):
|
||||
"""Check whether bucket name is valid optional with strict check or not."""
|
||||
|
||||
if strict:
|
||||
if not _BUCKET_NAME_REGEX.match(bucket_name):
|
||||
raise ValueError(f'invalid bucket name {bucket_name}')
|
||||
else:
|
||||
if not _OLD_BUCKET_NAME_REGEX.match(bucket_name):
|
||||
raise ValueError(f'invalid bucket name {bucket_name}')
|
||||
|
||||
if _IPV4_REGEX.match(bucket_name):
|
||||
raise ValueError(f'bucket name {bucket_name} must not be formatted '
|
||||
'as an IP address')
|
||||
|
||||
unallowed_successive_chars = ['..', '.-', '-.']
|
||||
if any(x in bucket_name for x in unallowed_successive_chars):
|
||||
raise ValueError(f'bucket name {bucket_name} contains invalid '
|
||||
'successive characters')
|
||||
|
||||
if (
|
||||
s3_check and
|
||||
bucket_name.startswith("xn--") or
|
||||
bucket_name.endswith("-s3alias") or
|
||||
bucket_name.endswith("--ol-s3")
|
||||
):
|
||||
raise ValueError(f"bucket name {bucket_name} must not start with "
|
||||
"'xn--' and must not end with '--s3alias' or "
|
||||
"'--ol-s3'")
|
||||
|
||||
|
||||
def check_non_empty_string(string: str | bytes):
|
||||
"""Check whether given string is not empty."""
|
||||
try:
|
||||
if not string.strip():
|
||||
raise ValueError()
|
||||
except AttributeError as exc:
|
||||
raise TypeError() from exc
|
||||
|
||||
|
||||
def is_valid_policy_type(policy: str | bytes):
|
||||
"""
|
||||
Validate if policy is type str
|
||||
|
||||
:param policy: S3 style Bucket policy.
|
||||
:return: True if policy parameter is of a valid type, 'string'.
|
||||
Raise :exc:`TypeError` otherwise.
|
||||
"""
|
||||
if not isinstance(policy, (str, bytes)):
|
||||
raise TypeError("policy must be str or bytes type")
|
||||
|
||||
check_non_empty_string(policy)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def check_ssec(sse: SseCustomerKey | None):
|
||||
"""Check sse is SseCustomerKey type or not."""
|
||||
if sse and not isinstance(sse, SseCustomerKey):
|
||||
raise ValueError("SseCustomerKey type is required")
|
||||
|
||||
|
||||
def check_sse(sse: Sse | None):
|
||||
"""Check sse is Sse type or not."""
|
||||
if sse and not isinstance(sse, Sse):
|
||||
raise ValueError("Sse type is required")
|
||||
|
||||
|
||||
def md5sum_hash(data: str | bytes | None) -> str | None:
|
||||
"""Compute MD5 of data and return hash as Base64 encoded value."""
|
||||
if data is None:
|
||||
return None
|
||||
|
||||
# indicate md5 hashing algorithm is not used in a security context.
|
||||
# Refer https://bugs.python.org/issue9216 for more information.
|
||||
hasher = hashlib.new( # type: ignore[call-arg]
|
||||
"md5",
|
||||
usedforsecurity=False,
|
||||
)
|
||||
hasher.update(data.encode() if isinstance(data, str) else data)
|
||||
md5sum = base64.b64encode(hasher.digest())
|
||||
return md5sum.decode() if isinstance(md5sum, bytes) else md5sum
|
||||
|
||||
|
||||
def sha256_hash(data: str | bytes | None) -> str:
|
||||
"""Compute SHA-256 of data and return hash as hex encoded value."""
|
||||
data = data or b""
|
||||
hasher = hashlib.sha256()
|
||||
hasher.update(data.encode() if isinstance(data, str) else data)
|
||||
sha256sum = hasher.hexdigest()
|
||||
if isinstance(sha256sum, bytes):
|
||||
return sha256sum.decode()
|
||||
return sha256sum
|
||||
|
||||
|
||||
def url_replace(
|
||||
url: urllib.parse.SplitResult,
|
||||
scheme: str | None = None,
|
||||
netloc: str | None = None,
|
||||
path: str | None = None,
|
||||
query: str | None = None,
|
||||
fragment: str | None = None,
|
||||
) -> urllib.parse.SplitResult:
|
||||
"""Return new URL with replaced properties in given URL."""
|
||||
return urllib.parse.SplitResult(
|
||||
scheme if scheme is not None else url.scheme,
|
||||
netloc if netloc is not None else url.netloc,
|
||||
path if path is not None else url.path,
|
||||
query if query is not None else url.query,
|
||||
fragment if fragment is not None else url.fragment,
|
||||
)
|
||||
|
||||
|
||||
def _metadata_to_headers(metadata: DictType) -> dict[str, list[str]]:
|
||||
"""Convert user metadata to headers."""
|
||||
def normalize_key(key: str) -> str:
|
||||
if not key.lower().startswith("x-amz-meta-"):
|
||||
key = "X-Amz-Meta-" + key
|
||||
return key
|
||||
|
||||
def to_string(value) -> str:
|
||||
value = str(value)
|
||||
try:
|
||||
value.encode("us-ascii")
|
||||
except UnicodeEncodeError as exc:
|
||||
raise ValueError(
|
||||
f"unsupported metadata value {value}; "
|
||||
f"only US-ASCII encoded characters are supported"
|
||||
) from exc
|
||||
return value
|
||||
|
||||
def normalize_value(values: str | list[str] | tuple[str]) -> list[str]:
|
||||
if not isinstance(values, (list, tuple)):
|
||||
values = [values]
|
||||
return [to_string(value) for value in values]
|
||||
|
||||
return {
|
||||
normalize_key(key): normalize_value(value)
|
||||
for key, value in (metadata or {}).items()
|
||||
}
|
||||
|
||||
|
||||
def normalize_headers(headers: DictType | None) -> DictType:
|
||||
"""Normalize headers by prefixing 'X-Amz-Meta-' for user metadata."""
|
||||
headers = {str(key): value for key, value in (headers or {}).items()}
|
||||
|
||||
def guess_user_metadata(key: str) -> bool:
|
||||
key = key.lower()
|
||||
return not (
|
||||
key.startswith("x-amz-") or
|
||||
key in [
|
||||
"cache-control",
|
||||
"content-encoding",
|
||||
"content-type",
|
||||
"content-disposition",
|
||||
"content-language",
|
||||
]
|
||||
)
|
||||
|
||||
user_metadata = {
|
||||
key: value for key, value in headers.items()
|
||||
if guess_user_metadata(key)
|
||||
}
|
||||
|
||||
# Remove guessed user metadata.
|
||||
_ = [headers.pop(key) for key in user_metadata]
|
||||
|
||||
headers.update(_metadata_to_headers(user_metadata))
|
||||
return headers
|
||||
|
||||
|
||||
def genheaders(
|
||||
headers: DictType | None,
|
||||
sse: Sse | None,
|
||||
tags: dict[str, str] | None,
|
||||
retention,
|
||||
legal_hold: bool,
|
||||
) -> DictType:
|
||||
"""Generate headers for given parameters."""
|
||||
headers = normalize_headers(headers)
|
||||
headers.update(sse.headers() if sse else {})
|
||||
tagging = "&".join(
|
||||
[
|
||||
queryencode(key) + "=" + queryencode(value)
|
||||
for key, value in (tags or {}).items()
|
||||
],
|
||||
)
|
||||
if tagging:
|
||||
headers["x-amz-tagging"] = tagging
|
||||
if retention and retention.mode:
|
||||
headers["x-amz-object-lock-mode"] = retention.mode
|
||||
headers["x-amz-object-lock-retain-until-date"] = (
|
||||
to_iso8601utc(retention.retain_until_date) or ""
|
||||
)
|
||||
if legal_hold:
|
||||
headers["x-amz-object-lock-legal-hold"] = "ON"
|
||||
return headers
|
||||
|
||||
|
||||
def _get_aws_info(
|
||||
host: str,
|
||||
https: bool,
|
||||
region: str | None,
|
||||
) -> tuple[dict | None, str | None]:
|
||||
"""Extract AWS domain information. """
|
||||
|
||||
if not _HOSTNAME_REGEX.match(host):
|
||||
return (None, None)
|
||||
|
||||
if _AWS_ELB_ENDPOINT_REGEX.match(host):
|
||||
region_in_host = host.split(".elb.amazonaws.com", 1)[0].split(".")[-1]
|
||||
return (None, region or region_in_host)
|
||||
|
||||
if not _AWS_ENDPOINT_REGEX.match(host):
|
||||
return (None, None)
|
||||
|
||||
if host.startswith("ec2-"):
|
||||
return (None, None)
|
||||
|
||||
if not _AWS_S3_ENDPOINT_REGEX.match(host):
|
||||
raise ValueError(f"invalid Amazon AWS host {host}")
|
||||
|
||||
matcher = _AWS_S3_PREFIX_REGEX.match(host)
|
||||
end = matcher.end() if matcher else 0
|
||||
aws_s3_prefix = host[:end]
|
||||
|
||||
if "s3-accesspoint" in aws_s3_prefix and not https:
|
||||
raise ValueError(f"use HTTPS scheme for host {host}")
|
||||
|
||||
tokens = host[end:].split(".")
|
||||
dualstack = tokens[0] == "dualstack"
|
||||
if dualstack:
|
||||
tokens = tokens[1:]
|
||||
region_in_host = ""
|
||||
if tokens[0] not in ["vpce", "amazonaws"]:
|
||||
region_in_host = tokens[0]
|
||||
tokens = tokens[1:]
|
||||
aws_domain_suffix = ".".join(tokens)
|
||||
|
||||
if host in "s3-external-1.amazonaws.com":
|
||||
region_in_host = "us-east-1"
|
||||
|
||||
if host in ["s3-us-gov-west-1.amazonaws.com",
|
||||
"s3-fips-us-gov-west-1.amazonaws.com"]:
|
||||
region_in_host = "us-gov-west-1"
|
||||
|
||||
if (aws_domain_suffix.endswith(".cn") and
|
||||
not aws_s3_prefix.endswith("s3-accelerate.") and
|
||||
not region_in_host and
|
||||
not region):
|
||||
raise ValueError(
|
||||
f"region missing in Amazon S3 China endpoint {host}",
|
||||
)
|
||||
|
||||
return ({"s3_prefix": aws_s3_prefix,
|
||||
"domain_suffix": aws_domain_suffix,
|
||||
"region": region or region_in_host,
|
||||
"dualstack": dualstack}, None)
|
||||
|
||||
|
||||
def _parse_url(endpoint: str) -> urllib.parse.SplitResult:
|
||||
"""Parse url string."""
|
||||
|
||||
url = urllib.parse.urlsplit(endpoint)
|
||||
host = url.hostname
|
||||
|
||||
if url.scheme.lower() not in ["http", "https"]:
|
||||
raise ValueError("scheme in endpoint must be http or https")
|
||||
|
||||
url = url_replace(url, scheme=url.scheme.lower())
|
||||
|
||||
if url.path and url.path != "/":
|
||||
raise ValueError("path in endpoint is not allowed")
|
||||
|
||||
url = url_replace(url, path="")
|
||||
|
||||
if url.query:
|
||||
raise ValueError("query in endpoint is not allowed")
|
||||
|
||||
if url.fragment:
|
||||
raise ValueError("fragment in endpoint is not allowed")
|
||||
|
||||
try:
|
||||
url.port
|
||||
except ValueError as exc:
|
||||
raise ValueError("invalid port") from exc
|
||||
|
||||
if url.username:
|
||||
raise ValueError("username in endpoint is not allowed")
|
||||
|
||||
if url.password:
|
||||
raise ValueError("password in endpoint is not allowed")
|
||||
|
||||
if (
|
||||
(url.scheme == "http" and url.port == 80) or
|
||||
(url.scheme == "https" and url.port == 443)
|
||||
):
|
||||
url = url_replace(url, netloc=host)
|
||||
|
||||
return url
|
||||
|
||||
|
||||
class BaseURL:
|
||||
"""Base URL of S3 endpoint."""
|
||||
_aws_info: dict | None
|
||||
_virtual_style_flag: bool
|
||||
_url: urllib.parse.SplitResult
|
||||
_region: str | None
|
||||
_accelerate_host_flag: bool
|
||||
|
||||
def __init__(self, endpoint: str, region: str | None):
|
||||
url = _parse_url(endpoint)
|
||||
|
||||
if region and not _REGION_REGEX.match(region):
|
||||
raise ValueError(f"invalid region {region}")
|
||||
|
||||
hostname = url.hostname or ""
|
||||
self._aws_info, region_in_host = _get_aws_info(
|
||||
hostname, url.scheme == "https", region)
|
||||
self._virtual_style_flag = (
|
||||
self._aws_info is not None or hostname.endswith("aliyuncs.com")
|
||||
)
|
||||
self._url = url
|
||||
self._region = region or region_in_host
|
||||
self._accelerate_host_flag = False
|
||||
if self._aws_info:
|
||||
self._region = self._aws_info["region"]
|
||||
self._accelerate_host_flag = (
|
||||
self._aws_info["s3_prefix"].endswith("s3-accelerate.")
|
||||
)
|
||||
|
||||
@property
|
||||
def region(self) -> str | None:
|
||||
"""Get region."""
|
||||
return self._region
|
||||
|
||||
@property
|
||||
def is_https(self) -> bool:
|
||||
"""Check if scheme is HTTPS."""
|
||||
return self._url.scheme == "https"
|
||||
|
||||
@property
|
||||
def host(self) -> str:
|
||||
"""Get hostname."""
|
||||
return self._url.netloc
|
||||
|
||||
@property
|
||||
def is_aws_host(self) -> bool:
|
||||
"""Check if URL points to AWS host."""
|
||||
return self._aws_info is not None
|
||||
|
||||
@property
|
||||
def aws_s3_prefix(self) -> str | None:
|
||||
"""Get AWS S3 domain prefix."""
|
||||
return self._aws_info["s3_prefix"] if self._aws_info else None
|
||||
|
||||
@aws_s3_prefix.setter
|
||||
def aws_s3_prefix(self, s3_prefix: str):
|
||||
"""Set AWS s3 domain prefix."""
|
||||
if not _AWS_S3_PREFIX_REGEX.match(s3_prefix):
|
||||
raise ValueError(f"invalid AWS S3 domain prefix {s3_prefix}")
|
||||
if self._aws_info:
|
||||
self._aws_info["s3_prefix"] = s3_prefix
|
||||
|
||||
@property
|
||||
def accelerate_host_flag(self) -> bool:
|
||||
"""Get AWS accelerate host flag."""
|
||||
return self._accelerate_host_flag
|
||||
|
||||
@accelerate_host_flag.setter
|
||||
def accelerate_host_flag(self, flag: bool):
|
||||
"""Set AWS accelerate host flag."""
|
||||
self._accelerate_host_flag = flag
|
||||
|
||||
@property
|
||||
def dualstack_host_flag(self) -> bool:
|
||||
"""Check if URL points to AWS dualstack host."""
|
||||
return self._aws_info["dualstack"] if self._aws_info else False
|
||||
|
||||
@dualstack_host_flag.setter
|
||||
def dualstack_host_flag(self, flag: bool):
|
||||
"""Set AWS dualstack host."""
|
||||
if self._aws_info:
|
||||
self._aws_info["dualstack"] = flag
|
||||
|
||||
@property
|
||||
def virtual_style_flag(self) -> bool:
|
||||
"""Check to use virtual style or not."""
|
||||
return self._virtual_style_flag
|
||||
|
||||
@virtual_style_flag.setter
|
||||
def virtual_style_flag(self, flag: bool):
|
||||
"""Check to use virtual style or not."""
|
||||
self._virtual_style_flag = flag
|
||||
|
||||
@classmethod
|
||||
def _build_aws_url(
|
||||
cls,
|
||||
aws_info: dict,
|
||||
url: urllib.parse.SplitResult,
|
||||
bucket_name: str | None,
|
||||
enforce_path_style: bool,
|
||||
region: str,
|
||||
) -> urllib.parse.SplitResult:
|
||||
"""Build URL for given information."""
|
||||
s3_prefix = aws_info["s3_prefix"]
|
||||
domain_suffix = aws_info["domain_suffix"]
|
||||
|
||||
host = f"{s3_prefix}{domain_suffix}"
|
||||
if host in ["s3-external-1.amazonaws.com",
|
||||
"s3-us-gov-west-1.amazonaws.com",
|
||||
"s3-fips-us-gov-west-1.amazonaws.com"]:
|
||||
return url_replace(url, netloc=host)
|
||||
|
||||
netloc = s3_prefix
|
||||
if "s3-accelerate" in s3_prefix:
|
||||
if "." in (bucket_name or ""):
|
||||
raise ValueError(
|
||||
f"bucket name '{bucket_name}' with '.' is not allowed "
|
||||
f"for accelerate endpoint"
|
||||
)
|
||||
if enforce_path_style:
|
||||
netloc = netloc.replace("-accelerate", "", 1)
|
||||
|
||||
if aws_info["dualstack"]:
|
||||
netloc += "dualstack."
|
||||
if "s3-accelerate" not in s3_prefix:
|
||||
netloc += region + "."
|
||||
netloc += domain_suffix
|
||||
|
||||
return url_replace(url, netloc=netloc)
|
||||
|
||||
def _build_list_buckets_url(
|
||||
self,
|
||||
url: urllib.parse.SplitResult,
|
||||
region: str | None,
|
||||
) -> urllib.parse.SplitResult:
|
||||
"""Build URL for ListBuckets API."""
|
||||
if not self._aws_info:
|
||||
return url
|
||||
|
||||
s3_prefix = self._aws_info["s3_prefix"]
|
||||
domain_suffix = self._aws_info["domain_suffix"]
|
||||
|
||||
host = f"{s3_prefix}{domain_suffix}"
|
||||
if host in ["s3-external-1.amazonaws.com",
|
||||
"s3-us-gov-west-1.amazonaws.com",
|
||||
"s3-fips-us-gov-west-1.amazonaws.com"]:
|
||||
return url_replace(url, netloc=host)
|
||||
|
||||
if s3_prefix.startswith("s3.") or s3_prefix.startswith("s3-"):
|
||||
s3_prefix = "s3."
|
||||
cn_suffix = ".cn" if domain_suffix.endswith(".cn") else ""
|
||||
domain_suffix = f"amazonaws.com{cn_suffix}"
|
||||
return url_replace(url, netloc=f"{s3_prefix}{region}.{domain_suffix}")
|
||||
|
||||
def build(
|
||||
self,
|
||||
method: str,
|
||||
region: str,
|
||||
bucket_name: str | None = None,
|
||||
object_name: str | None = None,
|
||||
query_params: DictType | None = None,
|
||||
) -> urllib.parse.SplitResult:
|
||||
"""Build URL for given information."""
|
||||
if not bucket_name and object_name:
|
||||
raise ValueError(
|
||||
f"empty bucket name for object name {object_name}",
|
||||
)
|
||||
|
||||
url = url_replace(self._url, path="/")
|
||||
|
||||
query = []
|
||||
for key, values in sorted((query_params or {}).items()):
|
||||
values = values if isinstance(values, (list, tuple)) else [values]
|
||||
query += [
|
||||
f"{queryencode(key)}={queryencode(value)}"
|
||||
for value in sorted(values)
|
||||
]
|
||||
url = url_replace(url, query="&".join(query))
|
||||
|
||||
if not bucket_name:
|
||||
return self._build_list_buckets_url(url, region)
|
||||
|
||||
enforce_path_style = (
|
||||
# CreateBucket API requires path style in Amazon AWS S3.
|
||||
(method == "PUT" and not object_name and not query_params) or
|
||||
|
||||
# GetBucketLocation API requires path style in Amazon AWS S3.
|
||||
(query_params and "location" in query_params) or
|
||||
|
||||
# Use path style for bucket name containing '.' which causes
|
||||
# SSL certificate validation error.
|
||||
("." in bucket_name and self._url.scheme == "https")
|
||||
)
|
||||
|
||||
if self._aws_info:
|
||||
url = BaseURL._build_aws_url(
|
||||
self._aws_info, url, bucket_name, enforce_path_style, region)
|
||||
|
||||
netloc = url.netloc
|
||||
path = "/"
|
||||
|
||||
if enforce_path_style or not self._virtual_style_flag:
|
||||
path = f"/{bucket_name}"
|
||||
else:
|
||||
netloc = f"{bucket_name}.{netloc}"
|
||||
if object_name:
|
||||
path += ("" if path.endswith("/") else "/") + quote(object_name)
|
||||
|
||||
return url_replace(url, netloc=netloc, path=path)
|
||||
|
||||
|
||||
class ObjectWriteResult:
|
||||
"""Result class of any APIs doing object creation."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
bucket_name: str,
|
||||
object_name: str,
|
||||
version_id: str | None,
|
||||
etag: str | None,
|
||||
http_headers: HTTPHeaderDict,
|
||||
last_modified: datetime | None = None,
|
||||
location: str | None = None,
|
||||
):
|
||||
self._bucket_name = bucket_name
|
||||
self._object_name = object_name
|
||||
self._version_id = version_id
|
||||
self._etag = etag
|
||||
self._http_headers = http_headers
|
||||
self._last_modified = last_modified
|
||||
self._location = location
|
||||
|
||||
@property
|
||||
def bucket_name(self) -> str:
|
||||
"""Get bucket name."""
|
||||
return self._bucket_name
|
||||
|
||||
@property
|
||||
def object_name(self) -> str:
|
||||
"""Get object name."""
|
||||
return self._object_name
|
||||
|
||||
@property
|
||||
def version_id(self) -> str | None:
|
||||
"""Get version ID."""
|
||||
return self._version_id
|
||||
|
||||
@property
|
||||
def etag(self) -> str | None:
|
||||
"""Get etag."""
|
||||
return self._etag
|
||||
|
||||
@property
|
||||
def http_headers(self) -> HTTPHeaderDict:
|
||||
"""Get HTTP headers."""
|
||||
return self._http_headers
|
||||
|
||||
@property
|
||||
def last_modified(self) -> datetime | None:
|
||||
"""Get last-modified time."""
|
||||
return self._last_modified
|
||||
|
||||
@property
|
||||
def location(self) -> str | None:
|
||||
"""Get location."""
|
||||
return self._location
|
||||
|
||||
|
||||
class Worker(Thread):
|
||||
""" Thread executing tasks from a given tasks queue """
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
tasks_queue: Queue,
|
||||
results_queue: Queue,
|
||||
exceptions_queue: Queue,
|
||||
):
|
||||
Thread.__init__(self, daemon=True)
|
||||
self._tasks_queue = tasks_queue
|
||||
self._results_queue = results_queue
|
||||
self._exceptions_queue = exceptions_queue
|
||||
self.start()
|
||||
|
||||
def run(self):
|
||||
""" Continuously receive tasks and execute them """
|
||||
while True:
|
||||
task = self._tasks_queue.get()
|
||||
if not task:
|
||||
self._tasks_queue.task_done()
|
||||
break
|
||||
# No exception detected in any thread,
|
||||
# continue the execution.
|
||||
if self._exceptions_queue.empty():
|
||||
# Execute the task
|
||||
func, args, kargs, cleanup_func = task
|
||||
try:
|
||||
result = func(*args, **kargs)
|
||||
self._results_queue.put(result)
|
||||
except Exception as ex: # pylint: disable=broad-except
|
||||
self._exceptions_queue.put(ex)
|
||||
finally:
|
||||
cleanup_func()
|
||||
# Mark this task as done, whether an exception happened or not
|
||||
self._tasks_queue.task_done()
|
||||
|
||||
|
||||
class ThreadPool:
|
||||
""" Pool of threads consuming tasks from a queue """
|
||||
_results_queue: Queue
|
||||
_exceptions_queue: Queue
|
||||
_tasks_queue: Queue
|
||||
_sem: BoundedSemaphore
|
||||
_num_threads: int
|
||||
|
||||
def __init__(self, num_threads: int):
|
||||
self._results_queue = Queue()
|
||||
self._exceptions_queue = Queue()
|
||||
self._tasks_queue = Queue()
|
||||
self._sem = BoundedSemaphore(num_threads)
|
||||
self._num_threads = num_threads
|
||||
|
||||
def add_task(self, func, *args, **kargs):
|
||||
"""
|
||||
Add a task to the queue. Calling this function can block
|
||||
until workers have a room for processing new tasks. Blocking
|
||||
the caller also prevents the latter from allocating a lot of
|
||||
memory while workers are still busy running their assigned tasks.
|
||||
"""
|
||||
self._sem.acquire() # pylint: disable=consider-using-with
|
||||
cleanup_func = self._sem.release
|
||||
self._tasks_queue.put((func, args, kargs, cleanup_func))
|
||||
|
||||
def start_parallel(self):
|
||||
""" Prepare threads to run tasks"""
|
||||
for _ in range(self._num_threads):
|
||||
Worker(
|
||||
self._tasks_queue, self._results_queue, self._exceptions_queue,
|
||||
)
|
||||
|
||||
def result(self) -> Queue:
|
||||
""" Stop threads and return the result of all called tasks """
|
||||
# Send None to all threads to cleanly stop them
|
||||
for _ in range(self._num_threads):
|
||||
self._tasks_queue.put(None)
|
||||
# Wait for completion of all the tasks in the queue
|
||||
self._tasks_queue.join()
|
||||
# Check if one of the thread raised an exception, if yes
|
||||
# raise it here in the function
|
||||
if not self._exceptions_queue.empty():
|
||||
raise self._exceptions_queue.get()
|
||||
return self._results_queue
|
||||
50
venv/lib/python3.12/site-packages/minio/legalhold.py
Normal file
50
venv/lib/python3.12/site-packages/minio/legalhold.py
Normal file
@@ -0,0 +1,50 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C)
|
||||
# 2020 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Request/response of PutObjectLegalHold and GetObjectLegalHold S3 APIs."""
|
||||
|
||||
from __future__ import absolute_import, annotations
|
||||
|
||||
from typing import Type, TypeVar
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
from .xml import Element, SubElement, findtext
|
||||
|
||||
A = TypeVar("A", bound="LegalHold")
|
||||
|
||||
|
||||
class LegalHold:
|
||||
"""Legal hold configuration."""
|
||||
|
||||
def __init__(self, status: bool = False):
|
||||
self._status = status
|
||||
|
||||
@property
|
||||
def status(self) -> bool:
|
||||
"""Get status."""
|
||||
return self._status
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[A], element: ET.Element) -> A:
|
||||
"""Create new object with values from XML element."""
|
||||
status = findtext(element, "Status")
|
||||
return cls(status == "ON")
|
||||
|
||||
def toxml(self, element: ET.Element | None) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
element = Element("LegalHold")
|
||||
SubElement(element, "Status", "ON" if self._status is True else "OFF")
|
||||
return element
|
||||
456
venv/lib/python3.12/site-packages/minio/lifecycleconfig.py
Normal file
456
venv/lib/python3.12/site-packages/minio/lifecycleconfig.py
Normal file
@@ -0,0 +1,456 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C)
|
||||
# 2015, 2016, 2017, 2018, 2019 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""
|
||||
Request/response of PutBucketLifecycleConfiguration and
|
||||
GetBucketLifecycleConfiguration APIs.
|
||||
"""
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
from __future__ import absolute_import, annotations
|
||||
|
||||
from abc import ABCMeta
|
||||
from datetime import datetime
|
||||
from typing import Type, TypeVar, cast
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
from .commonconfig import BaseRule, Filter, check_status
|
||||
from .time import from_iso8601utc, to_iso8601utc
|
||||
from .xml import Element, SubElement, find, findall, findtext
|
||||
|
||||
|
||||
class DateDays:
|
||||
"""Base class holds date and days of Transition and Expiration."""
|
||||
__metaclass__ = ABCMeta
|
||||
|
||||
def __init__(self, date: datetime | None = None, days: int | None = None):
|
||||
self._date = date
|
||||
self._days = days
|
||||
|
||||
@property
|
||||
def date(self) -> datetime | None:
|
||||
"""Get date."""
|
||||
return self._date
|
||||
|
||||
@property
|
||||
def days(self) -> int | None:
|
||||
"""Get days."""
|
||||
return self._days
|
||||
|
||||
@staticmethod
|
||||
def parsexml(element: ET.Element) -> tuple[datetime | None, int | None]:
|
||||
"""Parse XML to date and days."""
|
||||
date = from_iso8601utc(findtext(element, "Date"))
|
||||
days = findtext(element, "Days")
|
||||
return date, int(days) if days else None
|
||||
|
||||
def toxml(self, element: ET.Element | None) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
if self._date is not None:
|
||||
SubElement(
|
||||
element, "Date", to_iso8601utc(self._date),
|
||||
)
|
||||
if self._days:
|
||||
SubElement(element, "Days", str(self._days))
|
||||
return element
|
||||
|
||||
|
||||
A = TypeVar("A", bound="Transition")
|
||||
|
||||
|
||||
class Transition(DateDays):
|
||||
"""Transition."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
date: datetime | None = None,
|
||||
days: int | None = None,
|
||||
storage_class: str | None = None,
|
||||
):
|
||||
super().__init__(date, days)
|
||||
self._storage_class = storage_class
|
||||
|
||||
@property
|
||||
def storage_class(self) -> str | None:
|
||||
"""Get storage class."""
|
||||
return self._storage_class
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[A], element: ET.Element) -> A:
|
||||
"""Create new object with values from XML element."""
|
||||
element = cast(ET.Element, find(element, "Transition", True))
|
||||
date, days = cls.parsexml(element)
|
||||
return cls(date, days, findtext(element, "StorageClass"))
|
||||
|
||||
def toxml(self, element: ET.Element | None) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, "Transition")
|
||||
super().toxml(element)
|
||||
if self._storage_class:
|
||||
SubElement(element, "StorageClass", self._storage_class)
|
||||
return element
|
||||
|
||||
|
||||
B = TypeVar("B", bound="NoncurrentVersionTransition")
|
||||
|
||||
|
||||
class NoncurrentVersionTransition:
|
||||
"""Noncurrent version transition."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
noncurrent_days: int | None = None,
|
||||
storage_class: str | None = None,
|
||||
):
|
||||
self._noncurrent_days = noncurrent_days
|
||||
self._storage_class = storage_class
|
||||
|
||||
@property
|
||||
def noncurrent_days(self) -> int | None:
|
||||
"""Get Noncurrent days."""
|
||||
return self._noncurrent_days
|
||||
|
||||
@property
|
||||
def storage_class(self) -> str | None:
|
||||
"""Get storage class."""
|
||||
return self._storage_class
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[B], element: ET.Element) -> B:
|
||||
"""Create new object with values from XML element."""
|
||||
element = cast(
|
||||
ET.Element,
|
||||
find(element, "NoncurrentVersionTransition", True),
|
||||
)
|
||||
noncurrent_days = findtext(element, "NoncurrentDays")
|
||||
return cls(
|
||||
int(noncurrent_days) if noncurrent_days else None,
|
||||
findtext(element, "StorageClass"),
|
||||
)
|
||||
|
||||
def toxml(self, element: ET.Element | None) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, "NoncurrentVersionTransition")
|
||||
if self._noncurrent_days:
|
||||
SubElement(element, "NoncurrentDays", str(self._noncurrent_days))
|
||||
if self._storage_class:
|
||||
SubElement(element, "StorageClass", self._storage_class)
|
||||
return element
|
||||
|
||||
|
||||
C = TypeVar("C", bound="NoncurrentVersionExpiration")
|
||||
|
||||
|
||||
class NoncurrentVersionExpiration:
|
||||
"""Noncurrent version expiration."""
|
||||
|
||||
def __init__(self, noncurrent_days: int | None = None):
|
||||
self._noncurrent_days = noncurrent_days
|
||||
|
||||
@property
|
||||
def noncurrent_days(self) -> int | None:
|
||||
"""Get Noncurrent days."""
|
||||
return self._noncurrent_days
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[C], element: ET.Element) -> C:
|
||||
"""Create new object with values from XML element."""
|
||||
element = cast(
|
||||
ET.Element,
|
||||
find(element, "NoncurrentVersionExpiration", True),
|
||||
)
|
||||
noncurrent_days = findtext(element, "NoncurrentDays")
|
||||
return cls(int(noncurrent_days) if noncurrent_days else None)
|
||||
|
||||
def toxml(self, element: ET.Element | None) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, "NoncurrentVersionExpiration")
|
||||
if self._noncurrent_days:
|
||||
SubElement(element, "NoncurrentDays", str(self._noncurrent_days))
|
||||
return element
|
||||
|
||||
|
||||
D = TypeVar("D", bound="Expiration")
|
||||
|
||||
|
||||
class Expiration(DateDays):
|
||||
"""Expiration."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
date: datetime | None = None,
|
||||
days: int | None = None,
|
||||
expired_object_delete_marker: bool | None = None,
|
||||
):
|
||||
super().__init__(date, days)
|
||||
self._expired_object_delete_marker = expired_object_delete_marker
|
||||
|
||||
@property
|
||||
def expired_object_delete_marker(self) -> bool | None:
|
||||
"""Get expired object delete marker."""
|
||||
return self._expired_object_delete_marker
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[D], element: ET.Element) -> D:
|
||||
"""Create new object with values from XML element."""
|
||||
element = cast(ET.Element, find(element, "Expiration", True))
|
||||
date, days = cls.parsexml(element)
|
||||
expired_object_delete_marker = findtext(
|
||||
element, "ExpiredObjectDeleteMarker",
|
||||
)
|
||||
if expired_object_delete_marker is None:
|
||||
return cls(date, days, None)
|
||||
|
||||
if expired_object_delete_marker.title() not in ["False", "True"]:
|
||||
raise ValueError(
|
||||
"value of ExpiredObjectDeleteMarker must be "
|
||||
"'True' or 'False'",
|
||||
)
|
||||
return cls(date, days, expired_object_delete_marker.title() == "True")
|
||||
|
||||
def toxml(self, element: ET.Element | None) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, "Expiration")
|
||||
super().toxml(element)
|
||||
if self._expired_object_delete_marker is not None:
|
||||
SubElement(
|
||||
element,
|
||||
"ExpiredObjectDeleteMarker",
|
||||
str(self._expired_object_delete_marker).lower(),
|
||||
)
|
||||
return element
|
||||
|
||||
|
||||
E = TypeVar("E", bound="AbortIncompleteMultipartUpload")
|
||||
|
||||
|
||||
class AbortIncompleteMultipartUpload:
|
||||
"""Abort incomplete multipart upload."""
|
||||
|
||||
def __init__(self, days_after_initiation: int | None = None):
|
||||
self._days_after_initiation = days_after_initiation
|
||||
|
||||
@property
|
||||
def days_after_initiation(self) -> int | None:
|
||||
"""Get days after initiation."""
|
||||
return self._days_after_initiation
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[E], element: ET.Element) -> E:
|
||||
"""Create new object with values from XML element."""
|
||||
element = cast(
|
||||
ET.Element,
|
||||
find(element, "AbortIncompleteMultipartUpload", True),
|
||||
)
|
||||
days_after_initiation = findtext(element, "DaysAfterInitiation")
|
||||
return cls(
|
||||
int(days_after_initiation) if days_after_initiation else None,
|
||||
)
|
||||
|
||||
def toxml(self, element: ET.Element | None) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, "AbortIncompleteMultipartUpload")
|
||||
if self._days_after_initiation:
|
||||
SubElement(
|
||||
element,
|
||||
"DaysAfterInitiation",
|
||||
str(self._days_after_initiation),
|
||||
)
|
||||
return element
|
||||
|
||||
|
||||
F = TypeVar("F", bound="Rule")
|
||||
|
||||
|
||||
class Rule(BaseRule):
|
||||
"""Lifecycle rule. """
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
status: str,
|
||||
abort_incomplete_multipart_upload:
|
||||
AbortIncompleteMultipartUpload | None = None,
|
||||
expiration: Expiration | None = None,
|
||||
rule_filter: Filter | None = None,
|
||||
rule_id: str | None = None,
|
||||
noncurrent_version_expiration:
|
||||
NoncurrentVersionExpiration | None = None,
|
||||
noncurrent_version_transition:
|
||||
NoncurrentVersionTransition | None = None,
|
||||
transition: Transition | None = None,
|
||||
):
|
||||
check_status(status)
|
||||
if (not abort_incomplete_multipart_upload and not expiration
|
||||
and not noncurrent_version_expiration
|
||||
and not noncurrent_version_transition
|
||||
and not transition):
|
||||
raise ValueError(
|
||||
"at least one of action (AbortIncompleteMultipartUpload, "
|
||||
"Expiration, NoncurrentVersionExpiration, "
|
||||
"NoncurrentVersionTransition or Transition) must be specified "
|
||||
"in a rule")
|
||||
if not rule_filter:
|
||||
raise ValueError("Rule filter must be provided")
|
||||
|
||||
super().__init__(rule_filter, rule_id)
|
||||
|
||||
self._status = status
|
||||
self._abort_incomplete_multipart_upload = (
|
||||
abort_incomplete_multipart_upload
|
||||
)
|
||||
self._expiration = expiration
|
||||
self._noncurrent_version_expiration = noncurrent_version_expiration
|
||||
self._noncurrent_version_transition = noncurrent_version_transition
|
||||
self._transition = transition
|
||||
|
||||
@property
|
||||
def status(self) -> str | None:
|
||||
"""Get status."""
|
||||
return self._status
|
||||
|
||||
@property
|
||||
def abort_incomplete_multipart_upload(
|
||||
self,
|
||||
) -> AbortIncompleteMultipartUpload | None:
|
||||
"""Get abort incomplete multipart upload."""
|
||||
return self._abort_incomplete_multipart_upload
|
||||
|
||||
@property
|
||||
def expiration(self) -> Expiration | None:
|
||||
"""Get expiration."""
|
||||
return self._expiration
|
||||
|
||||
@property
|
||||
def noncurrent_version_expiration(
|
||||
self,
|
||||
) -> NoncurrentVersionExpiration | None:
|
||||
"""Get noncurrent version expiration."""
|
||||
return self._noncurrent_version_expiration
|
||||
|
||||
@property
|
||||
def noncurrent_version_transition(
|
||||
self,
|
||||
) -> NoncurrentVersionTransition | None:
|
||||
"""Get noncurrent version transition."""
|
||||
return self._noncurrent_version_transition
|
||||
|
||||
@property
|
||||
def transition(self) -> Transition | None:
|
||||
"""Get transition."""
|
||||
return self._transition
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[F], element: ET.Element) -> F:
|
||||
"""Create new object with values from XML element."""
|
||||
status = cast(str, findtext(element, "Status", True))
|
||||
abort_incomplete_multipart_upload = (
|
||||
None if find(element, "AbortIncompleteMultipartUpload") is None
|
||||
else AbortIncompleteMultipartUpload.fromxml(element)
|
||||
)
|
||||
expiration = (
|
||||
None if find(element, "Expiration") is None
|
||||
else Expiration.fromxml(element)
|
||||
)
|
||||
rule_filter, rule_id = cls.parsexml(element)
|
||||
noncurrent_version_expiration = (
|
||||
None if find(element, "NoncurrentVersionExpiration") is None
|
||||
else NoncurrentVersionExpiration.fromxml(element)
|
||||
)
|
||||
noncurrent_version_transition = (
|
||||
None if find(element, "NoncurrentVersionTransition") is None
|
||||
else NoncurrentVersionTransition.fromxml(element)
|
||||
)
|
||||
transition = (
|
||||
None if find(element, "Transition") is None
|
||||
else Transition.fromxml(element)
|
||||
)
|
||||
|
||||
return cls(
|
||||
status,
|
||||
abort_incomplete_multipart_upload=(
|
||||
abort_incomplete_multipart_upload
|
||||
),
|
||||
expiration=expiration,
|
||||
rule_filter=rule_filter,
|
||||
rule_id=rule_id,
|
||||
noncurrent_version_expiration=noncurrent_version_expiration,
|
||||
noncurrent_version_transition=noncurrent_version_transition,
|
||||
transition=transition,
|
||||
)
|
||||
|
||||
def toxml(self, element: ET.Element | None) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, "Rule")
|
||||
SubElement(element, "Status", self._status)
|
||||
if self._abort_incomplete_multipart_upload:
|
||||
self._abort_incomplete_multipart_upload.toxml(element)
|
||||
if self._expiration:
|
||||
self._expiration.toxml(element)
|
||||
super().toxml(element)
|
||||
if self._noncurrent_version_expiration:
|
||||
self._noncurrent_version_expiration.toxml(element)
|
||||
if self._noncurrent_version_transition:
|
||||
self._noncurrent_version_transition.toxml(element)
|
||||
if self._transition:
|
||||
self._transition.toxml(element)
|
||||
return element
|
||||
|
||||
|
||||
G = TypeVar("G", bound="LifecycleConfig")
|
||||
|
||||
|
||||
class LifecycleConfig:
|
||||
"""Lifecycle configuration."""
|
||||
|
||||
def __init__(self, rules: list[Rule]):
|
||||
if not rules:
|
||||
raise ValueError("rules must be provided")
|
||||
self._rules = rules
|
||||
|
||||
@property
|
||||
def rules(self) -> list[Rule]:
|
||||
"""Get rules."""
|
||||
return self._rules
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[G], element: ET.Element) -> G:
|
||||
"""Create new object with values from XML element."""
|
||||
elements = findall(element, "Rule")
|
||||
rules = []
|
||||
for tag in elements:
|
||||
rules.append(Rule.fromxml(tag))
|
||||
return cls(rules)
|
||||
|
||||
def toxml(self, element: ET.Element | None) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
element = Element("LifecycleConfiguration")
|
||||
for rule in self._rules:
|
||||
rule.toxml(element)
|
||||
return element
|
||||
836
venv/lib/python3.12/site-packages/minio/minioadmin.py
Normal file
836
venv/lib/python3.12/site-packages/minio/minioadmin.py
Normal file
@@ -0,0 +1,836 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage,
|
||||
# (C) 2021 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# pylint: disable=too-many-public-methods
|
||||
|
||||
"""MinIO Admin Client to perform MinIO administration operations."""
|
||||
|
||||
from __future__ import absolute_import, annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
from datetime import timedelta
|
||||
from enum import Enum
|
||||
from typing import TextIO, Tuple, cast
|
||||
from urllib.parse import urlunsplit
|
||||
|
||||
import certifi
|
||||
from typing_extensions import Protocol
|
||||
from urllib3 import Retry
|
||||
from urllib3._collections import HTTPHeaderDict
|
||||
from urllib3.poolmanager import PoolManager
|
||||
|
||||
try:
|
||||
from urllib3.response import BaseHTTPResponse # type: ignore[attr-defined]
|
||||
except ImportError:
|
||||
from urllib3.response import HTTPResponse as BaseHTTPResponse
|
||||
|
||||
from urllib3.util import Timeout
|
||||
|
||||
from . import time
|
||||
from .credentials import Provider
|
||||
from .crypto import decrypt, encrypt
|
||||
from .datatypes import PeerInfo, PeerSite, SiteReplicationStatusOptions
|
||||
from .error import MinioAdminException
|
||||
from .helpers import (_DEFAULT_USER_AGENT, _REGION_REGEX, DictType, _parse_url,
|
||||
headers_to_strings, queryencode, sha256_hash,
|
||||
url_replace)
|
||||
from .signer import sign_v4_s3
|
||||
|
||||
_COMMAND = Enum(
|
||||
"Command",
|
||||
{
|
||||
"ADD_USER": "add-user",
|
||||
"USER_INFO": "user-info",
|
||||
"LIST_USERS": "list-users",
|
||||
"REMOVE_USER": "remove-user",
|
||||
"SET_USER_STATUS": "set-user-status",
|
||||
"ADD_CANNED_POLICY": "add-canned-policy",
|
||||
"SET_USER_OR_GROUP_POLICY": "set-user-or-group-policy",
|
||||
"LIST_CANNED_POLICIES": "list-canned-policies",
|
||||
"REMOVE_CANNED_POLICY": "remove-canned-policy",
|
||||
"UNSET_USER_OR_GROUP_POLICY": "idp/builtin/policy/detach",
|
||||
"CANNED_POLICY_INFO": "info-canned-policy",
|
||||
"SET_BUCKET_QUOTA": "set-bucket-quota",
|
||||
"GET_BUCKET_QUOTA": "get-bucket-quota",
|
||||
"DATA_USAGE_INFO": "datausageinfo",
|
||||
"ADD_UPDATE_REMOVE_GROUP": "update-group-members",
|
||||
"SET_GROUP_STATUS": "set-group-status",
|
||||
"GROUP_INFO": "group",
|
||||
"LIST_GROUPS": "groups",
|
||||
"INFO": "info",
|
||||
"SERVICE": "service",
|
||||
"UPDATE": "update",
|
||||
"TOP_LOCKS": "top/locks",
|
||||
"HELP_CONFIG": "help-config-kv",
|
||||
"GET_CONFIG": "get-config-kv",
|
||||
"SET_CONFIG": "set-config-kv",
|
||||
"DELETE_CONFIG": "del-config-kv",
|
||||
"LIST_CONFIG_HISTORY": "list-config-history-kv",
|
||||
"RESOTRE_CONFIG_HISTORY": "restore-config-history-kv",
|
||||
"START_PROFILE": "profile",
|
||||
"CREATE_KMS_KEY": "kms/key/create",
|
||||
"GET_KMS_KEY_STATUS": "kms/key/status",
|
||||
"SITE_REPLICATION_ADD": "site-replication/add",
|
||||
"SITE_REPLICATION_INFO": "site-replication/info",
|
||||
"SITE_REPLICATION_STATUS": "site-replication/status",
|
||||
"SITE_REPLICATION_EDIT": "site-replication/edit",
|
||||
"SITE_REPLICATION_REMOVE": "site-replication/remove",
|
||||
"SERVICE_ACCOUNT_INFO": "info-service-account",
|
||||
"SERVICE_ACCOUNT_LIST": "list-service-accounts",
|
||||
"SERVICE_ACCOUNT_ADD": "add-service-account",
|
||||
"SERVICE_ACCOUNT_UPDATE": "update-service-account",
|
||||
"SERVICE_ACCOUNT_DELETE": "delete-service-account",
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
class CommandType(Protocol):
|
||||
"""typing stub for enum.Command class"""
|
||||
|
||||
@property
|
||||
def value(self) -> str:
|
||||
"""Get value of the command."""
|
||||
|
||||
|
||||
class MinioAdmin:
|
||||
"""Client to perform MinIO administration operations."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
endpoint: str,
|
||||
credentials: Provider,
|
||||
region: str = "",
|
||||
secure: bool = True,
|
||||
cert_check: bool = True,
|
||||
http_client: PoolManager | None = None,
|
||||
):
|
||||
url = _parse_url(("https://" if secure else "http://") + endpoint)
|
||||
if not isinstance(credentials, Provider):
|
||||
raise ValueError("valid credentials must be provided")
|
||||
if region and not _REGION_REGEX.match(region):
|
||||
raise ValueError(f"invalid region {region}")
|
||||
if http_client:
|
||||
if not isinstance(http_client, PoolManager):
|
||||
raise ValueError(
|
||||
"HTTP client should be instance of "
|
||||
"`urllib3.poolmanager.PoolManager`"
|
||||
)
|
||||
else:
|
||||
timeout = timedelta(minutes=5).seconds
|
||||
http_client = PoolManager(
|
||||
timeout=Timeout(connect=timeout, read=timeout),
|
||||
maxsize=10,
|
||||
cert_reqs='CERT_REQUIRED' if cert_check else 'CERT_NONE',
|
||||
ca_certs=os.environ.get('SSL_CERT_FILE') or certifi.where(),
|
||||
retries=Retry(
|
||||
total=5,
|
||||
backoff_factor=0.2,
|
||||
status_forcelist=[500, 502, 503, 504]
|
||||
)
|
||||
)
|
||||
|
||||
self._url = url
|
||||
self._provider = credentials
|
||||
self._region = region
|
||||
self._secure = secure
|
||||
self._cert_check = cert_check
|
||||
self._http = http_client
|
||||
self._user_agent = _DEFAULT_USER_AGENT
|
||||
self._trace_stream: TextIO | None = None
|
||||
|
||||
def __del__(self):
|
||||
self._http.clear()
|
||||
|
||||
def _url_open(
|
||||
self,
|
||||
method: str,
|
||||
command: CommandType,
|
||||
query_params: DictType | None = None,
|
||||
body: bytes | None = None,
|
||||
preload_content: bool = True,
|
||||
) -> BaseHTTPResponse:
|
||||
"""Execute HTTP request."""
|
||||
creds = self._provider.retrieve()
|
||||
|
||||
url = url_replace(self._url, path="/minio/admin/v3/"+command.value)
|
||||
query = []
|
||||
for key, values in sorted((query_params or {}).items()):
|
||||
values = values if isinstance(values, (list, tuple)) else [values]
|
||||
query += [
|
||||
f"{queryencode(key)}={queryencode(value)}"
|
||||
for value in sorted(values)
|
||||
]
|
||||
url = url_replace(url, query="&".join(query))
|
||||
|
||||
content_sha256 = sha256_hash(body)
|
||||
date = time.utcnow()
|
||||
headers: DictType = {
|
||||
"Host": url.netloc,
|
||||
"User-Agent": self._user_agent,
|
||||
"x-amz-date": time.to_amz_date(date),
|
||||
"x-amz-content-sha256": content_sha256,
|
||||
"Content-Type": "application/octet-stream"
|
||||
}
|
||||
if creds.session_token:
|
||||
headers["X-Amz-Security-Token"] = creds.session_token
|
||||
if body:
|
||||
headers["Content-Length"] = str(len(body))
|
||||
|
||||
headers = sign_v4_s3(
|
||||
method,
|
||||
url,
|
||||
self._region,
|
||||
headers,
|
||||
creds,
|
||||
content_sha256,
|
||||
date,
|
||||
)
|
||||
|
||||
if self._trace_stream:
|
||||
self._trace_stream.write("---------START-HTTP---------\n")
|
||||
query_string = ("?" + url.query) if url.query else ""
|
||||
self._trace_stream.write(
|
||||
f"{method} {url.path}{query_string} HTTP/1.1\n",
|
||||
)
|
||||
self._trace_stream.write(
|
||||
headers_to_strings(headers, titled_key=True),
|
||||
)
|
||||
self._trace_stream.write("\n")
|
||||
if body is not None:
|
||||
self._trace_stream.write("\n")
|
||||
self._trace_stream.write(
|
||||
body.decode() if isinstance(body, bytes) else str(body),
|
||||
)
|
||||
self._trace_stream.write("\n")
|
||||
self._trace_stream.write("\n")
|
||||
|
||||
http_headers = HTTPHeaderDict()
|
||||
for key, value in headers.items():
|
||||
if isinstance(value, (list, tuple)):
|
||||
for val in value:
|
||||
http_headers.add(key, val)
|
||||
else:
|
||||
http_headers.add(key, value)
|
||||
|
||||
response = self._http.urlopen(
|
||||
method,
|
||||
urlunsplit(url),
|
||||
body=body,
|
||||
headers=http_headers,
|
||||
preload_content=preload_content,
|
||||
)
|
||||
|
||||
if self._trace_stream:
|
||||
self._trace_stream.write(f"HTTP/1.1 {response.status}\n")
|
||||
self._trace_stream.write(
|
||||
headers_to_strings(response.headers),
|
||||
)
|
||||
self._trace_stream.write("\n")
|
||||
self._trace_stream.write("\n")
|
||||
self._trace_stream.write(response.data.decode())
|
||||
self._trace_stream.write("\n")
|
||||
self._trace_stream.write("----------END-HTTP----------\n")
|
||||
|
||||
if response.status in [200, 204, 206]:
|
||||
return response
|
||||
|
||||
raise MinioAdminException(str(response.status), response.data.decode())
|
||||
|
||||
def set_app_info(self, app_name: str, app_version: str):
|
||||
"""
|
||||
Set your application name and version to user agent header.
|
||||
|
||||
:param app_name: Application name.
|
||||
:param app_version: Application version.
|
||||
|
||||
Example::
|
||||
client.set_app_info('my_app', '1.0.2')
|
||||
"""
|
||||
if not (app_name and app_version):
|
||||
raise ValueError("Application name/version cannot be empty.")
|
||||
self._user_agent = f"{_DEFAULT_USER_AGENT} {app_name}/{app_version}"
|
||||
|
||||
def trace_on(self, stream: TextIO):
|
||||
"""
|
||||
Enable http trace.
|
||||
|
||||
:param stream: Stream for writing HTTP call tracing.
|
||||
"""
|
||||
if not stream:
|
||||
raise ValueError('Input stream for trace output is invalid.')
|
||||
# Save new output stream.
|
||||
self._trace_stream = stream
|
||||
|
||||
def trace_off(self):
|
||||
"""
|
||||
Disable HTTP trace.
|
||||
"""
|
||||
self._trace_stream = None
|
||||
|
||||
def service_restart(self) -> str:
|
||||
"""Restart MinIO service."""
|
||||
response = self._url_open(
|
||||
"POST",
|
||||
_COMMAND.SERVICE,
|
||||
query_params={"action": "restart"}
|
||||
)
|
||||
return response.data.decode()
|
||||
|
||||
def service_stop(self) -> str:
|
||||
"""Stop MinIO service."""
|
||||
response = self._url_open(
|
||||
"POST",
|
||||
_COMMAND.SERVICE,
|
||||
query_params={"action": "stop"}
|
||||
)
|
||||
return response.data.decode()
|
||||
|
||||
def update(self) -> str:
|
||||
"""Update MinIO."""
|
||||
response = self._url_open(
|
||||
"POST",
|
||||
_COMMAND.UPDATE,
|
||||
query_params={"updateURL": ""}
|
||||
)
|
||||
return response.data.decode()
|
||||
|
||||
def info(self) -> str:
|
||||
"""Get MinIO server information."""
|
||||
response = self._url_open(
|
||||
"GET",
|
||||
_COMMAND.INFO,
|
||||
)
|
||||
return response.data.decode()
|
||||
|
||||
def user_add(self, access_key: str, secret_key: str) -> str:
|
||||
"""Create user with access and secret keys"""
|
||||
body = json.dumps(
|
||||
{"status": "enabled", "secretKey": secret_key}).encode()
|
||||
response = self._url_open(
|
||||
"PUT",
|
||||
_COMMAND.ADD_USER,
|
||||
query_params={"accessKey": access_key},
|
||||
body=encrypt(body, self._provider.retrieve().secret_key),
|
||||
)
|
||||
return response.data.decode()
|
||||
|
||||
def user_disable(self, access_key: str) -> str:
|
||||
"""Disable user."""
|
||||
response = self._url_open(
|
||||
"PUT",
|
||||
_COMMAND.SET_USER_STATUS,
|
||||
query_params={"accessKey": access_key, "status": "disabled"}
|
||||
)
|
||||
return response.data.decode()
|
||||
|
||||
def user_enable(self, access_key: str) -> str:
|
||||
"""Enable user."""
|
||||
response = self._url_open(
|
||||
"PUT",
|
||||
_COMMAND.SET_USER_STATUS,
|
||||
query_params={"accessKey": access_key, "status": "enabled"}
|
||||
)
|
||||
return response.data.decode()
|
||||
|
||||
def user_remove(self, access_key: str) -> str:
|
||||
"""Delete user"""
|
||||
response = self._url_open(
|
||||
"DELETE",
|
||||
_COMMAND.REMOVE_USER,
|
||||
query_params={"accessKey": access_key},
|
||||
)
|
||||
return response.data.decode()
|
||||
|
||||
def user_info(self, access_key: str) -> str:
|
||||
"""Get information about user"""
|
||||
response = self._url_open(
|
||||
"GET",
|
||||
_COMMAND.USER_INFO,
|
||||
query_params={"accessKey": access_key},
|
||||
)
|
||||
return response.data.decode()
|
||||
|
||||
def user_list(self) -> str:
|
||||
"""List all users"""
|
||||
response = self._url_open(
|
||||
"GET", _COMMAND.LIST_USERS, preload_content=False,
|
||||
)
|
||||
plain_data = decrypt(
|
||||
response, self._provider.retrieve().secret_key,
|
||||
)
|
||||
return plain_data.decode()
|
||||
|
||||
def group_add(self, group_name: str, members: str) -> str:
|
||||
"""Add users a new or existing group."""
|
||||
body = json.dumps({
|
||||
"group": group_name,
|
||||
"members": members,
|
||||
"isRemove": False
|
||||
}).encode()
|
||||
response = self._url_open(
|
||||
"PUT",
|
||||
_COMMAND.ADD_UPDATE_REMOVE_GROUP,
|
||||
body=body,
|
||||
)
|
||||
return response.data.decode()
|
||||
|
||||
def group_disable(self, group_name: str) -> str:
|
||||
"""Disable group."""
|
||||
response = self._url_open(
|
||||
"PUT",
|
||||
_COMMAND.SET_GROUP_STATUS,
|
||||
query_params={"group": group_name, "status": "disabled"}
|
||||
)
|
||||
return response.data.decode()
|
||||
|
||||
def group_enable(self, group_name: str) -> str:
|
||||
"""Enable group."""
|
||||
response = self._url_open(
|
||||
"PUT",
|
||||
_COMMAND.SET_GROUP_STATUS,
|
||||
query_params={"group": group_name, "status": "enabled"}
|
||||
)
|
||||
return response.data.decode()
|
||||
|
||||
def group_remove(self, group_name: str, members: str | None = None) -> str:
|
||||
"""Remove group or members from a group."""
|
||||
data = {
|
||||
"group": group_name,
|
||||
"isRemove": True
|
||||
}
|
||||
if members is not None:
|
||||
data["members"] = members
|
||||
|
||||
response = self._url_open(
|
||||
"PUT",
|
||||
_COMMAND.ADD_UPDATE_REMOVE_GROUP,
|
||||
body=json.dumps(data).encode(),
|
||||
)
|
||||
return response.data.decode()
|
||||
|
||||
def group_info(self, group_name: str) -> str:
|
||||
"""Get group information."""
|
||||
response = self._url_open(
|
||||
"GET",
|
||||
_COMMAND.GROUP_INFO,
|
||||
query_params={"group": group_name},
|
||||
)
|
||||
return response.data.decode()
|
||||
|
||||
def group_list(self) -> str:
|
||||
"""List groups."""
|
||||
response = self._url_open("GET", _COMMAND.LIST_GROUPS)
|
||||
return response.data.decode()
|
||||
|
||||
def policy_add(self, policy_name: str, policy_file: str) -> str:
|
||||
"""Add new policy."""
|
||||
with open(policy_file, encoding='utf-8') as file:
|
||||
response = self._url_open(
|
||||
"PUT",
|
||||
_COMMAND.ADD_CANNED_POLICY,
|
||||
query_params={"name": policy_name},
|
||||
body=file.read().encode(),
|
||||
)
|
||||
return response.data.decode()
|
||||
|
||||
def policy_remove(self, policy_name: str) -> str:
|
||||
"""Remove policy."""
|
||||
response = self._url_open(
|
||||
"DELETE",
|
||||
_COMMAND.REMOVE_CANNED_POLICY,
|
||||
query_params={"name": policy_name},
|
||||
)
|
||||
return response.data.decode()
|
||||
|
||||
def policy_info(self, policy_name: str) -> str:
|
||||
"""Get policy information."""
|
||||
response = self._url_open(
|
||||
"GET",
|
||||
_COMMAND.CANNED_POLICY_INFO,
|
||||
query_params={"name": policy_name},
|
||||
)
|
||||
return response.data.decode()
|
||||
|
||||
def policy_list(self) -> str:
|
||||
"""List policies."""
|
||||
response = self._url_open("GET", _COMMAND.LIST_CANNED_POLICIES)
|
||||
return response.data.decode()
|
||||
|
||||
def policy_set(
|
||||
self,
|
||||
policy_name: str | list[str],
|
||||
user: str | None = None,
|
||||
group: str | None = None,
|
||||
) -> str:
|
||||
"""Set IAM policy on a user or group."""
|
||||
if (user is not None) ^ (group is not None):
|
||||
response = self._url_open(
|
||||
"PUT",
|
||||
_COMMAND.SET_USER_OR_GROUP_POLICY,
|
||||
query_params={"userOrGroup": cast(str, user or group),
|
||||
"isGroup": "true" if group else "false",
|
||||
"policyName": policy_name},
|
||||
)
|
||||
return response.data.decode()
|
||||
raise ValueError("either user or group must be set")
|
||||
|
||||
def policy_unset(
|
||||
self,
|
||||
policy_name: str | list[str],
|
||||
user: str | None = None,
|
||||
group: str | None = None,
|
||||
) -> str:
|
||||
"""Unset an IAM policy for a user or group."""
|
||||
if (user is not None) ^ (group is not None):
|
||||
policies = (
|
||||
policy_name if isinstance(policy_name, list) else [policy_name]
|
||||
)
|
||||
data: dict[str, str | list[str]] = {"policies": policies}
|
||||
if user:
|
||||
data["user"] = user
|
||||
if group:
|
||||
data["group"] = group
|
||||
response = self._url_open(
|
||||
"POST",
|
||||
_COMMAND.UNSET_USER_OR_GROUP_POLICY,
|
||||
body=encrypt(
|
||||
json.dumps(data).encode(),
|
||||
self._provider.retrieve().secret_key,
|
||||
),
|
||||
preload_content=False,
|
||||
)
|
||||
plain_data = decrypt(
|
||||
response, self._provider.retrieve().secret_key,
|
||||
)
|
||||
return plain_data.decode()
|
||||
raise ValueError("either user or group must be set")
|
||||
|
||||
def config_get(self, key: str | None = None) -> str:
|
||||
"""Get configuration parameters."""
|
||||
try:
|
||||
response = self._url_open(
|
||||
"GET",
|
||||
_COMMAND.GET_CONFIG,
|
||||
query_params={"key": key or "", "subSys": ""},
|
||||
preload_content=False,
|
||||
)
|
||||
if key is None:
|
||||
return response.read().decode()
|
||||
return decrypt(
|
||||
response, self._provider.retrieve().secret_key,
|
||||
).decode()
|
||||
finally:
|
||||
if response:
|
||||
response.close()
|
||||
response.release_conn()
|
||||
|
||||
def config_set(
|
||||
self,
|
||||
key: str,
|
||||
config: dict[str, str] | None = None,
|
||||
) -> str:
|
||||
"""Set configuration parameters."""
|
||||
data = [key]
|
||||
if config:
|
||||
data += [f"{name}={value}" for name, value in config.items()]
|
||||
body = " ".join(data).encode()
|
||||
response = self._url_open(
|
||||
"PUT",
|
||||
_COMMAND.SET_CONFIG,
|
||||
body=encrypt(body, self._provider.retrieve().secret_key),
|
||||
)
|
||||
return response.data.decode()
|
||||
|
||||
def config_reset(self, key: str, name: str | None = None) -> str:
|
||||
"""Reset configuration parameters."""
|
||||
if name:
|
||||
key += ":" + name
|
||||
body = key.encode()
|
||||
response = self._url_open(
|
||||
"DELETE",
|
||||
_COMMAND.DELETE_CONFIG,
|
||||
body=encrypt(body, self._provider.retrieve().secret_key),
|
||||
)
|
||||
return response.data.decode()
|
||||
|
||||
def config_history(self) -> str:
|
||||
"""Get historic configuration changes."""
|
||||
try:
|
||||
response = self._url_open(
|
||||
"GET",
|
||||
_COMMAND.LIST_CONFIG_HISTORY,
|
||||
query_params={"count": "10"},
|
||||
preload_content=False,
|
||||
)
|
||||
plain_text = decrypt(
|
||||
response, self._provider.retrieve().secret_key,
|
||||
)
|
||||
return plain_text.decode()
|
||||
finally:
|
||||
if response:
|
||||
response.close()
|
||||
response.release_conn()
|
||||
|
||||
def config_restore(self, restore_id: str) -> str:
|
||||
"""Restore to a specific configuration history."""
|
||||
response = self._url_open(
|
||||
"PUT",
|
||||
_COMMAND.RESOTRE_CONFIG_HISTORY,
|
||||
query_params={"restoreId": restore_id}
|
||||
)
|
||||
return response.data.decode()
|
||||
|
||||
def profile_start(
|
||||
self,
|
||||
profilers: tuple[str] = cast(Tuple[str], ()),
|
||||
) -> str:
|
||||
"""Runs a system profile"""
|
||||
response = self._url_open(
|
||||
"POST",
|
||||
_COMMAND.START_PROFILE,
|
||||
query_params={"profilerType;": ",".join(profilers)},
|
||||
)
|
||||
return response.data.decode()
|
||||
|
||||
def top_locks(self) -> str:
|
||||
"""Get a list of the 10 oldest locks on a MinIO cluster."""
|
||||
response = self._url_open(
|
||||
"GET",
|
||||
_COMMAND.TOP_LOCKS,
|
||||
)
|
||||
return response.data.decode()
|
||||
|
||||
def kms_key_create(self, key: str | None = None) -> str:
|
||||
"""Create a new KMS master key."""
|
||||
response = self._url_open(
|
||||
"POST",
|
||||
_COMMAND.CREATE_KMS_KEY,
|
||||
query_params={"key-id": key or ""},
|
||||
)
|
||||
return response.data.decode()
|
||||
|
||||
def kms_key_status(self, key: str | None = None) -> str:
|
||||
"""Get status information of a KMS master key."""
|
||||
response = self._url_open(
|
||||
"GET",
|
||||
_COMMAND.GET_KMS_KEY_STATUS,
|
||||
query_params={"key-id": key or ""}
|
||||
)
|
||||
return response.data.decode()
|
||||
|
||||
def add_site_replication(self, peer_sites: list[PeerSite]) -> str:
|
||||
"""Add peer sites to site replication."""
|
||||
body = json.dumps(
|
||||
[peer_site.to_dict() for peer_site in peer_sites]).encode()
|
||||
response = self._url_open(
|
||||
"PUT",
|
||||
_COMMAND.SITE_REPLICATION_ADD,
|
||||
query_params={"api-version": "1"},
|
||||
body=encrypt(body, self._provider.retrieve().secret_key),
|
||||
)
|
||||
return response.data.decode()
|
||||
|
||||
def get_site_replication_info(self) -> str:
|
||||
"""Get site replication information."""
|
||||
response = self._url_open("GET", _COMMAND.SITE_REPLICATION_INFO)
|
||||
return response.data.decode()
|
||||
|
||||
def get_site_replication_status(
|
||||
self,
|
||||
options: SiteReplicationStatusOptions,
|
||||
) -> str:
|
||||
"""Get site replication information."""
|
||||
response = self._url_open(
|
||||
"GET",
|
||||
_COMMAND.SITE_REPLICATION_STATUS,
|
||||
query_params=cast(DictType, options.to_query_params()),
|
||||
)
|
||||
return response.data.decode()
|
||||
|
||||
def edit_site_replication(self, peer_info: PeerInfo) -> str:
|
||||
"""Edit site replication with given peer information."""
|
||||
body = json.dumps(peer_info.to_dict()).encode()
|
||||
response = self._url_open(
|
||||
"PUT",
|
||||
_COMMAND.SITE_REPLICATION_EDIT,
|
||||
query_params={"api-version": "1"},
|
||||
body=encrypt(body, self._provider.retrieve().secret_key),
|
||||
)
|
||||
return response.data.decode()
|
||||
|
||||
def remove_site_replication(
|
||||
self,
|
||||
sites: str | None = None,
|
||||
all_sites: bool = False,
|
||||
) -> str:
|
||||
"""Remove given sites or all sites from site replication."""
|
||||
data = {}
|
||||
if all_sites:
|
||||
data.update({"all": "True"})
|
||||
elif sites:
|
||||
data.update({"sites": sites or ""})
|
||||
else:
|
||||
raise ValueError("either sites or all flag must be given")
|
||||
body = json.dumps(data).encode()
|
||||
response = self._url_open(
|
||||
"PUT",
|
||||
_COMMAND.SITE_REPLICATION_REMOVE,
|
||||
query_params={"api-version": "1"},
|
||||
body=encrypt(body, self._provider.retrieve().secret_key),
|
||||
)
|
||||
return response.data.decode()
|
||||
|
||||
def bucket_quota_set(self, bucket: str, size: int) -> str:
|
||||
"""Set bucket quota configuration."""
|
||||
body = json.dumps({"quota": size, "quotatype": "hard"}).encode()
|
||||
response = self._url_open(
|
||||
"PUT",
|
||||
_COMMAND.SET_BUCKET_QUOTA,
|
||||
query_params={"bucket": bucket},
|
||||
body=body
|
||||
)
|
||||
return response.data.decode()
|
||||
|
||||
def bucket_quota_clear(self, bucket: str) -> str:
|
||||
"""Clear bucket quota configuration."""
|
||||
return self.bucket_quota_set(bucket, 0)
|
||||
|
||||
def bucket_quota_get(self, bucket: str) -> str:
|
||||
"""Get bucket quota configuration."""
|
||||
response = self._url_open(
|
||||
"GET",
|
||||
_COMMAND.GET_BUCKET_QUOTA,
|
||||
query_params={"bucket": bucket}
|
||||
)
|
||||
return response.data.decode()
|
||||
|
||||
def get_service_account(self, access_key: str) -> str:
|
||||
"""Get information about service account"""
|
||||
response = self._url_open(
|
||||
"GET",
|
||||
_COMMAND.SERVICE_ACCOUNT_INFO,
|
||||
query_params={"accessKey": access_key},
|
||||
preload_content=False,
|
||||
)
|
||||
plain_data = decrypt(
|
||||
response, self._provider.retrieve().secret_key,
|
||||
)
|
||||
return plain_data.decode()
|
||||
|
||||
def list_service_account(self, user: str) -> str:
|
||||
"""List service accounts of user"""
|
||||
response = self._url_open(
|
||||
"GET",
|
||||
_COMMAND.SERVICE_ACCOUNT_LIST,
|
||||
query_params={"user": user},
|
||||
preload_content=False,
|
||||
)
|
||||
plain_data = decrypt(
|
||||
response, self._provider.retrieve().secret_key,
|
||||
)
|
||||
return plain_data.decode()
|
||||
|
||||
def add_service_account(self,
|
||||
access_key: str | None = None,
|
||||
secret_key: str | None = None,
|
||||
name: str | None = None,
|
||||
description: str | None = None,
|
||||
policy_file: str | None = None,
|
||||
expiration: str | None = None,
|
||||
status: str | None = None) -> str:
|
||||
"""
|
||||
Add a new service account with the given access key and secret key
|
||||
"""
|
||||
if (access_key is None) ^ (secret_key is None):
|
||||
raise ValueError("both access key and secret key must be provided")
|
||||
if access_key == "" or secret_key == "":
|
||||
raise ValueError("access key or secret key must not be empty")
|
||||
data = {
|
||||
"status": "enabled",
|
||||
"accessKey": access_key,
|
||||
"secretKey": secret_key,
|
||||
}
|
||||
if name:
|
||||
data["name"] = name
|
||||
if description:
|
||||
data["description"] = description
|
||||
if policy_file:
|
||||
with open(policy_file, encoding="utf-8") as file:
|
||||
data["policy"] = json.load(file)
|
||||
if expiration:
|
||||
data["expiration"] = expiration
|
||||
if status:
|
||||
data["status"] = status
|
||||
|
||||
body = json.dumps(data).encode()
|
||||
response = self._url_open(
|
||||
"PUT",
|
||||
_COMMAND.SERVICE_ACCOUNT_ADD,
|
||||
body=encrypt(body, self._provider.retrieve().secret_key),
|
||||
preload_content=False,
|
||||
)
|
||||
plain_data = decrypt(
|
||||
response, self._provider.retrieve().secret_key,
|
||||
)
|
||||
return plain_data.decode()
|
||||
|
||||
def update_service_account(self,
|
||||
access_key: str,
|
||||
secret_key: str | None = None,
|
||||
name: str | None = None,
|
||||
description: str | None = None,
|
||||
policy_file: str | None = None,
|
||||
expiration: str | None = None,
|
||||
status: str | None = None) -> str:
|
||||
"""Update an existing service account"""
|
||||
args = [secret_key, name, description, policy_file, expiration, status]
|
||||
if not any(arg for arg in args):
|
||||
raise ValueError("at least one of secret_key, name, description, "
|
||||
"policy_file, expiration or status must be "
|
||||
"specified")
|
||||
data = {}
|
||||
if secret_key:
|
||||
data["newSecretKey"] = secret_key
|
||||
if name:
|
||||
data["newName"] = name
|
||||
if description:
|
||||
data["newDescription"] = description
|
||||
if policy_file:
|
||||
with open(policy_file, encoding="utf-8") as file:
|
||||
data["newPolicy"] = json.load(file)
|
||||
if expiration:
|
||||
data["newExpiration"] = expiration
|
||||
if status:
|
||||
data["newStatus"] = status
|
||||
|
||||
body = json.dumps(data).encode()
|
||||
response = self._url_open(
|
||||
"POST",
|
||||
_COMMAND.SERVICE_ACCOUNT_UPDATE,
|
||||
query_params={"accessKey": access_key},
|
||||
body=encrypt(body, self._provider.retrieve().secret_key),
|
||||
)
|
||||
return response.data.decode()
|
||||
|
||||
def delete_service_account(self, access_key: str) -> str:
|
||||
"""Delete a service account"""
|
||||
response = self._url_open(
|
||||
"DELETE",
|
||||
_COMMAND.SERVICE_ACCOUNT_DELETE,
|
||||
query_params={"accessKey": access_key},
|
||||
)
|
||||
return response.data.decode()
|
||||
378
venv/lib/python3.12/site-packages/minio/notificationconfig.py
Normal file
378
venv/lib/python3.12/site-packages/minio/notificationconfig.py
Normal file
@@ -0,0 +1,378 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C)
|
||||
# 2020 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""
|
||||
Request/response of PutBucketNotificationConfiguration and
|
||||
GetBucketNotiicationConfiguration APIs.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, annotations
|
||||
|
||||
from abc import ABCMeta
|
||||
from typing import Type, TypeVar, cast
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
from .xml import Element, SubElement, find, findall, findtext
|
||||
|
||||
A = TypeVar("A", bound="FilterRule")
|
||||
|
||||
|
||||
class FilterRule:
|
||||
"""Filter rule."""
|
||||
|
||||
__metaclass__ = ABCMeta
|
||||
|
||||
def __init__(self, name: str, value: str):
|
||||
self._name = name
|
||||
self._value = value
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
"""Get name."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def value(self) -> str:
|
||||
"""Get value."""
|
||||
return self._value
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[A], element: ET.Element) -> A:
|
||||
"""Create new object with values from XML element."""
|
||||
name = cast(str, findtext(element, "Name", True))
|
||||
value = cast(str, findtext(element, "Value", True))
|
||||
return cls(name, value)
|
||||
|
||||
def toxml(self, element: ET.Element | None) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, "FilterRule")
|
||||
SubElement(element, "Name", self._name)
|
||||
SubElement(element, "Value", self._value)
|
||||
return element
|
||||
|
||||
|
||||
class PrefixFilterRule(FilterRule):
|
||||
"""Prefix filter rule."""
|
||||
|
||||
def __init__(self, value: str):
|
||||
super().__init__("prefix", value)
|
||||
|
||||
|
||||
class SuffixFilterRule(FilterRule):
|
||||
"""Suffix filter rule."""
|
||||
|
||||
def __init__(self, value: str):
|
||||
super().__init__("suffix", value)
|
||||
|
||||
|
||||
class CommonConfig:
|
||||
"""Common for cloud-function/queue/topic configuration."""
|
||||
|
||||
__metaclass__ = ABCMeta
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
events: list[str],
|
||||
config_id: str | None,
|
||||
prefix_filter_rule: PrefixFilterRule | None,
|
||||
suffix_filter_rule: SuffixFilterRule | None,
|
||||
):
|
||||
if not events:
|
||||
raise ValueError("events must be provided")
|
||||
self._events = events
|
||||
self._config_id = config_id
|
||||
self._prefix_filter_rule = prefix_filter_rule
|
||||
self._suffix_filter_rule = suffix_filter_rule
|
||||
|
||||
@property
|
||||
def events(self) -> list[str]:
|
||||
"""Get events."""
|
||||
return self._events
|
||||
|
||||
@property
|
||||
def config_id(self) -> str | None:
|
||||
"""Get configuration ID."""
|
||||
return self._config_id
|
||||
|
||||
@property
|
||||
def prefix_filter_rule(self) -> PrefixFilterRule | None:
|
||||
"""Get prefix filter rule."""
|
||||
return self._prefix_filter_rule
|
||||
|
||||
@property
|
||||
def suffix_filter_rule(self) -> SuffixFilterRule | None:
|
||||
"""Get suffix filter rule."""
|
||||
return self._suffix_filter_rule
|
||||
|
||||
@staticmethod
|
||||
def parsexml(
|
||||
element: ET.Element,
|
||||
) -> tuple[
|
||||
list[str], str | None, PrefixFilterRule | None, SuffixFilterRule | None
|
||||
]:
|
||||
"""Parse XML."""
|
||||
elements = findall(element, "Event")
|
||||
events = []
|
||||
for tag in elements:
|
||||
if tag.text is None:
|
||||
raise ValueError("missing value in XML tag 'Event'")
|
||||
events.append(tag.text)
|
||||
config_id = findtext(element, "Id")
|
||||
elem = find(element, "Filter")
|
||||
if elem is None:
|
||||
return events, config_id, None, None
|
||||
prefix_filter_rule = None
|
||||
suffix_filter_rule = None
|
||||
elem = cast(ET.Element, find(elem, "S3Key", True))
|
||||
elements = findall(elem, "FilterRule")
|
||||
for tag in elements:
|
||||
filter_rule = FilterRule.fromxml(tag)
|
||||
if filter_rule.name == "prefix":
|
||||
prefix_filter_rule = PrefixFilterRule(filter_rule.value)
|
||||
else:
|
||||
suffix_filter_rule = SuffixFilterRule(filter_rule.value)
|
||||
return events, config_id, prefix_filter_rule, suffix_filter_rule
|
||||
|
||||
def toxml(self, element: ET.Element | None) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
for event in self._events:
|
||||
SubElement(element, "Event", event)
|
||||
if self._config_id is not None:
|
||||
SubElement(element, "Id", self._config_id)
|
||||
if self._prefix_filter_rule or self._suffix_filter_rule:
|
||||
rule = SubElement(element, "Filter")
|
||||
rule = SubElement(rule, "S3Key")
|
||||
if self._prefix_filter_rule:
|
||||
self._prefix_filter_rule.toxml(rule)
|
||||
if self._suffix_filter_rule:
|
||||
self._suffix_filter_rule.toxml(rule)
|
||||
return element
|
||||
|
||||
|
||||
B = TypeVar("B", bound="CloudFuncConfig")
|
||||
|
||||
|
||||
class CloudFuncConfig(CommonConfig):
|
||||
"""Cloud function configuration."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
cloud_func: str,
|
||||
events: list[str],
|
||||
config_id: str | None = None,
|
||||
prefix_filter_rule: PrefixFilterRule | None = None,
|
||||
suffix_filter_rule: SuffixFilterRule | None = None,
|
||||
):
|
||||
if not cloud_func:
|
||||
raise ValueError("cloud function must be provided")
|
||||
self._cloud_func = cloud_func
|
||||
super().__init__(
|
||||
events, config_id, prefix_filter_rule, suffix_filter_rule,
|
||||
)
|
||||
|
||||
@property
|
||||
def cloud_func(self) -> str:
|
||||
"""Get cloud function ARN."""
|
||||
return self._cloud_func
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[B], element: ET.Element) -> B:
|
||||
"""Create new object with values from XML element."""
|
||||
cloud_func = cast(str, findtext(element, "CloudFunction", True))
|
||||
(events, config_id, prefix_filter_rule,
|
||||
suffix_filter_rule) = cls.parsexml(element)
|
||||
return cls(
|
||||
cloud_func,
|
||||
events,
|
||||
config_id,
|
||||
prefix_filter_rule,
|
||||
suffix_filter_rule
|
||||
)
|
||||
|
||||
def toxml(self, element: ET.Element | None) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, "CloudFunctionConfiguration")
|
||||
SubElement(element, "CloudFunction", self._cloud_func)
|
||||
super().toxml(element)
|
||||
return element
|
||||
|
||||
|
||||
C = TypeVar("C", bound="QueueConfig")
|
||||
|
||||
|
||||
class QueueConfig(CommonConfig):
|
||||
"""Queue configuration."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
queue: str,
|
||||
events: list[str],
|
||||
config_id: str | None = None,
|
||||
prefix_filter_rule: PrefixFilterRule | None = None,
|
||||
suffix_filter_rule: SuffixFilterRule | None = None,
|
||||
):
|
||||
if not queue:
|
||||
raise ValueError("queue must be provided")
|
||||
self._queue = queue
|
||||
super().__init__(
|
||||
events, config_id, prefix_filter_rule, suffix_filter_rule,
|
||||
)
|
||||
|
||||
@property
|
||||
def queue(self) -> str:
|
||||
"""Get queue ARN."""
|
||||
return self._queue
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[C], element: ET.Element) -> C:
|
||||
"""Create new object with values from XML element."""
|
||||
queue = cast(str, findtext(element, "Queue", True))
|
||||
(events, config_id, prefix_filter_rule,
|
||||
suffix_filter_rule) = cls.parsexml(element)
|
||||
return cls(
|
||||
queue,
|
||||
events,
|
||||
config_id,
|
||||
prefix_filter_rule,
|
||||
suffix_filter_rule
|
||||
)
|
||||
|
||||
def toxml(self, element: ET.Element | None) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, "QueueConfiguration")
|
||||
SubElement(element, "Queue", self._queue)
|
||||
super().toxml(element)
|
||||
return element
|
||||
|
||||
|
||||
D = TypeVar("D", bound="TopicConfig")
|
||||
|
||||
|
||||
class TopicConfig(CommonConfig):
|
||||
"""Get topic configuration."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
topic: str,
|
||||
events: list[str],
|
||||
config_id: str | None = None,
|
||||
prefix_filter_rule: PrefixFilterRule | None = None,
|
||||
suffix_filter_rule: SuffixFilterRule | None = None,
|
||||
):
|
||||
if not topic:
|
||||
raise ValueError("topic must be provided")
|
||||
self._topic = topic
|
||||
super().__init__(
|
||||
events, config_id, prefix_filter_rule, suffix_filter_rule,
|
||||
)
|
||||
|
||||
@property
|
||||
def topic(self) -> str:
|
||||
"""Get topic ARN."""
|
||||
return self._topic
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[D], element: ET.Element) -> D:
|
||||
"""Create new object with values from XML element."""
|
||||
topic = cast(str, findtext(element, "Topic", True))
|
||||
(events, config_id, prefix_filter_rule,
|
||||
suffix_filter_rule) = cls.parsexml(element)
|
||||
return cls(
|
||||
topic,
|
||||
events,
|
||||
config_id,
|
||||
prefix_filter_rule,
|
||||
suffix_filter_rule
|
||||
)
|
||||
|
||||
def toxml(self, element: ET.Element | None) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, "TopicConfiguration")
|
||||
SubElement(element, "Topic", self._topic)
|
||||
super().toxml(element)
|
||||
return element
|
||||
|
||||
|
||||
E = TypeVar("E", bound="NotificationConfig")
|
||||
|
||||
|
||||
class NotificationConfig:
|
||||
"""Notification configuration."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
cloud_func_config_list: list[CloudFuncConfig] | None = None,
|
||||
queue_config_list: list[QueueConfig] | None = None,
|
||||
topic_config_list: list[TopicConfig] | None = None,
|
||||
):
|
||||
self._cloud_func_config_list = cloud_func_config_list or []
|
||||
self._queue_config_list = queue_config_list or []
|
||||
self._topic_config_list = topic_config_list or []
|
||||
|
||||
@property
|
||||
def cloud_func_config_list(self) -> list[CloudFuncConfig] | None:
|
||||
"""Get cloud function configuration list."""
|
||||
return self._cloud_func_config_list
|
||||
|
||||
@property
|
||||
def queue_config_list(self) -> list[QueueConfig] | None:
|
||||
"""Get queue configuration list."""
|
||||
return self._queue_config_list
|
||||
|
||||
@property
|
||||
def topic_config_list(self) -> list[TopicConfig] | None:
|
||||
"""Get topic configuration list."""
|
||||
return self._topic_config_list
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[E], element: ET.Element) -> E:
|
||||
"""Create new object with values from XML element."""
|
||||
elements = findall(element, "CloudFunctionConfiguration")
|
||||
cloud_func_config_list = []
|
||||
for tag in elements:
|
||||
cloud_func_config_list.append(CloudFuncConfig.fromxml(tag))
|
||||
elements = findall(element, "QueueConfiguration")
|
||||
queue_config_list = []
|
||||
for tag in elements:
|
||||
queue_config_list.append(QueueConfig.fromxml(tag))
|
||||
elements = findall(element, "TopicConfiguration")
|
||||
topic_config_list = []
|
||||
for tag in elements:
|
||||
topic_config_list.append(TopicConfig.fromxml(tag))
|
||||
return cls(
|
||||
cloud_func_config_list, queue_config_list, topic_config_list,
|
||||
)
|
||||
|
||||
def toxml(self, element: ET.Element | None) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
element = Element("NotificationConfiguration")
|
||||
for cloud_func_config in self._cloud_func_config_list:
|
||||
cloud_func_config.toxml(element)
|
||||
for queue_config in self._queue_config_list:
|
||||
queue_config.toxml(element)
|
||||
for config in self._topic_config_list:
|
||||
config.toxml(element)
|
||||
return element
|
||||
97
venv/lib/python3.12/site-packages/minio/objectlockconfig.py
Normal file
97
venv/lib/python3.12/site-packages/minio/objectlockconfig.py
Normal file
@@ -0,0 +1,97 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C)
|
||||
# 2020 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""
|
||||
Request/response of PutObjectLockConfiguration and GetObjectLockConfiguration
|
||||
APIs.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, annotations
|
||||
|
||||
from typing import Type, TypeVar, cast
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
from .commonconfig import COMPLIANCE, ENABLED, GOVERNANCE
|
||||
from .xml import Element, SubElement, find, findtext
|
||||
|
||||
DAYS = "Days"
|
||||
YEARS = "Years"
|
||||
|
||||
A = TypeVar("A", bound="ObjectLockConfig")
|
||||
|
||||
|
||||
class ObjectLockConfig:
|
||||
"""Object lock configuration."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
mode: str | None,
|
||||
duration: int | None,
|
||||
duration_unit: str | None,
|
||||
):
|
||||
if (mode is not None) ^ (duration is not None):
|
||||
if mode is None:
|
||||
raise ValueError("mode must be provided")
|
||||
raise ValueError("duration must be provided")
|
||||
if mode is not None and mode not in [GOVERNANCE, COMPLIANCE]:
|
||||
raise ValueError(f"mode must be {GOVERNANCE} or {COMPLIANCE}")
|
||||
if duration_unit:
|
||||
duration_unit = duration_unit.title()
|
||||
if duration is not None and duration_unit not in [DAYS, YEARS]:
|
||||
raise ValueError(f"duration unit must be {DAYS} or {YEARS}")
|
||||
self._mode = mode
|
||||
self._duration = duration
|
||||
self._duration_unit = duration_unit
|
||||
|
||||
@property
|
||||
def mode(self) -> str | None:
|
||||
"""Get mode."""
|
||||
return self._mode
|
||||
|
||||
@property
|
||||
def duration(self) -> tuple[int | None, str | None]:
|
||||
"""Get duration and it's unit."""
|
||||
return self._duration, self._duration_unit
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[A], element: ET.Element) -> A:
|
||||
"""Create new object with values from XML element."""
|
||||
elem = find(element, "Rule")
|
||||
if elem is None:
|
||||
return cls(None, None, None)
|
||||
elem = cast(ET.Element, find(elem, "DefaultRetention", True))
|
||||
mode = findtext(elem, "Mode")
|
||||
duration_unit = DAYS
|
||||
duration = findtext(elem, duration_unit)
|
||||
if not duration:
|
||||
duration_unit = YEARS
|
||||
duration = findtext(elem, duration_unit)
|
||||
if not duration:
|
||||
raise ValueError(f"XML element <{DAYS}> or <{YEARS}> not found")
|
||||
return cls(mode, int(duration), duration_unit)
|
||||
|
||||
def toxml(self, element: ET.Element | None) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
element = Element("ObjectLockConfiguration")
|
||||
SubElement(element, "ObjectLockEnabled", ENABLED)
|
||||
if self._mode:
|
||||
rule = SubElement(element, "Rule")
|
||||
retention = SubElement(rule, "DefaultRetention")
|
||||
SubElement(retention, "Mode", self._mode)
|
||||
if not self._duration_unit:
|
||||
raise ValueError("duration unit must be provided")
|
||||
SubElement(retention, self._duration_unit, str(self._duration))
|
||||
return element
|
||||
0
venv/lib/python3.12/site-packages/minio/py.typed
Normal file
0
venv/lib/python3.12/site-packages/minio/py.typed
Normal file
575
venv/lib/python3.12/site-packages/minio/replicationconfig.py
Normal file
575
venv/lib/python3.12/site-packages/minio/replicationconfig.py
Normal file
@@ -0,0 +1,575 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C)
|
||||
# 2020 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Request/response of PutBucketReplication and GetBucketReplication APIs."""
|
||||
|
||||
from __future__ import absolute_import, annotations
|
||||
|
||||
from abc import ABCMeta
|
||||
from typing import Type, TypeVar, cast
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
from .commonconfig import DISABLED, BaseRule, Filter, check_status
|
||||
from .xml import Element, SubElement, find, findall, findtext
|
||||
|
||||
A = TypeVar("A", bound="Status")
|
||||
|
||||
|
||||
class Status:
|
||||
"""Status."""
|
||||
__metaclass__ = ABCMeta
|
||||
|
||||
def __init__(self, status: str):
|
||||
check_status(status)
|
||||
self._status = status
|
||||
|
||||
@property
|
||||
def status(self) -> str:
|
||||
"""Get status."""
|
||||
return self._status
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[A], element: ET.Element) -> A:
|
||||
"""Create new object with values from XML element."""
|
||||
element = cast(ET.Element, find(element, cls.__name__, True))
|
||||
status = cast(str, findtext(element, "Status", True))
|
||||
return cls(status)
|
||||
|
||||
def toxml(self, element: ET.Element | None) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, self.__class__.__name__)
|
||||
SubElement(element, "Status", self._status)
|
||||
return element
|
||||
|
||||
|
||||
class SseKmsEncryptedObjects(Status):
|
||||
"""SSE KMS encrypted objects."""
|
||||
|
||||
|
||||
B = TypeVar("B", bound="SourceSelectionCriteria")
|
||||
|
||||
|
||||
class SourceSelectionCriteria:
|
||||
"""Source selection criteria."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
sse_kms_encrypted_objects: SseKmsEncryptedObjects | None = None,
|
||||
):
|
||||
self._sse_kms_encrypted_objects = sse_kms_encrypted_objects
|
||||
|
||||
@property
|
||||
def sse_kms_encrypted_objects(self) -> SseKmsEncryptedObjects | None:
|
||||
"""Get SSE KMS encrypted objects."""
|
||||
return self._sse_kms_encrypted_objects
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[B], element: ET.Element) -> B:
|
||||
"""Create new object with values from XML element."""
|
||||
element = cast(
|
||||
ET.Element,
|
||||
find(element, "SourceSelectionCriteria", True),
|
||||
)
|
||||
return cls(
|
||||
None if find(element, "SseKmsEncryptedObjects") is None
|
||||
else SseKmsEncryptedObjects.fromxml(element)
|
||||
)
|
||||
|
||||
def toxml(self, element: ET.Element | None) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, "SourceSelectionCriteria")
|
||||
if self._sse_kms_encrypted_objects:
|
||||
self._sse_kms_encrypted_objects.toxml(element)
|
||||
return element
|
||||
|
||||
|
||||
class ExistingObjectReplication(Status):
|
||||
"""Existing object replication."""
|
||||
|
||||
|
||||
class DeleteMarkerReplication(Status):
|
||||
"""Delete marker replication."""
|
||||
|
||||
def __init__(self, status=DISABLED):
|
||||
super().__init__(status)
|
||||
|
||||
|
||||
C = TypeVar("C", bound="ReplicationTimeValue")
|
||||
|
||||
|
||||
class ReplicationTimeValue:
|
||||
"""Replication time value."""
|
||||
__metaclass__ = ABCMeta
|
||||
|
||||
def __init__(self, minutes: None | int = 15):
|
||||
self._minutes = minutes
|
||||
|
||||
@property
|
||||
def minutes(self) -> int | None:
|
||||
"""Get minutes."""
|
||||
return self._minutes
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[C], element: ET.Element) -> C:
|
||||
"""Create new object with values from XML element."""
|
||||
element = cast(ET.Element, find(element, cls.__name__, True))
|
||||
minutes = findtext(element, "Minutes")
|
||||
return cls(int(minutes) if minutes else None)
|
||||
|
||||
def toxml(self, element: ET.Element | None) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, self.__class__.__name__)
|
||||
if self._minutes is not None:
|
||||
SubElement(element, "Minutes", str(self._minutes))
|
||||
return element
|
||||
|
||||
|
||||
class Time(ReplicationTimeValue):
|
||||
"""Time."""
|
||||
|
||||
|
||||
D = TypeVar("D", bound="ReplicationTime")
|
||||
|
||||
|
||||
class ReplicationTime:
|
||||
"""Replication time."""
|
||||
|
||||
def __init__(self, time: Time, status: str):
|
||||
if not time:
|
||||
raise ValueError("time must be provided")
|
||||
check_status(status)
|
||||
self._time = time
|
||||
self._status = status
|
||||
|
||||
@property
|
||||
def time(self) -> Time:
|
||||
"""Get time value."""
|
||||
return self._time
|
||||
|
||||
@property
|
||||
def status(self) -> str:
|
||||
"""Get status."""
|
||||
return self._status
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[D], element: ET.Element) -> D:
|
||||
"""Create new object with values from XML element."""
|
||||
element = cast(ET.Element, find(element, "ReplicationTime", True))
|
||||
time = Time.fromxml(element)
|
||||
status = cast(str, findtext(element, "Status", True))
|
||||
return cls(time, status)
|
||||
|
||||
def toxml(self, element: ET.Element | None) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, "ReplicationTime")
|
||||
self._time.toxml(element)
|
||||
SubElement(element, "Status", self._status)
|
||||
return element
|
||||
|
||||
|
||||
class EventThreshold(ReplicationTimeValue):
|
||||
"""Event threshold."""
|
||||
|
||||
|
||||
E = TypeVar("E", bound="Metrics")
|
||||
|
||||
|
||||
class Metrics:
|
||||
"""Metrics."""
|
||||
|
||||
def __init__(self, event_threshold: EventThreshold, status: str):
|
||||
if not event_threshold:
|
||||
raise ValueError("event threshold must be provided")
|
||||
check_status(status)
|
||||
self._event_threshold = event_threshold
|
||||
self._status = status
|
||||
|
||||
@property
|
||||
def event_threshold(self) -> EventThreshold:
|
||||
"""Get event threshold."""
|
||||
return self._event_threshold
|
||||
|
||||
@property
|
||||
def status(self) -> str:
|
||||
"""Get status."""
|
||||
return self._status
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[E], element: ET.Element) -> E:
|
||||
"""Create new object with values from XML element."""
|
||||
element = cast(ET.Element, find(element, "Metrics", True))
|
||||
event_threshold = EventThreshold.fromxml(element)
|
||||
status = cast(str, findtext(element, "Status", True))
|
||||
return cls(event_threshold, status)
|
||||
|
||||
def toxml(self, element: ET.Element | None) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, "Metrics")
|
||||
self._event_threshold.toxml(element)
|
||||
SubElement(element, "Status", self._status)
|
||||
return element
|
||||
|
||||
|
||||
F = TypeVar("F", bound="EncryptionConfig")
|
||||
|
||||
|
||||
class EncryptionConfig:
|
||||
"""Encryption configuration."""
|
||||
|
||||
def __init__(self, replica_kms_key_id: str | None = None):
|
||||
self._replica_kms_key_id = replica_kms_key_id
|
||||
|
||||
@property
|
||||
def replica_kms_key_id(self) -> str | None:
|
||||
"""Get replica KMS key ID."""
|
||||
return self._replica_kms_key_id
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[F], element: ET.Element) -> F:
|
||||
"""Create new object with values from XML element."""
|
||||
element = cast(
|
||||
ET.Element,
|
||||
find(element, "EncryptionConfiguration", True),
|
||||
)
|
||||
return cls(findtext(element, "ReplicaKmsKeyID"))
|
||||
|
||||
def toxml(self, element: ET.Element | None) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, "EncryptionConfiguration")
|
||||
SubElement(element, "ReplicaKmsKeyID", self._replica_kms_key_id)
|
||||
return element
|
||||
|
||||
|
||||
G = TypeVar("G", bound="AccessControlTranslation")
|
||||
|
||||
|
||||
class AccessControlTranslation:
|
||||
"""Access control translation."""
|
||||
|
||||
def __init__(self, owner: str = "Destination"):
|
||||
if not owner:
|
||||
raise ValueError("owner must be provided")
|
||||
self._owner = owner
|
||||
|
||||
@property
|
||||
def owner(self) -> str:
|
||||
"""Get owner."""
|
||||
return self._owner
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[G], element: ET.Element) -> G:
|
||||
"""Create new object with values from XML element."""
|
||||
element = cast(
|
||||
ET.Element, find(element, "AccessControlTranslation", True),
|
||||
)
|
||||
owner = cast(str, findtext(element, "Owner", True))
|
||||
return cls(owner)
|
||||
|
||||
def toxml(self, element: ET.Element | None) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, "AccessControlTranslation")
|
||||
SubElement(element, "Owner", self._owner)
|
||||
return element
|
||||
|
||||
|
||||
H = TypeVar("H", bound="Destination")
|
||||
|
||||
|
||||
class Destination:
|
||||
"""Replication destination."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
bucket_arn: str,
|
||||
access_control_translation: AccessControlTranslation | None = None,
|
||||
account: str | None = None,
|
||||
encryption_config: EncryptionConfig | None = None,
|
||||
metrics: Metrics | None = None,
|
||||
replication_time: ReplicationTime | None = None,
|
||||
storage_class: str | None = None,
|
||||
):
|
||||
if not bucket_arn:
|
||||
raise ValueError("bucket ARN must be provided")
|
||||
self._bucket_arn = bucket_arn
|
||||
self._access_control_translation = access_control_translation
|
||||
self._account = account
|
||||
self._encryption_config = encryption_config
|
||||
self._metrics = metrics
|
||||
self._replication_time = replication_time
|
||||
self._storage_class = storage_class
|
||||
|
||||
@property
|
||||
def bucket_arn(self) -> str:
|
||||
"""Get bucket ARN."""
|
||||
return self._bucket_arn
|
||||
|
||||
@property
|
||||
def access_control_translation(self) -> AccessControlTranslation | None:
|
||||
"""Get access control translation. """
|
||||
return self._access_control_translation
|
||||
|
||||
@property
|
||||
def account(self) -> str | None:
|
||||
"""Get account."""
|
||||
return self._account
|
||||
|
||||
@property
|
||||
def encryption_config(self) -> EncryptionConfig | None:
|
||||
"""Get encryption configuration."""
|
||||
return self._encryption_config
|
||||
|
||||
@property
|
||||
def metrics(self) -> Metrics | None:
|
||||
"""Get metrics."""
|
||||
return self._metrics
|
||||
|
||||
@property
|
||||
def replication_time(self) -> ReplicationTime | None:
|
||||
"""Get replication time."""
|
||||
return self._replication_time
|
||||
|
||||
@property
|
||||
def storage_class(self) -> str | None:
|
||||
"""Get storage class."""
|
||||
return self._storage_class
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[H], element: ET.Element) -> H:
|
||||
"""Create new object with values from XML element."""
|
||||
element = cast(ET.Element, find(element, "Destination", True))
|
||||
access_control_translation = (
|
||||
None if find(element, "AccessControlTranslation") is None
|
||||
else AccessControlTranslation.fromxml(element)
|
||||
)
|
||||
account = findtext(element, "Account")
|
||||
bucket_arn = cast(str, findtext(element, "Bucket", True))
|
||||
encryption_config = (
|
||||
None if find(element, "EncryptionConfiguration") is None
|
||||
else EncryptionConfig.fromxml(element)
|
||||
)
|
||||
metrics = (
|
||||
None if find(element, "Metrics") is None
|
||||
else Metrics.fromxml(element)
|
||||
)
|
||||
replication_time = (
|
||||
None if find(element, "ReplicationTime") is None
|
||||
else ReplicationTime.fromxml(element)
|
||||
)
|
||||
storage_class = findtext(element, "StorageClass")
|
||||
return cls(bucket_arn, access_control_translation, account,
|
||||
encryption_config, metrics, replication_time, storage_class)
|
||||
|
||||
def toxml(self, element: ET.Element | None) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, "Destination")
|
||||
if self._access_control_translation:
|
||||
self._access_control_translation.toxml(element)
|
||||
if self._account is not None:
|
||||
SubElement(element, "Account", self._account)
|
||||
SubElement(element, "Bucket", self._bucket_arn)
|
||||
if self._encryption_config:
|
||||
self._encryption_config.toxml(element)
|
||||
if self._metrics:
|
||||
self._metrics.toxml(element)
|
||||
if self._replication_time:
|
||||
self._replication_time.toxml(element)
|
||||
if self._storage_class:
|
||||
SubElement(element, "StorageClass", self._storage_class)
|
||||
return element
|
||||
|
||||
|
||||
I = TypeVar("I", bound="Rule")
|
||||
|
||||
|
||||
class Rule(BaseRule):
|
||||
"""Replication rule. """
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
destination: Destination,
|
||||
status: str,
|
||||
delete_marker_replication: DeleteMarkerReplication | None = None,
|
||||
existing_object_replication:
|
||||
ExistingObjectReplication | None = None,
|
||||
rule_filter: Filter | None = None,
|
||||
rule_id: str | None = None,
|
||||
prefix: str | None = None,
|
||||
priority: int | None = None,
|
||||
source_selection_criteria: SourceSelectionCriteria | None = None,
|
||||
):
|
||||
if not destination:
|
||||
raise ValueError("destination must be provided")
|
||||
|
||||
check_status(status)
|
||||
|
||||
super().__init__(rule_filter, rule_id)
|
||||
|
||||
self._destination = destination
|
||||
self._status = status
|
||||
if rule_filter and not delete_marker_replication:
|
||||
delete_marker_replication = DeleteMarkerReplication()
|
||||
self._delete_marker_replication = delete_marker_replication
|
||||
self._existing_object_replication = existing_object_replication
|
||||
self._prefix = prefix
|
||||
self._priority = priority
|
||||
self._source_selection_criteria = source_selection_criteria
|
||||
|
||||
@property
|
||||
def destination(self) -> Destination:
|
||||
"""Get destination."""
|
||||
return self._destination
|
||||
|
||||
@property
|
||||
def status(self) -> str:
|
||||
"""Get status."""
|
||||
return self._status
|
||||
|
||||
@property
|
||||
def delete_marker_replication(self) -> DeleteMarkerReplication | None:
|
||||
"""Get delete marker replication."""
|
||||
return self._delete_marker_replication
|
||||
|
||||
@property
|
||||
def existing_object_replication(self) -> ExistingObjectReplication | None:
|
||||
"""Get existing object replication."""
|
||||
return self._existing_object_replication
|
||||
|
||||
@property
|
||||
def prefix(self) -> str | None:
|
||||
"""Get prefix."""
|
||||
return self._prefix
|
||||
|
||||
@property
|
||||
def priority(self) -> int | None:
|
||||
"""Get priority."""
|
||||
return self._priority
|
||||
|
||||
@property
|
||||
def source_selection_criteria(self) -> SourceSelectionCriteria | None:
|
||||
"""Get source selection criteria."""
|
||||
return self._source_selection_criteria
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[I], element: ET.Element) -> I:
|
||||
"""Create new object with values from XML element."""
|
||||
delete_marker_replication = (
|
||||
None if find(element, "DeleteMarkerReplication") is None
|
||||
else DeleteMarkerReplication.fromxml(element)
|
||||
)
|
||||
destination = Destination.fromxml(element)
|
||||
existing_object_replication = (
|
||||
None if find(element, "ExistingObjectReplication") is None
|
||||
else ExistingObjectReplication.fromxml(element)
|
||||
)
|
||||
rule_filter, rule_id = cls.parsexml(element)
|
||||
prefix = findtext(element, "Prefix")
|
||||
priority = findtext(element, "Priority")
|
||||
source_selection_criteria = (
|
||||
None if find(element, "SourceSelectionCriteria") is None
|
||||
else SourceSelectionCriteria.fromxml(element)
|
||||
)
|
||||
status = cast(str, findtext(element, "Status", True))
|
||||
|
||||
return cls(
|
||||
destination,
|
||||
status,
|
||||
delete_marker_replication,
|
||||
existing_object_replication,
|
||||
rule_filter,
|
||||
rule_id,
|
||||
prefix,
|
||||
int(priority) if priority else None,
|
||||
source_selection_criteria,
|
||||
)
|
||||
|
||||
def toxml(self, element: ET.Element | None) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, "Rule")
|
||||
if self._delete_marker_replication:
|
||||
self._delete_marker_replication.toxml(element)
|
||||
self._destination.toxml(element)
|
||||
if self._existing_object_replication:
|
||||
self._existing_object_replication.toxml(element)
|
||||
super().toxml(element)
|
||||
if self._prefix is not None:
|
||||
SubElement(element, "Prefix", self._prefix)
|
||||
if self._priority is not None:
|
||||
SubElement(element, "Priority", str(self._priority))
|
||||
if self._source_selection_criteria:
|
||||
self._source_selection_criteria.toxml(element)
|
||||
SubElement(element, "Status", self._status)
|
||||
return element
|
||||
|
||||
|
||||
J = TypeVar("J", bound="ReplicationConfig")
|
||||
|
||||
|
||||
class ReplicationConfig:
|
||||
"""Replication configuration."""
|
||||
|
||||
def __init__(self, role: str, rules: list[Rule]):
|
||||
if not rules:
|
||||
raise ValueError("rules must be provided")
|
||||
if len(rules) > 1000:
|
||||
raise ValueError("more than 1000 rules are not supported")
|
||||
self._role = role
|
||||
self._rules = rules
|
||||
|
||||
@property
|
||||
def role(self) -> str:
|
||||
"""Get role."""
|
||||
return self._role
|
||||
|
||||
@property
|
||||
def rules(self) -> list[Rule]:
|
||||
"""Get rules."""
|
||||
return self._rules
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[J], element: ET.Element) -> J:
|
||||
"""Create new object with values from XML element."""
|
||||
role = cast(str, findtext(element, "Role", True))
|
||||
elements = findall(element, "Rule")
|
||||
rules = []
|
||||
for tag in elements:
|
||||
rules.append(Rule.fromxml(tag))
|
||||
return cls(role, rules)
|
||||
|
||||
def toxml(self, element: ET.Element | None) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
element = Element("ReplicationConfiguration")
|
||||
SubElement(element, "Role", self._role)
|
||||
for rule in self._rules:
|
||||
rule.toxml(element)
|
||||
return element
|
||||
76
venv/lib/python3.12/site-packages/minio/retention.py
Normal file
76
venv/lib/python3.12/site-packages/minio/retention.py
Normal file
@@ -0,0 +1,76 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C)
|
||||
# 2020 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Request/response of PutObjectRetention and GetObjectRetention APIs."""
|
||||
|
||||
from __future__ import absolute_import, annotations
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Type, TypeVar, cast
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
from .commonconfig import COMPLIANCE, GOVERNANCE
|
||||
from .time import from_iso8601utc, to_iso8601utc
|
||||
from .xml import Element, SubElement, findtext
|
||||
|
||||
A = TypeVar("A", bound="Retention")
|
||||
|
||||
|
||||
class Retention:
|
||||
"""Retention configuration."""
|
||||
|
||||
def __init__(self, mode: str, retain_until_date: datetime):
|
||||
if mode not in [GOVERNANCE, COMPLIANCE]:
|
||||
raise ValueError(f"mode must be {GOVERNANCE} or {COMPLIANCE}")
|
||||
if not isinstance(retain_until_date, datetime):
|
||||
raise ValueError(
|
||||
"retain until date must be datetime type",
|
||||
)
|
||||
self._mode = mode
|
||||
self._retain_until_date = retain_until_date
|
||||
|
||||
@property
|
||||
def mode(self) -> str:
|
||||
"""Get mode."""
|
||||
return self._mode
|
||||
|
||||
@property
|
||||
def retain_until_date(self) -> datetime:
|
||||
"""Get retain util date."""
|
||||
return self._retain_until_date
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[A], element: ET.Element) -> A:
|
||||
"""Create new object with values from XML element."""
|
||||
mode = cast(str, findtext(element, "Mode", True))
|
||||
retain_until_date = cast(
|
||||
datetime,
|
||||
from_iso8601utc(
|
||||
cast(str, findtext(element, "RetainUntilDate", True)),
|
||||
),
|
||||
)
|
||||
return cls(mode, retain_until_date)
|
||||
|
||||
def toxml(self, element: ET.Element | None) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
element = Element("Retention")
|
||||
SubElement(element, "Mode", self._mode)
|
||||
SubElement(
|
||||
element,
|
||||
"RetainUntilDate",
|
||||
to_iso8601utc(self._retain_until_date),
|
||||
)
|
||||
return element
|
||||
430
venv/lib/python3.12/site-packages/minio/select.py
Normal file
430
venv/lib/python3.12/site-packages/minio/select.py
Normal file
@@ -0,0 +1,430 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C)
|
||||
# 2020 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Request/response of SelectObjectContent API."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from abc import ABCMeta
|
||||
from binascii import crc32
|
||||
from io import BytesIO
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
from .error import MinioException
|
||||
from .xml import Element, SubElement, findtext
|
||||
|
||||
COMPRESSION_TYPE_NONE = "NONE"
|
||||
COMPRESSION_TYPE_GZIP = "GZIP"
|
||||
COMPRESSION_TYPE_BZIP2 = "BZIP2"
|
||||
|
||||
FILE_HEADER_INFO_USE = "USE"
|
||||
FILE_HEADER_INFO_IGNORE = "IGNORE"
|
||||
FILE_HEADER_INFO_NONE = "NONE"
|
||||
|
||||
JSON_TYPE_DOCUMENT = "DOCUMENT"
|
||||
JSON_TYPE_LINES = "LINES"
|
||||
|
||||
QUOTE_FIELDS_ALWAYS = "ALWAYS"
|
||||
QUOTE_FIELDS_ASNEEDED = "ASNEEDED"
|
||||
|
||||
|
||||
class InputSerialization:
|
||||
"""Input serialization."""
|
||||
|
||||
__metaclass__ = ABCMeta
|
||||
|
||||
def __init__(self, compression_type):
|
||||
if (
|
||||
compression_type is not None and
|
||||
compression_type not in [
|
||||
COMPRESSION_TYPE_NONE,
|
||||
COMPRESSION_TYPE_GZIP,
|
||||
COMPRESSION_TYPE_BZIP2,
|
||||
]
|
||||
):
|
||||
raise ValueError(
|
||||
f"compression type must be {COMPRESSION_TYPE_NONE}, "
|
||||
f"{COMPRESSION_TYPE_GZIP} or {COMPRESSION_TYPE_BZIP2}"
|
||||
)
|
||||
self._compression_type = compression_type
|
||||
|
||||
def toxml(self, element):
|
||||
"""Convert to XML."""
|
||||
if self._compression_type is not None:
|
||||
SubElement(element, "CompressionType", self._compression_type)
|
||||
return element
|
||||
|
||||
|
||||
class CSVInputSerialization(InputSerialization):
|
||||
"""CSV input serialization."""
|
||||
|
||||
def __init__(self, compression_type=None,
|
||||
allow_quoted_record_delimiter=None, comments=None,
|
||||
field_delimiter=None, file_header_info=None,
|
||||
quote_character=None, quote_escape_character=None,
|
||||
record_delimiter=None):
|
||||
super().__init__(compression_type)
|
||||
self._allow_quoted_record_delimiter = allow_quoted_record_delimiter
|
||||
self._comments = comments
|
||||
self._field_delimiter = field_delimiter
|
||||
if (
|
||||
file_header_info is not None and
|
||||
file_header_info not in [
|
||||
FILE_HEADER_INFO_USE,
|
||||
FILE_HEADER_INFO_IGNORE,
|
||||
FILE_HEADER_INFO_NONE,
|
||||
]
|
||||
):
|
||||
raise ValueError(
|
||||
f"file header info must be {FILE_HEADER_INFO_USE}, "
|
||||
f"{FILE_HEADER_INFO_IGNORE} or {FILE_HEADER_INFO_NONE}"
|
||||
)
|
||||
self._file_header_info = file_header_info
|
||||
self._quote_character = quote_character
|
||||
self._quote_escape_character = quote_escape_character
|
||||
self._record_delimiter = record_delimiter
|
||||
|
||||
def toxml(self, element):
|
||||
"""Convert to XML."""
|
||||
super().toxml(element)
|
||||
element = SubElement(element, "CSV")
|
||||
if self._allow_quoted_record_delimiter is not None:
|
||||
SubElement(
|
||||
element,
|
||||
"AllowQuotedRecordDelimiter",
|
||||
self._allow_quoted_record_delimiter,
|
||||
)
|
||||
if self._comments is not None:
|
||||
SubElement(element, "Comments", self._comments)
|
||||
if self._field_delimiter is not None:
|
||||
SubElement(element, "FieldDelimiter", self._field_delimiter)
|
||||
if self._file_header_info is not None:
|
||||
SubElement(element, "FileHeaderInfo", self._file_header_info)
|
||||
if self._quote_character is not None:
|
||||
SubElement(element, "QuoteCharacter", self._quote_character)
|
||||
if self._quote_escape_character is not None:
|
||||
SubElement(
|
||||
element,
|
||||
"QuoteEscapeCharacter",
|
||||
self._quote_escape_character,
|
||||
)
|
||||
if self._record_delimiter is not None:
|
||||
SubElement(element, "RecordDelimiter", self._record_delimiter)
|
||||
|
||||
|
||||
class JSONInputSerialization(InputSerialization):
|
||||
"""JSON input serialization."""
|
||||
|
||||
def __init__(self, compression_type=None, json_type=None):
|
||||
super().__init__(compression_type)
|
||||
if (
|
||||
json_type is not None and
|
||||
json_type not in [JSON_TYPE_DOCUMENT, JSON_TYPE_LINES]
|
||||
):
|
||||
raise ValueError(
|
||||
f"json type must be {JSON_TYPE_DOCUMENT} or {JSON_TYPE_LINES}"
|
||||
)
|
||||
self._json_type = json_type
|
||||
|
||||
def toxml(self, element):
|
||||
"""Convert to XML."""
|
||||
super().toxml(element)
|
||||
element = SubElement(element, "JSON")
|
||||
if self._json_type is not None:
|
||||
SubElement(element, "Type", self._json_type)
|
||||
|
||||
|
||||
class ParquetInputSerialization(InputSerialization):
|
||||
"""Parquet input serialization."""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(None)
|
||||
|
||||
def toxml(self, element):
|
||||
"""Convert to XML."""
|
||||
super().toxml(element)
|
||||
return SubElement(element, "Parquet")
|
||||
|
||||
|
||||
class CSVOutputSerialization:
|
||||
"""CSV output serialization."""
|
||||
|
||||
def __init__(self, field_delimiter=None, quote_character=None,
|
||||
quote_escape_character=None, quote_fields=None,
|
||||
record_delimiter=None):
|
||||
self._field_delimiter = field_delimiter
|
||||
self._quote_character = quote_character
|
||||
self._quote_escape_character = quote_escape_character
|
||||
if (
|
||||
quote_fields is not None and
|
||||
quote_fields not in [
|
||||
QUOTE_FIELDS_ALWAYS, QUOTE_FIELDS_ASNEEDED,
|
||||
]
|
||||
):
|
||||
raise ValueError(
|
||||
f"quote fields must be {QUOTE_FIELDS_ALWAYS} or "
|
||||
f"{QUOTE_FIELDS_ASNEEDED}"
|
||||
)
|
||||
self._quote_fields = quote_fields
|
||||
self._record_delimiter = record_delimiter
|
||||
|
||||
def toxml(self, element):
|
||||
"""Convert to XML."""
|
||||
element = SubElement(element, "CSV")
|
||||
if self._field_delimiter is not None:
|
||||
SubElement(element, "FieldDelimiter", self._field_delimiter)
|
||||
if self._quote_character is not None:
|
||||
SubElement(element, "QuoteCharacter", self._quote_character)
|
||||
if self._quote_escape_character is not None:
|
||||
SubElement(
|
||||
element,
|
||||
"QuoteEscapeCharacter",
|
||||
self._quote_escape_character,
|
||||
)
|
||||
if self._quote_fields is not None:
|
||||
SubElement(element, "QuoteFields", self._quote_fields)
|
||||
if self._record_delimiter is not None:
|
||||
SubElement(element, "RecordDelimiter", self._record_delimiter)
|
||||
|
||||
|
||||
class JSONOutputSerialization:
|
||||
"""JSON output serialization."""
|
||||
|
||||
def __init__(self, record_delimiter=None):
|
||||
self._record_delimiter = record_delimiter
|
||||
|
||||
def toxml(self, element):
|
||||
"""Convert to XML."""
|
||||
element = SubElement(element, "JSON")
|
||||
if self._record_delimiter is not None:
|
||||
SubElement(element, "RecordDelimiter", self._record_delimiter)
|
||||
|
||||
|
||||
class SelectRequest:
|
||||
"""Select object content request."""
|
||||
|
||||
def __init__(self, expression, input_serialization, output_serialization,
|
||||
request_progress=False, scan_start_range=None,
|
||||
scan_end_range=None):
|
||||
self._expression = expression
|
||||
if not isinstance(
|
||||
input_serialization,
|
||||
(
|
||||
CSVInputSerialization,
|
||||
JSONInputSerialization,
|
||||
ParquetInputSerialization,
|
||||
),
|
||||
):
|
||||
raise ValueError(
|
||||
"input serialization must be CSVInputSerialization, "
|
||||
"JSONInputSerialization or ParquetInputSerialization type",
|
||||
)
|
||||
self._input_serialization = input_serialization
|
||||
if not isinstance(
|
||||
output_serialization,
|
||||
(CSVOutputSerialization, JSONOutputSerialization),
|
||||
):
|
||||
raise ValueError(
|
||||
"output serialization must be CSVOutputSerialization or "
|
||||
"JSONOutputSerialization type",
|
||||
)
|
||||
self._output_serialization = output_serialization
|
||||
self._request_progress = request_progress
|
||||
self._scan_start_range = scan_start_range
|
||||
self._scan_end_range = scan_end_range
|
||||
|
||||
def toxml(self, element):
|
||||
"""Convert to XML."""
|
||||
element = Element("SelectObjectContentRequest")
|
||||
SubElement(element, "Expression", self._expression)
|
||||
SubElement(element, "ExpressionType", "SQL")
|
||||
self._input_serialization.toxml(
|
||||
SubElement(element, "InputSerialization"),
|
||||
)
|
||||
self._output_serialization.toxml(
|
||||
SubElement(element, "OutputSerialization"),
|
||||
)
|
||||
if self._request_progress:
|
||||
SubElement(
|
||||
SubElement(element, "RequestProgress"), "Enabled", "true",
|
||||
)
|
||||
if self._scan_start_range or self._scan_end_range:
|
||||
tag = SubElement(element, "ScanRange")
|
||||
if self._scan_start_range:
|
||||
SubElement(tag, "Start", self._scan_start_range)
|
||||
if self._scan_end_range:
|
||||
SubElement(tag, "End", self._scan_end_range)
|
||||
return element
|
||||
|
||||
|
||||
def _read(reader, size):
|
||||
"""Wrapper to RawIOBase.read() to error out on short reads."""
|
||||
data = reader.read(size)
|
||||
if len(data) != size:
|
||||
raise IOError("insufficient data")
|
||||
return data
|
||||
|
||||
|
||||
def _int(data):
|
||||
"""Convert byte data to big-endian int."""
|
||||
return int.from_bytes(data, byteorder="big")
|
||||
|
||||
|
||||
def _crc32(data):
|
||||
"""Wrapper to binascii.crc32()."""
|
||||
return crc32(data) & 0xffffffff
|
||||
|
||||
|
||||
def _decode_header(data):
|
||||
"""Decode header data."""
|
||||
reader = BytesIO(data)
|
||||
headers = {}
|
||||
while True:
|
||||
length = reader.read(1)
|
||||
if not length:
|
||||
break
|
||||
name = _read(reader, _int(length))
|
||||
if _int(_read(reader, 1)) != 7:
|
||||
raise IOError("header value type is not 7")
|
||||
value = _read(reader, _int(_read(reader, 2)))
|
||||
headers[name.decode()] = value.decode()
|
||||
return headers
|
||||
|
||||
|
||||
class Stats:
|
||||
"""Progress/Stats information."""
|
||||
|
||||
def __init__(self, data):
|
||||
element = ET.fromstring(data.decode())
|
||||
self._bytes_scanned = findtext(element, "BytesScanned")
|
||||
self._bytes_processed = findtext(element, "BytesProcessed")
|
||||
self._bytes_returned = findtext(element, "BytesReturned")
|
||||
|
||||
@property
|
||||
def bytes_scanned(self):
|
||||
"""Get bytes scanned."""
|
||||
return self._bytes_scanned
|
||||
|
||||
@property
|
||||
def bytes_processed(self):
|
||||
"""Get bytes processed."""
|
||||
return self._bytes_processed
|
||||
|
||||
@property
|
||||
def bytes_returned(self):
|
||||
"""Get bytes returned."""
|
||||
return self._bytes_returned
|
||||
|
||||
|
||||
class SelectObjectReader:
|
||||
"""
|
||||
BufferedIOBase compatible reader represents response data of
|
||||
Minio.select_object_content() API.
|
||||
"""
|
||||
|
||||
def __init__(self, response):
|
||||
self._response = response
|
||||
self._stats = None
|
||||
self._payload = None
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, exc_traceback):
|
||||
return self.close()
|
||||
|
||||
def readable(self): # pylint: disable=no-self-use
|
||||
"""Return this is readable."""
|
||||
return True
|
||||
|
||||
def writeable(self): # pylint: disable=no-self-use
|
||||
"""Return this is not writeable."""
|
||||
return False
|
||||
|
||||
def close(self):
|
||||
"""Close response and release network resources."""
|
||||
self._response.close()
|
||||
self._response.release_conn()
|
||||
|
||||
def stats(self):
|
||||
"""Get stats information."""
|
||||
return self._stats
|
||||
|
||||
def _read(self):
|
||||
"""Read and decode response."""
|
||||
if self._response.isclosed():
|
||||
return 0
|
||||
|
||||
prelude = _read(self._response, 8)
|
||||
prelude_crc = _read(self._response, 4)
|
||||
if _crc32(prelude) != _int(prelude_crc):
|
||||
raise IOError(
|
||||
f"prelude CRC mismatch; expected: {_crc32(prelude)}, "
|
||||
f"got: {_int(prelude_crc)}"
|
||||
)
|
||||
|
||||
total_length = _int(prelude[:4])
|
||||
data = _read(self._response, total_length - 8 - 4 - 4)
|
||||
message_crc = _int(_read(self._response, 4))
|
||||
if _crc32(prelude + prelude_crc + data) != message_crc:
|
||||
raise IOError(
|
||||
f"message CRC mismatch; "
|
||||
f"expected: {_crc32(prelude + prelude_crc + data)}, "
|
||||
f"got: {message_crc}"
|
||||
)
|
||||
|
||||
header_length = _int(prelude[4:])
|
||||
headers = _decode_header(data[:header_length])
|
||||
|
||||
if headers.get(":message-type") == "error":
|
||||
raise MinioException(
|
||||
f"{headers.get(':error-code')}: "
|
||||
f"{headers.get(':error-message')}"
|
||||
)
|
||||
|
||||
if headers.get(":event-type") == "End":
|
||||
return 0
|
||||
|
||||
payload_length = total_length - header_length - 16
|
||||
if headers.get(":event-type") == "Cont" or payload_length < 1:
|
||||
return self._read()
|
||||
|
||||
payload = data[header_length:header_length+payload_length]
|
||||
|
||||
if headers.get(":event-type") in ["Progress", "Stats"]:
|
||||
self._stats = Stats(payload)
|
||||
return self._read()
|
||||
|
||||
if headers.get(":event-type") == "Records":
|
||||
self._payload = payload
|
||||
return len(payload)
|
||||
|
||||
raise MinioException(
|
||||
f"unknown event-type {headers.get(':event-type')}",
|
||||
)
|
||||
|
||||
def stream(self, num_bytes=32*1024):
|
||||
"""
|
||||
Stream extracted payload from response data. Upon completion, caller
|
||||
should call self.close() to release network resources.
|
||||
"""
|
||||
while self._read() > 0:
|
||||
while self._payload:
|
||||
result = self._payload
|
||||
if num_bytes < len(self._payload):
|
||||
result = self._payload[:num_bytes]
|
||||
self._payload = self._payload[len(result):]
|
||||
yield result
|
||||
344
venv/lib/python3.12/site-packages/minio/signer.py
Normal file
344
venv/lib/python3.12/site-packages/minio/signer.py
Normal file
@@ -0,0 +1,344 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage,
|
||||
# (C) 2015-2020 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
"""
|
||||
minio.signer
|
||||
~~~~~~~~~~~~~~~
|
||||
|
||||
This module implements all helpers for AWS Signature version '4' support.
|
||||
|
||||
:copyright: (c) 2015 by MinIO, Inc.
|
||||
:license: Apache 2.0, see LICENSE for more details.
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, annotations
|
||||
|
||||
import hashlib
|
||||
import hmac
|
||||
import re
|
||||
from collections import OrderedDict
|
||||
from datetime import datetime
|
||||
from typing import Mapping, cast
|
||||
from urllib.parse import SplitResult
|
||||
|
||||
from . import time
|
||||
from .credentials import Credentials
|
||||
from .helpers import DictType, queryencode, sha256_hash
|
||||
|
||||
SIGN_V4_ALGORITHM = 'AWS4-HMAC-SHA256'
|
||||
_MULTI_SPACE_REGEX = re.compile(r"( +)")
|
||||
|
||||
|
||||
def _hmac_hash(
|
||||
key: bytes,
|
||||
data: bytes,
|
||||
hexdigest: bool = False,
|
||||
) -> bytes | str:
|
||||
"""Return HMacSHA256 digest of given key and data."""
|
||||
|
||||
hasher = hmac.new(key, data, hashlib.sha256)
|
||||
return hasher.hexdigest() if hexdigest else hasher.digest()
|
||||
|
||||
|
||||
def _get_scope(date: datetime, region: str, service_name: str) -> str:
|
||||
"""Get scope string."""
|
||||
return f"{time.to_signer_date(date)}/{region}/{service_name}/aws4_request"
|
||||
|
||||
|
||||
def _get_canonical_headers(
|
||||
headers: Mapping[str, str | list[str] | tuple[str]],
|
||||
) -> tuple[str, str]:
|
||||
"""Get canonical headers."""
|
||||
|
||||
ordered_headers = {}
|
||||
for key, values in headers.items():
|
||||
key = key.lower()
|
||||
if key not in (
|
||||
"authorization",
|
||||
"user-agent",
|
||||
):
|
||||
values = values if isinstance(values, (list, tuple)) else [values]
|
||||
ordered_headers[key] = ",".join([
|
||||
_MULTI_SPACE_REGEX.sub(" ", value) for value in values
|
||||
])
|
||||
|
||||
ordered_headers = OrderedDict(sorted(ordered_headers.items()))
|
||||
signed_headers = ";".join(ordered_headers.keys())
|
||||
canonical_headers = "\n".join(
|
||||
[f"{key}:{value}" for key, value in ordered_headers.items()],
|
||||
)
|
||||
return canonical_headers, signed_headers
|
||||
|
||||
|
||||
def _get_canonical_query_string(query: str) -> str:
|
||||
"""Get canonical query string."""
|
||||
|
||||
query = query or ""
|
||||
return "&".join(
|
||||
[
|
||||
"=".join(pair) for pair in sorted(
|
||||
[params.split("=") for params in query.split("&")],
|
||||
)
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def _get_canonical_request_hash(
|
||||
method: str,
|
||||
url: SplitResult,
|
||||
headers: Mapping[str, str | list[str] | tuple[str]],
|
||||
content_sha256: str,
|
||||
) -> tuple[str, str]:
|
||||
"""Get canonical request hash."""
|
||||
canonical_headers, signed_headers = _get_canonical_headers(headers)
|
||||
canonical_query_string = _get_canonical_query_string(url.query)
|
||||
|
||||
# CanonicalRequest =
|
||||
# HTTPRequestMethod + '\n' +
|
||||
# CanonicalURI + '\n' +
|
||||
# CanonicalQueryString + '\n' +
|
||||
# CanonicalHeaders + '\n\n' +
|
||||
# SignedHeaders + '\n' +
|
||||
# HexEncode(Hash(RequestPayload))
|
||||
canonical_request = (
|
||||
f"{method}\n"
|
||||
f"{url.path or '/'}\n"
|
||||
f"{canonical_query_string}\n"
|
||||
f"{canonical_headers}\n\n"
|
||||
f"{signed_headers}\n"
|
||||
f"{content_sha256}"
|
||||
)
|
||||
return sha256_hash(canonical_request), signed_headers
|
||||
|
||||
|
||||
def _get_string_to_sign(
|
||||
date: datetime,
|
||||
scope: str,
|
||||
canonical_request_hash: str,
|
||||
) -> str:
|
||||
"""Get string-to-sign."""
|
||||
return (
|
||||
f"AWS4-HMAC-SHA256\n{time.to_amz_date(date)}\n{scope}\n"
|
||||
f"{canonical_request_hash}"
|
||||
)
|
||||
|
||||
|
||||
def _get_signing_key(
|
||||
secret_key: str,
|
||||
date: datetime,
|
||||
region: str,
|
||||
service_name: str,
|
||||
) -> bytes:
|
||||
"""Get signing key."""
|
||||
|
||||
date_key = cast(
|
||||
bytes,
|
||||
_hmac_hash(
|
||||
("AWS4" + secret_key).encode(),
|
||||
time.to_signer_date(date).encode(),
|
||||
),
|
||||
)
|
||||
date_region_key = cast(bytes, _hmac_hash(date_key, region.encode()))
|
||||
date_region_service_key = cast(
|
||||
bytes,
|
||||
_hmac_hash(date_region_key, service_name.encode()),
|
||||
)
|
||||
return cast(
|
||||
bytes,
|
||||
_hmac_hash(date_region_service_key, b"aws4_request"),
|
||||
)
|
||||
|
||||
|
||||
def _get_signature(signing_key: bytes, string_to_sign: str) -> str:
|
||||
"""Get signature."""
|
||||
|
||||
return cast(
|
||||
str,
|
||||
_hmac_hash(signing_key, string_to_sign.encode(), hexdigest=True),
|
||||
)
|
||||
|
||||
|
||||
def _get_authorization(
|
||||
access_key: str,
|
||||
scope: str,
|
||||
signed_headers: str,
|
||||
signature: str,
|
||||
) -> str:
|
||||
"""Get authorization."""
|
||||
return (
|
||||
f"AWS4-HMAC-SHA256 Credential={access_key}/{scope}, "
|
||||
f"SignedHeaders={signed_headers}, Signature={signature}"
|
||||
)
|
||||
|
||||
|
||||
def _sign_v4(
|
||||
service_name: str,
|
||||
method: str,
|
||||
url: SplitResult,
|
||||
region: str,
|
||||
headers: DictType,
|
||||
credentials: Credentials,
|
||||
content_sha256: str,
|
||||
date: datetime,
|
||||
) -> DictType:
|
||||
"""Do signature V4 of given request for given service name."""
|
||||
|
||||
scope = _get_scope(date, region, service_name)
|
||||
canonical_request_hash, signed_headers = _get_canonical_request_hash(
|
||||
method, url, headers, content_sha256,
|
||||
)
|
||||
string_to_sign = _get_string_to_sign(date, scope, canonical_request_hash)
|
||||
signing_key = _get_signing_key(
|
||||
credentials.secret_key, date, region, service_name,
|
||||
)
|
||||
signature = _get_signature(signing_key, string_to_sign)
|
||||
authorization = _get_authorization(
|
||||
credentials.access_key, scope, signed_headers, signature,
|
||||
)
|
||||
headers["Authorization"] = authorization
|
||||
return headers
|
||||
|
||||
|
||||
def sign_v4_s3(
|
||||
method: str,
|
||||
url: SplitResult,
|
||||
region: str,
|
||||
headers: DictType,
|
||||
credentials: Credentials,
|
||||
content_sha256: str,
|
||||
date: datetime,
|
||||
) -> DictType:
|
||||
"""Do signature V4 of given request for S3 service."""
|
||||
return _sign_v4(
|
||||
"s3",
|
||||
method,
|
||||
url,
|
||||
region,
|
||||
headers,
|
||||
credentials,
|
||||
content_sha256,
|
||||
date,
|
||||
)
|
||||
|
||||
|
||||
def sign_v4_sts(
|
||||
method: str,
|
||||
url: SplitResult,
|
||||
region: str,
|
||||
headers: DictType,
|
||||
credentials: Credentials,
|
||||
content_sha256: str,
|
||||
date: datetime,
|
||||
) -> DictType:
|
||||
"""Do signature V4 of given request for STS service."""
|
||||
return _sign_v4(
|
||||
"sts",
|
||||
method,
|
||||
url,
|
||||
region,
|
||||
headers,
|
||||
credentials,
|
||||
content_sha256,
|
||||
date,
|
||||
)
|
||||
|
||||
|
||||
def _get_presign_canonical_request_hash( # pylint: disable=invalid-name
|
||||
method: str,
|
||||
url: SplitResult,
|
||||
access_key: str,
|
||||
scope: str,
|
||||
date: datetime,
|
||||
expires: int,
|
||||
) -> tuple[str, SplitResult]:
|
||||
"""Get canonical request hash for presign request."""
|
||||
x_amz_credential = queryencode(access_key + "/" + scope)
|
||||
canonical_headers, signed_headers = "host:" + url.netloc, "host"
|
||||
|
||||
query = url.query+"&" if url.query else ""
|
||||
query += (
|
||||
f"X-Amz-Algorithm=AWS4-HMAC-SHA256"
|
||||
f"&X-Amz-Credential={x_amz_credential}"
|
||||
f"&X-Amz-Date={time.to_amz_date(date)}"
|
||||
f"&X-Amz-Expires={expires}"
|
||||
f"&X-Amz-SignedHeaders={signed_headers}"
|
||||
)
|
||||
parts = list(url)
|
||||
parts[3] = query
|
||||
url = SplitResult(*parts)
|
||||
|
||||
canonical_query_string = _get_canonical_query_string(query)
|
||||
|
||||
# CanonicalRequest =
|
||||
# HTTPRequestMethod + '\n' +
|
||||
# CanonicalURI + '\n' +
|
||||
# CanonicalQueryString + '\n' +
|
||||
# CanonicalHeaders + '\n\n' +
|
||||
# SignedHeaders + '\n' +
|
||||
# HexEncode(Hash(RequestPayload))
|
||||
canonical_request = (
|
||||
f"{method}\n"
|
||||
f"{url.path or '/'}\n"
|
||||
f"{canonical_query_string}\n"
|
||||
f"{canonical_headers}\n\n"
|
||||
f"{signed_headers}\n"
|
||||
f"UNSIGNED-PAYLOAD"
|
||||
)
|
||||
return sha256_hash(canonical_request), url
|
||||
|
||||
|
||||
def presign_v4(
|
||||
method: str,
|
||||
url: SplitResult,
|
||||
region: str,
|
||||
credentials: Credentials,
|
||||
date: datetime,
|
||||
expires: int,
|
||||
) -> SplitResult:
|
||||
"""Do signature V4 of given presign request."""
|
||||
|
||||
scope = _get_scope(date, region, "s3")
|
||||
canonical_request_hash, url = _get_presign_canonical_request_hash(
|
||||
method, url, credentials.access_key, scope, date, expires,
|
||||
)
|
||||
string_to_sign = _get_string_to_sign(date, scope, canonical_request_hash)
|
||||
signing_key = _get_signing_key(credentials.secret_key, date, region, "s3")
|
||||
signature = _get_signature(signing_key, string_to_sign)
|
||||
|
||||
parts = list(url)
|
||||
parts[3] = url.query + "&X-Amz-Signature=" + queryencode(signature)
|
||||
url = SplitResult(*parts)
|
||||
return url
|
||||
|
||||
|
||||
def get_credential_string(access_key: str, date: datetime, region: str) -> str:
|
||||
"""Get credential string of given access key, date and region."""
|
||||
return f"{access_key}/{time.to_signer_date(date)}/{region}/s3/aws4_request"
|
||||
|
||||
|
||||
def post_presign_v4(
|
||||
data: str,
|
||||
secret_key: str,
|
||||
date: datetime,
|
||||
region: str,
|
||||
) -> str:
|
||||
"""Do signature V4 of given presign POST form-data."""
|
||||
return _get_signature(
|
||||
_get_signing_key(secret_key, date, region, "s3"),
|
||||
data,
|
||||
)
|
||||
111
venv/lib/python3.12/site-packages/minio/sse.py
Normal file
111
venv/lib/python3.12/site-packages/minio/sse.py
Normal file
@@ -0,0 +1,111 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage,
|
||||
# (C) 2018 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""
|
||||
minio.sse
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
This module contains core API parsers.
|
||||
|
||||
:copyright: (c) 2018 by MinIO, Inc.
|
||||
:license: Apache 2.0, see LICENSE for more details.
|
||||
|
||||
"""
|
||||
from __future__ import absolute_import, annotations
|
||||
|
||||
import base64
|
||||
import json
|
||||
from abc import ABCMeta, abstractmethod
|
||||
from typing import Any, cast
|
||||
|
||||
|
||||
class Sse:
|
||||
"""Server-side encryption base class."""
|
||||
__metaclass__ = ABCMeta
|
||||
|
||||
@abstractmethod
|
||||
def headers(self) -> dict[str, str]:
|
||||
"""Return headers."""
|
||||
|
||||
def tls_required(self) -> bool: # pylint: disable=no-self-use
|
||||
"""Return TLS required to use this server-side encryption."""
|
||||
return True
|
||||
|
||||
def copy_headers(self) -> dict[str, str]: # pylint: disable=no-self-use
|
||||
"""Return copy headers."""
|
||||
return {}
|
||||
|
||||
|
||||
class SseCustomerKey(Sse):
|
||||
""" Server-side encryption - customer key type."""
|
||||
|
||||
def __init__(self, key: bytes):
|
||||
if len(key) != 32:
|
||||
raise ValueError(
|
||||
"SSE-C keys need to be 256 bit base64 encoded",
|
||||
)
|
||||
b64key = base64.b64encode(key).decode()
|
||||
from .helpers import \
|
||||
md5sum_hash # pylint: disable=import-outside-toplevel
|
||||
md5key = cast(str, md5sum_hash(key))
|
||||
self._headers: dict[str, str] = {
|
||||
"X-Amz-Server-Side-Encryption-Customer-Algorithm": "AES256",
|
||||
"X-Amz-Server-Side-Encryption-Customer-Key": b64key,
|
||||
"X-Amz-Server-Side-Encryption-Customer-Key-MD5": md5key,
|
||||
}
|
||||
self._copy_headers: dict[str, str] = {
|
||||
"X-Amz-Copy-Source-Server-Side-Encryption-Customer-Algorithm":
|
||||
"AES256",
|
||||
"X-Amz-Copy-Source-Server-Side-Encryption-Customer-Key": b64key,
|
||||
"X-Amz-Copy-Source-Server-Side-Encryption-Customer-Key-MD5":
|
||||
md5key,
|
||||
}
|
||||
|
||||
def headers(self) -> dict[str, str]:
|
||||
return self._headers.copy()
|
||||
|
||||
def copy_headers(self) -> dict[str, str]:
|
||||
return self._copy_headers.copy()
|
||||
|
||||
|
||||
class SseKMS(Sse):
|
||||
"""Server-side encryption - KMS type."""
|
||||
|
||||
def __init__(self, key: str, context: dict[str, Any]):
|
||||
self._headers = {
|
||||
"X-Amz-Server-Side-Encryption-Aws-Kms-Key-Id": key,
|
||||
"X-Amz-Server-Side-Encryption": "aws:kms"
|
||||
}
|
||||
if context:
|
||||
data = bytes(json.dumps(context), "utf-8")
|
||||
self._headers["X-Amz-Server-Side-Encryption-Context"] = (
|
||||
base64.b64encode(data).decode()
|
||||
)
|
||||
|
||||
def headers(self) -> dict[str, str]:
|
||||
return self._headers.copy()
|
||||
|
||||
|
||||
class SseS3(Sse):
|
||||
"""Server-side encryption - S3 type."""
|
||||
|
||||
def headers(self) -> dict[str, str]:
|
||||
return {
|
||||
"X-Amz-Server-Side-Encryption": "AES256"
|
||||
}
|
||||
|
||||
def tls_required(self) -> bool:
|
||||
return False
|
||||
118
venv/lib/python3.12/site-packages/minio/sseconfig.py
Normal file
118
venv/lib/python3.12/site-packages/minio/sseconfig.py
Normal file
@@ -0,0 +1,118 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C)
|
||||
# 2020 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Request/response of PutBucketEncryption and GetBucketEncryption APIs."""
|
||||
|
||||
from __future__ import absolute_import, annotations
|
||||
|
||||
from abc import ABCMeta
|
||||
from typing import Type, TypeVar, cast
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
from .xml import Element, SubElement, find, findtext
|
||||
|
||||
AES256 = "AES256"
|
||||
AWS_KMS = "aws:kms"
|
||||
|
||||
A = TypeVar("A", bound="Rule")
|
||||
|
||||
|
||||
class Rule:
|
||||
"""Server-side encryption rule. """
|
||||
__metaclass__ = ABCMeta
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
sse_algorithm: str,
|
||||
kms_master_key_id: str | None = None,
|
||||
):
|
||||
self._sse_algorithm = sse_algorithm
|
||||
self._kms_master_key_id = kms_master_key_id
|
||||
|
||||
@property
|
||||
def sse_algorithm(self) -> str:
|
||||
"""Get SSE algorithm."""
|
||||
return self._sse_algorithm
|
||||
|
||||
@property
|
||||
def kms_master_key_id(self) -> str | None:
|
||||
"""Get KMS master key ID."""
|
||||
return self._kms_master_key_id
|
||||
|
||||
@classmethod
|
||||
def new_sse_s3_rule(cls: Type[A]) -> A:
|
||||
"""Create SSE-S3 rule."""
|
||||
return cls(AES256)
|
||||
|
||||
@classmethod
|
||||
def new_sse_kms_rule(
|
||||
cls: Type[A],
|
||||
kms_master_key_id: str | None = None,
|
||||
) -> A:
|
||||
"""Create new SSE-KMS rule."""
|
||||
return cls(AWS_KMS, kms_master_key_id)
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[A], element: ET.Element) -> A:
|
||||
"""Create new object with values from XML element."""
|
||||
element = cast(
|
||||
ET.Element,
|
||||
find(element, "ApplyServerSideEncryptionByDefault", True),
|
||||
)
|
||||
return cls(
|
||||
cast(str, findtext(element, "SSEAlgorithm", True)),
|
||||
findtext(element, "KMSMasterKeyID"),
|
||||
)
|
||||
|
||||
def toxml(self, element: ET.Element | None) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
if element is None:
|
||||
raise ValueError("element must be provided")
|
||||
element = SubElement(element, "Rule")
|
||||
tag = SubElement(element, "ApplyServerSideEncryptionByDefault")
|
||||
SubElement(tag, "SSEAlgorithm", self._sse_algorithm)
|
||||
if self._kms_master_key_id is not None:
|
||||
SubElement(tag, "KMSMasterKeyID", self._kms_master_key_id)
|
||||
return element
|
||||
|
||||
|
||||
B = TypeVar("B", bound="SSEConfig")
|
||||
|
||||
|
||||
class SSEConfig:
|
||||
"""server-side encryption configuration."""
|
||||
|
||||
def __init__(self, rule: Rule):
|
||||
if not rule:
|
||||
raise ValueError("rule must be provided")
|
||||
self._rule = rule
|
||||
|
||||
@property
|
||||
def rule(self) -> Rule:
|
||||
"""Get rule."""
|
||||
return self._rule
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[B], element: ET.Element) -> B:
|
||||
"""Create new object with values from XML element."""
|
||||
element = cast(ET.Element, find(element, "Rule", True))
|
||||
return cls(Rule.fromxml(element))
|
||||
|
||||
def toxml(self, element: ET.Element | None) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
element = Element("ServerSideEncryptionConfiguration")
|
||||
self._rule.toxml(element)
|
||||
return element
|
||||
56
venv/lib/python3.12/site-packages/minio/tagging.py
Normal file
56
venv/lib/python3.12/site-packages/minio/tagging.py
Normal file
@@ -0,0 +1,56 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C)
|
||||
# 2020 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Tagging for bucket and object."""
|
||||
|
||||
from __future__ import absolute_import, annotations
|
||||
|
||||
from typing import Type, TypeVar, cast
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
from .commonconfig import Tags
|
||||
from .xml import Element, SubElement, find
|
||||
|
||||
A = TypeVar("A", bound="Tagging")
|
||||
|
||||
|
||||
class Tagging:
|
||||
"""Tagging for buckets and objects."""
|
||||
|
||||
def __init__(self, tags: Tags | None):
|
||||
self._tags = tags
|
||||
|
||||
@property
|
||||
def tags(self) -> Tags | None:
|
||||
"""Get tags."""
|
||||
return self._tags
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[A], element: ET.Element) -> A:
|
||||
"""Create new object with values from XML element."""
|
||||
element = cast(ET.Element, find(element, "TagSet", True))
|
||||
tags = (
|
||||
None if find(element, "Tag") is None
|
||||
else Tags.fromxml(element)
|
||||
)
|
||||
return cls(tags)
|
||||
|
||||
def toxml(self, element: ET.Element | None) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
element = Element("Tagging")
|
||||
if self._tags:
|
||||
self._tags.toxml(SubElement(element, "TagSet"))
|
||||
return element
|
||||
123
venv/lib/python3.12/site-packages/minio/time.py
Normal file
123
venv/lib/python3.12/site-packages/minio/time.py
Normal file
@@ -0,0 +1,123 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C)
|
||||
# 2020 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Time formatter for S3 APIs."""
|
||||
|
||||
from __future__ import absolute_import, annotations
|
||||
|
||||
import time as ctime
|
||||
from datetime import datetime, timezone
|
||||
|
||||
try:
|
||||
from datetime import UTC # type: ignore[attr-defined]
|
||||
_UTC_IMPORTED = True
|
||||
except ImportError:
|
||||
_UTC_IMPORTED = False
|
||||
|
||||
_WEEK_DAYS = ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"]
|
||||
_MONTHS = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct",
|
||||
"Nov", "Dec"]
|
||||
|
||||
|
||||
def _to_utc(value: datetime) -> datetime:
|
||||
"""Convert to UTC time if value is not naive."""
|
||||
return (
|
||||
value.astimezone(timezone.utc).replace(tzinfo=None)
|
||||
if value.tzinfo else value
|
||||
)
|
||||
|
||||
|
||||
def from_iso8601utc(value: str | None) -> datetime | None:
|
||||
"""Parse UTC ISO-8601 formatted string to datetime."""
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
try:
|
||||
time = datetime.strptime(value, "%Y-%m-%dT%H:%M:%S.%fZ")
|
||||
except ValueError:
|
||||
time = datetime.strptime(value, "%Y-%m-%dT%H:%M:%SZ")
|
||||
return time.replace(tzinfo=timezone.utc)
|
||||
|
||||
|
||||
def to_iso8601utc(value: datetime | None) -> str | None:
|
||||
"""Format datetime into UTC ISO-8601 formatted string."""
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
value = _to_utc(value)
|
||||
return (
|
||||
value.strftime("%Y-%m-%dT%H:%M:%S.") + value.strftime("%f")[:3] + "Z"
|
||||
)
|
||||
|
||||
|
||||
def from_http_header(value: str) -> datetime:
|
||||
"""Parse HTTP header date formatted string to datetime."""
|
||||
if len(value) != 29:
|
||||
raise ValueError(
|
||||
f"time data {value} does not match HTTP header format")
|
||||
|
||||
if value[0:3] not in _WEEK_DAYS or value[3] != ",":
|
||||
raise ValueError(
|
||||
f"time data {value} does not match HTTP header format")
|
||||
weekday = _WEEK_DAYS.index(value[0:3])
|
||||
|
||||
day = datetime.strptime(value[4:8], " %d ").day
|
||||
|
||||
if value[8:11] not in _MONTHS:
|
||||
raise ValueError(
|
||||
f"time data {value} does not match HTTP header format")
|
||||
month = _MONTHS.index(value[8:11])
|
||||
|
||||
time = datetime.strptime(value[11:], " %Y %H:%M:%S GMT")
|
||||
time = time.replace(day=day, month=month+1, tzinfo=timezone.utc)
|
||||
|
||||
if weekday != time.weekday():
|
||||
raise ValueError(
|
||||
f"time data {value} does not match HTTP header format")
|
||||
|
||||
return time
|
||||
|
||||
|
||||
def to_http_header(value: datetime) -> str:
|
||||
"""Format datatime into HTTP header date formatted string."""
|
||||
value = _to_utc(value)
|
||||
weekday = _WEEK_DAYS[value.weekday()]
|
||||
day = value.strftime(" %d ")
|
||||
month = _MONTHS[value.month - 1]
|
||||
suffix = value.strftime(" %Y %H:%M:%S GMT")
|
||||
return f"{weekday},{day}{month}{suffix}"
|
||||
|
||||
|
||||
def to_amz_date(value: datetime) -> str:
|
||||
"""Format datetime into AMZ date formatted string."""
|
||||
return _to_utc(value).strftime("%Y%m%dT%H%M%SZ")
|
||||
|
||||
|
||||
def utcnow() -> datetime:
|
||||
"""Timezone-aware wrapper to datetime.utcnow()."""
|
||||
if _UTC_IMPORTED:
|
||||
return datetime.now(UTC).replace(tzinfo=timezone.utc)
|
||||
return datetime.utcnow().replace(tzinfo=timezone.utc)
|
||||
|
||||
|
||||
def to_signer_date(value: datetime) -> str:
|
||||
"""Format datetime into SignatureV4 date formatted string."""
|
||||
return _to_utc(value).strftime("%Y%m%d")
|
||||
|
||||
|
||||
def to_float(value: datetime) -> float:
|
||||
"""Convert datetime into float value."""
|
||||
return ctime.mktime(value.timetuple()) + value.microsecond * 1e-6
|
||||
72
venv/lib/python3.12/site-packages/minio/versioningconfig.py
Normal file
72
venv/lib/python3.12/site-packages/minio/versioningconfig.py
Normal file
@@ -0,0 +1,72 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C)
|
||||
# 2020 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Request/response of PutBucketVersioning and GetBucketVersioning APIs."""
|
||||
|
||||
from __future__ import absolute_import, annotations
|
||||
|
||||
from typing import Type, TypeVar
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
from .commonconfig import DISABLED, ENABLED
|
||||
from .xml import Element, SubElement, findtext
|
||||
|
||||
OFF = "Off"
|
||||
SUSPENDED = "Suspended"
|
||||
|
||||
A = TypeVar("A", bound="VersioningConfig")
|
||||
|
||||
|
||||
class VersioningConfig:
|
||||
"""Versioning configuration."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
status: str | None = None,
|
||||
mfa_delete: str | None = None,
|
||||
):
|
||||
if status is not None and status not in [ENABLED, SUSPENDED]:
|
||||
raise ValueError(f"status must be {ENABLED} or {SUSPENDED}")
|
||||
if mfa_delete is not None and mfa_delete not in [ENABLED, DISABLED]:
|
||||
raise ValueError(f"MFA delete must be {ENABLED} or {DISABLED}")
|
||||
self._status = status
|
||||
self._mfa_delete = mfa_delete
|
||||
|
||||
@property
|
||||
def status(self) -> str:
|
||||
"""Get status."""
|
||||
return self._status or OFF
|
||||
|
||||
@property
|
||||
def mfa_delete(self) -> str | None:
|
||||
"""Get MFA delete."""
|
||||
return self._mfa_delete
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[A], element: ET.Element) -> A:
|
||||
"""Create new object with values from XML element."""
|
||||
status = findtext(element, "Status")
|
||||
mfa_delete = findtext(element, "MFADelete")
|
||||
return cls(status, mfa_delete)
|
||||
|
||||
def toxml(self, element: ET.Element | None) -> ET.Element:
|
||||
"""Convert to XML."""
|
||||
element = Element("VersioningConfiguration")
|
||||
if self._status:
|
||||
SubElement(element, "Status", self._status)
|
||||
if self._mfa_delete:
|
||||
SubElement(element, "MFADelete", self._mfa_delete)
|
||||
return element
|
||||
137
venv/lib/python3.12/site-packages/minio/xml.py
Normal file
137
venv/lib/python3.12/site-packages/minio/xml.py
Normal file
@@ -0,0 +1,137 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C)
|
||||
# 2020 MinIO, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""XML utility module."""
|
||||
|
||||
from __future__ import absolute_import, annotations
|
||||
|
||||
import io
|
||||
from typing import Type, TypeVar
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
from typing_extensions import Protocol
|
||||
|
||||
_S3_NAMESPACE = "http://s3.amazonaws.com/doc/2006-03-01/"
|
||||
|
||||
|
||||
def Element( # pylint: disable=invalid-name
|
||||
tag: str,
|
||||
namespace: str = _S3_NAMESPACE,
|
||||
) -> ET.Element:
|
||||
"""Create ElementTree.Element with tag and namespace."""
|
||||
return ET.Element(tag, {"xmlns": namespace} if namespace else {})
|
||||
|
||||
|
||||
def SubElement( # pylint: disable=invalid-name
|
||||
parent: ET.Element, tag: str, text: str | None = None
|
||||
) -> ET.Element:
|
||||
"""Create ElementTree.SubElement on parent with tag and text."""
|
||||
element = ET.SubElement(parent, tag)
|
||||
if text is not None:
|
||||
element.text = text
|
||||
return element
|
||||
|
||||
|
||||
def _get_namespace(element: ET.Element) -> str:
|
||||
"""Exact namespace if found."""
|
||||
start = element.tag.find("{")
|
||||
if start < 0:
|
||||
return ""
|
||||
start += 1
|
||||
end = element.tag.find("}")
|
||||
if end < 0:
|
||||
return ""
|
||||
return element.tag[start:end]
|
||||
|
||||
|
||||
def findall(element: ET.Element, name: str) -> list[ET.Element]:
|
||||
"""Namespace aware ElementTree.Element.findall()."""
|
||||
namespace = _get_namespace(element)
|
||||
return element.findall(
|
||||
"ns:" + name if namespace else name,
|
||||
{"ns": namespace} if namespace else {},
|
||||
)
|
||||
|
||||
|
||||
def find(
|
||||
element: ET.Element,
|
||||
name: str,
|
||||
strict: bool = False,
|
||||
) -> ET.Element | None:
|
||||
"""Namespace aware ElementTree.Element.find()."""
|
||||
namespace = _get_namespace(element)
|
||||
elem = element.find(
|
||||
"ns:" + name if namespace else name,
|
||||
{"ns": namespace} if namespace else {},
|
||||
)
|
||||
if strict and elem is None:
|
||||
raise ValueError(f"XML element <{name}> not found")
|
||||
return elem
|
||||
|
||||
|
||||
def findtext(
|
||||
element: ET.Element,
|
||||
name: str,
|
||||
strict: bool = False,
|
||||
) -> str | None:
|
||||
"""
|
||||
Namespace aware ElementTree.Element.findtext() with strict flag
|
||||
raises ValueError if element name not exist.
|
||||
"""
|
||||
elem = find(element, name, strict=strict)
|
||||
return None if elem is None else (elem.text or "")
|
||||
|
||||
|
||||
A = TypeVar("A")
|
||||
|
||||
|
||||
class FromXmlType(Protocol):
|
||||
"""typing stub for class with `fromxml` method"""
|
||||
|
||||
@classmethod
|
||||
def fromxml(cls: Type[A], element: ET.Element) -> A:
|
||||
"""Create python object with values from XML element."""
|
||||
|
||||
|
||||
B = TypeVar("B", bound=FromXmlType)
|
||||
|
||||
|
||||
def unmarshal(cls: Type[B], xmlstring: str) -> B:
|
||||
"""Unmarshal given XML string to an object of passed class."""
|
||||
return cls.fromxml(ET.fromstring(xmlstring))
|
||||
|
||||
|
||||
def getbytes(element: ET.Element) -> bytes:
|
||||
"""Convert ElementTree.Element to bytes."""
|
||||
with io.BytesIO() as data:
|
||||
ET.ElementTree(element).write(
|
||||
data,
|
||||
encoding=None,
|
||||
xml_declaration=False,
|
||||
)
|
||||
return data.getvalue()
|
||||
|
||||
|
||||
class ToXmlType(Protocol):
|
||||
"""typing stub for class with `toxml` method"""
|
||||
|
||||
def toxml(self, element: ET.Element | None) -> ET.Element:
|
||||
"""Convert python object to ElementTree.Element."""
|
||||
|
||||
|
||||
def marshal(obj: ToXmlType) -> bytes:
|
||||
"""Get XML data as bytes of ElementTree.Element."""
|
||||
return getbytes(obj.toxml(None))
|
||||
Reference in New Issue
Block a user