Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

run pre-commit on s3transfer #9385

Merged
merged 1 commit into from
Mar 25, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 2 additions & 3 deletions awscli/s3transfer/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,7 @@ def __call__(self, bytes_amount):


"""

import concurrent.futures
import functools
import logging
Expand All @@ -134,13 +135,12 @@ def __call__(self, bytes_amount):
import string
import threading

import s3transfer.compat
from botocore.compat import six # noqa: F401
from botocore.exceptions import IncompleteReadError
from botocore.vendored.requests.packages.urllib3.exceptions import (
ReadTimeoutError,
)

import s3transfer.compat
from s3transfer.exceptions import RetriesExceededError, S3UploadFailedError

__author__ = 'Amazon Web Services'
Expand Down Expand Up @@ -681,7 +681,6 @@ def __init__(


class S3Transfer:

ALLOWED_DOWNLOAD_ARGS = [
'VersionId',
'SSECustomerAlgorithm',
Expand Down
4 changes: 1 addition & 3 deletions awscli/s3transfer/bandwidth.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,7 @@ def __init__(self, requested_amt, retry_time):
"""
self.requested_amt = requested_amt
self.retry_time = retry_time
msg = 'Request amount {} exceeded the amount available. Retry in {}'.format(
requested_amt, retry_time
)
msg = f'Request amount {requested_amt} exceeded the amount available. Retry in {retry_time}'
super().__init__(msg)


Expand Down
10 changes: 5 additions & 5 deletions awscli/s3transfer/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,11 +27,11 @@
]

FULL_OBJECT_CHECKSUM_ARGS = [
'ChecksumCRC32',
'ChecksumCRC32C',
'ChecksumCRC64NVME',
'ChecksumSHA1',
'ChecksumSHA256',
'ChecksumCRC32',
'ChecksumCRC32C',
'ChecksumCRC64NVME',
'ChecksumSHA1',
'ChecksumSHA256',
]

USER_AGENT = 's3transfer/%s' % s3transfer.__version__
Expand Down
13 changes: 6 additions & 7 deletions awscli/s3transfer/crt.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,6 @@
from botocore.exceptions import NoCredentialsError
from botocore.useragent import register_feature_id
from botocore.utils import ArnParser, InvalidArnException, is_s3express_bucket

from s3transfer.constants import FULL_OBJECT_CHECKSUM_ARGS, MB
from s3transfer.exceptions import TransferNotDoneError
from s3transfer.futures import BaseTransferFuture, BaseTransferMeta
Expand Down Expand Up @@ -628,7 +627,7 @@ class CRTTransferCoordinator:
"""A helper class for managing CRTTransferFuture"""

def __init__(
self, transfer_id=None, s3_request=None, exception_translator=None
self, transfer_id=None, s3_request=None, exception_translator=None
):
self.transfer_id = transfer_id
self._exception_translator = exception_translator
Expand Down Expand Up @@ -858,9 +857,8 @@ def _default_get_make_request_args(

arn_handler = _S3ArnParamHandler()
if (
(accesspoint_arn_details := arn_handler.handle_arn(call_args.bucket))
and accesspoint_arn_details['region'] == ""
):
accesspoint_arn_details := arn_handler.handle_arn(call_args.bucket)
) and accesspoint_arn_details['region'] == "":
# Configure our region to `*` to propogate in `x-amz-region-set`
# for multi-region support in MRAP accesspoints.
# use_double_uri_encode and should_normalize_uri_path are defaulted to be True
Expand Down Expand Up @@ -928,6 +926,7 @@ class _S3ArnParamHandler:
purposes. This should be safe to remove once we properly integrate auth
resolution from Botocore into the CRT transfer integration.
"""

_RESOURCE_REGEX = re.compile(
r'^(?P<resource_type>accesspoint|outpost)[/:](?P<resource_name>.+)$'
)
Expand All @@ -948,9 +947,9 @@ def _get_arn_details_from_bucket(self, bucket):
self._add_resource_type_and_name(arn_details)
return arn_details
except InvalidArnException:
pass
pass
return None

def _add_resource_type_and_name(self, arn_details):
match = self._RESOURCE_REGEX.match(arn_details['resource'])
if match:
Expand Down
4 changes: 1 addition & 3 deletions awscli/s3transfer/download.py
Original file line number Diff line number Diff line change
Expand Up @@ -307,9 +307,7 @@ def _get_download_output_manager_cls(self, transfer_future, osutil):
if download_manager_cls.is_compatible(fileobj, osutil):
return download_manager_cls
raise RuntimeError(
'Output {} of type: {} is not supported.'.format(
fileobj, type(fileobj)
)
f'Output {fileobj} of type: {type(fileobj)} is not supported.'
)

def _submit(
Expand Down
10 changes: 3 additions & 7 deletions awscli/s3transfer/futures.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
from concurrent import futures

from botocore.context import get_context

from s3transfer.compat import MAXINT
from s3transfer.exceptions import CancelledError, TransferNotDoneError
from s3transfer.utils import FunctionContainer, TaskSemaphore
Expand Down Expand Up @@ -177,9 +176,7 @@ def __init__(self, transfer_id=None):
self._failure_cleanups_lock = threading.Lock()

def __repr__(self):
return '{}(transfer_id={})'.format(
self.__class__.__name__, self.transfer_id
)
return f'{self.__class__.__name__}(transfer_id={self.transfer_id})'

@property
def exception(self):
Expand Down Expand Up @@ -318,9 +315,7 @@ def submit(self, executor, task, tag=None):
:returns: A future representing the submitted task
"""
logger.debug(
"Submitting task {} to executor {} for transfer request: {}.".format(
task, executor, self.transfer_id
)
f"Submitting task {task} to executor {executor} for transfer request: {self.transfer_id}."
)
future = executor.submit(task, tag=tag)
# Add this created future to the list of associated future just
Expand Down Expand Up @@ -509,6 +504,7 @@ def add_done_callback(self, fn):
than concurrent.futures.Future.add_done_callback that requires
a single argument for the future.
"""

# The done callback for concurrent.futures.Future will always pass a
# the future in as the only argument. So we need to create the
# proper signature wrapper that will invoke the callback provided.
Expand Down
13 changes: 6 additions & 7 deletions awscli/s3transfer/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
import threading

from botocore.useragent import register_feature_id

from s3transfer.bandwidth import BandwidthLimiter, LeakyBucket
from s3transfer.constants import (
ALLOWED_DOWNLOAD_ARGS,
Expand Down Expand Up @@ -192,12 +191,12 @@ class TransferManager:
]

ALLOWED_UPLOAD_ARGS = (
_ALLOWED_SHARED_ARGS
+ [
'ChecksumType',
'MpuObjectSize',
]
+ FULL_OBJECT_CHECKSUM_ARGS
_ALLOWED_SHARED_ARGS
+ [
'ChecksumType',
'MpuObjectSize',
]
+ FULL_OBJECT_CHECKSUM_ARGS
)

ALLOWED_COPY_ARGS = _ALLOWED_SHARED_ARGS + [
Expand Down
7 changes: 1 addition & 6 deletions awscli/s3transfer/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
import logging

from botocore.context import start_as_current_context

from s3transfer.utils import get_callbacks

logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -98,11 +97,7 @@ def __repr__(self):
main_kwargs_to_display = self._get_kwargs_with_params_to_include(
self._main_kwargs, params_to_display
)
return '{}(transfer_id={}, {})'.format(
self.__class__.__name__,
self._transfer_coordinator.transfer_id,
main_kwargs_to_display,
)
return f'{self.__class__.__name__}(transfer_id={self._transfer_coordinator.transfer_id}, {main_kwargs_to_display})'

@property
def transfer_id(self):
Expand Down
8 changes: 3 additions & 5 deletions awscli/s3transfer/upload.py
Original file line number Diff line number Diff line change
Expand Up @@ -557,9 +557,7 @@ def _get_upload_input_manager_cls(self, transfer_future):
if upload_manager_cls.is_compatible(fileobj):
return upload_manager_cls
raise RuntimeError(
'Input {} of type: {} is not supported.'.format(
fileobj, type(fileobj)
)
f'Input {fileobj} of type: {type(fileobj)} is not supported.'
)

def _submit(
Expand Down Expand Up @@ -691,7 +689,7 @@ def _submit_multipart_request(
'client': client,
'bucket': call_args.bucket,
'key': call_args.key,
'extra_args':create_multipart_extra_args,
'extra_args': create_multipart_extra_args,
},
),
)
Expand Down Expand Up @@ -830,7 +828,7 @@ def _main(
UploadId=upload_id,
PartNumber=part_number,
Body=body,
**extra_args
**extra_args,
)
etag = response['ETag']
part_metadata = {'ETag': etag, 'PartNumber': part_number}
Expand Down
17 changes: 7 additions & 10 deletions awscli/s3transfer/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,16 +24,15 @@
from botocore.exceptions import IncompleteReadError, ReadTimeoutError
from botocore.httpchecksum import DEFAULT_CHECKSUM_ALGORITHM, AwsChunkedWrapper
from botocore.utils import is_s3express_bucket

from s3transfer.compat import SOCKET_ERROR, fallocate, rename_file
from s3transfer.constants import FULL_OBJECT_CHECKSUM_ARGS

MAX_PARTS = 10000
# The maximum file size you can upload via S3 per request.
# See: http://docs.aws.amazon.com/AmazonS3/latest/dev/UploadingObjects.html
# and: http://docs.aws.amazon.com/AmazonS3/latest/dev/qfacts.html
MAX_SINGLE_UPLOAD_SIZE = 5 * (1024 ** 3)
MIN_UPLOAD_CHUNKSIZE = 5 * (1024 ** 2)
MAX_SINGLE_UPLOAD_SIZE = 5 * (1024**3)
MIN_UPLOAD_CHUNKSIZE = 5 * (1024**2)
logger = logging.getLogger(__name__)


Expand Down Expand Up @@ -194,9 +193,7 @@ def __init__(self, func, *args, **kwargs):
self._kwargs = kwargs

def __repr__(self):
return 'Function: {} with args {} and kwargs {}'.format(
self._func, self._args, self._kwargs
)
return f'Function: {self._func} with args {self._args} and kwargs {self._kwargs}'

def __call__(self):
return self._func(*self._args, **self._kwargs)
Expand Down Expand Up @@ -825,7 +822,7 @@ def add_s3express_defaults(bucket, extra_args):


def set_default_checksum_algorithm(extra_args):
"""Set the default algorithm if not specified by the user."""
if any(checksum in extra_args for checksum in FULL_OBJECT_CHECKSUM_ARGS):
return
extra_args.setdefault("ChecksumAlgorithm", DEFAULT_CHECKSUM_ALGORITHM)
"""Set the default algorithm if not specified by the user."""
if any(checksum in extra_args for checksum in FULL_OBJECT_CHECKSUM_ARGS):
return
extra_args.setdefault("ChecksumAlgorithm", DEFAULT_CHECKSUM_ALGORITHM)
26 changes: 12 additions & 14 deletions tests/functional/s3transfer/test_copy.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,9 @@
# language governing permissions and limitations under the License.
from botocore.exceptions import ClientError
from botocore.stub import Stubber

from s3transfer.manager import TransferConfig, TransferManager
from s3transfer.utils import MIN_UPLOAD_CHUNKSIZE

from tests import BaseGeneralInterfaceTest, FileSizeProvider


Expand Down Expand Up @@ -84,7 +84,6 @@ def add_successful_copy_responses(
expected_create_mpu_params=None,
expected_complete_mpu_params=None,
):

# Add all responses needed to do the copy of the object.
# Should account for both ranged and nonranged downloads.
stubbed_responses = self.create_stubbed_responses()[1:]
Expand All @@ -97,9 +96,9 @@ def add_successful_copy_responses(

# Add the expected create multipart upload params.
if expected_create_mpu_params:
stubbed_responses[0][
'expected_params'
] = expected_create_mpu_params
stubbed_responses[0]['expected_params'] = (
expected_create_mpu_params
)

# Add any expected copy parameters.
if expected_copy_params:
Expand All @@ -111,9 +110,9 @@ def add_successful_copy_responses(

# Add the expected complete multipart upload params.
if expected_complete_mpu_params:
stubbed_responses[-1][
'expected_params'
] = expected_complete_mpu_params
stubbed_responses[-1]['expected_params'] = (
expected_complete_mpu_params
)

# Add the responses to the stubber.
for stubbed_response in stubbed_responses:
Expand Down Expand Up @@ -353,7 +352,7 @@ def create_stubbed_responses(self):
]

def add_get_head_response_with_default_expected_params(
self, extra_expected_params=None
self, extra_expected_params=None
):
expected_params = {
'Bucket': 'mysourcebucket',
Expand All @@ -366,7 +365,7 @@ def add_get_head_response_with_default_expected_params(
self.stubber.add_response(**response)

def add_create_multipart_response_with_default_expected_params(
self, extra_expected_params=None
self, extra_expected_params=None
):
expected_params = {'Bucket': self.bucket, 'Key': self.key}
if extra_expected_params:
Expand All @@ -376,7 +375,7 @@ def add_create_multipart_response_with_default_expected_params(
self.stubber.add_response(**response)

def add_upload_part_copy_responses_with_default_expected_params(
self, extra_expected_params=None
self, extra_expected_params=None
):
ranges = [
'bytes=0-5242879',
Expand All @@ -400,7 +399,7 @@ def add_upload_part_copy_responses_with_default_expected_params(
checksum_member = 'Checksum%s' % name.upper()
response = upload_part_response['service_response']
response['CopyPartResult'][checksum_member] = 'sum%s==' % (
i + 1
i + 1
)
else:
expected_params.update(extra_expected_params)
Expand All @@ -409,7 +408,7 @@ def add_upload_part_copy_responses_with_default_expected_params(
self.stubber.add_response(**upload_part_response)

def add_complete_multipart_response_with_default_expected_params(
self, extra_expected_params=None
self, extra_expected_params=None
):
expected_params = {
'Bucket': self.bucket,
Expand Down Expand Up @@ -497,7 +496,6 @@ def _get_expected_params(self):
def _add_params_to_expected_params(
self, add_copy_kwargs, operation_types, new_params
):

expected_params_to_update = []
for operation_type in operation_types:
add_copy_kwargs_key = 'expected_' + operation_type + '_params'
Expand Down
Loading
Loading