diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 5285b367..c29c54bc 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -5,21 +5,13 @@ repos: - id: check-yaml - id: end-of-file-fixer - id: trailing-whitespace - - repo: 'https://github.com/asottile/pyupgrade' - rev: v3.15.0 - hooks: - - id: pyupgrade - args: - - '--py38-plus' - repo: 'https://github.com/PyCQA/isort' rev: 5.12.0 hooks: - id: isort - - repo: 'https://github.com/psf/black' - rev: 23.11.0 - hooks: - - id: black - - repo: 'https://github.com/pycqa/flake8' - rev: 6.1.0 + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.4.8 hooks: - - id: flake8 + - id: ruff + args: [ --fix ] + - id: ruff-format diff --git a/CHANGES.rst b/CHANGES.rst index 6ecdcb13..bccb9c7a 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,9 +1,10 @@ Changes ------- -2.13.3 (2024-08-18) +2.13.3 (2024-08-19) ^^^^^^^^^^^^^^^^^^^ * fix ``create_waiter_with_client()`` +* relax botocore dependency specification 2.13.2 (2024-07-18) ^^^^^^^^^^^^^^^^^^^ diff --git a/Makefile b/Makefile index 91c59f4e..01bf871f 100644 --- a/Makefile +++ b/Makefile @@ -3,7 +3,7 @@ # ?= conditional assign, so users can pass options on the CLI instead of manually editing this file FLAGS?= -pre-commit flake: checkrst +pre-commit: checkrst pre-commit run --all test: pre-commit diff --git a/aiobotocore/client.py b/aiobotocore/client.py index 762ae520..5f3ece32 100644 --- a/aiobotocore/client.py +++ b/aiobotocore/client.py @@ -111,7 +111,7 @@ async def _create_client_class(self, service_name, service_model): bases = [AioBaseClient] service_id = service_model.service_id.hyphenize() await self._event_emitter.emit( - 'creating-client-class.%s' % service_id, + f'creating-client-class.{service_id}', class_attributes=class_attributes, base_classes=bases, ) @@ -189,7 +189,7 @@ def _register_legacy_retries(self, client): handler = self._retry_handler_factory.create_retry_handler( retry_config, endpoint_prefix ) - unique_id = 'retry-config-%s' % service_event_name + unique_id = f'retry-config-{service_event_name}' client.meta.events.register( f"needs-retry.{service_event_name}", handler, unique_id=unique_id ) @@ -301,8 +301,8 @@ def _get_client_args( class AioBaseClient(BaseClient): async def _async_getattr(self, item): - event_name = 'getattr.{}.{}'.format( - self._service_model.service_id.hyphenize(), item + event_name = ( + f'getattr.{self._service_model.service_id.hyphenize()}.{item}' ) handler, event_response = await self.meta.events.emit_until_response( event_name, client=self @@ -315,9 +315,7 @@ def __getattr__(self, item): # deferred attrgetter (See #803), it would resolve in hasattr always returning # true. This ends up breaking ddtrace for example when it tries to set a pin. raise AttributeError( - "'{}' object has no attribute '{}'".format( - self.__class__.__name__, item - ) + f"'{self.__class__.__name__}' object has no attribute '{item}'" ) async def close(self): @@ -372,9 +370,7 @@ async def _make_api_call(self, operation_name, api_params): service_id = self._service_model.service_id.hyphenize() handler, event_response = await self.meta.events.emit_until_response( - 'before-call.{service_id}.{operation_name}'.format( - service_id=service_id, operation_name=operation_name - ), + f'before-call.{service_id}.{operation_name}', model=operation_model, params=request_dict, request_signer=self._request_signer, @@ -393,9 +389,7 @@ async def _make_api_call(self, operation_name, api_params): ) await self.meta.events.emit( - 'after-call.{service_id}.{operation_name}'.format( - service_id=service_id, operation_name=operation_name - ), + f'after-call.{service_id}.{operation_name}', http_response=http, parsed=parsed_response, model=operation_model, @@ -421,10 +415,7 @@ async def _make_request( ) except Exception as e: await self.meta.events.emit( - 'after-call-error.{service_id}.{operation_name}'.format( - service_id=self._service_model.service_id.hyphenize(), - operation_name=operation_model.name, - ), + f'after-call-error.{self._service_model.service_id.hyphenize()}.{operation_model.name}', exception=e, context=request_context, ) @@ -614,13 +605,13 @@ def get_waiter(self, waiter_name): """ config = self._get_waiter_config() if not config: - raise ValueError("Waiter does not exist: %s" % waiter_name) + raise ValueError(f"Waiter does not exist: {waiter_name}") model = waiter.WaiterModel(config) mapping = {} for name in model.waiter_names: mapping[xform_name(name)] = name if waiter_name not in mapping: - raise ValueError("Waiter does not exist: %s" % waiter_name) + raise ValueError(f"Waiter does not exist: {waiter_name}") return waiter.create_waiter_with_client( mapping[waiter_name], model, self diff --git a/aiobotocore/credentials.py b/aiobotocore/credentials.py index 885b71c8..8d0db499 100644 --- a/aiobotocore/credentials.py +++ b/aiobotocore/credentials.py @@ -796,7 +796,7 @@ async def _resolve_credentials_from_source( provider=credential_source, error_msg=( 'No credentials found in credential_source referenced ' - 'in profile %s' % profile_name + f'in profile {profile_name}' ), ) return credentials diff --git a/aiobotocore/discovery.py b/aiobotocore/discovery.py index 09361831..b9ac6b3f 100644 --- a/aiobotocore/discovery.py +++ b/aiobotocore/discovery.py @@ -33,8 +33,7 @@ async def describe_endpoint(self, **kwargs): if not self._always_discover and not discovery_required: # Discovery set to only run on required operations logger.debug( - 'Optional discovery disabled. Skipping discovery for Operation: %s' - % operation + f'Optional discovery disabled. Skipping discovery for Operation: {operation}' ) return None diff --git a/aiobotocore/endpoint.py b/aiobotocore/endpoint.py index b968d386..1956f86d 100644 --- a/aiobotocore/endpoint.py +++ b/aiobotocore/endpoint.py @@ -80,9 +80,7 @@ async def create_request(self, params, operation_model=None): ] ) service_id = operation_model.service_model.service_id.hyphenize() - event_name = 'request-created.{service_id}.{op_name}'.format( - service_id=service_id, op_name=operation_model.name - ) + event_name = f'request-created.{service_id}.{operation_model.name}' await self._event_emitter.emit( event_name, request=request, @@ -124,9 +122,9 @@ async def _send_request(self, request_dict, operation_model): ): # We want to share num retries, not num attempts. total_retries = attempts - 1 - success_response[1]['ResponseMetadata'][ - 'RetryAttempts' - ] = total_retries + success_response[1]['ResponseMetadata']['RetryAttempts'] = ( + total_retries + ) if exception is not None: raise exception else: @@ -201,9 +199,9 @@ async def _do_get_response(self, request, operation_model, context): ) http_response_record_dict = response_dict.copy() - http_response_record_dict[ - 'streaming' - ] = operation_model.has_streaming_output + http_response_record_dict['streaming'] = ( + operation_model.has_streaming_output + ) history_recorder.record('HTTP_RESPONSE', http_response_record_dict) protocol = operation_model.metadata['protocol'] @@ -307,7 +305,7 @@ def create_endpoint( if not is_valid_endpoint_url( endpoint_url ) and not is_valid_ipv6_endpoint_url(endpoint_url): - raise ValueError("Invalid endpoint: %s" % endpoint_url) + raise ValueError(f"Invalid endpoint: {endpoint_url}") if proxies is None: proxies = self._get_proxies(endpoint_url) diff --git a/aiobotocore/httpchecksum.py b/aiobotocore/httpchecksum.py index e9861e0b..efd2a873 100644 --- a/aiobotocore/httpchecksum.py +++ b/aiobotocore/httpchecksum.py @@ -79,7 +79,7 @@ async def handle_checksum_body( return for algorithm in algorithms: - header_name = "x-amz-checksum-%s" % algorithm + header_name = f"x-amz-checksum-{algorithm}" # If the header is not found, check the next algorithm if header_name not in headers: continue @@ -113,7 +113,7 @@ async def handle_checksum_body( def _handle_streaming_response(http_response, response, algorithm): checksum_cls = _CHECKSUM_CLS.get(algorithm) - header_name = "x-amz-checksum-%s" % algorithm + header_name = f"x-amz-checksum-{algorithm}" return StreamingChecksumBody( http_response.raw, response["headers"].get("content-length"), @@ -124,18 +124,15 @@ def _handle_streaming_response(http_response, response, algorithm): async def _handle_bytes_response(http_response, response, algorithm): body = await http_response.content - header_name = "x-amz-checksum-%s" % algorithm + header_name = f"x-amz-checksum-{algorithm}" checksum_cls = _CHECKSUM_CLS.get(algorithm) checksum = checksum_cls() checksum.update(body) expected = response["headers"][header_name] if checksum.digest() != base64.b64decode(expected): error_msg = ( - "Expected checksum %s did not match calculated checksum: %s" - % ( - expected, - checksum.b64digest(), - ) + f"Expected checksum {expected} did not match calculated " + f"checksum: {checksum.b64digest()}" ) raise FlexibleChecksumError(error_msg=error_msg) return body @@ -157,7 +154,7 @@ def apply_request_checksum(request): _apply_request_trailer_checksum(request) else: raise FlexibleChecksumError( - error_msg="Unknown checksum variant: %s" % algorithm["in"] + error_msg="Unknown checksum variant: {}".format(algorithm["in"]) ) diff --git a/aiobotocore/httpsession.py b/aiobotocore/httpsession.py index 4c527fb5..67dcb822 100644 --- a/aiobotocore/httpsession.py +++ b/aiobotocore/httpsession.py @@ -173,9 +173,9 @@ def _create_connector(self, proxy_url): async def _get_session(self, proxy_url): if not (session := self._sessions.get(proxy_url)): connector = self._create_connector(proxy_url) - self._sessions[ - proxy_url - ] = session = await self._exit_stack.enter_async_context( + self._sessions[proxy_url] = ( + session + ) = await self._exit_stack.enter_async_context( aiohttp.ClientSession( connector=connector, timeout=self._timeout, diff --git a/aiobotocore/regions.py b/aiobotocore/regions.py index b91b95dd..967dd5bf 100644 --- a/aiobotocore/regions.py +++ b/aiobotocore/regions.py @@ -25,7 +25,7 @@ async def construct_endpoint( operation_model, call_args, request_context ) LOG.debug( - 'Calling endpoint provider with parameters: %s' % provider_params + f'Calling endpoint provider with parameters: {provider_params}' ) try: provider_result = self._provider.resolve_endpoint( @@ -39,7 +39,7 @@ async def construct_endpoint( raise else: raise botocore_exception from ex - LOG.debug('Endpoint provider result: %s' % provider_result.url) + LOG.debug(f'Endpoint provider result: {provider_result.url}') # The endpoint provider does not support non-secure transport. if not self._use_ssl and provider_result.url.startswith('https://'): @@ -98,7 +98,7 @@ async def _get_customized_builtins( customized_builtins = copy.copy(self._builtins) # Handlers are expected to modify the builtins dict in place. await self._event_emitter.emit( - 'before-endpoint-resolution.%s' % service_id, + f'before-endpoint-resolution.{service_id}', builtins=customized_builtins, model=operation_model, params=call_args, diff --git a/aiobotocore/retries/adaptive.py b/aiobotocore/retries/adaptive.py index 45f2e130..0adca11a 100644 --- a/aiobotocore/retries/adaptive.py +++ b/aiobotocore/retries/adaptive.py @@ -1,4 +1,5 @@ """An async reimplementation of the blocking elements from botocore.retries.adaptive.""" + import asyncio import logging diff --git a/aiobotocore/retries/bucket.py b/aiobotocore/retries/bucket.py index c154a950..c7dfd146 100644 --- a/aiobotocore/retries/bucket.py +++ b/aiobotocore/retries/bucket.py @@ -1,4 +1,5 @@ """An async reimplementation of the blocking elements from botocore.retries.bucket.""" + import asyncio from botocore.exceptions import CapacityNotAvailableError diff --git a/aiobotocore/retries/standard.py b/aiobotocore/retries/standard.py index ad56d7bf..da4c0efb 100644 --- a/aiobotocore/retries/standard.py +++ b/aiobotocore/retries/standard.py @@ -40,9 +40,9 @@ def register_retry_handler(client, max_attempts=DEFAULT_MAX_ATTEMPTS): retry_quota=retry_quota, ) - unique_id = 'retry-config-%s' % service_event_name + unique_id = f'retry-config-{service_event_name}' client.meta.events.register( - 'needs-retry.%s' % service_event_name, + f'needs-retry.{service_event_name}', handler.needs_retry, unique_id=unique_id, ) @@ -82,9 +82,7 @@ async def should_retry(self, context): class AioStandardRetryConditions(StandardRetryConditions): - def __init__( - self, max_attempts=DEFAULT_MAX_ATTEMPTS - ): # noqa: E501, lgtm [py/missing-call-to-init] + def __init__(self, max_attempts=DEFAULT_MAX_ATTEMPTS): # noqa: E501, lgtm [py/missing-call-to-init] # Note: This class is for convenience so you can have the # standard retry condition in a single class. self._max_attempts_checker = MaxAttemptsChecker(max_attempts) diff --git a/aiobotocore/retryhandler.py b/aiobotocore/retryhandler.py index df7cbb18..f73216bd 100644 --- a/aiobotocore/retryhandler.py +++ b/aiobotocore/retryhandler.py @@ -130,9 +130,9 @@ async def _call( if attempt_number >= self._max_attempts: # explicitly set MaxAttemptsReached if response is not None and 'ResponseMetadata' in response[1]: - response[1]['ResponseMetadata'][ - 'MaxAttemptsReached' - ] = True + response[1]['ResponseMetadata']['MaxAttemptsReached'] = ( + True + ) logger.debug( "Reached the maximum number of retry attempts: %s", attempt_number, diff --git a/aiobotocore/session.py b/aiobotocore/session.py index 8d31a92d..be40b7e0 100644 --- a/aiobotocore/session.py +++ b/aiobotocore/session.py @@ -53,7 +53,7 @@ def _set_user_agent_for_session(self): self.user_agent_name = 'aiobotocore' self.user_agent_version = __version__ - self.user_agent_extra = 'botocore/%s' % botocore_version + self.user_agent_extra = f'botocore/{botocore_version}' def _create_token_resolver(self): return create_token_resolver(self) @@ -108,7 +108,7 @@ async def get_service_data(self, service_name, api_version=None): ) service_id = EVENT_ALIASES.get(service_name, service_name) await self._events.emit( - 'service-data-loaded.%s' % service_id, + f'service-data-loaded.{service_id}', service_data=service_data, service_name=service_name, session=self, diff --git a/aiobotocore/signers.py b/aiobotocore/signers.py index dbabce87..ba2679e2 100644 --- a/aiobotocore/signers.py +++ b/aiobotocore/signers.py @@ -45,9 +45,7 @@ async def sign( # Allow mutating request before signing await self._event_emitter.emit( - 'before-sign.{}.{}'.format( - self._service_id.hyphenize(), operation_name - ), + f'before-sign.{self._service_id.hyphenize()}.{operation_name}', request=request, signing_name=signing_name, region_name=self._region_name, @@ -111,9 +109,7 @@ async def _choose_signer(self, operation_name, signing_type, context): signature_version += suffix handler, response = await self._event_emitter.emit_until_response( - 'choose-signer.{}.{}'.format( - self._service_id.hyphenize(), operation_name - ), + f'choose-signer.{self._service_id.hyphenize()}.{operation_name}', signing_name=signing_name, region_name=region_name, signature_version=signature_version, diff --git a/aiobotocore/stub.py b/aiobotocore/stub.py index 9423a822..e2049c8a 100644 --- a/aiobotocore/stub.py +++ b/aiobotocore/stub.py @@ -7,8 +7,8 @@ class AioStubber(Stubber): def _add_response(self, method, service_response, expected_params): if not hasattr(self.client, method): raise ValueError( - "Client %s does not have method: %s" - % (self.client.meta.service_model.service_name, method) + f"Client {self.client.meta.service_model.service_name} " + f"does not have method: {method}" ) # pragma: no cover # Create a successful http response diff --git a/aiobotocore/tokens.py b/aiobotocore/tokens.py index 40efc7a1..33c0ee41 100644 --- a/aiobotocore/tokens.py +++ b/aiobotocore/tokens.py @@ -24,9 +24,7 @@ def create_token_resolver(session): class AioDeferredRefreshableToken(DeferredRefreshableToken): - def __init__( - self, method, refresh_using, time_fetcher=_utc_now - ): # noqa: E501, lgtm [py/missing-call-to-init] + def __init__(self, method, refresh_using, time_fetcher=_utc_now): # noqa: E501, lgtm [py/missing-call-to-init] self._time_fetcher = time_fetcher self._refresh_using = refresh_using self.method = method diff --git a/aiobotocore/utils.py b/aiobotocore/utils.py index b97318af..a540de69 100644 --- a/aiobotocore/utils.py +++ b/aiobotocore/utils.py @@ -478,17 +478,16 @@ async def redirect_from_error( if new_region is None: logger.debug( - "S3 client configured for region %s but the bucket %s is not " - "in that region and the proper region could not be " - "automatically determined." % (client_region, bucket) + f"S3 client configured for region {client_region} but the " + f"bucket {bucket} is not in that region and the proper region " + "could not be automatically determined." ) return logger.debug( - "S3 client configured for region %s but the bucket %s is in region" - " %s; Please configure the proper region to avoid multiple " - "unnecessary redirects and signing attempts." - % (client_region, bucket, new_region) + f"S3 client configured for region {client_region} but the bucket {bucket} " + f"is in region {new_region}; Please configure the proper region to " + f"avoid multiple unnecessary redirects and signing attempts." ) # Adding the new region to _cache will make construct_endpoint() to # use the new region as value for the AWS::Region builtin parameter. @@ -615,17 +614,16 @@ async def redirect_from_error( if new_region is None: logger.debug( - "S3 client configured for region %s but the bucket %s is not " + f"S3 client configured for region {client_region} but the bucket {bucket} is not " "in that region and the proper region could not be " - "automatically determined." % (client_region, bucket) + "automatically determined." ) return logger.debug( - "S3 client configured for region %s but the bucket %s is in region" - " %s; Please configure the proper region to avoid multiple " + f"S3 client configured for region {client_region} but the bucket {bucket} is in region" + f" {new_region}; Please configure the proper region to avoid multiple " "unnecessary redirects and signing attempts." - % (client_region, bucket, new_region) ) endpoint = self._endpoint_resolver.resolve('s3', new_region) endpoint = endpoint['endpoint_url'] @@ -670,9 +668,7 @@ async def get_bucket_region(self, bucket, response): class AioContainerMetadataFetcher(ContainerMetadataFetcher): - def __init__( - self, session=None, sleep=asyncio.sleep - ): # noqa: E501, lgtm [py/missing-call-to-init] + def __init__(self, session=None, sleep=asyncio.sleep): # noqa: E501, lgtm [py/missing-call-to-init] if session is None: session = _RefCountedSession(timeout=self.TIMEOUT_SECONDS) self._session = session diff --git a/aiobotocore/waiter.py b/aiobotocore/waiter.py index 989c53a5..0a5ce660 100644 --- a/aiobotocore/waiter.py +++ b/aiobotocore/waiter.py @@ -58,8 +58,7 @@ async def wait(self, **kwargs): # Rename the waiter class based on the type of waiter. waiter_class_name = str( - '%s.Waiter.%s' - % (get_service_module_name(client.meta.service_model), waiter_name) + f'{get_service_module_name(client.meta.service_model)}.Waiter.{waiter_name}' ) # Create the new waiter class @@ -109,8 +108,7 @@ async def wait(self, **kwargs): # can just handle here by raising an exception. raise WaiterError( name=self.name, - reason='An error occurred (%s): %s' - % ( + reason='An error occurred ({}): {}'.format( response['Error'].get('Code', 'Unknown'), response['Error'].get('Message', 'Unknown'), ), @@ -122,9 +120,7 @@ async def wait(self, **kwargs): ) return response if current_state == 'failure': - reason = 'Waiter encountered a terminal failure state: %s' % ( - acceptor.explanation - ) + reason = f'Waiter encountered a terminal failure state: {acceptor.explanation}' raise WaiterError( name=self.name, reason=reason, @@ -135,8 +131,8 @@ async def wait(self, **kwargs): reason = 'Max attempts exceeded' else: reason = ( - 'Max attempts exceeded. Previously accepted state: %s' - % (acceptor.explanation) + f'Max attempts exceeded. Previously accepted state: ' + f'{acceptor.explanation}' ) raise WaiterError( name=self.name, diff --git a/examples/sqs_queue_consumer.py b/examples/sqs_queue_consumer.py index 9a7b1a61..13dd7cfa 100644 --- a/examples/sqs_queue_consumer.py +++ b/examples/sqs_queue_consumer.py @@ -2,6 +2,7 @@ """ aiobotocore SQS Consumer Example """ + import asyncio import sys diff --git a/examples/sqs_queue_producer.py b/examples/sqs_queue_producer.py index e984f491..03ec8ef5 100644 --- a/examples/sqs_queue_producer.py +++ b/examples/sqs_queue_producer.py @@ -2,6 +2,7 @@ """ aiobotocore SQS Producer Example """ + import asyncio import random import sys diff --git a/pyproject.toml b/pyproject.toml index 2db446f4..398309e0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,9 +13,66 @@ line_length = 79 honor_noqa = true src_paths = ["aiobotocore", "tests"] -[tool.black] +[tool.ruff] +exclude = [ + ".bzr", + ".direnv", + ".eggs", + ".git", + ".git-rewrite", + ".hg", + ".ipynb_checkpoints", + ".mypy_cache", + ".nox", + ".pants.d", + ".pyenv", + ".pytest_cache", + ".pytype", + ".ruff_cache", + ".svn", + ".tox", + ".venv", + ".vscode", + "__pypackages__", + "_build", + "buck-out", + "build", + "dist", + "node_modules", + "site-packages", + "venv", +] + +# Format same as Black. line-length = 79 -skip_string_normalization = true +indent-width = 4 + +target-version = "py38" + +[tool.ruff.lint] +# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default. +# Unlike Flake8, Ruff doesn't enable pycodestyle warnings (`W`) or +# McCabe complexity (`C901`) by default. +select = ["E4", "E7", "E9", "F", "UP"] +ignore = [] + +# Allow fix for all enabled rules (when `--fix`) is provided. +fixable = ["ALL"] +unfixable = [] + +# Allow unused variables when underscore-prefixed. +dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" + +[tool.ruff.format] +# Like Black, use double quotes for strings, spaces for indents +# and trailing commas. +quote-style = "preserve" +indent-style = "space" +skip-magic-trailing-comma = false +line-ending = "auto" + +docstring-code-format = false +docstring-code-line-length = "dynamic" [tool.coverage.report] exclude_also = [ diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 8e50a8ee..00000000 --- a/setup.cfg +++ /dev/null @@ -1,3 +0,0 @@ -[flake8] -ignore = E203,E226,E501,E731,W503,W504 -exclude = .git,__pycache__,.pytest_cache,.venv,*.egg-info,build,dist diff --git a/setup.py b/setup.py index 155f8db1..cd600536 100644 --- a/setup.py +++ b/setup.py @@ -7,15 +7,15 @@ # NOTE: When updating botocore make sure to update awscli/boto3 versions below install_requires = [ # pegged to also match items in `extras_require` - 'botocore>=1.34.70,<1.34.132', + 'botocore>=1.34.70,<1.34.163', 'aiohttp>=3.9.2,<4.0.0', 'wrapt>=1.10.10, <2.0.0', 'aioitertools>=0.5.1,<1.0.0', ] extras_require = { - 'awscli': ['awscli>=1.32.70,<1.33.14'], - 'boto3': ['boto3>=1.34.70,<1.34.132'], + 'awscli': ['awscli>=1.32.70,<1.33.45'], + 'boto3': ['boto3>=1.34.70,<1.34.163'], } diff --git a/tests/boto_tests/test_credentials.py b/tests/boto_tests/test_credentials.py index 54ee44eb..518dd3be 100644 --- a/tests/boto_tests/test_credentials.py +++ b/tests/boto_tests/test_credentials.py @@ -3,6 +3,7 @@ https://github.com/boto/botocore/blob/develop/tests/unit/test_credentials.py and adapted to work with asyncio and pytest """ + import binascii import os import sys @@ -625,9 +626,7 @@ async def test_assumerolewebidentprovider_no_cache(): # From class TestContainerProvider(BaseEnvVar): def full_url(url): - return 'http://{}{}'.format( - credentials.AioContainerMetadataFetcher.IP_ADDRESS, url - ) + return f'http://{credentials.AioContainerMetadataFetcher.IP_ADDRESS}{url}' # From class TestEnvVar(BaseEnvVar): @@ -965,9 +964,9 @@ def _create_assume_role_response(credentials, expiration=None): def _create_random_credentials(): return Credentials( - 'fake-%s' % random_chars(15), - 'fake-%s' % random_chars(35), - 'fake-%s' % random_chars(45), + f'fake-{random_chars(15)}', + f'fake-{random_chars(35)}', + f'fake-{random_chars(45)}', ) @@ -1094,9 +1093,7 @@ def assume_role_setup(base_assume_role_test_setup): credential_process = os.path.join( current_dir, 'utils', 'credentialprocess.py' ) - self.credential_process = '{} {}'.format( - sys.executable, credential_process - ) + self.credential_process = f'{sys.executable} {credential_process}' yield self diff --git a/tests/conftest.py b/tests/conftest.py index 76fe117a..f04afa68 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -80,9 +80,7 @@ async def assert_num_uploads_found( await asyncio.sleep(2) pytest.fail( - "Expected to see {} uploads, instead saw: {}".format( - num_uploads, amount_seen - ) + f"Expected to see {num_uploads} uploads, instead saw: {amount_seen}" ) diff --git a/tests/test_basic_s3.py b/tests/test_basic_s3.py index fb92ad36..7ad3fb3a 100644 --- a/tests/test_basic_s3.py +++ b/tests/test_basic_s3.py @@ -95,7 +95,7 @@ async def test_can_delete_urlencoded_object( @pytest.mark.moto async def test_can_paginate(s3_client, bucket_name, create_object): for i in range(5): - key_name = 'key%s' % i + key_name = f'key{i}' await create_object(key_name) paginator = s3_client.get_paginator('list_objects') @@ -113,7 +113,7 @@ async def test_can_paginate_with_page_size( s3_client, bucket_name, create_object ): for i in range(5): - key_name = 'key%s' % i + key_name = f'key{i}' await create_object(key_name) paginator = s3_client.get_paginator('list_objects') @@ -133,7 +133,7 @@ async def test_can_paginate_with_page_size( async def test_can_search_paginate(s3_client, bucket_name, create_object): keys = [] for i in range(5): - key_name = 'key%s' % i + key_name = f'key{i}' keys.append(key_name) await create_object(key_name) @@ -147,7 +147,7 @@ async def test_can_search_paginate(s3_client, bucket_name, create_object): @pytest.mark.moto async def test_can_paginate_iterator(s3_client, bucket_name, create_object): for i in range(5): - key_name = 'key%s' % i + key_name = f'key{i}' await create_object(key_name) paginator = s3_client.get_paginator('list_objects') @@ -169,7 +169,7 @@ async def test_result_key_iters(s3_client, bucket_name, create_object): for i in range(5): key_name = f'key/{i}/{i}' await create_object(key_name) - key_name2 = 'key/%s' % i + key_name2 = f'key/{i}' await create_object(key_name2) paginator = s3_client.get_paginator('list_objects') @@ -565,7 +565,7 @@ async def test_presign_sigv4( ) msg = ( "Host was suppose to be the us-east-1 endpoint, " - "instead got: %s" % presigned_url + f"instead got: {presigned_url}" ) assert presigned_url.startswith( f'https://{bucket_name}.s3.amazonaws.com/{key}' diff --git a/tests/test_config.py b/tests/test_config.py index 8630aa4e..c42fb76a 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -139,8 +139,7 @@ def test_merge(): @pytest.mark.moto @pytest.mark.asyncio async def test_config_http_session_cls(): - class SuccessExc(Exception): - ... + class SuccessExc(Exception): ... class MyHttpSession(AIOHTTPSession): async def send(self, request): diff --git a/tests/test_patches.py b/tests/test_patches.py index e11f7c8d..f00543bc 100644 --- a/tests/test_patches.py +++ b/tests/test_patches.py @@ -149,7 +149,8 @@ 'eeb7c4730ac86aec37de53b2be0779490b05f50b', }, ClientCreator._create_client_class: { - 'fcecaf8d4f2c1ac3c5d0eb50c573233ef86d641d' + 'fcecaf8d4f2c1ac3c5d0eb50c573233ef86d641d', + '320c74206570605ff55ca32535b3f6108a7d8c1f', }, ClientCreator._register_endpoint_discovery: { '483c6c8e035810d1b76110fc1956de76943c2f18' @@ -173,12 +174,17 @@ '9ec4ff68599544b4f46067b3783287862d38fb50' }, ClientCreator._register_legacy_retries: { - '000b2f2a122602e2e741ec2e89308dc2e2b67329' + '000b2f2a122602e2e741ec2e89308dc2e2b67329', + '06864c8ecac153fa36d9d57f0057cb95909ee7cd', }, BaseClient._make_api_call: { '2cb11088d36a89cf9f5c41508bce908acbde24c4', + '490a95d7f829ce1fc2e3fbb6169ee63f04d6aeeb', + }, + BaseClient._make_request: { + 'cfd8bbf19ea132134717cdf9c460694ddacdbf58', + 'bdfa0e9843e97bee2da16f17c6a7685ad4a0a44b', }, - BaseClient._make_request: {'cfd8bbf19ea132134717cdf9c460694ddacdbf58'}, BaseClient._convert_to_request_dict: { '5e0a374926b6ee1a8715963ab551e126506e7fc9' }, @@ -191,6 +197,7 @@ }, BaseClient.get_waiter: { '4a4aeabe53af25d3737204187a31f930230864b4', + '40484242690c0679c3b13fddf0d6c9a26f7b25d2', }, BaseClient.__getattr__: {'3ec17f468f50789fa633d6041f40b66a2f593e77'}, # config.py @@ -321,7 +328,8 @@ 'a470795f6ba451cf99ce7456fef24777f8087654' }, AssumeRoleProvider._resolve_credentials_from_source: { - 'de41138b36bfc74d7f8a21f6002b55279d3de017' + 'de41138b36bfc74d7f8a21f6002b55279d3de017', + '2f083f5c046de432194a2925fafc9478a5977586', }, CanonicalNameCredentialSourcer.source_credentials: { '602930a78e0e64e3b313a046aab5edc3bcf5c2d9' @@ -344,17 +352,27 @@ }, # endpoint.py convert_to_response_dict: {'5b7701c1f5b3cb2daa6eb307cdbdbbb2e9d33e5f'}, - Endpoint.create_request: {'37d0fbd02f91aef6c0499a2d0a725bf067c3ce8b'}, - Endpoint._send_request: {'5d40748a95c3005728e6548b402b90cb57d6f575'}, + Endpoint.create_request: { + '37d0fbd02f91aef6c0499a2d0a725bf067c3ce8b', + 'f78944c88453219ed3f0f5c8eefccdc69faf3046', + }, + Endpoint._send_request: { + '5d40748a95c3005728e6548b402b90cb57d6f575', + '0158d0b6c492fde95947b49c34fd0927e5d145a1', + }, Endpoint._get_response: {'bbf10e6e07147d50e09d7205bf0883bd673a8bf3'}, - Endpoint._do_get_response: {'5afcfe76196406903afb24e05e3dd0feeac1a23d'}, + Endpoint._do_get_response: { + '5afcfe76196406903afb24e05e3dd0feeac1a23d', + '4a1cab721ecf2777419ba070c38db77ae3b6df1e', + }, Endpoint._needs_retry: {'f718e2ff874763a677648fe6f87cc65e4cec2792'}, Endpoint._send: {'644c7e5bb88fecaa0b2a204411f8c7e69cc90bf1'}, Endpoint._add_modeled_error_fields: { 'd0390647f2d7a4a325be048dcda4dcc7f42fdd17' }, EndpointCreator.create_endpoint: { - '863e17b1299f9fda2cef5be3297d470d1bfa86ae' + '863e17b1299f9fda2cef5be3297d470d1bfa86ae', + 'a49253f3c80fc69f66fd64c8549f6daa47f7c6ad', }, # eventstream.py EventStream._create_raw_event_generator: { @@ -412,13 +430,15 @@ create_parser: {'37e9f1c3b60de17f477a9b79eae8e1acaa7c89d7'}, # regions.py EndpointRulesetResolver.construct_endpoint: { - 'ccbed61e316a0e92e1d0f67c554ee15efa4ee6b8' + 'ccbed61e316a0e92e1d0f67c554ee15efa4ee6b8', + 'ab22bb1ec171713e548567fbe84dd88a3d5f4b76', }, EndpointRulesetResolver._get_provider_params: { 'e17f8fce4a5d8adba932cb85e588f369845ce534' }, EndpointRulesetResolver._get_customized_builtins: { - '41085e0e1ac19915c24339f25b8d966708905fd0' + '41085e0e1ac19915c24339f25b8d966708905fd0', + '51f00c9f658df0cd0e140aaed24b517954f9300c', }, # response.py StreamingBody: {'73cb1276dfb509331b964d3d5ed69e5efa008de5'}, @@ -437,7 +457,10 @@ }, Session.get_credentials: {'718da08b630569e631f93aedd65f1d9215bfc30b'}, get_session: {'c47d588f5da9b8bde81ccc26eaef3aee19ddd901'}, - Session.get_service_data: {'3879b969c0c2b1d5b454006a1025deb4322ae804'}, + Session.get_service_data: { + '3879b969c0c2b1d5b454006a1025deb4322ae804', + 'c6b755f2eff7277ccc6023552d306ff823280404', + }, Session.get_service_model: {'1c8f93e6fb9913e859e43aea9bc2546edbea8365'}, Session.get_available_regions: { '9fb4df0b7d082a74d524a4a15aaf92a2717e0358' @@ -449,12 +472,16 @@ RequestSigner.handler: {'371909df136a0964ef7469a63d25149176c2b442'}, RequestSigner.sign: { '2f1f45a6fcfcca1be2c5e292c9e1b80453e7fa57', + 'deb7af66bc099801810eee5b00db83da2563d0a3', }, RequestSigner.get_auth: {'4f8099bef30f9a72fa3bcaa1bd3d22c4fbd224a8'}, RequestSigner.get_auth_instance: { 'b78756b9d4f7a5bce9630195d761bab557677225', }, - RequestSigner._choose_signer: {'bd0e9784029b8aa182b5aec73910d94cb67c36b0'}, + RequestSigner._choose_signer: { + 'bd0e9784029b8aa182b5aec73910d94cb67c36b0', + '23861aeaf48509e59c131547d98cdeb4fc4af1d3', + }, RequestSigner.generate_presigned_url: { '417682868eacc10bf4c65f3dfbdba7d20d9250db' }, @@ -545,13 +572,15 @@ 'e7e5a8ce541110eb79bf98414171d3a1c137e32b' }, S3RegionRedirectorv2.redirect_from_error: { - 'ac37ca2ca48f7bde42d9659c01d5bd5bc08a78f9' + 'ac37ca2ca48f7bde42d9659c01d5bd5bc08a78f9', + 'bc01047b596b1d1113df7cc2481af9cca2a49267', }, S3RegionRedirectorv2.get_bucket_region: { 'b5bbc8b010576668dc2812d657c4b48af79e8f99' }, S3RegionRedirector.redirect_from_error: { - '3863b2c6472513b7896bfccc9dfd2567c472f441' + '3863b2c6472513b7896bfccc9dfd2567c472f441', + 'e1d93a4a85dfbfa810b9249da0b22ce14744b99d', }, S3RegionRedirector.get_bucket_region: { 'b5bbc8b010576668dc2812d657c4b48af79e8f99' @@ -585,8 +614,14 @@ NormalizedOperationMethod.__call__: { '79723632d023739aa19c8a899bc2b814b8ab12ff' }, - Waiter.wait: {'735608297a2a3d4572e6705daafcf4fc8556fc03'}, - create_waiter_with_client: {'e6ea06674b6fdf9157c95757a12b3c9c35af531c'}, + Waiter.wait: { + '735608297a2a3d4572e6705daafcf4fc8556fc03', + '00d3990fb22fee667235f4035a9754cda0ebd4d8', + }, + create_waiter_with_client: { + 'e6ea06674b6fdf9157c95757a12b3c9c35af531c', + '13d661baedd3db97e5291331ceebd4bbafaccf18', + }, # handlers.py inject_presigned_url_rds: {'b5d45b339686346e81b255d4e8c36e76d3fe6a78'}, inject_presigned_url_ec2: {'48e09a5e4e95577e716be30f2d2706949261a07f'}, @@ -603,7 +638,8 @@ 'd87eff9008356a6aaa9b7078f23ba7a9ff0c7a60' }, EndpointDiscoveryManager.describe_endpoint: { - 'b2f1b29177cf30f299e61b85ddec09eaa070e54e' + 'b2f1b29177cf30f299e61b85ddec09eaa070e54e', + 'cbd237b874daef01cf7be82fef30516557ba17f9', }, EndpointDiscoveryManager._refresh_current_endpoints: { 'f8a51047c8f395d9458a904e778a3ac156a11911' @@ -617,7 +653,8 @@ }, # retries/standard.py standard.register_retry_handler: { - 'da0ae35712211bc38938e93c4af8b7aeb999084e' + 'da0ae35712211bc38938e93c4af8b7aeb999084e', + '09035248202bfd94c7ae7c532e515ec6fe2fab4b', }, standard.RetryHandler.needs_retry: { '89a4148d7f4af9d2795d1d0189293528aa668b59' @@ -648,17 +685,27 @@ # httpchecksum.py handle_checksum_body: { '898cee7a7a5e5a02af7e0e65dcbb8122257b85df', + '6f15cc120818413e89aac088d130c729ba3d422c', + }, + _handle_streaming_response: { + '7ce971e012f9d4b04889f0af83f67281ed6a9e6e', + '2fb57a6073db3052ca22c335e779cd991357088d', }, - _handle_streaming_response: {'7ce971e012f9d4b04889f0af83f67281ed6a9e6e'}, StreamingChecksumBody: { '2c6eb22268d46abae261ce386eb2deabbc3a0dcd', }, - _handle_bytes_response: {'0761c4590c6addbe8c674e40fca9f7dd375a184b'}, + _handle_bytes_response: { + '0761c4590c6addbe8c674e40fca9f7dd375a184b', + '11dce986975df44f08ff61d7e86cba4f92f7c19f', + }, AwsChunkedWrapper._make_chunk: { '097361692f0fd6c863a17dd695739629982ef7e4' }, AwsChunkedWrapper.__iter__: {'261e26d1061655555fe3dcb2689d963e43f80fb0'}, - apply_request_checksum: {'bcc044f0655f30769994efab72b29e76d73f7e39'}, + apply_request_checksum: { + 'bcc044f0655f30769994efab72b29e76d73f7e39', + '5ebac6a8f1475a6b42b356135bc5f5840ac07a55', + }, _apply_request_trailer_checksum: { '28cdf19282be7cd2c99a734831ec4f489648bcc7' }, @@ -679,7 +726,8 @@ 'e599399167b1f278e4cd839170f887d60eea5bfa' }, retryhandler.MaxAttemptsDecorator.__call__: { - '24b442126f0ff730be0ae64dc7158929d4d2fca7' + '24b442126f0ff730be0ae64dc7158929d4d2fca7', + '631b9963bec06a9d1264bbddc3ad003ae4896945', }, retryhandler.MaxAttemptsDecorator._should_retry: { '581273f875bb779a9ff796df8c8597ec551abf97', @@ -693,7 +741,10 @@ retryhandler.CRC32Checker._check_response: { '3ee7afd0bb1a3bf53934d77e44f619962c52b0c9' }, - stub.Stubber: {'bccf23c3733cc656b909f5130cba80dbc9540b05'}, + stub.Stubber: { + 'bccf23c3733cc656b909f5130cba80dbc9540b05', + '7c01f505134b5ea3f4886e2288ea7f389577efd5', + }, } @@ -724,9 +775,7 @@ def test_patches(): if digest not in digests: print( - "Digest of {}:{} not found in: {}".format( - obj.__qualname__, digest, digests - ) + f"Digest of {obj.__qualname__}:{digest} not found in: {digests}" ) success = False