Skip to content

Commit

Permalink
Merge pull request #530 from aiven/chore/add-pyupgrade
Browse files Browse the repository at this point in the history
chore: Add pyupgrade pre-commit hook
  • Loading branch information
tvainika authored Feb 1, 2023
2 parents 1981895 + c1f67ab commit b58b9d0
Show file tree
Hide file tree
Showing 21 changed files with 52 additions and 43 deletions.
6 changes: 6 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,12 @@ repos:
exclude: ^vendor/|^tests/.*/fixtures/.*|^tests/integration/test_data/.*
- id: debug-statements

- repo: https://github.com/asottile/pyupgrade
rev: "v3.3.1"
hooks:
- id: pyupgrade
args: ["--py37-plus"]

- repo: https://github.com/PyCQA/isort
rev: 5.12.0
hooks:
Expand Down
2 changes: 1 addition & 1 deletion karapace/auth.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ async def close(self) -> None:

def _load_authfile(self) -> None:
try:
with open(self._auth_filename, "r") as authfile:
with open(self._auth_filename) as authfile:
authdata = json.load(authfile)

users = {
Expand Down
2 changes: 1 addition & 1 deletion karapace/kafka_rest_apis/admin.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def cluster_metadata(self, topics: List[str] = None, retries: int = 0) -> dict:
metadata_version = self._matching_api_version(MetadataRequest)
if metadata_version > 6 or metadata_version < 1:
raise UnrecognizedBrokerVersion(
"Kafka Admin interface cannot determine the controller using MetadataRequest_v{}.".format(metadata_version)
f"Kafka Admin interface cannot determine the controller using MetadataRequest_v{metadata_version}."
)
request = MetadataRequest[1](topics=topics)
future = self._send_request_to_least_loaded_node(request)
Expand Down
2 changes: 1 addition & 1 deletion karapace/master_coordinator.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@


def get_member_url(scheme: str, host: str, port: str) -> str:
return "{}://{}:{}".format(scheme, host, port)
return f"{scheme}://{host}:{port}"


def get_member_configuration(*, host: str, port: int, scheme: str, master_eligibility: bool) -> JsonData:
Expand Down
1 change: 0 additions & 1 deletion karapace/protobuf/proto_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
# Ported from square/wire:
# wire-library/wire-schema/src/commonMain/kotlin/com/squareup/wire/schema/internal/parser/ProtoParser.kt

from builtins import str
from enum import Enum
from karapace.protobuf.enum_constant_element import EnumConstantElement
from karapace.protobuf.enum_element import EnumElement
Expand Down
6 changes: 3 additions & 3 deletions karapace/protobuf/protobuf_to_dict.py
Original file line number Diff line number Diff line change
Expand Up @@ -195,7 +195,7 @@ def _get_field_mapping(pb, dict_value, strict):
continue
if key not in pb.DESCRIPTOR.fields_by_name:
if strict:
raise KeyError("%s does not have a field called %s" % (type(pb), key))
raise KeyError(f"{type(pb)} does not have a field called {key}")
continue
field_mapping.append((pb.DESCRIPTOR.fields_by_name[key], value, getattr(pb, key, None)))

Expand All @@ -208,7 +208,7 @@ def _get_field_mapping(pb, dict_value, strict):
# pylint: disable=protected-access
if ext_num not in pb._extensions_by_number:
if strict:
raise KeyError("%s does not have a extension with number %s. Perhaps you forgot to import it?" % (pb, key))
raise KeyError(f"{pb} does not have a extension with number {key}. Perhaps you forgot to import it?")
continue
# pylint: disable=protected-access

Expand Down Expand Up @@ -305,7 +305,7 @@ def _string_to_enum(field, input_value, strict=False):
except KeyError:
if strict:
# pylint: disable=raise-missing-from
raise KeyError("`%s` is not a valid value for field `%s`" % (input_value, field.name))
raise KeyError(f"`{input_value}` is not a valid value for field `{field.name}`")
return _string_to_enum(field, input_value.upper(), strict=True)
return input_value

Expand Down
8 changes: 4 additions & 4 deletions karapace/rapu.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
import re
import time

SERVER_NAME = "Karapace/{}".format(__version__)
SERVER_NAME = f"Karapace/{__version__}"
JSON_CONTENT_TYPE = "application/json"

SCHEMA_CONTENT_TYPES = [
Expand Down Expand Up @@ -105,7 +105,7 @@ def get_header(self, header, default_value=None):
return self._header_cache[upper_cased]

def __repr__(self):
return "HTTPRequest(url=%s query=%s method=%s json=%r)" % (self.url, self.query, self.method, self.json)
return f"HTTPRequest(url={self.url} query={self.query} method={self.method} json={self.json!r})"


class HTTPResponse(Exception):
Expand Down Expand Up @@ -164,7 +164,7 @@ def __init__(
) -> None:
self.app_name = app_name
self.config = config
self.app_request_metric = "{}_request".format(app_name)
self.app_request_metric = f"{app_name}_request"
self.app = aiohttp.web.Application()
self.log = logging.getLogger(self.app_name)
self.stats = StatsClient(config=config)
Expand Down Expand Up @@ -351,7 +351,7 @@ async def _handle_request(
# On 204 - NO CONTENT there is no point of calculating cache headers
if is_success(status):
if resp_bytes:
etag = '"{}"'.format(hashlib.md5(resp_bytes).hexdigest())
etag = f'"{hashlib.md5(resp_bytes).hexdigest()}"'
else:
etag = '""'
if_none_match = request.headers.get("if-none-match")
Expand Down
4 changes: 2 additions & 2 deletions karapace/schema_backup.py
Original file line number Diff line number Diff line change
Expand Up @@ -183,7 +183,7 @@ def restore_backup(self) -> None:
self.init_producer()
LOG.info("Starting backup restore for topic: %r", self.topic_name)

with open(self.backup_location, mode="r", encoding="utf8") as fp:
with open(self.backup_location, encoding="utf8") as fp:
if _check_backup_file_version(fp) == BackupVersion.V2:
self._restore_backup_version_2(fp)
else:
Expand All @@ -209,7 +209,7 @@ def _restore_backup_version_1_single_array(self, fp: IO) -> None:

def _restore_backup_version_2(self, fp: IO) -> None:
for line in fp:
hex_key, hex_value = [val.strip() for val in line.split("\t")] # strip to remove the linefeed
hex_key, hex_value = (val.strip() for val in line.split("\t")) # strip to remove the linefeed

key = base64.b16decode(hex_key).decode("utf8") if hex_key != "null" else hex_key
value = base64.b16decode(hex_value.strip()).decode("utf8") if hex_value != "null" else hex_value
Expand Down
8 changes: 4 additions & 4 deletions karapace/schema_registry.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@
import time

LOG = logging.getLogger(__name__)
X_REGISTRY_VERSION_HEADER = ("X-Registry-Version", f"karapace-{__version__}".encode("utf8"))
X_REGISTRY_VERSION_HEADER = ("X-Registry-Version", f"karapace-{__version__}".encode())


def _resolve_version(subject_data: SubjectData, version: Version) -> ResolvedVersion:
Expand Down Expand Up @@ -489,7 +489,7 @@ def send_config_message(
"keytype": "CONFIG",
}
)
value = '{{"compatibilityLevel":"{}"}}'.format(compatibility_level.value)
value = f'{{"compatibilityLevel":"{compatibility_level.value}"}}'
return self.send_kafka_message(key, value)

def send_config_subject_delete_message(self, subject: Subject) -> FutureRecordMetadata:
Expand All @@ -500,7 +500,7 @@ def send_config_subject_delete_message(self, subject: Subject) -> FutureRecordMe
"keytype": "CONFIG",
}
)
return self.send_kafka_message(key, "".encode("utf-8"))
return self.send_kafka_message(key, b"")

def send_delete_subject_message(self, subject: Subject, version: Version) -> FutureRecordMetadata:
key = self.key_formatter.format_key(
Expand All @@ -510,5 +510,5 @@ def send_delete_subject_message(self, subject: Subject, version: Version) -> Fut
"keytype": "DELETE_SUBJECT",
}
)
value = '{{"subject":"{}","version":{}}}'.format(subject, version)
value = f'{{"subject":"{subject}","version":{version}}}'
return self.send_kafka_message(key, value)
2 changes: 1 addition & 1 deletion karapace/serialization.py
Original file line number Diff line number Diff line change
Expand Up @@ -215,7 +215,7 @@ async def deserialize(self, bytes_: bytes) -> dict:
# we should probably check for compatibility here
start_byte, schema_id = struct.unpack(HEADER_FORMAT, byte_arr)
if start_byte != START_BYTE:
raise InvalidMessageHeader("Start byte is %x and should be %x" % (start_byte, START_BYTE))
raise InvalidMessageHeader(f"Start byte is {start_byte:x} and should be {START_BYTE:x}")
try:
schema = await self.get_schema_for_id(schema_id)
if schema is None:
Expand Down
4 changes: 2 additions & 2 deletions karapace/statsd.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,12 +72,12 @@ def _send(self, metric: str, metric_type: bytes, value: Any, tags: Optional[Dict
tag_value = tag_value.astimezone(datetime.timezone.utc).replace(tzinfo=None)
tag_value = tag_value.isoformat()[:19].replace("-", "").replace(":", "") + "Z"
elif isinstance(tag_value, datetime.timedelta):
tag_value = "{}s".format(int(tag_value.total_seconds()))
tag_value = f"{int(tag_value.total_seconds())}s"
elif not isinstance(tag_value, str):
tag_value = str(tag_value)
if " " in tag_value or ":" in tag_value or "|" in tag_value or "=" in tag_value:
tag_value = "INVALID"
parts.insert(1, ",{}={}".format(tag, tag_value).encode("utf-8"))
parts.insert(1, f",{tag}={tag_value}".encode())

self._socket.sendto(b"".join(parts), self._dest_addr)
except Exception: # pylint: disable=broad-except
Expand Down
2 changes: 1 addition & 1 deletion karapace/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ def default_json_serialization(
if isinstance(obj, MappingProxyType):
return dict(obj)

assert_never("Object of type {!r} is not JSON serializable".format(obj.__class__.__name__))
assert_never(f"Object of type {obj.__class__.__name__!r} is not JSON serializable")


SEPARATORS = (",", ":")
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
import version

readme_path = os.path.join(os.path.dirname(__file__), "README.rst")
with open(readme_path, mode="r", encoding="utf8") as fp:
with open(readme_path, encoding="utf8") as fp:
readme_text = fp.read()

version_for_setup_py = version.get_project_version("karapace/version.py")
Expand Down
10 changes: 7 additions & 3 deletions tests/integration/test_rest.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,9 @@ async def test_admin_client(admin_client, producer):
topic_names = [new_topic(admin_client) for i in range(10, 13)]
topic_info = admin_client.cluster_metadata()
retrieved_names = list(topic_info["topics"].keys())
assert set(topic_names).difference(set(retrieved_names)) == set(), "Returned value %r differs from written one %r" % (
assert (
set(topic_names).difference(set(retrieved_names)) == set()
), "Returned value {!r} differs from written one {!r}".format(
retrieved_names,
topic_names,
)
Expand Down Expand Up @@ -440,9 +442,11 @@ async def test_partitions(rest_async_client, admin_client, producer):
for _ in range(5):
producer.send(topic_name, value=b"foo_val").get()
offset_res = await rest_async_client.get(f"/topics/{topic_name}/partitions/0/offsets", headers=header)
assert offset_res.ok, "Status code %r is not expected: %r" % (offset_res.status_code, offset_res.json())
assert offset_res.ok, f"Status code {offset_res.status_code!r} is not expected: {offset_res.json()!r}"
data = offset_res.json()
assert data == {"beginning_offset": 0, "end_offset": 5}, "Unexpected offsets for topic %r: %r" % (topic_name, data)
assert data == {"beginning_offset": 0, "end_offset": 5}, "Unexpected offsets for topic {!r}: {!r}".format(
topic_name, data
)
res = await rest_async_client.get("/topics/fooo/partitions/0/offsets", headers=header)
assert res.status_code == 404
assert res.json()["error_code"] == 40401
Expand Down
4 changes: 2 additions & 2 deletions tests/integration/test_rest_consumer.py
Original file line number Diff line number Diff line change
Expand Up @@ -250,7 +250,7 @@ async def test_consume(rest_async_client, admin_client, producer, trail):
# avro to be handled in a separate testcase ??
values = {
"json": [json.dumps({"foo": f"bar{i}"}).encode("utf-8") for i in range(3)],
"binary": [f"val{i}".encode("utf-8") for i in range(3)],
"binary": [f"val{i}".encode() for i in range(3)],
}
deserializers = {"binary": base64.b64decode, "json": lambda x: json.dumps(x).encode("utf-8")}
group_name = "consume_group"
Expand Down Expand Up @@ -283,7 +283,7 @@ async def test_consume(rest_async_client, admin_client, producer, trail):
async def test_consume_timeout(rest_async_client, admin_client, producer):
values = {
"json": [json.dumps({"foo": f"bar{i}"}).encode("utf-8") for i in range(3)],
"binary": [f"val{i}".encode("utf-8") for i in range(3)],
"binary": [f"val{i}".encode() for i in range(3)],
}
deserializers = {"binary": base64.b64decode, "json": lambda x: json.dumps(x).encode("utf-8")}
group_name = "consume_group"
Expand Down
8 changes: 4 additions & 4 deletions tests/integration/test_schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -1176,15 +1176,15 @@ async def test_schema_versions_deleting(registry_async_client: Client, trail: st
await assert_schema_versions(registry_async_client, trail, schema_id_2, schema_2_versions)

# Deleting one version, the other still found
res = await registry_async_client.delete("subjects/{}/versions/{}".format(subject, version_1))
res = await registry_async_client.delete(f"subjects/{subject}/versions/{version_1}")
assert res.status_code == 200
assert res.json() == version_1

await assert_schema_versions(registry_async_client, trail, schema_id_1, [])
await assert_schema_versions(registry_async_client, trail, schema_id_2, schema_2_versions)

# Deleting the subject, the schema version 2 cannot be found anymore
res = await registry_async_client.delete("subjects/{}".format(subject))
res = await registry_async_client.delete(f"subjects/{subject}")
assert res.status_code == 200
assert res.json() == [version_2]

Expand Down Expand Up @@ -1227,15 +1227,15 @@ async def test_schema_delete_latest_version(registry_async_client: Client, trail
await assert_schema_versions(registry_async_client, trail, schema_id_2, schema_2_versions)

# Deleting latest version, the other still found
res = await registry_async_client.delete("subjects/{}/versions/latest".format(subject))
res = await registry_async_client.delete(f"subjects/{subject}/versions/latest")
assert res.status_code == 200
assert res.json() == version_2

await assert_schema_versions(registry_async_client, trail, schema_id_1, schema_1_versions)
await assert_schema_versions(registry_async_client, trail, schema_id_2, [])

# Deleting the latest version, no schemas left
res = await registry_async_client.delete("subjects/{}/versions/latest".format(subject))
res = await registry_async_client.delete(f"subjects/{subject}/versions/latest")
assert res.status_code == 200
assert res.json() == version_1

Expand Down
6 changes: 3 additions & 3 deletions tests/integration/test_schema_backup.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ async def test_backup_get(
assert os.path.exists(backup_location)

lines = 0
with open(backup_location, "r", encoding="utf8") as fp:
with open(backup_location, encoding="utf8") as fp:
version_line = fp.readline()
assert version_line.rstrip() == "/V2"
for line in fp:
Expand Down Expand Up @@ -95,10 +95,10 @@ async def test_backup_restore_and_get_non_schema_topic(
assert os.path.exists(backup_location)

restore_file_content = None
with open(restore_location, "r", encoding="utf8") as fp:
with open(restore_location, encoding="utf8") as fp:
restore_file_content = fp.read()
backup_file_content = None
with open(backup_location, "r", encoding="utf8") as fp:
with open(backup_location, encoding="utf8") as fp:
backup_file_content = fp.read()
assert restore_file_content is not None
assert backup_file_content is not None
Expand Down
2 changes: 1 addition & 1 deletion tests/integration/utils/cluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ async def start_schema_registry_cluster(

# None is considered a valid value, and it represents the lack of user
# configuration, so this will generate one for the cluster
group_ids = set(config.get("group_id") for config in config_templates)
group_ids = {config.get("group_id") for config in config_templates}
assert len(group_ids) == 1, f"All configurations entries must have the same group_id value, got: {group_ids}"

group_id = new_random_name("group_id")
Expand Down
8 changes: 4 additions & 4 deletions tests/integration/utils/kafka_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,10 +116,10 @@ def kafka_java_args(
msg = f"Couldn't find kafka installation at {kafka_description.install_dir} to run integration tests."
assert kafka_description.install_dir.exists(), msg
java_args = [
"-Xmx{}M".format(heap_mb),
"-Xms{}M".format(heap_mb),
"-Dkafka.logs.dir={}/logs".format(logs_dir),
"-Dlog4j.configuration=file:{}".format(log4j_properties_path),
f"-Xmx{heap_mb}M",
f"-Xms{heap_mb}M",
f"-Dkafka.logs.dir={logs_dir}/logs",
f"-Dlog4j.configuration=file:{log4j_properties_path}",
"-cp",
str(kafka_description.install_dir / "libs" / "*"),
"kafka.Kafka",
Expand Down
2 changes: 1 addition & 1 deletion tests/integration/utils/network.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,5 +79,5 @@ def port_is_listening(hostname: str, port: int, ipv6: bool) -> bool:
s.connect((hostname, port))
s.close()
return True
except socket.error:
except OSError:
return False
6 changes: 3 additions & 3 deletions version.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ def save_version(new_ver, old_ver, version_file):
version_file = os.path.join(os.path.dirname(__file__), version_file)
if not old_ver or new_ver != old_ver:
with open(version_file, mode="w", encoding="utf8") as fp:
fp.write('"""{}"""\n__version__ = "{}"\n'.format(__doc__, new_ver))
fp.write(f'"""{__doc__}"""\n__version__ = "{new_ver}"\n')
return True


Expand All @@ -37,14 +37,14 @@ def get_project_version(version_file: str) -> str:
else:
git_ver = git_out.splitlines()[0].strip().decode("utf-8")
if "." not in git_ver:
git_ver = "0.0.1-0-unknown-{}".format(git_ver)
git_ver = f"0.0.1-0-unknown-{git_ver}"
version = git_ver

if save_version(version, file_ver, version_file):
return version

if not file_ver:
raise Exception("version not available from git or from file {!r}".format(version_file))
raise Exception(f"version not available from git or from file {version_file!r}")

return file_ver

Expand Down

0 comments on commit b58b9d0

Please sign in to comment.