Skip to content

Commit

Permalink
chore: pre-commit autoupdate (#1832)
Browse files Browse the repository at this point in the history
* chore: pre-commit autoupdate

updates:
- https://github.com/charliermarsh/ruff-pre-commithttps://github.com/astral-sh/ruff-pre-commit
- [github.com/astral-sh/ruff-pre-commit: v0.0.272 → v0.0.277](astral-sh/ruff-pre-commit@v0.0.272...v0.0.277)
- [github.com/psf/black: 23.3.0 → 23.7.0](psf/black@23.3.0...23.7.0)

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

* Update annotations

---------

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
Co-authored-by: Edgar R. M <[email protected]>
  • Loading branch information
pre-commit-ci[bot] and edgarrmondragon authored Jul 11, 2023
1 parent 011d552 commit b02d698
Show file tree
Hide file tree
Showing 18 changed files with 68 additions and 88 deletions.
6 changes: 3 additions & 3 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -42,8 +42,8 @@ repos:
- id: check-github-workflows
- id: check-readthedocs

- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v0.0.272
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.0.277
hooks:
- id: ruff
args: [--fix, --exit-non-zero-on-fix]
Expand All @@ -53,7 +53,7 @@ repos:
)$
- repo: https://github.com/psf/black
rev: 23.3.0
rev: 23.7.0
hooks:
- id: black
exclude: |
Expand Down
4 changes: 2 additions & 2 deletions samples/sample_tap_countries/countries_streams.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ class CountriesStream(CountriesAPIStream):
"""Countries API stream."""

name = "countries"
primary_keys = ["code"]
primary_keys = ("code",)
query = """
countries {
code
Expand Down Expand Up @@ -81,7 +81,7 @@ class ContinentsStream(CountriesAPIStream):
"""Continents stream from the Countries API."""

name = "continents"
primary_keys = ["code"]
primary_keys = ("code",)
schema_filepath = SCHEMAS_DIR / "continents.json"
query = """
continents {
Expand Down
4 changes: 2 additions & 2 deletions samples/sample_tap_gitlab/gitlab_graphql_streams.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ class GraphQLCurrentUserStream(GitlabGraphQLStream):
"""Gitlab Current User stream."""

name = "currentuser"
primary_keys = ["id"]
primary_keys = ("id",)
replication_key = None
schema_filepath = SCHEMAS_DIR / "currentuser.json"
query = """
Expand All @@ -48,7 +48,7 @@ class GraphQLProjectsStream(GitlabGraphQLStream):
"""Gitlab Projects stream."""

name = "projects"
primary_keys = ["id"]
primary_keys = ("id",)
replication_key = None
schema_filepath = SCHEMAS_DIR / "projects-graphql.json"

Expand Down
12 changes: 6 additions & 6 deletions samples/sample_tap_gitlab/gitlab_rest_streams.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ class ProjectsStream(ProjectBasedStream):

name = "projects"
path = "/projects/{project_id}?statistics=1"
primary_keys = ["id"]
primary_keys = ("id",)
replication_key = "last_activity_at"
is_sorted = True
schema_filepath = SCHEMAS_DIR / "projects.json"
Expand All @@ -108,7 +108,7 @@ class ReleasesStream(ProjectBasedStream):

name = "releases"
path = "/projects/{project_id}/releases"
primary_keys = ["project_id", "tag_name"]
primary_keys = ("project_id", "tag_name")
replication_key = None
schema_filepath = SCHEMAS_DIR / "releases.json"

Expand All @@ -118,7 +118,7 @@ class IssuesStream(ProjectBasedStream):

name = "issues"
path = "/projects/{project_id}/issues?scope=all&updated_after={start_date}"
primary_keys = ["id"]
primary_keys = ("id",)
replication_key = "updated_at"
is_sorted = False
schema_filepath = SCHEMAS_DIR / "issues.json"
Expand All @@ -131,7 +131,7 @@ class CommitsStream(ProjectBasedStream):
path = (
"/projects/{project_id}/repository/commits?since={start_date}&with_stats=true"
)
primary_keys = ["id"]
primary_keys = ("id",)
replication_key = "created_at"
is_sorted = False
schema_filepath = SCHEMAS_DIR / "commits.json"
Expand All @@ -146,7 +146,7 @@ class EpicsStream(ProjectBasedStream):

name = "epics"
path = "/groups/{group_id}/epics?updated_after={start_date}"
primary_keys = ["id"]
primary_keys = ("id",)
replication_key = "updated_at"
is_sorted = True
schema = PropertiesList(
Expand Down Expand Up @@ -187,7 +187,7 @@ class EpicIssuesStream(GitlabStream):

name = "epic_issues"
path = "/groups/{group_id}/epics/{epic_iid}/issues"
primary_keys = ["id"]
primary_keys = ("id",)
replication_key = None
schema_filepath = SCHEMAS_DIR / "epic_issues.json"
parent_stream_type = EpicsStream # Stream should wait for parents to complete.
Expand Down
8 changes: 4 additions & 4 deletions samples/sample_tap_google_analytics/ga_tap_stream.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,8 @@ class SampleGoogleAnalyticsStream(RESTStream):
rest_method = "POST"

# Child class overrides:
dimensions: list[str] = []
metrics: list[str] = []
dimensions: tuple[str] = ()
metrics: tuple[str] = ()

@property
def authenticator(self) -> GoogleJWTAuthenticator:
Expand Down Expand Up @@ -86,5 +86,5 @@ class GASimpleSampleStream(SampleGoogleAnalyticsStream):
name = "simple_sample"
schema_filepath = SCHEMAS_DIR / "simple-sample.json"

dimensions = ["ga:date"]
metrics = ["ga:users", "ga:sessions"]
dimensions = ("ga:date",)
metrics = ("ga:users", "ga:sessions")
35 changes: 12 additions & 23 deletions singer_sdk/helpers/_typing.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,10 +108,13 @@ def is_object_type(property_schema: dict) -> bool | None:
"""Return true if the JSON Schema type is an object or None if detection fails."""
if "anyOf" not in property_schema and "type" not in property_schema:
return None # Could not detect data type
for property_type in property_schema.get("anyOf", [property_schema.get("type")]):
if "object" in property_type or property_type == "object":
return True
return False
return any(
"object" in property_type or property_type == "object"
for property_type in property_schema.get(
"anyOf",
[property_schema.get("type")],
)
)


def is_uniform_list(property_schema: dict) -> bool | None:
Expand Down Expand Up @@ -410,9 +413,7 @@ def _conform_record_data_types( # noqa: PLR0912
return input_object, unmapped_properties

for property_name, elem in input_object.items():
property_path = (
property_name if parent is None else parent + "." + property_name
)
property_path = property_name if parent is None else f"{parent}.{property_name}"
if property_name not in schema["properties"]:
unmapped_properties.append(property_path)
continue
Expand Down Expand Up @@ -466,15 +467,15 @@ def _conform_record_data_types( # noqa: PLR0912
return output_object, unmapped_properties


def _conform_primitive_property( # noqa: PLR0911, C901
def _conform_primitive_property( # noqa: PLR0911
elem: t.Any,
property_schema: dict,
) -> t.Any:
"""Converts a primitive (i.e. not object or array) to a json compatible type."""
if isinstance(elem, (datetime.datetime, pendulum.DateTime)):
return to_json_compatible(elem)
if isinstance(elem, datetime.date):
return elem.isoformat() + "T00:00:00+00:00"
return f"{elem.isoformat()}T00:00:00+00:00"
if isinstance(elem, datetime.timedelta):
epoch = datetime.datetime.fromtimestamp(0, UTC)
timedelta_from_epoch = epoch + elem
Expand All @@ -485,19 +486,7 @@ def _conform_primitive_property( # noqa: PLR0911, C901
return str(elem)
if isinstance(elem, bytes):
# for BIT value, treat 0 as False and anything else as True
bit_representation: bool
if is_boolean_type(property_schema):
bit_representation = elem != b"\x00"
return bit_representation
return elem.hex()
return elem != b"\x00" if is_boolean_type(property_schema) else elem.hex()
if is_boolean_type(property_schema):
boolean_representation: bool | None
if elem is None:
boolean_representation = None
elif elem == 0:
boolean_representation = False
else:
boolean_representation = True
return boolean_representation

return None if elem is None else elem != 0
return elem
2 changes: 1 addition & 1 deletion singer_sdk/plugin_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ class PluginBase(metaclass=abc.ABCMeta):
#: The package name of the plugin. e.g meltanolabs-tap-foo
package_name: str | None = None

config_jsonschema: dict = {}
config_jsonschema: t.ClassVar[dict] = {}
# A JSON Schema object defining the config options that this tap will accept.

_config: dict
Expand Down
44 changes: 16 additions & 28 deletions singer_sdk/streams/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,9 +97,6 @@ class Stream(metaclass=abc.ABCMeta):

ignore_parent_replication_key: bool = False

# Internal API cost aggregator
_sync_costs: dict[str, int] = {}

selected_by_default: bool = True
"""Whether this stream is selected by default in the catalog."""

Expand Down Expand Up @@ -143,6 +140,7 @@ def __init__(
self._mask: singer.SelectionMask | None = None
self._schema: dict
self._is_state_flushed: bool = True
self._sync_costs: dict[str, int] = {}
self.child_streams: list[Stream] = []
if schema:
if isinstance(schema, (PathLike, str)):
Expand Down Expand Up @@ -296,11 +294,10 @@ def has_selected_descendents(self) -> bool:
Returns:
True if any child streams are selected, recursively.
"""
for child in self.child_streams or []:
if child.selected or child.has_selected_descendents:
return True

return False
return any(
child.selected or child.has_selected_descendents
for child in self.child_streams or []
)

@final
@property
Expand Down Expand Up @@ -405,10 +402,7 @@ def get_replication_key_signpost(
Returns:
Max allowable bookmark value for this stream's replication key.
"""
if self.is_timestamp_replication_key:
return utc_now()

return None
return utc_now() if self.is_timestamp_replication_key else None

@property
def schema_filepath(self) -> Path | None:
Expand All @@ -435,9 +429,7 @@ def primary_keys(self) -> list[str] | None:
Returns:
A list of primary key(s) for the stream.
"""
if not self._primary_keys:
return []
return self._primary_keys
return self._primary_keys or []

@primary_keys.setter
def primary_keys(self, new_value: list[str] | None) -> None:
Expand Down Expand Up @@ -479,9 +471,7 @@ def replication_key(self) -> str | None:
Returns:
Replication key for the stream.
"""
if not self._replication_key:
return None
return self._replication_key
return self._replication_key or None

@replication_key.setter
def replication_key(self, new_value: str | None) -> None:
Expand Down Expand Up @@ -706,11 +696,12 @@ def partitions(self) -> list[dict] | None:
Returns:
A list of partition key dicts (if applicable), otherwise `None`.
"""
result: list[dict] = []
for partition_state in (
get_state_partitions_list(self.tap_state, self.name) or []
):
result.append(partition_state["context"])
result: list[dict] = [
partition_state["context"]
for partition_state in (
get_state_partitions_list(self.tap_state, self.name) or []
)
]
return result or None

# Private bookmarking methods
Expand Down Expand Up @@ -781,13 +772,12 @@ def _generate_schema_messages(
# Don't emit schema if the stream's records are all ignored.
continue

schema_message = singer.SchemaMessage(
yield singer.SchemaMessage(
stream_map.stream_alias,
stream_map.transformed_schema,
stream_map.transformed_key_properties,
bookmark_keys,
)
yield schema_message

def _write_schema_message(self) -> None:
"""Write out a SCHEMA message with the stream schema."""
Expand Down Expand Up @@ -830,15 +820,13 @@ def _generate_record_messages(
mapped_record = stream_map.transform(record)
# Emit record if not filtered
if mapped_record is not None:
record_message = singer.RecordMessage(
yield singer.RecordMessage(
stream=stream_map.stream_alias,
record=mapped_record,
version=None,
time_extracted=utc_now(),
)

yield record_message

def _write_record_message(self, record: dict) -> None:
"""Write out a RECORD message.
Expand Down
2 changes: 1 addition & 1 deletion singer_sdk/streams/rest.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ class RESTStream(Stream, t.Generic[_TToken], metaclass=abc.ABCMeta):
records_jsonpath: str = "$[*]"

#: Response code reference for rate limit retries
extra_retry_statuses: list[int] = [HTTPStatus.TOO_MANY_REQUESTS]
extra_retry_statuses: t.Sequence[int] = [HTTPStatus.TOO_MANY_REQUESTS]

#: Optional JSONPath expression to extract a pagination token from the API response.
#: Example: `"$.next_page"`
Expand Down
5 changes: 2 additions & 3 deletions singer_sdk/testing/factory.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
"""Test Class Factory."""

from __future__ import annotations

import typing as t
Expand All @@ -22,8 +21,8 @@
class BaseTestClass:
"""Base test class."""

params: dict = {}
param_ids: dict = {}
params: t.ClassVar[dict] = {}
param_ids: t.ClassVar[dict] = {}


class TapTestClassFactory:
Expand Down
12 changes: 6 additions & 6 deletions singer_sdk/testing/runners.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,12 +17,6 @@
class SingerTestRunner(metaclass=abc.ABCMeta):
"""Base Singer Test Runner."""

raw_messages: list[dict] = []
schema_messages: list[dict] = []
record_messages: list[dict] = []
state_messages: list[dict] = []
records: defaultdict = defaultdict(list)

def __init__(
self,
singer_class: type[Tap] | type[Target],
Expand All @@ -44,6 +38,12 @@ def __init__(
self.default_kwargs = kwargs
self.suite_config = suite_config or SuiteConfig()

self.raw_messages: list[dict] = []
self.schema_messages: list[dict] = []
self.record_messages: list[dict] = []
self.state_messages: list[dict] = []
self.records: defaultdict = defaultdict(list)

@staticmethod
def _clean_sync_output(raw_records: str) -> list[dict]:
"""Clean sync output.
Expand Down
2 changes: 1 addition & 1 deletion singer_sdk/testing/templates.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@ class StreamTestTemplate(TestTemplate):
"""Base Tap Stream test template."""

plugin_type = "stream"
required_kwargs = ["stream"]
required_kwargs: t.ClassVar[list[str]] = ["stream"]

@property
def id(self) -> str: # noqa: A003
Expand Down
Loading

0 comments on commit b02d698

Please sign in to comment.