Skip to content

Commit

Permalink
chore: upgrade mypy (#19227)
Browse files Browse the repository at this point in the history
  • Loading branch information
ktmud authored Mar 17, 2022
1 parent c345029 commit 92cd0a1
Show file tree
Hide file tree
Showing 9 changed files with 16 additions and 23 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ repos:
hooks:
- id: isort
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v0.910
rev: v0.941
hooks:
- id: mypy
additional_dependencies: [types-all]
Expand Down
4 changes: 2 additions & 2 deletions RELEASING/changelog.py
Original file line number Diff line number Diff line change
Expand Up @@ -381,12 +381,12 @@ def change_log(
with open(csv, "w") as csv_file:
log_items = list(logs)
field_names = log_items[0].keys()
writer = lib_csv.DictWriter( # type: ignore
writer = lib_csv.DictWriter(
csv_file,
delimiter=",",
quotechar='"',
quoting=lib_csv.QUOTE_ALL,
fieldnames=field_names, # type: ignore
fieldnames=field_names,
)
writer.writeheader()
for log in logs:
Expand Down
11 changes: 4 additions & 7 deletions superset/charts/data/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -306,16 +306,13 @@ def _run_async(
Execute command as an async query.
"""
# First, look for the chart query results in the cache.
result = None
try:
result = command.run(force_cached=True)
if result is not None:
return self._send_chart_response(result)
except ChartDataCacheLoadError:
result = None # type: ignore

already_cached_result = result is not None

# If the chart query has already been cached, return it immediately.
if already_cached_result:
return self._send_chart_response(result)
pass

# Otherwise, kick off a background job to run the chart query.
# Clients will either poll or be notified of query completion,
Expand Down
2 changes: 1 addition & 1 deletion superset/cli/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def make_shell_context() -> Dict[str, Any]:

# add sub-commands
for load, module_name, is_pkg in pkgutil.walk_packages(
cli.__path__, cli.__name__ + "." # type: ignore
cli.__path__, cli.__name__ + "."
):
module = importlib.import_module(module_name)
for attribute in module.__dict__.values():
Expand Down
2 changes: 1 addition & 1 deletion superset/models/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -221,7 +221,7 @@ def import_from_dict(
if not obj:
is_new_obj = True
# Create new DB object
obj = cls(**dict_rep) # type: ignore
obj = cls(**dict_rep)
logger.info("Importing new %s %s", obj.__tablename__, str(obj))
if cls.export_parent and parent:
setattr(obj, cls.export_parent, parent)
Expand Down
3 changes: 1 addition & 2 deletions superset/reports/commands/alert.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,8 +77,7 @@ def run(self) -> bool:
threshold = json.loads(self._report_schedule.validator_config_json)[
"threshold"
]

return OPERATOR_FUNCTIONS[operator](self._result, threshold)
return OPERATOR_FUNCTIONS[operator](self._result, threshold) # type: ignore
except (KeyError, json.JSONDecodeError) as ex:
raise AlertValidatorConfigError() from ex

Expand Down
2 changes: 1 addition & 1 deletion superset/reports/notifications/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ class BaseNotification: # pylint: disable=too-few-public-methods
"""

def __init_subclass__(cls, *args: Any, **kwargs: Any) -> None:
super().__init_subclass__(*args, **kwargs) # type: ignore
super().__init_subclass__(*args, **kwargs)
cls.plugins.append(cls)

def __init__(
Expand Down
2 changes: 1 addition & 1 deletion superset/utils/async_query_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ class AsyncQueryManager:

def __init__(self) -> None:
super().__init__()
self._redis: redis.Redis
self._redis: redis.Redis # type: ignore
self._stream_prefix: str = ""
self._stream_limit: Optional[int]
self._stream_limit_firehose: Optional[int]
Expand Down
11 changes: 4 additions & 7 deletions superset/views/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -653,14 +653,11 @@ def explore_json(
force=force,
)
payload = viz_obj.get_payload()
# If the chart query has already been cached, return it immediately.
if payload is not None:
return self.send_data_payload_response(viz_obj, payload)
except CacheLoadError:
payload = None # type: ignore

already_cached_result = payload is not None

# If the chart query has already been cached, return it immediately.
if already_cached_result:
return self.send_data_payload_response(viz_obj, payload)
pass

# Otherwise, kick off a background job to run the chart query.
# Clients will either poll or be notified of query completion,
Expand Down

0 comments on commit 92cd0a1

Please sign in to comment.