Skip to content

Commit

Permalink
refactor: from superset.utils.core break down date_parser (#12408)
Browse files Browse the repository at this point in the history
  • Loading branch information
zhaoyongjie authored and villebro committed Jan 13, 2021
1 parent 90915db commit 35c15b8
Show file tree
Hide file tree
Showing 11 changed files with 753 additions and 704 deletions.
8 changes: 2 additions & 6 deletions superset/common/query_object.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,12 +28,8 @@
from superset.exceptions import QueryObjectValidationError
from superset.typing import Metric
from superset.utils import pandas_postprocessing
from superset.utils.core import (
DTTM_ALIAS,
get_since_until,
json_int_dttm_ser,
parse_human_timedelta,
)
from superset.utils.core import DTTM_ALIAS, json_int_dttm_ser
from superset.utils.date_parser import get_since_until, parse_human_timedelta
from superset.views.utils import get_time_range_endpoints

config = app.config
Expand Down
9 changes: 5 additions & 4 deletions superset/connectors/druid/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@
from superset.models.helpers import AuditMixinNullable, ImportExportMixin, QueryResult
from superset.typing import FilterValues, Granularity, Metric, QueryObjectDict
from superset.utils import core as utils
from superset.utils.date_parser import parse_human_datetime, parse_human_timedelta

try:
import requests
Expand Down Expand Up @@ -777,7 +778,7 @@ def granularity(
granularity["timeZone"] = timezone

if origin:
dttm = utils.parse_human_datetime(origin)
dttm = parse_human_datetime(origin)
assert dttm
granularity["origin"] = dttm.isoformat()

Expand All @@ -795,7 +796,7 @@ def granularity(
else:
granularity["type"] = "duration"
granularity["duration"] = (
utils.parse_human_timedelta(period_name).total_seconds() # type: ignore
parse_human_timedelta(period_name).total_seconds() # type: ignore
* 1000
)
return granularity
Expand Down Expand Up @@ -938,7 +939,7 @@ def values_for_column(self, column_name: str, limit: int = 10000) -> List[Any]:
)
# TODO: Use Lexicographic TopNMetricSpec once supported by PyDruid
if self.fetch_values_from:
from_dttm = utils.parse_human_datetime(self.fetch_values_from)
from_dttm = parse_human_datetime(self.fetch_values_from)
assert from_dttm
else:
from_dttm = datetime(1970, 1, 1)
Expand Down Expand Up @@ -1426,7 +1427,7 @@ def query(self, query_obj: QueryObjectDict) -> QueryResult:
time_offset = DruidDatasource.time_offset(query_obj["granularity"])

def increment_timestamp(ts: str) -> datetime:
dt = utils.parse_human_datetime(ts).replace(tzinfo=DRUID_TZ)
dt = parse_human_datetime(ts).replace(tzinfo=DRUID_TZ)
return dt + timedelta(milliseconds=time_offset)

if DTTM_ALIAS in df.columns and time_offset:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
from sqlalchemy.ext.declarative import declarative_base

from superset import db
from superset.utils.core import parse_human_timedelta
from superset.utils.date_parser import parse_human_timedelta

revision = "3dda56f1c4c6"
down_revision = "bddc498dd179"
Expand Down
2 changes: 1 addition & 1 deletion superset/tasks/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
from superset.models.dashboard import Dashboard
from superset.models.slice import Slice
from superset.models.tags import Tag, TaggedObject
from superset.utils.core import parse_human_datetime
from superset.utils.date_parser import parse_human_datetime
from superset.views.utils import build_extra_filters

logger = get_task_logger(__name__)
Expand Down
Loading

0 comments on commit 35c15b8

Please sign in to comment.