diff --git a/UPDATING.md b/UPDATING.md index cfee3dcfd436f..22674e49a3f2e 100644 --- a/UPDATING.md +++ b/UPDATING.md @@ -29,6 +29,9 @@ assists people when migrating to a new version. ### Breaking Changes +- [19168](https://github.com/apache/superset/pull/19168): Celery upgrade to 5.X has breaking changes on it's command line invocation. + Please follow: https://docs.celeryq.dev/en/stable/whatsnew-5.2.html#step-1-adjust-your-command-line-invocation + Consider migrating you celery config if you haven't already: https://docs.celeryq.dev/en/stable/userguide/configuration.html#conf-old-settings-map - [19049](https://github.com/apache/superset/pull/19049): APP_ICON_WIDTH has been removed from the config. Superset should now be able to handle different logo sizes without having to explicitly set an APP_ICON_WIDTH. This might affect the size of existing custom logos as the UI will now resize them according to the specified space of maximum 148px and not according to the value of APP_ICON_WIDTH. - [19274](https://github.com/apache/superset/pull/19274): The `PUBLIC_ROLE_LIKE_GAMMA` config key has been removed, set `PUBLIC_ROLE_LIKE` = "Gamma" to have the same functionality. - [19273](https://github.com/apache/superset/pull/19273): The `SUPERSET_CELERY_WORKERS` and `SUPERSET_WORKERS` config keys has been removed. Configure celery directly using `CELERY_CONFIG` on Superset diff --git a/docs/docs/installation/alerts-reports.mdx b/docs/docs/installation/alerts-reports.mdx index 3ddb35caa1747..8ab37cc90529a 100644 --- a/docs/docs/installation/alerts-reports.mdx +++ b/docs/docs/installation/alerts-reports.mdx @@ -89,12 +89,12 @@ REDIS_HOST = "redis-superset" REDIS_PORT = "6379" class CeleryConfig: - BROKER_URL = 'redis://%s:%s/0' % (REDIS_HOST, REDIS_PORT) - CELERY_IMPORTS = ('superset.sql_lab', "superset.tasks", "superset.tasks.thumbnails", ) - CELERY_RESULT_BACKEND = 'redis://%s:%s/0' % (REDIS_HOST, REDIS_PORT) - CELERYD_PREFETCH_MULTIPLIER = 10 - CELERY_ACKS_LATE = True - CELERY_ANNOTATIONS = { + broker_url = 'redis://%s:%s/0' % (REDIS_HOST, REDIS_PORT) + imports = ('superset.sql_lab', "superset.tasks", "superset.tasks.thumbnails", ) + result_backend = 'redis://%s:%s/0' % (REDIS_HOST, REDIS_PORT) + worker_prefetch_multiplier = 10 + task_acks_late = True + task_annotations = { 'sql_lab.get_sql_results': { 'rate_limit': '100/s', }, @@ -105,7 +105,7 @@ class CeleryConfig: 'ignore_result': True, }, } - CELERYBEAT_SCHEDULE = { + beat_schedule = { 'reports.scheduler': { 'task': 'reports.scheduler', 'schedule': crontab(minute='*', hour='*'), diff --git a/docs/docs/installation/async-queries-celery.mdx b/docs/docs/installation/async-queries-celery.mdx index 7c19a9405cf09..b742d8c6b52c2 100644 --- a/docs/docs/installation/async-queries-celery.mdx +++ b/docs/docs/installation/async-queries-celery.mdx @@ -23,16 +23,16 @@ and web server processes should have the same configuration. ```python class CeleryConfig(object): - BROKER_URL = 'redis://localhost:6379/0' - CELERY_IMPORTS = ( + broker_url = 'redis://localhost:6379/0' + imports = ( 'superset.sql_lab', 'superset.tasks', ) - CELERY_RESULT_BACKEND = 'redis://localhost:6379/0' - CELERYD_LOG_LEVEL = 'DEBUG' - CELERYD_PREFETCH_MULTIPLIER = 10 - CELERY_ACKS_LATE = True - CELERY_ANNOTATIONS = { + result_backend = 'redis://localhost:6379/0' + worker_log_level = 'DEBUG' + worker_prefetch_multiplier = 10 + task_acks_late = True + task_annotations = { 'sql_lab.get_sql_results': { 'rate_limit': '100/s', }, @@ -43,7 +43,7 @@ class CeleryConfig(object): 'ignore_result': True, }, } - CELERYBEAT_SCHEDULE = { + beat_schedule = { 'email_reports.schedule_hourly': { 'task': 'email_reports.schedule_hourly', 'schedule': crontab(minute=1, hour='*'), diff --git a/docs/docs/installation/cache.mdx b/docs/docs/installation/cache.mdx index 2cf56c1812937..df2fc1471d29d 100644 --- a/docs/docs/installation/cache.mdx +++ b/docs/docs/installation/cache.mdx @@ -85,11 +85,11 @@ from s3cache.s3cache import S3Cache ... class CeleryConfig(object): - BROKER_URL = "redis://localhost:6379/0" - CELERY_IMPORTS = ("superset.sql_lab", "superset.tasks", "superset.tasks.thumbnails") - CELERY_RESULT_BACKEND = "redis://localhost:6379/0" - CELERYD_PREFETCH_MULTIPLIER = 10 - CELERY_ACKS_LATE = True + broker_url = "redis://localhost:6379/0" + imports = ("superset.sql_lab", "superset.tasks", "superset.tasks.thumbnails") + result_backend = "redis://localhost:6379/0" + worker_prefetch_multiplier = 10 + task_acks_late = True CELERY_CONFIG = CeleryConfig diff --git a/docs/docs/installation/running-on-kubernetes.mdx b/docs/docs/installation/running-on-kubernetes.mdx index d87359f146089..b009c722e622f 100644 --- a/docs/docs/installation/running-on-kubernetes.mdx +++ b/docs/docs/installation/running-on-kubernetes.mdx @@ -344,12 +344,10 @@ configOverrides: from celery.schedules import crontab class CeleryConfig(object): - BROKER_URL = f"redis://{env('REDIS_HOST')}:{env('REDIS_PORT')}/0" - CELERY_IMPORTS = ('superset.sql_lab', ) - CELERY_RESULT_BACKEND = f"redis://{env('REDIS_HOST')}:{env('REDIS_PORT')}/0" - CELERY_ANNOTATIONS = {'tasks.add': {'rate_limit': '10/s'}} - CELERY_IMPORTS = ('superset.sql_lab', "superset.tasks", "superset.tasks.thumbnails", ) - CELERY_ANNOTATIONS = { + broker_url = f"redis://{env('REDIS_HOST')}:{env('REDIS_PORT')}/0" + imports = ('superset.sql_lab', "superset.tasks", "superset.tasks.thumbnails", ) + result_backend = f"redis://{env('REDIS_HOST')}:{env('REDIS_PORT')}/0" + task_annotations = { 'sql_lab.get_sql_results': { 'rate_limit': '100/s', }, @@ -360,7 +358,7 @@ configOverrides: 'ignore_result': True, }, } - CELERYBEAT_SCHEDULE = { + beat_schedule = { 'reports.scheduler': { 'task': 'reports.scheduler', 'schedule': crontab(minute='*', hour='*'), diff --git a/requirements/base.txt b/requirements/base.txt index dd2c70aa03de2..fb16470fd399a 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -11,7 +11,7 @@ aiohttp==3.7.4.post0 # via slackclient alembic==1.6.5 # via flask-migrate -amqp==2.6.1 +amqp==5.1.0 # via kombu apispec[yaml]==3.3.2 # via flask-appbuilder @@ -33,17 +33,27 @@ brotli==1.0.9 # via flask-compress cachelib==0.4.1 # via apache-superset -celery==4.4.7 +celery==5.2.2 # via apache-superset cffi==1.14.6 # via cryptography chardet==4.0.0 # via aiohttp -click==7.1.2 +click==8.0.4 # via # apache-superset + # celery + # click-didyoumean + # click-plugins + # click-repl # flask # flask-appbuilder +click-didyoumean==0.3.0 + # via celery +click-plugins==1.1.1 + # via celery +click-repl==0.2.0 + # via celery colorama==0.4.4 # via # apache-superset @@ -56,15 +66,13 @@ croniter==1.0.15 # via apache-superset cryptography==3.4.7 # via apache-superset -defusedxml==0.7.1 - # via python3-openid deprecation==2.1.0 # via apache-superset dnspython==2.1.0 # via email-validator email-validator==1.1.3 # via flask-appbuilder -flask==1.1.4 +flask==2.0.3 # via # apache-superset # flask-appbuilder @@ -74,10 +82,9 @@ flask==1.1.4 # flask-jwt-extended # flask-login # flask-migrate - # flask-openid # flask-sqlalchemy # flask-wtf -flask-appbuilder==3.4.5 +flask-appbuilder==4.0.0 # via apache-superset flask-babel==1.0.0 # via flask-appbuilder @@ -85,14 +92,12 @@ flask-caching==1.10.1 # via apache-superset flask-compress==1.10.1 # via apache-superset -flask-jwt-extended==3.25.1 +flask-jwt-extended==4.3.1 # via flask-appbuilder flask-login==0.4.1 # via flask-appbuilder flask-migrate==3.1.0 # via apache-superset -flask-openid==1.3.0 - # via flask-appbuilder flask-sqlalchemy==2.5.1 # via # flask-appbuilder @@ -123,18 +128,17 @@ idna==3.2 # yarl isodate==0.6.0 # via apache-superset -itsdangerous==1.1.0 +itsdangerous==2.1.1 # via - # apache-superset # flask # flask-wtf -jinja2==2.11.3 +jinja2==3.0.3 # via # flask # flask-babel jsonschema==3.2.0 # via flask-appbuilder -kombu==4.6.11 +kombu==5.2.4 # via celery korean-lunar-calendar==0.2.1 # via holidays @@ -180,11 +184,13 @@ polyline==1.4.0 # via apache-superset prison==0.2.1 # via flask-appbuilder +prompt-toolkit==3.0.28 + # via click-repl pyarrow==5.0.0 # via apache-superset pycparser==2.20 # via cffi -pyjwt==1.7.1 +pyjwt==2.2.0 # via # apache-superset # flask-appbuilder @@ -213,9 +219,7 @@ python-editor==1.0.4 # via alembic python-geohash==0.8.5 # via apache-superset -python3-openid==3.2.0 - # via flask-openid -pytz==2021.1 +pytz==2021.3 # via # babel # celery @@ -237,7 +241,7 @@ simplejson==3.17.3 six==1.16.0 # via # bleach - # flask-jwt-extended + # click-repl # flask-talisman # holidays # isodate @@ -273,13 +277,16 @@ typing-extensions==3.10.0.0 # apache-superset urllib3==1.26.6 # via selenium -vine==1.3.0 +vine==5.0.0 # via # amqp # celery + # kombu +wcwidth==0.2.5 + # via prompt-toolkit webencodings==0.5.1 # via bleach -werkzeug==1.0.1 +werkzeug==2.0.3 # via # flask # flask-jwt-extended diff --git a/requirements/integration.in b/requirements/integration.in index 763cb936e0f0a..eff495d881314 100644 --- a/requirements/integration.in +++ b/requirements/integration.in @@ -18,4 +18,4 @@ pip-compile-multi!=1.5.9 pre-commit tox py>=1.10.0 -click==7.1.2 +click diff --git a/requirements/integration.txt b/requirements/integration.txt index 79431b5dd57c5..edc39fb151267 100644 --- a/requirements/integration.txt +++ b/requirements/integration.txt @@ -1,4 +1,4 @@ -# SHA1:03eb2d96afe21f1bda1ab33b4cf84e670a1efe21 +# SHA1:8e2dd1e795bcad7451376b3653eb03465e4f05d3 # # This file is autogenerated by pip-compile-multi # To update, run: @@ -9,7 +9,7 @@ backports.entry-points-selectable==1.1.0 # via virtualenv cfgv==3.3.0 # via pre-commit -click==7.1.2 +click==8.0.4 # via # -r requirements/integration.in # pip-compile-multi diff --git a/requirements/testing.txt b/requirements/testing.txt index a02d250526edc..3b1ce021873f5 100644 --- a/requirements/testing.txt +++ b/requirements/testing.txt @@ -116,8 +116,6 @@ pexpect==4.8.0 # via ipython pickleshare==0.7.5 # via ipython -prompt-toolkit==3.0.19 - # via ipython proto-plus==1.19.7 # via # google-cloud-bigquery @@ -178,8 +176,6 @@ trino==0.306 # via sqlalchemy-trino typing-inspect==0.7.1 # via libcst -wcwidth==0.2.5 - # via prompt-toolkit websocket-client==1.2.0 # via docker diff --git a/scripts/tests/run.sh b/scripts/tests/run.sh index 9f78318b72b51..24233010107dd 100755 --- a/scripts/tests/run.sh +++ b/scripts/tests/run.sh @@ -138,5 +138,5 @@ fi if [ $RUN_TESTS -eq 1 ] then - pytest -x -s "${TEST_MODULE}" + pytest --durations=0 --maxfail=1 "${TEST_MODULE}" fi diff --git a/setup.cfg b/setup.cfg index 7f2f83c18e7db..6f667677ec810 100644 --- a/setup.cfg +++ b/setup.cfg @@ -41,7 +41,7 @@ disallow_untyped_calls = true disallow_untyped_defs = true ignore_missing_imports = true no_implicit_optional = true -warn_unused_ignores = true +warn_unused_ignores = false [mypy-superset.migrations.versions.*] ignore_errors = true diff --git a/setup.py b/setup.py index 980fde8a83639..977d1b3e7ae63 100644 --- a/setup.py +++ b/setup.py @@ -70,15 +70,15 @@ def get_git_sha() -> str: "backoff>=1.8.0", "bleach>=3.0.2, <4.0.0", "cachelib>=0.4.1,<0.5", - "celery>=4.3.0, <5.0.0, !=4.4.1", - "click<8", + "celery>=5.2.2, <6.0.0", + "click>=8.0.3", "colorama", "croniter>=0.3.28", "cron-descriptor", "cryptography>=3.3.2", "deprecation>=2.1.0, <2.2.0", - "flask>=1.1.0, <2.0.0", - "flask-appbuilder>=3.4.5, <4.0.0", + "flask>=2.0.0, <3.0.0", + "flask-appbuilder>=4.0.0, <5.0.0", "flask-caching>=1.10.0", "flask-compress", "flask-talisman", @@ -90,7 +90,6 @@ def get_git_sha() -> str: "gunicorn>=20.1.0", "holidays==0.10.3", # PINNED! https://github.com/dr-prodigy/python-holidays/issues/406 "humanize", - "itsdangerous>=1.0.0, <2.0.0", # https://github.com/apache/superset/pull/14627 "isodate", "markdown>=3.0", "msgpack>=1.0.0, <1.1", @@ -104,7 +103,7 @@ def get_git_sha() -> str: "python-geohash", "pyarrow>=5.0.0, <6.0", "pyyaml>=5.4", - "PyJWT>=1.7.1, <2", + "PyJWT>=2.0.0, <2.3.0", "redis", "selenium>=3.141.0", "simplejson>=3.15.0", diff --git a/superset/cli/importexport.py b/superset/cli/importexport.py index 8ca86939f2065..4bc3ee4a2e3c3 100755 --- a/superset/cli/importexport.py +++ b/superset/cli/importexport.py @@ -64,7 +64,9 @@ def export_dashboards(dashboard_file: Optional[str] = None) -> None: from superset.dashboards.commands.export import ExportDashboardsCommand from superset.models.dashboard import Dashboard - g.user = security_manager.find_user(username="admin") + g.user = security_manager.find_user( # pylint: disable=assigning-non-slot + username="admin" + ) dashboard_ids = [id_ for (id_,) in db.session.query(Dashboard.id).all()] timestamp = datetime.now().strftime("%Y%m%dT%H%M%S") @@ -96,7 +98,9 @@ def export_datasources(datasource_file: Optional[str] = None) -> None: from superset.connectors.sqla.models import SqlaTable from superset.datasets.commands.export import ExportDatasetsCommand - g.user = security_manager.find_user(username="admin") + g.user = security_manager.find_user( # pylint: disable=assigning-non-slot + username="admin" + ) dataset_ids = [id_ for (id_,) in db.session.query(SqlaTable.id).all()] timestamp = datetime.now().strftime("%Y%m%dT%H%M%S") @@ -135,7 +139,9 @@ def import_dashboards(path: str, username: Optional[str]) -> None: ) if username is not None: - g.user = security_manager.find_user(username=username) + g.user = security_manager.find_user( # pylint: disable=assigning-non-slot + username=username + ) if is_zipfile(path): with ZipFile(path) as bundle: contents = get_contents_from_bundle(bundle) @@ -299,7 +305,9 @@ def import_dashboards(path: str, recursive: bool, username: str) -> None: elif path_object.exists() and recursive: files.extend(path_object.rglob("*.json")) if username is not None: - g.user = security_manager.find_user(username=username) + g.user = security_manager.find_user( # pylint: disable=assigning-non-slot + username=username + ) contents = {} for path_ in files: with open(path_) as file: diff --git a/superset/config.py b/superset/config.py index bd32fc54a32b2..ee7a4d16e1383 100644 --- a/superset/config.py +++ b/superset/config.py @@ -300,8 +300,6 @@ def _try_json_readsha(filepath: str, length: int) -> Optional[str]: # { 'name': 'Yahoo', 'url': 'https://open.login.yahoo.com/' }, # { 'name': 'Flickr', 'url': 'https://www.flickr.com/' }, -AUTH_STRICT_RESPONSE_CODES = True - # --------------------------------------------------- # Roles config # --------------------------------------------------- @@ -747,13 +745,13 @@ def _try_json_readsha(filepath: str, length: int) -> Optional[str]: class CeleryConfig: # pylint: disable=too-few-public-methods - BROKER_URL = "sqla+sqlite:///celerydb.sqlite" - CELERY_IMPORTS = ("superset.sql_lab", "superset.tasks") - CELERY_RESULT_BACKEND = "db+sqlite:///celery_results.sqlite" - CELERYD_LOG_LEVEL = "DEBUG" - CELERYD_PREFETCH_MULTIPLIER = 1 - CELERY_ACKS_LATE = False - CELERY_ANNOTATIONS = { + broker_url = "sqla+sqlite:///celerydb.sqlite" + imports = ("superset.sql_lab", "superset.tasks") + result_backend = "db+sqlite:///celery_results.sqlite" + worker_log_level = "DEBUG" + worker_prefetch_multiplier = 1 + task_acks_late = False + task_annotations = { "sql_lab.get_sql_results": {"rate_limit": "100/s"}, "email_reports.send": { "rate_limit": "1/s", @@ -762,7 +760,7 @@ class CeleryConfig: # pylint: disable=too-few-public-methods "ignore_result": True, }, } - CELERYBEAT_SCHEDULE = { + beat_schedule = { "email_reports.schedule_hourly": { "task": "email_reports.schedule_hourly", "schedule": crontab(minute=1, hour="*"), diff --git a/superset/tasks/async_queries.py b/superset/tasks/async_queries.py index fcd6f91ebf754..9b17a465c9fcd 100644 --- a/superset/tasks/async_queries.py +++ b/superset/tasks/async_queries.py @@ -47,13 +47,17 @@ def ensure_user_is_set(user_id: Optional[int]) -> None: user_is_not_set = not (hasattr(g, "user") and g.user is not None) if user_is_not_set and user_id is not None: - g.user = security_manager.get_user_by_id(user_id) + g.user = security_manager.get_user_by_id( # pylint: disable=assigning-non-slot + user_id + ) elif user_is_not_set: - g.user = security_manager.get_anonymous_user() + g.user = ( # pylint: disable=assigning-non-slot + security_manager.get_anonymous_user() + ) def set_form_data(form_data: Dict[str, Any]) -> None: - g.form_data = form_data + g.form_data = form_data # pylint: disable=assigning-non-slot def _create_query_context_from_form(form_data: Dict[str, Any]) -> QueryContext: diff --git a/superset/utils/async_query_manager.py b/superset/utils/async_query_manager.py index a026fd6f3f3d7..847df76987ac1 100644 --- a/superset/utils/async_query_manager.py +++ b/superset/utils/async_query_manager.py @@ -134,7 +134,11 @@ def validate_session(response: Response) -> Response: session["async_user_id"] = user_id sub = str(user_id) if user_id else None - token = self.generate_jwt({"channel": async_channel_id, "sub": sub}) + token = jwt.encode( + {"channel": async_channel_id, "sub": sub}, + self._jwt_secret, + algorithm="HS256", + ) response.set_cookie( self._jwt_cookie_name, @@ -146,21 +150,13 @@ def validate_session(response: Response) -> Response: return response - def generate_jwt(self, data: Dict[str, Any]) -> str: - encoded_jwt = jwt.encode(data, self._jwt_secret, algorithm="HS256") - return encoded_jwt.decode("utf-8") - - def parse_jwt(self, token: str) -> Dict[str, Any]: - data = jwt.decode(token, self._jwt_secret, algorithms=["HS256"]) - return data - def parse_jwt_from_request(self, req: Request) -> Dict[str, Any]: token = req.cookies.get(self._jwt_cookie_name) if not token: raise AsyncQueryTokenException("Token not preset") try: - return self.parse_jwt(token) + return jwt.decode(token, self._jwt_secret, algorithms=["HS256"]) except Exception as ex: logger.warning(ex) raise AsyncQueryTokenException("Failed to parse token") from ex diff --git a/superset/views/core.py b/superset/views/core.py index 5959584f1c2d3..c5232e7e2e798 100755 --- a/superset/views/core.py +++ b/superset/views/core.py @@ -1841,7 +1841,7 @@ def warm_up_cache( # pylint: disable=too-many-locals,no-self-use force=True, ) - g.form_data = form_data + g.form_data = form_data # pylint: disable=assigning-non-slot payload = obj.get_payload() delattr(g, "form_data") error = payload["errors"] or None diff --git a/tests/integration_tests/core_tests.py b/tests/integration_tests/core_tests.py index f573e68075441..83ea63daf0b70 100644 --- a/tests/integration_tests/core_tests.py +++ b/tests/integration_tests/core_tests.py @@ -1254,8 +1254,10 @@ def test_get_select_star_not_allowed(self): self.assertEqual(resp.status_code, 403) @mock.patch("superset.views.core.results_backend_use_msgpack", False) - @mock.patch("superset.views.core.results_backend") - def test_display_limit(self, mock_results_backend): + def test_display_limit(self): + from superset.views import core + + core.results_backend = mock.Mock() self.login() data = [{"col_0": i} for i in range(100)] @@ -1284,7 +1286,7 @@ def test_display_limit(self, mock_results_backend): app.config["RESULTS_BACKEND_USE_MSGPACK"] = False serialized_payload = sql_lab._serialize_payload(payload, False) compressed = utils.zlib_compress(serialized_payload) - mock_results_backend.get.return_value = compressed + core.results_backend.get.return_value = compressed with mock.patch("superset.views.core.db") as mock_superset_db: mock_superset_db.session.query().filter_by().one_or_none.return_value = ( diff --git a/tests/integration_tests/dashboards/api_tests.py b/tests/integration_tests/dashboards/api_tests.py index 8669da99f4a9e..8ee6bb59250e9 100644 --- a/tests/integration_tests/dashboards/api_tests.py +++ b/tests/integration_tests/dashboards/api_tests.py @@ -1348,10 +1348,8 @@ def test_export(self): dashboards_ids = get_dashboards_ids(db, ["world_health", "births"]) uri = f"api/v1/dashboard/export/?q={prison.dumps(dashboards_ids)}" - # freeze time to ensure filename is deterministic - with freeze_time("2020-01-01T00:00:00Z"): - rv = self.get_assert_metric(uri, "export") - headers = generate_download_headers("json")["Content-Disposition"] + rv = self.get_assert_metric(uri, "export") + headers = generate_download_headers("json")["Content-Disposition"] assert rv.status_code == 200 assert rv.headers["Content-Disposition"] == headers diff --git a/tests/integration_tests/security/api_tests.py b/tests/integration_tests/security/api_tests.py index 86be5e7da58e9..f936219971517 100644 --- a/tests/integration_tests/security/api_tests.py +++ b/tests/integration_tests/security/api_tests.py @@ -92,7 +92,10 @@ def test_post_guest_token_authorized(self): self.assert200(response) token = json.loads(response.data)["token"] decoded_token = jwt.decode( - token, self.app.config["GUEST_TOKEN_JWT_SECRET"], audience=get_url_host() + token, + self.app.config["GUEST_TOKEN_JWT_SECRET"], + audience=get_url_host(), + algorithms=["HS256"], ) self.assertEqual(user, decoded_token["user"]) self.assertEqual(resource, decoded_token["resources"][0]) diff --git a/tests/integration_tests/sqllab_tests.py b/tests/integration_tests/sqllab_tests.py index c96b7449b9c1e..9028e589252c1 100644 --- a/tests/integration_tests/sqllab_tests.py +++ b/tests/integration_tests/sqllab_tests.py @@ -24,9 +24,9 @@ from parameterized import parameterized from random import random from unittest import mock -from superset.extensions import db import prison +from freezegun import freeze_time from superset import db, security_manager from superset.connectors.sqla.models import SqlaTable from superset.db_engine_specs import BaseEngineSpec @@ -34,16 +34,12 @@ from superset.db_engine_specs.presto import PrestoEngineSpec from superset.errors import ErrorLevel, SupersetError, SupersetErrorType from superset.exceptions import SupersetErrorException -from superset.models.core import Database from superset.models.sql_lab import Query, SavedQuery from superset.result_set import SupersetResultSet from superset.sqllab.limiting_factor import LimitingFactor from superset.sql_lab import ( cancel_query, execute_sql_statements, - execute_sql_statement, - get_sql_results, - SqlLabException, apply_limit_if_exists, ) from superset.sql_parse import CtasMethod @@ -157,8 +153,6 @@ def test_sql_json_to_saved_query_info(self): """ SQLLab: Test SQLLab query execution info propagation to saved queries """ - from freezegun import freeze_time - self.login("admin") sql_statement = "SELECT * FROM birth_names LIMIT 10" @@ -167,7 +161,7 @@ def test_sql_json_to_saved_query_info(self): db.session.add(saved_query) db.session.commit() - with freeze_time("2020-01-01T00:00:00Z"): + with freeze_time(datetime.now().isoformat(timespec="seconds")): self.run_sql(sql_statement, "1") saved_query_ = ( db.session.query(SavedQuery) @@ -178,9 +172,9 @@ def test_sql_json_to_saved_query_info(self): ) assert saved_query_.rows is not None assert saved_query_.last_run == datetime.now() - # Rollback changes - db.session.delete(saved_query_) - db.session.commit() + # Rollback changes + db.session.delete(saved_query_) + db.session.commit() @parameterized.expand([CtasMethod.TABLE, CtasMethod.VIEW]) @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")