Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Revert #5991 #6035

Merged
merged 1 commit into from
Oct 4, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
52 changes: 2 additions & 50 deletions superset/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,7 @@
import yaml

from superset import (
app, dashboard_import_export_util, data, db,
dict_import_export_util, security_manager, utils,
app, data, db, dict_import_export_util, security_manager, utils,
)

config = app.config
Expand Down Expand Up @@ -225,53 +224,6 @@ def refresh_druid(datasource, merge):
session.commit()


@app.cli.command()
@click.option(
'--path', '-p',
help='Path to a single JSON file or path containing multiple JSON files'
'files to import (*.json)')
@click.option(
'--recursive', '-r',
help='recursively search the path for json files')
def import_dashboards(path, recursive=False):
"""Import dashboards from JSON"""
p = Path(path)
files = []
if p.is_file():
files.append(p)
elif p.exists() and not recursive:
files.extend(p.glob('*.json'))
elif p.exists() and recursive:
files.extend(p.rglob('*.json'))
for f in files:
logging.info('Importing dashboard from file %s', f)
try:
with f.open() as data_stream:
dashboard_import_export_util.import_dashboards(
db.session, data_stream)
except Exception as e:
logging.error('Error when importing dashboard from file %s', f)
logging.error(e)


@app.cli.command()
@click.option(
'--dashboard-file', '-f', default=None,
help='Specify the the file to export to')
@click.option(
'--print_stdout', '-p',
help='Print JSON to stdout')
def export_dashboards(print_stdout, dashboard_file):
"""Export dashboards to JSON"""
data = dashboard_import_export_util.export_dashboards(db.session)
if print_stdout or not dashboard_file:
print(data)
if dashboard_file:
logging.info('Exporting dashboards to %s', dashboard_file)
with open(dashboard_file, 'w') as data_stream:
data_stream.write(data)


@app.cli.command()
@click.option(
'--path', '-p',
Expand Down Expand Up @@ -316,7 +268,7 @@ def import_datasources(path, sync, recursive=False):
'--datasource-file', '-f', default=None,
help='Specify the the file to export to')
@click.option(
'--print_stdout', '-p',
'--print', '-p',
help='Print YAML to stdout')
@click.option(
'--back-references', '-b',
Expand Down
39 changes: 0 additions & 39 deletions superset/dashboard_import_export_util.py

This file was deleted.

16 changes: 13 additions & 3 deletions superset/views/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,9 @@
from werkzeug.utils import secure_filename

from superset import (
app, appbuilder, cache, dashboard_import_export_util, db, results_backend,
security_manager, sql_lab, utils, viz)
app, appbuilder, cache, db, results_backend, security_manager, sql_lab, utils,
viz,
)
from superset.connectors.connector_registry import ConnectorRegistry
from superset.connectors.sqla.models import AnnotationDatasource, SqlaTable
from superset.exceptions import SupersetException
Expand Down Expand Up @@ -1237,7 +1238,16 @@ def import_dashboards(self):
"""Overrides the dashboards using json instances from the file."""
f = request.files.get('file')
if request.method == 'POST' and f:
dashboard_import_export_util.import_dashboards(db.session, f.stream)
current_tt = int(time.time())
data = json.loads(f.stream.read(), object_hook=utils.decode_dashboards)
# TODO: import DRUID datasources
for table in data['datasources']:
type(table).import_obj(table, import_time=current_tt)
db.session.commit()
for dashboard in data['dashboards']:
models.Dashboard.import_obj(
dashboard, import_time=current_tt)
db.session.commit()
return redirect('/dashboard/list/')
return self.render_template('superset/import_dashboards.html')

Expand Down
33 changes: 1 addition & 32 deletions tests/import_export_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

from sqlalchemy.orm.session import make_transient

from superset import dashboard_import_export_util, db, utils
from superset import db, utils
from superset.connectors.druid.models import (
DruidColumn, DruidDatasource, DruidMetric,
)
Expand Down Expand Up @@ -149,9 +149,6 @@ def get_table_by_name(self, name):
return db.session.query(SqlaTable).filter_by(
table_name=name).first()

def get_num_dashboards(self):
return db.session.query(models.Dashboard).count()

def assert_dash_equals(self, expected_dash, actual_dash,
check_position=True):
self.assertEquals(expected_dash.slug, actual_dash.slug)
Expand Down Expand Up @@ -550,34 +547,6 @@ def test_import_druid_override_identical(self):
self.assert_datasource_equals(
copy_datasource, self.get_datasource(imported_id))

def test_export_dashboards_util(self):
dashboards_json_dump = dashboard_import_export_util.export_dashboards(
db.session)
dashboards_objects = json.loads(
dashboards_json_dump,
object_hook=utils.decode_dashboards,
)

exported_dashboards = dashboards_objects['dashboards']
for dashboard in exported_dashboards:
id_ = dashboard.id
dash = self.get_dash(id_)
self.assert_dash_equals(dash, dashboard)
self.assertEquals(
dash.id, json.loads(
dashboard.json_metadata,
object_hook=utils.decode_dashboards,
)['remote_id'],
)
numDasboards = self.get_num_dashboards()
self.assertEquals(numDasboards, len(exported_dashboards))

exported_tables = dashboards_objects['datasources']
for exported_table in exported_tables:
id_ = exported_table.id
table = self.get_table(id_)
self.assert_table_equals(table, exported_table)


if __name__ == '__main__':
unittest.main()