This is an automated email from the ASF dual-hosted git repository.

beto pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-superset.git


The following commit(s) were added to refs/heads/master by this push:
     new bbfd69a  [utils.py] gathering/refactoring into a "utils/" folder 
(#6095)
bbfd69a is described below

commit bbfd69a138e97c062774c8f5997c9ecfce395fe4
Author: Maxime Beauchemin <maximebeauche...@gmail.com>
AuthorDate: Tue Oct 16 17:59:34 2018 -0700

    [utils.py] gathering/refactoring into a "utils/" folder (#6095)
    
    * [utils] gathering/refactoring into a "utils/" folder
    
    Moving current utils.py into utils/core.py and moving other *util*
    modules under this new "utils/" as well.
    
    Following steps include eroding at "utils/core.py" and breaking it down
    into smaller modules.
    
    * Improve tests
    
    * Make loading examples in scope for tests
    
    * Remove test class attrs examples_loaded and requires_examples
---
 superset/__init__.py                               | 12 ++---
 superset/cli.py                                    | 16 ++++---
 superset/connectors/base/models.py                 |  2 +-
 superset/connectors/druid/models.py                | 11 ++---
 superset/connectors/druid/views.py                 |  3 +-
 superset/connectors/sqla/models.py                 | 22 +++++-----
 superset/connectors/sqla/views.py                  |  3 +-
 superset/data/__init__.py                          | 38 ++++++++--------
 superset/dataframe.py                              |  2 +-
 superset/db_engine_specs.py                        |  5 ++-
 ...9ee0e3_fix_wrong_constraint_on_table_columns.py |  9 ++--
 .../versions/1a1d627ebd8e_position_json.py         |  9 ++--
 .../versions/3b626e2a6783_sync_db_with_models.py   | 27 ++++++------
 ...1c4c6_migrate_num_period_compare_and_period_.py |  2 +-
 superset/migrations/versions/4736ec66ce19_.py      | 12 ++---
 .../versions/bddc498dd179_adhoc_filters.py         |  7 +--
 superset/migrations/versions/f231d82b9b26_.py      |  2 +-
 superset/models/core.py                            |  6 +--
 superset/models/helpers.py                         |  2 +-
 superset/models/sql_lab.py                         |  2 +-
 superset/sql_lab.py                                |  2 +-
 superset/utils/__init__.py                         |  0
 superset/{cache_util.py => utils/cache.py}         |  0
 superset/{utils.py => utils/core.py}               |  0
 .../dashboard_import_export.py}                    |  4 +-
 .../dict_import_export.py}                         |  0
 .../{import_util.py => utils/import_datasource.py} |  0
 superset/views/base.py                             |  3 +-
 superset/views/core.py                             | 31 +++++++------
 superset/viz.py                                    |  5 ++-
 tests/access_tests.py                              |  4 +-
 tests/base_tests.py                                | 23 ++--------
 tests/cache_tests.py                               |  5 ++-
 tests/celery_tests.py                              |  2 +-
 tests/core_tests.py                                |  9 ++--
 tests/dashboard_tests.py                           |  2 -
 tests/datasource_tests.py                          |  2 -
 tests/dict_import_export_tests.py                  |  2 +-
 tests/druid_tests.py                               |  5 ++-
 tests/email_tests.py                               |  8 ++--
 tests/import_export_tests.py                       |  9 ++--
 tests/load_examples_test.py                        | 51 ++++++++++++++++++++++
 tests/model_tests.py                               |  2 +-
 tests/sqllab_tests.py                              |  6 +--
 tests/utils_tests.py                               | 30 ++++++-------
 tests/viz_tests.py                                 |  4 +-
 tox.ini                                            |  3 +-
 47 files changed, 226 insertions(+), 178 deletions(-)

diff --git a/superset/__init__.py b/superset/__init__.py
index 7a1213c..b005452 100644
--- a/superset/__init__.py
+++ b/superset/__init__.py
@@ -13,9 +13,11 @@ from flask_migrate import Migrate
 from flask_wtf.csrf import CSRFProtect
 from werkzeug.contrib.fixers import ProxyFix
 
-from superset import config, utils
+from superset import config
 from superset.connectors.connector_registry import ConnectorRegistry
 from superset.security import SupersetSecurityManager
+from superset.utils.core import (
+    get_update_perms_flag, pessimistic_connection_handling, setup_cache)
 
 APP_DIR = os.path.dirname(__file__)
 CONFIG_MODULE = os.environ.get('SUPERSET_CONFIG', 'superset.config')
@@ -112,10 +114,10 @@ if conf.get('WTF_CSRF_ENABLED'):
     for ex in csrf_exempt_list:
         csrf.exempt(ex)
 
-utils.pessimistic_connection_handling(db.engine)
+pessimistic_connection_handling(db.engine)
 
-cache = utils.setup_cache(app, conf.get('CACHE_CONFIG'))
-tables_cache = utils.setup_cache(app, conf.get('TABLE_NAMES_CACHE_CONFIG'))
+cache = setup_cache(app, conf.get('CACHE_CONFIG'))
+tables_cache = setup_cache(app, conf.get('TABLE_NAMES_CACHE_CONFIG'))
 
 migrate = Migrate(app, db, directory=APP_DIR + '/migrations')
 
@@ -183,7 +185,7 @@ appbuilder = AppBuilder(
     base_template='superset/base.html',
     indexview=MyIndexView,
     security_manager_class=custom_sm,
-    update_perms=utils.get_update_perms_flag(),
+    update_perms=get_update_perms_flag(),
 )
 
 security_manager = appbuilder.sm
diff --git a/superset/cli.py b/superset/cli.py
index a177301..51a1db4 100755
--- a/superset/cli.py
+++ b/superset/cli.py
@@ -12,9 +12,10 @@ import werkzeug.serving
 import yaml
 
 from superset import (
-    app, dashboard_import_export_util, data, db,
-    dict_import_export_util, security_manager, utils,
+    app, data, db, security_manager,
 )
+from superset.utils import (
+    core as utils, dashboard_import_export, dict_import_export)
 
 config = app.config
 celery_app = utils.get_celery_app(config)
@@ -241,7 +242,7 @@ def import_dashboards(path, recursive=False):
         logging.info('Importing dashboard from file %s', f)
         try:
             with f.open() as data_stream:
-                dashboard_import_export_util.import_dashboards(
+                dashboard_import_export.import_dashboards(
                     db.session, data_stream)
         except Exception as e:
             logging.error('Error when importing dashboard from file %s', f)
@@ -257,7 +258,7 @@ def import_dashboards(path, recursive=False):
     help='Print JSON to stdout')
 def export_dashboards(print_stdout, dashboard_file):
     """Export dashboards to JSON"""
-    data = dashboard_import_export_util.export_dashboards(db.session)
+    data = dashboard_import_export.export_dashboards(db.session)
     if print_stdout or not dashboard_file:
         print(data)
     if dashboard_file:
@@ -296,7 +297,7 @@ def import_datasources(path, sync, recursive=False):
         logging.info('Importing datasources from file %s', f)
         try:
             with f.open() as data_stream:
-                dict_import_export_util.import_from_dict(
+                dict_import_export.import_from_dict(
                     db.session,
                     yaml.safe_load(data_stream),
                     sync=sync_array)
@@ -321,7 +322,7 @@ def import_datasources(path, sync, recursive=False):
 def export_datasources(print_stdout, datasource_file,
                        back_references, include_defaults):
     """Export datasources to YAML"""
-    data = dict_import_export_util.export_to_dict(
+    data = dict_import_export.export_to_dict(
         session=db.session,
         recursive=True,
         back_references=back_references,
@@ -340,7 +341,7 @@ def export_datasources(print_stdout, datasource_file,
     help='Include parent back references')
 def export_datasource_schema(back_references):
     """Export datasource YAML schema to stdout"""
-    data = dict_import_export_util.export_schema_to_dict(
+    data = dict_import_export.export_schema_to_dict(
         back_references=back_references)
     yaml.safe_dump(data, stdout, default_flow_style=False)
 
@@ -416,6 +417,7 @@ def load_test_users():
 
     Syncs permissions for those users/roles
     """
+    print(Fore.GREEN + 'Loading a set of users for unit tests')
     load_test_users_run()
 
 
diff --git a/superset/connectors/base/models.py 
b/superset/connectors/base/models.py
index c51e078..7f159a6 100644
--- a/superset/connectors/base/models.py
+++ b/superset/connectors/base/models.py
@@ -8,9 +8,9 @@ from sqlalchemy import (
 from sqlalchemy.ext.declarative import declared_attr
 from sqlalchemy.orm import foreign, relationship
 
-from superset import utils
 from superset.models.core import Slice
 from superset.models.helpers import AuditMixinNullable, ImportMixin
+from superset.utils import core as utils
 
 
 class BaseDatasource(AuditMixinNullable, ImportMixin):
diff --git a/superset/connectors/druid/models.py 
b/superset/connectors/druid/models.py
index eebfad0..5dd7aa1 100644
--- a/superset/connectors/druid/models.py
+++ b/superset/connectors/druid/models.py
@@ -30,13 +30,14 @@ from sqlalchemy import (
 )
 from sqlalchemy.orm import backref, relationship
 
-from superset import conf, db, import_util, security_manager, utils
+from superset import conf, db, security_manager
 from superset.connectors.base.models import BaseColumn, BaseDatasource, 
BaseMetric
 from superset.exceptions import MetricPermException, SupersetException
 from superset.models.helpers import (
     AuditMixinNullable, ImportMixin, QueryResult,
 )
-from superset.utils import (
+from superset.utils import core as utils, import_datasource
+from superset.utils.core import (
     DimSelector, DTTM_ALIAS, flasher,
 )
 
@@ -392,7 +393,7 @@ class DruidColumn(Model, BaseColumn):
                 DruidColumn.datasource_id == lookup_column.datasource_id,
                 DruidColumn.column_name == lookup_column.column_name).first()
 
-        return import_util.import_simple_obj(db.session, i_column, lookup_obj)
+        return import_datasource.import_simple_obj(db.session, i_column, 
lookup_obj)
 
 
 class DruidMetric(Model, BaseMetric):
@@ -444,7 +445,7 @@ class DruidMetric(Model, BaseMetric):
             return db.session.query(DruidMetric).filter(
                 DruidMetric.datasource_id == lookup_metric.datasource_id,
                 DruidMetric.metric_name == lookup_metric.metric_name).first()
-        return import_util.import_simple_obj(db.session, i_metric, lookup_obj)
+        return import_datasource.import_simple_obj(db.session, i_metric, 
lookup_obj)
 
 
 class DruidDatasource(Model, BaseDatasource):
@@ -580,7 +581,7 @@ class DruidDatasource(Model, BaseDatasource):
         def lookup_cluster(d):
             return db.session.query(DruidCluster).filter_by(
                 cluster_name=d.cluster_name).one()
-        return import_util.import_datasource(
+        return import_datasource.import_datasource(
             db.session, i_datasource, lookup_cluster, lookup_datasource,
             import_time)
 
diff --git a/superset/connectors/druid/views.py 
b/superset/connectors/druid/views.py
index f660882..f6b3bb0 100644
--- a/superset/connectors/druid/views.py
+++ b/superset/connectors/druid/views.py
@@ -10,9 +10,10 @@ from flask_appbuilder.security.decorators import has_access
 from flask_babel import gettext as __
 from flask_babel import lazy_gettext as _
 
-from superset import appbuilder, db, security_manager, utils
+from superset import appbuilder, db, security_manager
 from superset.connectors.base.views import DatasourceModelView
 from superset.connectors.connector_registry import ConnectorRegistry
+from superset.utils import core as utils
 from superset.views.base import (
     BaseSupersetView, DatasourceFilter, DeleteMixin,
     get_datasource_exist_error_msg, ListWidgetWithCheckboxes, 
SupersetModelView,
diff --git a/superset/connectors/sqla/models.py 
b/superset/connectors/sqla/models.py
index 7dce50d..4b32586 100644
--- a/superset/connectors/sqla/models.py
+++ b/superset/connectors/sqla/models.py
@@ -17,13 +17,13 @@ from sqlalchemy.sql import column, literal_column, table, 
text
 from sqlalchemy.sql.expression import TextAsFrom
 import sqlparse
 
-from superset import app, db, import_util, security_manager, utils
+from superset import app, db, security_manager
 from superset.connectors.base.models import BaseColumn, BaseDatasource, 
BaseMetric
 from superset.jinja_context import get_template_processor
 from superset.models.annotations import Annotation
 from superset.models.core import Database
 from superset.models.helpers import QueryResult
-from superset.utils import DTTM_ALIAS, QueryStatus
+from superset.utils import core as utils, import_datasource
 
 config = app.config
 
@@ -44,11 +44,11 @@ class AnnotationDatasource(BaseDatasource):
             qry = qry.filter(Annotation.start_dttm >= query_obj['from_dttm'])
         if query_obj['to_dttm']:
             qry = qry.filter(Annotation.end_dttm <= query_obj['to_dttm'])
-        status = QueryStatus.SUCCESS
+        status = utils.QueryStatus.SUCCESS
         try:
             df = pd.read_sql_query(qry.statement, db.engine)
         except Exception as e:
-            status = QueryStatus.FAILED
+            status = utils.QueryStatus.FAILED
             logging.exception(e)
             error_message = (
                 utils.error_msg_from_exception(e))
@@ -120,7 +120,7 @@ class TableColumn(Model, BaseColumn):
         pdf = self.python_date_format
         is_epoch = pdf in ('epoch_s', 'epoch_ms')
         if not self.expression and not time_grain and not is_epoch:
-            return column(self.column_name, type_=DateTime).label(DTTM_ALIAS)
+            return column(self.column_name, 
type_=DateTime).label(utils.DTTM_ALIAS)
 
         expr = self.expression or self.column_name
         if is_epoch:
@@ -134,7 +134,7 @@ class TableColumn(Model, BaseColumn):
             grain = self.table.database.grains_dict().get(time_grain)
             if grain:
                 expr = grain.function.format(col=expr)
-        return literal_column(expr, type_=DateTime).label(DTTM_ALIAS)
+        return literal_column(expr, type_=DateTime).label(utils.DTTM_ALIAS)
 
     @classmethod
     def import_obj(cls, i_column):
@@ -142,7 +142,7 @@ class TableColumn(Model, BaseColumn):
             return db.session.query(TableColumn).filter(
                 TableColumn.table_id == lookup_column.table_id,
                 TableColumn.column_name == lookup_column.column_name).first()
-        return import_util.import_simple_obj(db.session, i_column, lookup_obj)
+        return import_datasource.import_simple_obj(db.session, i_column, 
lookup_obj)
 
     def dttm_sql_literal(self, dttm):
         """Convert datetime object to a SQL expression string
@@ -243,7 +243,7 @@ class SqlMetric(Model, BaseMetric):
             return db.session.query(SqlMetric).filter(
                 SqlMetric.table_id == lookup_metric.table_id,
                 SqlMetric.metric_name == lookup_metric.metric_name).first()
-        return import_util.import_simple_obj(db.session, i_metric, lookup_obj)
+        return import_datasource.import_simple_obj(db.session, i_metric, 
lookup_obj)
 
 
 class SqlaTable(Model, BaseDatasource):
@@ -776,13 +776,13 @@ class SqlaTable(Model, BaseDatasource):
     def query(self, query_obj):
         qry_start_dttm = datetime.now()
         sql = self.get_query_str(query_obj)
-        status = QueryStatus.SUCCESS
+        status = utils.QueryStatus.SUCCESS
         error_message = None
         df = None
         try:
             df = self.database.get_df(sql, self.schema)
         except Exception as e:
-            status = QueryStatus.FAILED
+            status = utils.QueryStatus.FAILED
             logging.exception(e)
             error_message = (
                 self.database.db_engine_spec.extract_error_message(e))
@@ -881,7 +881,7 @@ class SqlaTable(Model, BaseDatasource):
         def lookup_database(table):
             return db.session.query(Database).filter_by(
                 database_name=table.params_dict['database_name']).one()
-        return import_util.import_datasource(
+        return import_datasource.import_datasource(
             db.session, i_datasource, lookup_database, lookup_sqlatable,
             import_time)
 
diff --git a/superset/connectors/sqla/views.py 
b/superset/connectors/sqla/views.py
index feaee36..e02bde8 100644
--- a/superset/connectors/sqla/views.py
+++ b/superset/connectors/sqla/views.py
@@ -9,8 +9,9 @@ from flask_babel import gettext as __
 from flask_babel import lazy_gettext as _
 from past.builtins import basestring
 
-from superset import appbuilder, db, security_manager, utils
+from superset import appbuilder, db, security_manager
 from superset.connectors.base.views import DatasourceModelView
+from superset.utils import core as utils
 from superset.views.base import (
     DatasourceFilter, DeleteMixin, get_datasource_exist_error_msg,
     ListWidgetWithCheckboxes, SupersetModelView, YamlExportMixin,
diff --git a/superset/data/__init__.py b/superset/data/__init__.py
index 945efb5..49873eb 100644
--- a/superset/data/__init__.py
+++ b/superset/data/__init__.py
@@ -7,15 +7,16 @@ import os
 import random
 import textwrap
 
-import pandas as pd
-from sqlalchemy import BigInteger, Date, DateTime, Float, String, Text
 import geohash
+import pandas as pd
 import polyline
+from sqlalchemy import BigInteger, Date, DateTime, Float, String, Text
 
-from superset import app, db, utils
+from superset import app, db
 from superset.connectors.connector_registry import ConnectorRegistry
 from superset.connectors.sqla.models import TableColumn
 from superset.models import core as models
+from superset.utils.core import get_or_create_main_db, readfile
 
 # Shortcuts
 DB = models.Database
@@ -26,7 +27,7 @@ TBL = ConnectorRegistry.sources['table']
 
 config = app.config
 
-DATA_FOLDER = os.path.join(config.get("BASE_DIR"), 'data')
+DATA_FOLDER = os.path.join(config.get('BASE_DIR'), 'data')
 
 misc_dash_slices = set()  # slices assembled in a "Misc Chart" dashboard
 
@@ -38,7 +39,8 @@ def update_slice_ids(layout_dict, slices):
     ]
     sorted_charts = sorted(charts, key=lambda k: k['meta']['chartId'])
     for i, chart_component in enumerate(sorted_charts):
-        chart_component['meta']['chartId'] = int(slices[i].id)
+        if i < len(slices):
+            chart_component['meta']['chartId'] = int(slices[i].id)
 
 
 def merge_slice(slc):
@@ -77,7 +79,7 @@ def load_energy():
     if not tbl:
         tbl = TBL(table_name=tbl_name)
     tbl.description = "Energy consumption"
-    tbl.database = utils.get_or_create_main_db()
+    tbl.database = get_or_create_main_db()
     db.session.merge(tbl)
     db.session.commit()
     tbl.fetch_metadata()
@@ -183,9 +185,9 @@ def load_world_bank_health_n_pop():
     tbl = db.session.query(TBL).filter_by(table_name=tbl_name).first()
     if not tbl:
         tbl = TBL(table_name=tbl_name)
-    tbl.description = utils.readfile(os.path.join(DATA_FOLDER, 'countries.md'))
+    tbl.description = readfile(os.path.join(DATA_FOLDER, 'countries.md'))
     tbl.main_dttm_col = 'year'
-    tbl.database = utils.get_or_create_main_db()
+    tbl.database = get_or_create_main_db()
     tbl.filter_select_enabled = True
     db.session.merge(tbl)
     db.session.commit()
@@ -723,7 +725,7 @@ def load_birth_names():
     if not obj:
         obj = TBL(table_name='birth_names')
     obj.main_dttm_col = 'ds'
-    obj.database = utils.get_or_create_main_db()
+    obj.database = get_or_create_main_db()
     obj.filter_select_enabled = True
 
     if not any(col.column_name == 'num_california' for col in obj.columns):
@@ -1256,7 +1258,7 @@ def load_unicode_test_data():
     if not obj:
         obj = TBL(table_name='unicode_test')
     obj.main_dttm_col = 'dttm'
-    obj.database = utils.get_or_create_main_db()
+    obj.database = get_or_create_main_db()
     db.session.merge(obj)
     db.session.commit()
     obj.fetch_metadata()
@@ -1369,7 +1371,7 @@ def load_random_time_series_data():
     if not obj:
         obj = TBL(table_name='random_time_series')
     obj.main_dttm_col = 'ds'
-    obj.database = utils.get_or_create_main_db()
+    obj.database = get_or_create_main_db()
     db.session.merge(obj)
     db.session.commit()
     obj.fetch_metadata()
@@ -1432,7 +1434,7 @@ def load_country_map_data():
     if not obj:
         obj = TBL(table_name='birth_france_by_region')
     obj.main_dttm_col = 'dttm'
-    obj.database = utils.get_or_create_main_db()
+    obj.database = get_or_create_main_db()
     db.session.merge(obj)
     db.session.commit()
     obj.fetch_metadata()
@@ -1507,7 +1509,7 @@ def load_long_lat_data():
     if not obj:
         obj = TBL(table_name='long_lat')
     obj.main_dttm_col = 'datetime'
-    obj.database = utils.get_or_create_main_db()
+    obj.database = get_or_create_main_db()
     db.session.merge(obj)
     db.session.commit()
     obj.fetch_metadata()
@@ -1568,7 +1570,7 @@ def load_multiformat_time_series_data():
     if not obj:
         obj = TBL(table_name='multiformat_time_series')
     obj.main_dttm_col = 'ds'
-    obj.database = utils.get_or_create_main_db()
+    obj.database = get_or_create_main_db()
     dttm_and_expr_dict = {
         'ds': [None, None],
         'ds2': [None, None],
@@ -2391,7 +2393,7 @@ def load_flights():
     if not tbl:
         tbl = TBL(table_name=tbl_name)
     tbl.description = "Random set of flights in the US"
-    tbl.database = utils.get_or_create_main_db()
+    tbl.database = get_or_create_main_db()
     db.session.merge(tbl)
     db.session.commit()
     tbl.fetch_metadata()
@@ -2422,7 +2424,7 @@ def load_paris_iris_geojson():
     if not tbl:
         tbl = TBL(table_name=tbl_name)
     tbl.description = "Map of Paris"
-    tbl.database = utils.get_or_create_main_db()
+    tbl.database = get_or_create_main_db()
     db.session.merge(tbl)
     db.session.commit()
     tbl.fetch_metadata()
@@ -2452,7 +2454,7 @@ def load_sf_population_polygons():
     if not tbl:
         tbl = TBL(table_name=tbl_name)
     tbl.description = "Population density of San Francisco"
-    tbl.database = utils.get_or_create_main_db()
+    tbl.database = get_or_create_main_db()
     db.session.merge(tbl)
     db.session.commit()
     tbl.fetch_metadata()
@@ -2482,7 +2484,7 @@ def load_bart_lines():
     if not tbl:
         tbl = TBL(table_name=tbl_name)
     tbl.description = "BART lines"
-    tbl.database = utils.get_or_create_main_db()
+    tbl.database = get_or_create_main_db()
     db.session.merge(tbl)
     db.session.commit()
     tbl.fetch_metadata()
diff --git a/superset/dataframe.py b/superset/dataframe.py
index e3e75cd..d410d24 100644
--- a/superset/dataframe.py
+++ b/superset/dataframe.py
@@ -15,7 +15,7 @@ from pandas.core.common import _maybe_box_datetimelike
 from pandas.core.dtypes.dtypes import ExtensionDtype
 from past.builtins import basestring
 
-from superset.utils import JS_MAX_INTEGER
+from superset.utils.core import JS_MAX_INTEGER
 
 INFER_COL_TYPES_THRESHOLD = 95
 INFER_COL_TYPES_SAMPLE_SIZE = 100
diff --git a/superset/db_engine_specs.py b/superset/db_engine_specs.py
index 20d57d6..8ab5c07 100644
--- a/superset/db_engine_specs.py
+++ b/superset/db_engine_specs.py
@@ -35,10 +35,11 @@ import sqlparse
 from tableschema import Table
 from werkzeug.utils import secure_filename
 
-from superset import app, cache_util, conf, db, sql_parse, utils
+from superset import app, conf, db, sql_parse
 from superset.exceptions import SupersetTemplateException
-from superset.utils import QueryStatus
+from superset.utils import cache as cache_util, core as utils
 
+QueryStatus = utils.QueryStatus
 config = app.config
 
 tracking_url_trans = conf.get('TRACKING_URL_TRANSFORMER')
diff --git 
a/superset/migrations/versions/1226819ee0e3_fix_wrong_constraint_on_table_columns.py
 
b/superset/migrations/versions/1226819ee0e3_fix_wrong_constraint_on_table_columns.py
index 7874086..2b360ef 100644
--- 
a/superset/migrations/versions/1226819ee0e3_fix_wrong_constraint_on_table_columns.py
+++ 
b/superset/migrations/versions/1226819ee0e3_fix_wrong_constraint_on_table_columns.py
@@ -5,14 +5,15 @@ Revises: 956a063c52b3
 Create Date: 2016-05-27 15:03:32.980343
 
 """
+from alembic import op
+from superset import db
+from superset.utils.core import generic_find_constraint_name
+import logging
+
 # revision identifiers, used by Alembic.
 revision = '1226819ee0e3'
 down_revision = '956a063c52b3'
 
-from alembic import op
-from superset import db
-from superset.utils import generic_find_constraint_name
-import logging
 
 naming_convention = {
     "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
diff --git a/superset/migrations/versions/1a1d627ebd8e_position_json.py 
b/superset/migrations/versions/1a1d627ebd8e_position_json.py
index e344740..7448552 100644
--- a/superset/migrations/versions/1a1d627ebd8e_position_json.py
+++ b/superset/migrations/versions/1a1d627ebd8e_position_json.py
@@ -6,14 +6,15 @@ Create Date: 2018-08-13 11:30:07.101702
 
 """
 
-# revision identifiers, used by Alembic.
-revision = '1a1d627ebd8e'
-down_revision = '0c5070e96b57'
 
 from alembic import op
 import sqlalchemy as sa
 
-from superset.utils import MediumText
+from superset.utils.core import MediumText
+
+# revision identifiers, used by Alembic.
+revision = '1a1d627ebd8e'
+down_revision = '0c5070e96b57'
 
 
 def upgrade():
diff --git a/superset/migrations/versions/3b626e2a6783_sync_db_with_models.py 
b/superset/migrations/versions/3b626e2a6783_sync_db_with_models.py
index 03edfe5..0a0f802 100644
--- a/superset/migrations/versions/3b626e2a6783_sync_db_with_models.py
+++ b/superset/migrations/versions/3b626e2a6783_sync_db_with_models.py
@@ -8,18 +8,17 @@ Revises: 5e4a03ef0bf0
 Create Date: 2016-09-22 10:21:33.618976
 
 """
-
-# revision identifiers, used by Alembic.
-revision = '3b626e2a6783'
-down_revision = 'eca4694defa7'
-
 from alembic import op
 from superset import db
-from superset.utils import generic_find_constraint_name, table_has_constraint
+from superset.utils.core import generic_find_constraint_name
 import logging
 import sqlalchemy as sa
 from sqlalchemy.dialects import mysql
 
+# revision identifiers, used by Alembic.
+revision = '3b626e2a6783'
+down_revision = 'eca4694defa7'
+
 
 def upgrade():
     # cleanup after: https://github.com/airbnb/superset/pull/1078
@@ -31,11 +30,11 @@ def upgrade():
             table='slices', columns={'table_id'},
             referenced='tables', db=db)
 
-        with op.batch_alter_table("slices") as batch_op:
+        with op.batch_alter_table('slices') as batch_op:
             if slices_ibfk_1:
-                batch_op.drop_constraint(slices_ibfk_1, type_="foreignkey")
+                batch_op.drop_constraint(slices_ibfk_1, type_='foreignkey')
             if slices_ibfk_2:
-                batch_op.drop_constraint(slices_ibfk_2, type_="foreignkey")
+                batch_op.drop_constraint(slices_ibfk_2, type_='foreignkey')
             batch_op.drop_column('druid_datasource_id')
             batch_op.drop_column('table_id')
     except Exception as e:
@@ -43,7 +42,7 @@ def upgrade():
 
     # fixed issue: https://github.com/airbnb/superset/issues/466
     try:
-        with op.batch_alter_table("columns") as batch_op:
+        with op.batch_alter_table('columns') as batch_op:
             batch_op.create_foreign_key(
                 None, 'datasources', ['datasource_name'], ['datasource_name'])
     except Exception as e:
@@ -69,7 +68,7 @@ def downgrade():
         logging.warning(str(e))
 
     try:
-        with op.batch_alter_table("slices") as batch_op:
+        with op.batch_alter_table('slices') as batch_op:
             batch_op.add_column(sa.Column(
                 'table_id', mysql.INTEGER(display_width=11),
                 autoincrement=False, nullable=True))
@@ -88,15 +87,15 @@ def downgrade():
         fk_columns = generic_find_constraint_name(
             table='columns', columns={'datasource_name'},
             referenced='datasources', db=db)
-        with op.batch_alter_table("columns") as batch_op:
-            batch_op.drop_constraint(fk_columns, type_="foreignkey")
+        with op.batch_alter_table('columns') as batch_op:
+            batch_op.drop_constraint(fk_columns, type_='foreignkey')
     except Exception as e:
         logging.warning(str(e))
 
     op.add_column(
         'query', sa.Column('name', sa.String(length=256), nullable=True))
     try:
-        with op.batch_alter_table("query") as batch_op:
+        with op.batch_alter_table('query') as batch_op:
             batch_op.drop_constraint('client_id', type_='unique')
     except Exception as e:
         logging.warning(str(e))
diff --git 
a/superset/migrations/versions/3dda56f1c4c6_migrate_num_period_compare_and_period_.py
 
b/superset/migrations/versions/3dda56f1c4c6_migrate_num_period_compare_and_period_.py
index 5f7704e..d52849c 100644
--- 
a/superset/migrations/versions/3dda56f1c4c6_migrate_num_period_compare_and_period_.py
+++ 
b/superset/migrations/versions/3dda56f1c4c6_migrate_num_period_compare_and_period_.py
@@ -17,7 +17,7 @@ from sqlalchemy.ext.declarative import declarative_base
 from sqlalchemy import Column, Integer, String, Text
 
 from superset import db
-from superset.utils import parse_human_timedelta
+from superset.utils.core import parse_human_timedelta
 
 revision = '3dda56f1c4c6'
 down_revision = 'bddc498dd179'
diff --git a/superset/migrations/versions/4736ec66ce19_.py 
b/superset/migrations/versions/4736ec66ce19_.py
index 36f1ed4..e314102 100644
--- a/superset/migrations/versions/4736ec66ce19_.py
+++ b/superset/migrations/versions/4736ec66ce19_.py
@@ -11,7 +11,7 @@ import logging
 from alembic import op
 import sqlalchemy as sa
 
-from superset.utils import (
+from superset.utils.core import (
     generic_find_fk_constraint_name,
     generic_find_fk_constraint_names,
     generic_find_uq_constraint_name,
@@ -203,8 +203,8 @@ def downgrade():
 
         # Re-create the foreign key associated with the cluster_name column.
         batch_op.create_foreign_key(
-                'fk_{}_datasource_id_datasources'.format(foreign),
-                'clusters',
-                ['cluster_name'],
-                ['cluster_name'],
-            )
+            'fk_{}_datasource_id_datasources'.format(foreign),
+            'clusters',
+            ['cluster_name'],
+            ['cluster_name'],
+        )
diff --git a/superset/migrations/versions/bddc498dd179_adhoc_filters.py 
b/superset/migrations/versions/bddc498dd179_adhoc_filters.py
index c22e059..e277843 100644
--- a/superset/migrations/versions/bddc498dd179_adhoc_filters.py
+++ b/superset/migrations/versions/bddc498dd179_adhoc_filters.py
@@ -20,7 +20,8 @@ from sqlalchemy.ext.declarative import declarative_base
 from sqlalchemy import Column, Integer, Text
 
 from superset import db
-from superset import utils
+from superset.utils.core import (
+    convert_legacy_filters_into_adhoc, split_adhoc_filters_into_base_filters)
 
 
 Base = declarative_base()
@@ -40,7 +41,7 @@ def upgrade():
     for slc in session.query(Slice).all():
         try:
             params = json.loads(slc.params)
-            utils.convert_legacy_filters_into_adhoc(params)
+            convert_legacy_filters_into_adhoc(params)
             slc.params = json.dumps(params, sort_keys=True)
         except Exception:
             pass
@@ -56,7 +57,7 @@ def downgrade():
     for slc in session.query(Slice).all():
         try:
             params = json.loads(slc.params)
-            utils.split_adhoc_filters_into_base_filters(params)
+            split_adhoc_filters_into_base_filters(params)
 
             if 'adhoc_filters' in params:
                 del params['adhoc_filters']
diff --git a/superset/migrations/versions/f231d82b9b26_.py 
b/superset/migrations/versions/f231d82b9b26_.py
index 5d3acbe..e96ed1d 100644
--- a/superset/migrations/versions/f231d82b9b26_.py
+++ b/superset/migrations/versions/f231d82b9b26_.py
@@ -8,7 +8,7 @@ Create Date: 2018-03-20 19:47:54.991259
 from alembic import op
 import sqlalchemy as sa
 
-from superset.utils import generic_find_uq_constraint_name
+from superset.utils.core import generic_find_uq_constraint_name
 
 # revision identifiers, used by Alembic.
 revision = 'f231d82b9b26'
diff --git a/superset/models/core.py b/superset/models/core.py
index 2bc0206..b568e40 100644
--- a/superset/models/core.py
+++ b/superset/models/core.py
@@ -29,12 +29,12 @@ from sqlalchemy.schema import UniqueConstraint
 from sqlalchemy_utils import EncryptedType
 import sqlparse
 
-from superset import app, db, db_engine_specs, security_manager, utils
+from superset import app, db, db_engine_specs, security_manager
 from superset.connectors.connector_registry import ConnectorRegistry
 from superset.legacy import update_time_range
 from superset.models.helpers import AuditMixinNullable, ImportMixin
 from superset.models.user_attributes import UserAttribute
-from superset.utils import MediumText
+from superset.utils import core as utils
 from superset.viz import viz_types
 install_aliases()
 from urllib import parse  # noqa
@@ -358,7 +358,7 @@ class Dashboard(Model, AuditMixinNullable, ImportMixin):
     __tablename__ = 'dashboards'
     id = Column(Integer, primary_key=True)
     dashboard_title = Column(String(500))
-    position_json = Column(MediumText())
+    position_json = Column(utils.MediumText())
     description = Column(Text)
     css = Column(Text)
     json_metadata = Column(Text)
diff --git a/superset/models/helpers.py b/superset/models/helpers.py
index 6cefc48..d45ff83 100644
--- a/superset/models/helpers.py
+++ b/superset/models/helpers.py
@@ -15,7 +15,7 @@ from sqlalchemy.ext.declarative import declared_attr
 from sqlalchemy.orm.exc import MultipleResultsFound
 import yaml
 
-from superset.utils import QueryStatus
+from superset.utils.core import QueryStatus
 
 
 def json_to_dict(json_str):
diff --git a/superset/models/sql_lab.py b/superset/models/sql_lab.py
index f36d3ff..c6d5846 100644
--- a/superset/models/sql_lab.py
+++ b/superset/models/sql_lab.py
@@ -14,7 +14,7 @@ from sqlalchemy.orm import backref, relationship
 
 from superset import security_manager
 from superset.models.helpers import AuditMixinNullable
-from superset.utils import QueryStatus, user_label
+from superset.utils.core import QueryStatus, user_label
 
 install_aliases()
 
diff --git a/superset/sql_lab.py b/superset/sql_lab.py
index b2dde13..d211f02 100644
--- a/superset/sql_lab.py
+++ b/superset/sql_lab.py
@@ -14,7 +14,7 @@ from sqlalchemy.pool import NullPool
 from superset import app, dataframe, db, results_backend, security_manager
 from superset.models.sql_lab import Query
 from superset.sql_parse import SupersetQuery
-from superset.utils import (
+from superset.utils.core import (
     get_celery_app,
     json_iso_dttm_ser,
     now_as_float,
diff --git a/superset/utils/__init__.py b/superset/utils/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/superset/cache_util.py b/superset/utils/cache.py
similarity index 100%
rename from superset/cache_util.py
rename to superset/utils/cache.py
diff --git a/superset/utils.py b/superset/utils/core.py
similarity index 100%
rename from superset/utils.py
rename to superset/utils/core.py
diff --git a/superset/dashboard_import_export_util.py 
b/superset/utils/dashboard_import_export.py
similarity index 90%
rename from superset/dashboard_import_export_util.py
rename to superset/utils/dashboard_import_export.py
index 0b474b0..2711289 100644
--- a/superset/dashboard_import_export_util.py
+++ b/superset/utils/dashboard_import_export.py
@@ -8,15 +8,15 @@ import json
 import logging
 import time
 
-from superset import utils
 from superset.models.core import Dashboard
+from superset.utils.core import decode_dashboards
 
 
 def import_dashboards(session, data_stream, import_time=None):
     """Imports dashboards from a stream to databases"""
     current_tt = int(time.time())
     import_time = current_tt if import_time is None else import_time
-    data = json.loads(data_stream.read(), object_hook=utils.decode_dashboards)
+    data = json.loads(data_stream.read(), object_hook=decode_dashboards)
     # TODO: import DRUID datasources
     for table in data['datasources']:
         type(table).import_obj(table, import_time=import_time)
diff --git a/superset/dict_import_export_util.py 
b/superset/utils/dict_import_export.py
similarity index 100%
rename from superset/dict_import_export_util.py
rename to superset/utils/dict_import_export.py
diff --git a/superset/import_util.py b/superset/utils/import_datasource.py
similarity index 100%
rename from superset/import_util.py
rename to superset/utils/import_datasource.py
diff --git a/superset/views/base.py b/superset/views/base.py
index 2b3bb43..4cba8b2 100644
--- a/superset/views/base.py
+++ b/superset/views/base.py
@@ -15,9 +15,10 @@ from flask_babel import lazy_gettext as _
 import simplejson as json
 import yaml
 
-from superset import conf, db, security_manager, utils
+from superset import conf, db, security_manager
 from superset.exceptions import SupersetSecurityException
 from superset.translations.utils import get_language_pack
+from superset.utils import core as utils
 
 FRONTEND_CONF_KEYS = (
     'SUPERSET_WEBSERVER_TIMEOUT',
diff --git a/superset/views/core.py b/superset/views/core.py
index f7806e8..9122b53 100755
--- a/superset/views/core.py
+++ b/superset/views/core.py
@@ -28,8 +28,8 @@ from werkzeug.routing import BaseConverter
 from werkzeug.utils import secure_filename
 
 from superset import (
-    app, appbuilder, cache, dashboard_import_export_util, db, results_backend,
-    security_manager, sql_lab, utils, viz)
+    app, appbuilder, cache, db, results_backend,
+    security_manager, sql_lab, viz)
 from superset.connectors.connector_registry import ConnectorRegistry
 from superset.connectors.sqla.models import AnnotationDatasource, SqlaTable
 from superset.exceptions import SupersetException
@@ -40,9 +40,8 @@ import superset.models.core as models
 from superset.models.sql_lab import Query
 from superset.models.user_attributes import UserAttribute
 from superset.sql_parse import SupersetQuery
-from superset.utils import (
-    merge_extra_filters, merge_request_params, QueryStatus,
-)
+from superset.utils import core as utils
+from superset.utils import dashboard_import_export
 from .base import (
     api, BaseSupersetView,
     check_ownership,
@@ -56,6 +55,7 @@ config = app.config
 stats_logger = config.get('STATS_LOGGER')
 log_this = models.Log.log_this
 DAR = models.DatasourceAccessRequest
+QueryStatus = utils.QueryStatus
 
 
 ALL_DATASOURCE_ACCESS_ERR = __(
@@ -945,7 +945,10 @@ class Superset(BaseSupersetView):
             return json_error_response(ACCESS_REQUEST_MISSING_ERR)
 
         # check if you can approve
-        if security_manager.all_datasource_access() or g.user.id == 
datasource.owner_id:
+        if (
+                security_manager.all_datasource_access() or
+                check_ownership(datasource, raise_if_false=False)
+        ):
             # can by done by admin only
             if role_to_grant:
                 role = security_manager.find_role(role_to_grant)
@@ -1254,7 +1257,7 @@ class Superset(BaseSupersetView):
         """Overrides the dashboards using json instances from the file."""
         f = request.files.get('file')
         if request.method == 'POST' and f:
-            dashboard_import_export_util.import_dashboards(db.session, 
f.stream)
+            dashboard_import_export.import_dashboards(db.session, f.stream)
             return redirect('/dashboard/list/')
         return self.render_template('superset/import_dashboards.html')
 
@@ -1332,11 +1335,11 @@ class Superset(BaseSupersetView):
 
         # On explore, merge legacy and extra filters into the form data
         utils.convert_legacy_filters_into_adhoc(form_data)
-        merge_extra_filters(form_data)
+        utils.merge_extra_filters(form_data)
 
         # merge request url params
         if request.method == 'GET':
-            merge_request_params(form_data, request.args)
+            utils.merge_request_params(form_data, request.args)
 
         # handle save or overwrite
         action = request.args.get('action')
@@ -2451,7 +2454,7 @@ class Superset(BaseSupersetView):
                 db.session.query(Query)
                 .filter_by(client_id=client_id).one()
             )
-            query.status = utils.QueryStatus.STOPPED
+            query.status = QueryStatus.STOPPED
             db.session.commit()
         except Exception:
             pass
@@ -2673,8 +2676,8 @@ class Superset(BaseSupersetView):
         now = int(round(time.time() * 1000))
 
         unfinished_states = [
-            utils.QueryStatus.PENDING,
-            utils.QueryStatus.RUNNING,
+            QueryStatus.PENDING,
+            QueryStatus.RUNNING,
         ]
 
         queries_to_timeout = [
@@ -2693,10 +2696,10 @@ class Superset(BaseSupersetView):
                     Query.user_id == g.user.get_id(),
                     Query.client_id in queries_to_timeout,
                 ),
-            ).values(state=utils.QueryStatus.TIMED_OUT)
+            ).values(state=QueryStatus.TIMED_OUT)
 
             for client_id in queries_to_timeout:
-                dict_queries[client_id]['status'] = utils.QueryStatus.TIMED_OUT
+                dict_queries[client_id]['status'] = QueryStatus.TIMED_OUT
 
         return json_success(
             json.dumps(dict_queries, default=utils.json_int_dttm_ser))
diff --git a/superset/viz.py b/superset/viz.py
index 342d8e2..5fdc925 100644
--- a/superset/viz.py
+++ b/superset/viz.py
@@ -31,9 +31,10 @@ from past.builtins import basestring
 import polyline
 import simplejson as json
 
-from superset import app, cache, get_css_manifest_files, utils
+from superset import app, cache, get_css_manifest_files
 from superset.exceptions import NullValueException, SpatialException
-from superset.utils import (
+from superset.utils import core as utils
+from superset.utils.core import (
     DTTM_ALIAS,
     JS_MAX_INTEGER,
     merge_extra_filters,
diff --git a/tests/access_tests.py b/tests/access_tests.py
index f19e47b..29c903d 100644
--- a/tests/access_tests.py
+++ b/tests/access_tests.py
@@ -80,8 +80,6 @@ def create_access_request(session, ds_type, ds_name, 
role_name, user_name):
 
 class RequestAccessTests(SupersetTestCase):
 
-    requires_examples = False
-
     @classmethod
     def setUpClass(cls):
         security_manager.add_role('override_me')
@@ -317,7 +315,7 @@ class RequestAccessTests(SupersetTestCase):
 
         session.commit()
 
-    @mock.patch('superset.utils.send_MIME_email')
+    @mock.patch('superset.utils.core.send_MIME_email')
     def test_approve(self, mock_send_mime):
         if app.config.get('ENABLE_ACCESS_REQUEST'):
             session = db.session
diff --git a/tests/base_tests.py b/tests/base_tests.py
index e75ceea..b9b4649 100644
--- a/tests/base_tests.py
+++ b/tests/base_tests.py
@@ -1,44 +1,29 @@
 """Unit tests for Superset"""
 import json
-import logging
-import os
 import unittest
 
 from flask_appbuilder.security.sqla import models as ab_models
 from mock import Mock
 import pandas as pd
 
-from superset import app, cli, db, security_manager
+from superset import app, db, security_manager
 from superset.connectors.druid.models import DruidCluster, DruidDatasource
 from superset.connectors.sqla.models import SqlaTable
 from superset.models import core as models
-from superset.utils import get_main_database
-
+from superset.utils.core import get_main_database
 
 BASE_DIR = app.config.get('BASE_DIR')
 
 
 class SupersetTestCase(unittest.TestCase):
-    requires_examples = False
-    examples_loaded = False
 
     def __init__(self, *args, **kwargs):
-        if (
-            self.requires_examples and
-            not os.environ.get('examples_loaded')
-        ):
-            logging.info('Loading examples')
-            cli.load_examples_run(load_test_data=True)
-            logging.info('Done loading examples')
-            security_manager.sync_role_definitions()
-            os.environ['examples_loaded'] = '1'
-        else:
-            security_manager.sync_role_definitions()
         super(SupersetTestCase, self).__init__(*args, **kwargs)
         self.client = app.test_client()
         self.maxDiff = None
 
-        cli.load_test_users_run()
+    @classmethod
+    def create_druid_test_objects(cls):
         # create druid cluster and druid datasources
         session = db.session
         cluster = (
diff --git a/tests/cache_tests.py b/tests/cache_tests.py
index 8d64802..d5764ee 100644
--- a/tests/cache_tests.py
+++ b/tests/cache_tests.py
@@ -1,7 +1,8 @@
 """Unit tests for Superset with caching"""
 import json
 
-from superset import cache, db, utils
+from superset import cache, db
+from superset.utils.core import QueryStatus
 from .base_tests import SupersetTestCase
 
 
@@ -30,6 +31,6 @@ class CacheTests(SupersetTestCase):
             json_endpoint, {'form_data': json.dumps(slc.viz.form_data)})
         self.assertFalse(resp['is_cached'])
         self.assertTrue(resp_from_cache['is_cached'])
-        self.assertEqual(resp_from_cache['status'], utils.QueryStatus.SUCCESS)
+        self.assertEqual(resp_from_cache['status'], QueryStatus.SUCCESS)
         self.assertEqual(resp['data'], resp_from_cache['data'])
         self.assertEqual(resp['query'], resp_from_cache['query'])
diff --git a/tests/celery_tests.py b/tests/celery_tests.py
index 531cd29..f7dae14 100644
--- a/tests/celery_tests.py
+++ b/tests/celery_tests.py
@@ -11,7 +11,7 @@ from superset import app, db
 from superset.models.helpers import QueryStatus
 from superset.models.sql_lab import Query
 from superset.sql_parse import SupersetQuery
-from superset.utils import get_main_database
+from superset.utils.core import get_main_database
 from .base_tests import SupersetTestCase
 
 
diff --git a/tests/core_tests.py b/tests/core_tests.py
index 90efa7c..64a5e51 100644
--- a/tests/core_tests.py
+++ b/tests/core_tests.py
@@ -16,20 +16,19 @@ import pandas as pd
 import psycopg2
 import sqlalchemy as sqla
 
-from superset import dataframe, db, jinja_context, security_manager, sql_lab, 
utils
+from superset import dataframe, db, jinja_context, security_manager, sql_lab
 from superset.connectors.sqla.models import SqlaTable
 from superset.db_engine_specs import BaseEngineSpec
 from superset.models import core as models
 from superset.models.sql_lab import Query
-from superset.utils import get_main_database
+from superset.utils import core as utils
+from superset.utils.core import get_main_database
 from superset.views.core import DatabaseView
 from .base_tests import SupersetTestCase
 
 
 class CoreTests(SupersetTestCase):
 
-    requires_examples = True
-
     def __init__(self, *args, **kwargs):
         super(CoreTests, self).__init__(*args, **kwargs)
 
@@ -371,7 +370,7 @@ class CoreTests(SupersetTestCase):
 
         data = self.get_json_resp(
             '/superset/warm_up_cache?table_name=energy_usage&db_name=main')
-        assert len(data) == 4
+        assert len(data) > 0
 
     def test_shortner(self):
         self.login(username='admin')
diff --git a/tests/dashboard_tests.py b/tests/dashboard_tests.py
index 0bfbeaf..915d451 100644
--- a/tests/dashboard_tests.py
+++ b/tests/dashboard_tests.py
@@ -12,8 +12,6 @@ from .base_tests import SupersetTestCase
 
 class DashboardTests(SupersetTestCase):
 
-    requires_examples = True
-
     def __init__(self, *args, **kwargs):
         super(DashboardTests, self).__init__(*args, **kwargs)
 
diff --git a/tests/datasource_tests.py b/tests/datasource_tests.py
index b9f8693..64c57b5 100644
--- a/tests/datasource_tests.py
+++ b/tests/datasource_tests.py
@@ -7,8 +7,6 @@ from .fixtures.datasource import datasource_post
 
 class DatasourceTests(SupersetTestCase):
 
-    requires_examples = True
-
     def __init__(self, *args, **kwargs):
         super(DatasourceTests, self).__init__(*args, **kwargs)
 
diff --git a/tests/dict_import_export_tests.py 
b/tests/dict_import_export_tests.py
index bde8caa..a0ecc5c 100644
--- a/tests/dict_import_export_tests.py
+++ b/tests/dict_import_export_tests.py
@@ -9,7 +9,7 @@ from superset.connectors.druid.models import (
     DruidColumn, DruidDatasource, DruidMetric,
 )
 from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn
-from superset.utils import get_main_database
+from superset.utils.core import get_main_database
 from .base_tests import SupersetTestCase
 
 DBREF = 'dict_import__export_test'
diff --git a/tests/druid_tests.py b/tests/druid_tests.py
index 9d13db9..9d3a20f 100644
--- a/tests/druid_tests.py
+++ b/tests/druid_tests.py
@@ -77,8 +77,9 @@ class DruidTests(SupersetTestCase):
 
     """Testing interactions with Druid"""
 
-    def __init__(self, *args, **kwargs):
-        super(DruidTests, self).__init__(*args, **kwargs)
+    @classmethod
+    def setUpClass(cls):
+        cls.create_druid_test_objects()
 
     def get_test_cluster_obj(self):
         return DruidCluster(
diff --git a/tests/email_tests.py b/tests/email_tests.py
index a7f215d..559372b 100644
--- a/tests/email_tests.py
+++ b/tests/email_tests.py
@@ -1,3 +1,4 @@
+# -*- coding: utf-8 -*-
 """Unit tests for email service in Superset"""
 from email.mime.application import MIMEApplication
 from email.mime.multipart import MIMEMultipart
@@ -7,7 +8,8 @@ import unittest
 
 import mock
 
-from superset import app, utils
+from superset import app
+from superset.utils import core as utils
 
 send_email_test = mock.Mock()
 
@@ -16,7 +18,7 @@ class EmailSmtpTest(unittest.TestCase):
     def setUp(self):
         app.config['smtp_ssl'] = False
 
-    @mock.patch('superset.utils.send_MIME_email')
+    @mock.patch('superset.utils.core.send_MIME_email')
     def test_send_smtp(self, mock_send_mime):
         attachment = tempfile.NamedTemporaryFile()
         attachment.write(b'attachment')
@@ -35,7 +37,7 @@ class EmailSmtpTest(unittest.TestCase):
         mimeapp = MIMEApplication('attachment')
         assert msg.get_payload()[-1].get_payload() == mimeapp.get_payload()
 
-    @mock.patch('superset.utils.send_MIME_email')
+    @mock.patch('superset.utils.core.send_MIME_email')
     def test_send_bcc_smtp(self, mock_send_mime):
         attachment = tempfile.NamedTemporaryFile()
         attachment.write(b'attachment')
diff --git a/tests/import_export_tests.py b/tests/import_export_tests.py
index a327a47..fc26385 100644
--- a/tests/import_export_tests.py
+++ b/tests/import_export_tests.py
@@ -4,23 +4,19 @@ import unittest
 
 from sqlalchemy.orm.session import make_transient
 
-from superset import db, utils
+from superset import db
 from superset.connectors.druid.models import (
     DruidColumn, DruidDatasource, DruidMetric,
 )
 from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn
 from superset.models import core as models
+from superset.utils import core as utils
 from .base_tests import SupersetTestCase
 
 
 class ImportExportTests(SupersetTestCase):
     """Testing export import functionality for dashboards"""
 
-    requires_examples = True
-
-    def __init__(self, *args, **kwargs):
-        super(ImportExportTests, self).__init__(*args, **kwargs)
-
     @classmethod
     def delete_imports(cls):
         # Imported data clean up
@@ -42,6 +38,7 @@ class ImportExportTests(SupersetTestCase):
     @classmethod
     def setUpClass(cls):
         cls.delete_imports()
+        cls.create_druid_test_objects()
 
     @classmethod
     def tearDownClass(cls):
diff --git a/tests/load_examples_test.py b/tests/load_examples_test.py
new file mode 100644
index 0000000..d2b1005
--- /dev/null
+++ b/tests/load_examples_test.py
@@ -0,0 +1,51 @@
+from superset import data
+from superset.cli import load_test_users_run
+from .base_tests import SupersetTestCase
+
+
+class SupersetDataFrameTestCase(SupersetTestCase):
+
+    def test_load_css_templates(self):
+        data.load_css_templates()
+
+    def test_load_energy(self):
+        data.load_energy()
+
+    def test_load_world_bank_health_n_pop(self):
+        data.load_world_bank_health_n_pop()
+
+    def test_load_birth_names(self):
+        data.load_birth_names()
+
+    def test_load_random_time_series_data(self):
+        data.load_random_time_series_data()
+
+    def test_load_country_map_data(self):
+        data.load_country_map_data()
+
+    def test_load_multiformat_time_series_data(self):
+        data.load_multiformat_time_series_data()
+
+    def test_load_paris_iris_geojson(self):
+        data.load_paris_iris_geojson()
+
+    def test_load_bart_lines(self):
+        data.load_bart_lines()
+
+    def test_load_multi_line(self):
+        data.load_multi_line()
+
+    def test_load_misc_dashboard(self):
+        data.load_misc_dashboard()
+
+    def test_load_unicode_test_data(self):
+        data.load_unicode_test_data()
+
+    def test_load_deck_dash(self):
+        data.load_long_lat_data()
+        data.load_flights()
+        data.load_sf_population_polygons()
+        data.load_deck_dash()
+
+    def test_load_test_users_run(self):
+        load_test_users_run()
diff --git a/tests/model_tests.py b/tests/model_tests.py
index 348643b..1bf824c 100644
--- a/tests/model_tests.py
+++ b/tests/model_tests.py
@@ -4,7 +4,7 @@ from sqlalchemy.engine.url import make_url
 
 from superset import app, db
 from superset.models.core import Database
-from superset.utils import get_main_database
+from superset.utils.core import get_main_database
 from .base_tests import SupersetTestCase
 
 
diff --git a/tests/sqllab_tests.py b/tests/sqllab_tests.py
index fca532e..ca2063c 100644
--- a/tests/sqllab_tests.py
+++ b/tests/sqllab_tests.py
@@ -5,11 +5,11 @@ import unittest
 
 from flask_appbuilder.security.sqla import models as ab_models
 
-from superset import db, security_manager, utils
+from superset import db, security_manager
 from superset.dataframe import SupersetDataFrame
 from superset.db_engine_specs import BaseEngineSpec
 from superset.models.sql_lab import Query
-from superset.utils import get_main_database
+from superset.utils.core import datetime_to_epoch, get_main_database
 from .base_tests import SupersetTestCase
 
 
@@ -115,7 +115,7 @@ class SqlLabTests(SupersetTestCase):
 
         data = self.get_json_resp(
             '/superset/queries/{}'.format(
-                int(utils.datetime_to_epoch(now)) - 1000))
+                int(datetime_to_epoch(now)) - 1000))
         self.assertEquals(1, len(data))
 
         self.logout()
diff --git a/tests/utils_tests.py b/tests/utils_tests.py
index 643da47..5882a02 100644
--- a/tests/utils_tests.py
+++ b/tests/utils_tests.py
@@ -7,7 +7,7 @@ from mock import patch
 import numpy
 
 from superset.exceptions import SupersetException
-from superset.utils import (
+from superset.utils.core import (
     base_json_conv,
     convert_legacy_filters_into_adhoc,
     datetime_f,
@@ -97,7 +97,7 @@ class UtilsTestCase(unittest.TestCase):
         assert isinstance(base_json_conv(Decimal('1.0')), float) is True
         assert isinstance(base_json_conv(uuid.uuid4()), str) is True
 
-    @patch('superset.utils.datetime')
+    @patch('superset.utils.core.datetime')
     def test_parse_human_timedelta(self, mock_now):
         mock_now.return_value = datetime(2016, 12, 1)
         self.assertEquals(parse_human_timedelta('now'), timedelta(0))
@@ -108,7 +108,7 @@ class UtilsTestCase(unittest.TestCase):
         got_str = zlib_decompress_to_string(blob)
         self.assertEquals(json_str, got_str)
 
-    @patch('superset.utils.to_adhoc', mock_to_adhoc)
+    @patch('superset.utils.core.to_adhoc', mock_to_adhoc)
     def test_merge_extra_filters(self):
         # does nothing if no extra filters
         form_data = {'A': 1, 'B': 2, 'c': 'test'}
@@ -216,7 +216,7 @@ class UtilsTestCase(unittest.TestCase):
         merge_extra_filters(form_data)
         self.assertEquals(form_data, expected)
 
-    @patch('superset.utils.to_adhoc', mock_to_adhoc)
+    @patch('superset.utils.core.to_adhoc', mock_to_adhoc)
     def test_merge_extra_filters_ignores_empty_filters(self):
         form_data = {'extra_filters': [
             {'col': 'a', 'op': 'in', 'val': ''},
@@ -226,7 +226,7 @@ class UtilsTestCase(unittest.TestCase):
         merge_extra_filters(form_data)
         self.assertEquals(form_data, expected)
 
-    @patch('superset.utils.to_adhoc', mock_to_adhoc)
+    @patch('superset.utils.core.to_adhoc', mock_to_adhoc)
     def test_merge_extra_filters_ignores_nones(self):
         form_data = {
             'adhoc_filters': [
@@ -256,7 +256,7 @@ class UtilsTestCase(unittest.TestCase):
         merge_extra_filters(form_data)
         self.assertEquals(form_data, expected)
 
-    @patch('superset.utils.to_adhoc', mock_to_adhoc)
+    @patch('superset.utils.core.to_adhoc', mock_to_adhoc)
     def test_merge_extra_filters_ignores_equal_filters(self):
         form_data = {
             'extra_filters': [
@@ -301,7 +301,7 @@ class UtilsTestCase(unittest.TestCase):
         merge_extra_filters(form_data)
         self.assertEquals(form_data, expected)
 
-    @patch('superset.utils.to_adhoc', mock_to_adhoc)
+    @patch('superset.utils.core.to_adhoc', mock_to_adhoc)
     def test_merge_extra_filters_merges_different_val_types(self):
         form_data = {
             'extra_filters': [
@@ -402,7 +402,7 @@ class UtilsTestCase(unittest.TestCase):
         merge_extra_filters(form_data)
         self.assertEquals(form_data, expected)
 
-    @patch('superset.utils.to_adhoc', mock_to_adhoc)
+    @patch('superset.utils.core.to_adhoc', mock_to_adhoc)
     def test_merge_extra_filters_adds_unequal_lists(self):
         form_data = {
             'extra_filters': [
@@ -576,7 +576,7 @@ class UtilsTestCase(unittest.TestCase):
         self.assertEqual(instance.watcher, 4)
         self.assertEqual(result1, result8)
 
-    @patch('superset.utils.parse_human_datetime', mock_parse_human_datetime)
+    @patch('superset.utils.core.parse_human_datetime', 
mock_parse_human_datetime)
     def test_get_since_until(self):
         form_data = {}
         result = get_since_until(form_data)
@@ -623,7 +623,7 @@ class UtilsTestCase(unittest.TestCase):
         expected = datetime(2016, 11, 2), datetime(2016, 11, 8)
         self.assertEqual(result, expected)
 
-    @patch('superset.utils.to_adhoc', mock_to_adhoc)
+    @patch('superset.utils.core.to_adhoc', mock_to_adhoc)
     def test_convert_legacy_filters_into_adhoc_where(self):
         form_data = {
             'where': 'a = 1',
@@ -640,7 +640,7 @@ class UtilsTestCase(unittest.TestCase):
         convert_legacy_filters_into_adhoc(form_data)
         self.assertEquals(form_data, expected)
 
-    @patch('superset.utils.to_adhoc', mock_to_adhoc)
+    @patch('superset.utils.core.to_adhoc', mock_to_adhoc)
     def test_convert_legacy_filters_into_adhoc_filters(self):
         form_data = {
             'filters': [{'col': 'a', 'op': 'in', 'val': 'someval'}],
@@ -659,7 +659,7 @@ class UtilsTestCase(unittest.TestCase):
         convert_legacy_filters_into_adhoc(form_data)
         self.assertEquals(form_data, expected)
 
-    @patch('superset.utils.to_adhoc', mock_to_adhoc)
+    @patch('superset.utils.core.to_adhoc', mock_to_adhoc)
     def test_convert_legacy_filters_into_adhoc_having(self):
         form_data = {
             'having': 'COUNT(1) = 1',
@@ -676,7 +676,7 @@ class UtilsTestCase(unittest.TestCase):
         convert_legacy_filters_into_adhoc(form_data)
         self.assertEquals(form_data, expected)
 
-    @patch('superset.utils.to_adhoc', mock_to_adhoc)
+    @patch('superset.utils.core.to_adhoc', mock_to_adhoc)
     def test_convert_legacy_filters_into_adhoc_having_filters(self):
         form_data = {
             'having_filters': [{'col': 'COUNT(1)', 'op': '==', 'val': 1}],
@@ -695,7 +695,7 @@ class UtilsTestCase(unittest.TestCase):
         convert_legacy_filters_into_adhoc(form_data)
         self.assertEquals(form_data, expected)
 
-    @patch('superset.utils.to_adhoc', mock_to_adhoc)
+    @patch('superset.utils.core.to_adhoc', mock_to_adhoc)
     def test_convert_legacy_filters_into_adhoc_present_and_empty(self):
         form_data = {
             'adhoc_filters': [],
@@ -713,7 +713,7 @@ class UtilsTestCase(unittest.TestCase):
         convert_legacy_filters_into_adhoc(form_data)
         self.assertEquals(form_data, expected)
 
-    @patch('superset.utils.to_adhoc', mock_to_adhoc)
+    @patch('superset.utils.core.to_adhoc', mock_to_adhoc)
     def test_convert_legacy_filters_into_adhoc_present_and_nonempty(self):
         form_data = {
             'adhoc_filters': [
diff --git a/tests/viz_tests.py b/tests/viz_tests.py
index fb11027..e4232c4 100644
--- a/tests/viz_tests.py
+++ b/tests/viz_tests.py
@@ -6,7 +6,7 @@ import pandas as pd
 
 from superset import app
 from superset.exceptions import SpatialException
-from superset.utils import DTTM_ALIAS
+from superset.utils.core import DTTM_ALIAS
 import superset.viz as viz
 from .base_tests import SupersetTestCase
 from .utils import load_fixture
@@ -996,7 +996,7 @@ class BaseDeckGLVizTestCase(SupersetTestCase):
         with self.assertRaises(SpatialException):
             test_viz_deckgl.parse_coordinates('fldkjsalkj,fdlaskjfjadlksj')
 
-    @patch('superset.utils.uuid.uuid4')
+    @patch('superset.utils.core.uuid.uuid4')
     def test_filter_nulls(self, mock_uuid4):
         mock_uuid4.return_value = uuid.UUID('12345678123456781234567812345678')
         test_form_data = {
diff --git a/tox.ini b/tox.ini
index 2e62db0..53949a3 100644
--- a/tox.ini
+++ b/tox.ini
@@ -27,7 +27,8 @@ require-code = true
 [testenv]
 commands =
     {toxinidir}/superset/bin/superset db upgrade
-    nosetests {posargs}
+    nosetests tests/load_examples_test.py
+    nosetests -e load_examples_test {posargs}
 deps =
     -rrequirements.txt
     -rrequirements-dev.txt

Reply via email to