diff --git a/superset/assets/visualizations/deckgl/layers/scatter.jsx b/superset/assets/visualizations/deckgl/layers/scatter.jsx index 087df3d7ef363..26d320c502713 100644 --- a/superset/assets/visualizations/deckgl/layers/scatter.jsx +++ b/superset/assets/visualizations/deckgl/layers/scatter.jsx @@ -14,7 +14,7 @@ import { unitToRadius } from '../../../javascripts/modules/geo'; import sandboxedEval from '../../../javascripts/modules/sandbox'; function getStep(timeGrain) { - // grain in microseconds + // grain in milliseconds const MINUTE = 60 * 1000; const HOUR = 60 * MINUTE; const DAY = 24 * HOUR; diff --git a/superset/connectors/druid/models.py b/superset/connectors/druid/models.py index a16baf1039e65..3a4f63cfeb95c 100644 --- a/superset/connectors/druid/models.py +++ b/superset/connectors/druid/models.py @@ -130,7 +130,8 @@ def get_datasources(self): return json.loads(requests.get(endpoint).text) def get_druid_version(self): - endpoint = self.get_base_coordinator_url() + '/status' + endpoint = self.get_base_url( + self.coordinator_host, self.coordinator_port) + '/status' return json.loads(requests.get(endpoint).text)['version'] def refresh_datasources( @@ -155,16 +156,15 @@ def refresh_datasources( def refresh(self, datasource_names, merge_flag, refreshAll): """ - Fetches metadata for the specified datasources andm + Fetches metadata for the specified datasources and merges to the Superset database """ session = db.session ds_list = ( session.query(DruidDatasource) - .filter(or_(DruidDatasource.datasource_name == name - for name in datasource_names)) + .filter(DruidDatasource.cluster_name == self.cluster_name) + .filter(DruidDatasource.datasource_name.in_(datasource_names)) ) - ds_map = {ds.name: ds for ds in ds_list} for ds_name in datasource_names: datasource = ds_map.get(ds_name, None) @@ -1066,10 +1066,7 @@ def run_query( # noqa / druid if not is_timeseries: granularity = 'all' - if ( - granularity == 'all' or - timeseries_limit is None or - timeseries_limit == 0): + if granularity == 'all': phase = 1 inner_from_dttm = inner_from_dttm or from_dttm inner_to_dttm = inner_to_dttm or to_dttm @@ -1114,6 +1111,7 @@ def run_query( # noqa / druid order_direction = 'descending' if order_desc else 'ascending' if columns: + columns.append('__time') del qry['post_aggregations'] del qry['aggregations'] qry['dimensions'] = columns @@ -1148,7 +1146,8 @@ def run_query( # noqa / druid client.topn(**pre_qry) logging.info('Phase 1 Complete') - query_str += '// Two phase query\n// Phase 1\n' + if phase == 2: + query_str += '// Two phase query\n// Phase 1\n' query_str += json.dumps( client.query_builder.last_query.query_dict, indent=2) query_str += '\n' diff --git a/superset/connectors/sqla/models.py b/superset/connectors/sqla/models.py index 3b636e8fa2870..ef8c68ebb33e6 100644 --- a/superset/connectors/sqla/models.py +++ b/superset/connectors/sqla/models.py @@ -432,8 +432,6 @@ def get_from_clause(self, template_processor=None, db_engine_spec=None): from_sql = self.sql if template_processor: from_sql = template_processor.process_template(from_sql) - if db_engine_spec: - from_sql = db_engine_spec.escape_sql(from_sql) from_sql = sqlparse.format(from_sql, strip_comments=True) return TextAsFrom(sa.text(from_sql), []).alias('expr_qry') return self.get_sqla_table() diff --git a/superset/db_engine_specs.py b/superset/db_engine_specs.py index 4d373dff5a0de..9fc5f445db6bf 100644 --- a/superset/db_engine_specs.py +++ b/superset/db_engine_specs.py @@ -145,11 +145,6 @@ def _allowed_file(filename): BaseEngineSpec.df_to_db(**df_to_db_kwargs) - @classmethod - def escape_sql(cls, sql): - """Escapes the raw SQL""" - return sql - @classmethod def convert_dttm(cls, target_type, dttm): return "'{}'".format(dttm.strftime('%Y-%m-%d %H:%M:%S')) @@ -558,10 +553,6 @@ def adjust_database_uri(cls, uri, selected_schema=None): uri.database = database return uri - @classmethod - def escape_sql(cls, sql): - return re.sub(r'%%|%', '%%', sql) - @classmethod def convert_dttm(cls, target_type, dttm): tt = target_type.upper() diff --git a/superset/views/core.py b/superset/views/core.py index 4330e7d7eea41..b737b66ce2e6d 100755 --- a/superset/views/core.py +++ b/superset/views/core.py @@ -1303,9 +1303,9 @@ def explore(self, datasource_type=None, datasource_id=None): if datasource_type == 'table' \ else datasource.datasource_name if slc: - title = '[slice] ' + slc.slice_name + title = slc.slice_name else: - title = '[explore] ' + table_name + title = 'Explore - ' + table_name return self.render_template( 'superset/basic.html', bootstrap_data=json.dumps(bootstrap_data), @@ -2070,7 +2070,7 @@ def dashboard(**kwargs): # noqa 'superset/dashboard.html', entry='dashboard', standalone_mode=standalone_mode, - title='[dashboard] ' + dash.dashboard_title, + title=dash.dashboard_title, bootstrap_data=json.dumps(bootstrap_data), ) diff --git a/superset/viz.py b/superset/viz.py index 9225e5da8f65b..d232e9ac52290 100644 --- a/superset/viz.py +++ b/superset/viz.py @@ -276,9 +276,10 @@ def cache_key(self, query_obj): for k in ['from_dttm', 'to_dttm']: del cache_dict[k] - for k in ['since', 'until', 'datasource']: + for k in ['since', 'until']: cache_dict[k] = self.form_data.get(k) + cache_dict['datasource'] = self.datasource.uid json_data = self.json_dumps(cache_dict, sort_keys=True) return hashlib.md5(json_data.encode('utf-8')).hexdigest() @@ -2039,7 +2040,7 @@ def get_properties(self, d): 'radius': self.fixed_value if self.fixed_value else d.get(self.metric), 'cat_color': d.get(self.dim) if self.dim else None, 'position': d.get('spatial'), - '__timestamp': d.get('__timestamp'), + '__timestamp': d.get(DTTM_ALIAS) or d.get('__time'), } def get_data(self, df):