Skip to content

Commit

Permalink
Merge pull request apache#23 from john-bodley/john-bodley-cherry-pick…
Browse files Browse the repository at this point in the history
…-fixes

cherry pick fixes
  • Loading branch information
john-bodley authored Mar 13, 2018
2 parents 0bed225 + 503afdd commit a95a1e6
Show file tree
Hide file tree
Showing 6 changed files with 16 additions and 27 deletions.
2 changes: 1 addition & 1 deletion superset/assets/visualizations/deckgl/layers/scatter.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ import { unitToRadius } from '../../../javascripts/modules/geo';
import sandboxedEval from '../../../javascripts/modules/sandbox';

function getStep(timeGrain) {
// grain in microseconds
// grain in milliseconds
const MINUTE = 60 * 1000;
const HOUR = 60 * MINUTE;
const DAY = 24 * HOUR;
Expand Down
19 changes: 9 additions & 10 deletions superset/connectors/druid/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,8 @@ def get_datasources(self):
return json.loads(requests.get(endpoint).text)

def get_druid_version(self):
endpoint = self.get_base_coordinator_url() + '/status'
endpoint = self.get_base_url(
self.coordinator_host, self.coordinator_port) + '/status'
return json.loads(requests.get(endpoint).text)['version']

def refresh_datasources(
Expand All @@ -155,16 +156,15 @@ def refresh_datasources(

def refresh(self, datasource_names, merge_flag, refreshAll):
"""
Fetches metadata for the specified datasources andm
Fetches metadata for the specified datasources and
merges to the Superset database
"""
session = db.session
ds_list = (
session.query(DruidDatasource)
.filter(or_(DruidDatasource.datasource_name == name
for name in datasource_names))
.filter(DruidDatasource.cluster_name == self.cluster_name)
.filter(DruidDatasource.datasource_name.in_(datasource_names))
)

ds_map = {ds.name: ds for ds in ds_list}
for ds_name in datasource_names:
datasource = ds_map.get(ds_name, None)
Expand Down Expand Up @@ -1066,10 +1066,7 @@ def run_query( # noqa / druid
if not is_timeseries:
granularity = 'all'

if (
granularity == 'all' or
timeseries_limit is None or
timeseries_limit == 0):
if granularity == 'all':
phase = 1
inner_from_dttm = inner_from_dttm or from_dttm
inner_to_dttm = inner_to_dttm or to_dttm
Expand Down Expand Up @@ -1114,6 +1111,7 @@ def run_query( # noqa / druid
order_direction = 'descending' if order_desc else 'ascending'

if columns:
columns.append('__time')
del qry['post_aggregations']
del qry['aggregations']
qry['dimensions'] = columns
Expand Down Expand Up @@ -1148,7 +1146,8 @@ def run_query( # noqa / druid

client.topn(**pre_qry)
logging.info('Phase 1 Complete')
query_str += '// Two phase query\n// Phase 1\n'
if phase == 2:
query_str += '// Two phase query\n// Phase 1\n'
query_str += json.dumps(
client.query_builder.last_query.query_dict, indent=2)
query_str += '\n'
Expand Down
2 changes: 0 additions & 2 deletions superset/connectors/sqla/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -432,8 +432,6 @@ def get_from_clause(self, template_processor=None, db_engine_spec=None):
from_sql = self.sql
if template_processor:
from_sql = template_processor.process_template(from_sql)
if db_engine_spec:
from_sql = db_engine_spec.escape_sql(from_sql)
from_sql = sqlparse.format(from_sql, strip_comments=True)
return TextAsFrom(sa.text(from_sql), []).alias('expr_qry')
return self.get_sqla_table()
Expand Down
9 changes: 0 additions & 9 deletions superset/db_engine_specs.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,11 +145,6 @@ def _allowed_file(filename):

BaseEngineSpec.df_to_db(**df_to_db_kwargs)

@classmethod
def escape_sql(cls, sql):
"""Escapes the raw SQL"""
return sql

@classmethod
def convert_dttm(cls, target_type, dttm):
return "'{}'".format(dttm.strftime('%Y-%m-%d %H:%M:%S'))
Expand Down Expand Up @@ -558,10 +553,6 @@ def adjust_database_uri(cls, uri, selected_schema=None):
uri.database = database
return uri

@classmethod
def escape_sql(cls, sql):
return re.sub(r'%%|%', '%%', sql)

@classmethod
def convert_dttm(cls, target_type, dttm):
tt = target_type.upper()
Expand Down
6 changes: 3 additions & 3 deletions superset/views/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -1303,9 +1303,9 @@ def explore(self, datasource_type=None, datasource_id=None):
if datasource_type == 'table' \
else datasource.datasource_name
if slc:
title = '[slice] ' + slc.slice_name
title = slc.slice_name
else:
title = '[explore] ' + table_name
title = 'Explore - ' + table_name
return self.render_template(
'superset/basic.html',
bootstrap_data=json.dumps(bootstrap_data),
Expand Down Expand Up @@ -2070,7 +2070,7 @@ def dashboard(**kwargs): # noqa
'superset/dashboard.html',
entry='dashboard',
standalone_mode=standalone_mode,
title='[dashboard] ' + dash.dashboard_title,
title=dash.dashboard_title,
bootstrap_data=json.dumps(bootstrap_data),
)

Expand Down
5 changes: 3 additions & 2 deletions superset/viz.py
Original file line number Diff line number Diff line change
Expand Up @@ -276,9 +276,10 @@ def cache_key(self, query_obj):
for k in ['from_dttm', 'to_dttm']:
del cache_dict[k]

for k in ['since', 'until', 'datasource']:
for k in ['since', 'until']:
cache_dict[k] = self.form_data.get(k)

cache_dict['datasource'] = self.datasource.uid
json_data = self.json_dumps(cache_dict, sort_keys=True)
return hashlib.md5(json_data.encode('utf-8')).hexdigest()

Expand Down Expand Up @@ -2039,7 +2040,7 @@ def get_properties(self, d):
'radius': self.fixed_value if self.fixed_value else d.get(self.metric),
'cat_color': d.get(self.dim) if self.dim else None,
'position': d.get('spatial'),
'__timestamp': d.get('__timestamp'),
'__timestamp': d.get(DTTM_ALIAS) or d.get('__time'),
}

def get_data(self, df):
Expand Down

0 comments on commit a95a1e6

Please sign in to comment.