-
Notifications
You must be signed in to change notification settings - Fork 14.2k
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
fix(chart): Supporting custom SQL as temporal x-axis column with filter #25126
Changes from all commits
5b62640
8bc5c72
a191583
cf8c0ba
f65d707
08a50ce
a76ed47
d16ffbb
16f941d
91788c1
bb52a1a
5c79619
64e01b6
672eea4
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -1007,6 +1007,8 @@ def adhoc_column_to_sqla( # pylint: disable=too-many-locals | |
qry = sa.select([sqla_column]).limit(1).select_from(tbl) | ||
sql = self.database.compile_sqla_query(qry) | ||
col_desc = get_columns_description(self.database, self.schema, sql) | ||
if not col_desc: | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. dealing with case where There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I think we should handle this on frontend and not let users save a custom sql that's an empty string There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. @kgabryje I agree - however, as it will be lots of work to clean up old metadata, I think we can maybe have both? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I think we should handle this edge case in the backend anyways. I will be nice to prevent user from saving empty custom sql in the frontend and show some friendly message. But for now if the custom_sql is any arbitrary string the API responds with 500 with error message directly from DB engine, which is "fine" and readable for users but just this empty string it throws There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. we can create a separate PR for UI messaging and prevention though |
||
raise SupersetGenericDBErrorException("Column not found") | ||
is_dttm = col_desc[0]["is_dttm"] # type: ignore | ||
except SupersetGenericDBErrorException as ex: | ||
raise ColumnNotFoundException(message=str(ex)) from ex | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -51,6 +51,7 @@ | |
from superset.superset_typing import AdhocColumn | ||
from superset.utils.core import ( | ||
AnnotationType, | ||
backend, | ||
get_example_default_schema, | ||
AdhocMetricExpressionType, | ||
ExtraFiltersReasonType, | ||
|
@@ -943,6 +944,71 @@ def test_chart_data_get(self): | |
assert data["result"][0]["status"] == "success" | ||
assert data["result"][0]["rowcount"] == 2 | ||
|
||
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") | ||
def test_chart_data_get_with_x_axis_using_custom_sql(self): | ||
""" | ||
Chart data API: Test GET endpoint | ||
""" | ||
chart = db.session.query(Slice).filter_by(slice_name="Genders").one() | ||
chart.query_context = json.dumps( | ||
{ | ||
"datasource": {"id": chart.table.id, "type": "table"}, | ||
"force": False, | ||
"queries": [ | ||
{ | ||
"time_range": "1900-01-01T00:00:00 : 2000-01-01T00:00:00", | ||
"granularity": "ds", | ||
"filters": [ | ||
{"col": "ds", "op": "TEMPORAL_RANGE", "val": "No filter"} | ||
], | ||
"extras": { | ||
"having": "", | ||
"where": "", | ||
}, | ||
"applied_time_extras": {}, | ||
"columns": [ | ||
{ | ||
"columnType": "BASE_AXIS", | ||
"datasourceWarning": False, | ||
"expressionType": "SQL", | ||
"label": "My column", | ||
"sqlExpression": "ds", | ||
"timeGrain": "P1W", | ||
} | ||
], | ||
Comment on lines
+960
to
+978
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Basically, the "granularity" and "Axis-column" shouldn't be concurrent sent from frontend. the "granularity" is a legacy Druid "concept"(and this concept as be wrapped under the hood in "modern" Druid), so from my original design would like to totally remove "granularity" in Superset, the "granularity" means that a temporal column in columns(or dimensions). There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. the "time_range" and "filters" in query_object is same logic as before. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
I'm trying to fix a bug related to granularity. Basically, the temporal granularity column should be renamed as "__timestamp" like what the legacy time series line chart did before(alias after group by should not be the same with the raw column name in clickhouse) but current chart don't, so granularity is a little different from other temporal columns(at least for users). If the "granularity" should be removed, should this bug be fixed on the frontend? it seems like the time column as x-axis has been completely unusable for a very long time. thanks to this PR made custom SQL work |
||
"metrics": ["sum__num"], | ||
"orderby": [["sum__num", False]], | ||
"annotation_layers": [], | ||
"row_limit": 50000, | ||
"timeseries_limit": 0, | ||
"order_desc": True, | ||
"url_params": {}, | ||
"custom_params": {}, | ||
"custom_form_data": {}, | ||
} | ||
], | ||
"form_data": { | ||
"x_axis": { | ||
"datasourceWarning": False, | ||
"expressionType": "SQL", | ||
"label": "My column", | ||
"sqlExpression": "ds", | ||
} | ||
}, | ||
"result_format": "json", | ||
"result_type": "full", | ||
} | ||
) | ||
rv = self.get_assert_metric(f"api/v1/chart/{chart.id}/data/", "get_data") | ||
assert rv.mimetype == "application/json" | ||
data = json.loads(rv.data.decode("utf-8")) | ||
assert data["result"][0]["status"] == "success" | ||
|
||
if backend() == "presto": | ||
assert data["result"][0]["rowcount"] == 41 | ||
else: | ||
assert data["result"][0]["rowcount"] == 40 | ||
|
||
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") | ||
def test_chart_data_get_forced(self): | ||
""" | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I'm curious why we need
# type: ignore
here?There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
the x_axis is from form_data and it's ANY
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
It may be a good idea to broaden the scope of the type guard to
Any
so it can handle arbitrary types.There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I think the type guard is better to scope down rather than scope up right?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
@zephyring @villebro I think this PR introduced another potential bug in Adhoc column as X-Axis, you should sent a probe query to ensure x-axis is a temporal column.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
hi @zhaoyongjie can you share more on what potential bug this may've introduced?