Skip to content

Commit

Permalink
Revert "feat: add rolling window support to 'Big Number with Trendlin…
Browse files Browse the repository at this point in the history
…e' viz (#9107)"

This reverts commit c04d616.
  • Loading branch information
Erik Ritter authored Mar 16, 2020
1 parent f1370c5 commit 04ccc8b
Show file tree
Hide file tree
Showing 7 changed files with 45 additions and 146 deletions.
9 changes: 0 additions & 9 deletions superset-frontend/src/explore/controlPanels/BigNumber.js
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
* under the License.
*/
import { t } from '@superset-ui/translation';
import React from 'react';

export default {
controlPanelSections: [
Expand All @@ -44,14 +43,6 @@ export default {
['subheader_font_size'],
],
},
{
label: t('Advanced Analytics'),
expanded: false,
controlSetRows: [
[<h1 className="section-header">{t('Rolling Window')}</h1>],
['rolling_type', 'rolling_periods', 'min_periods'],
],
},
],
controlOverrides: {
y_axis_format: {
Expand Down
2 changes: 1 addition & 1 deletion superset-frontend/src/explore/controlPanels/sections.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ export const NVD3TimeSeries = [
'of query results',
),
controlSetRows: [
[<h1 className="section-header">{t('Rolling Window')}</h1>],
[<h1 className="section-header">{t('Moving Average')}</h1>],
['rolling_type', 'rolling_periods', 'min_periods'],
[<h1 className="section-header">{t('Time Comparison')}</h1>],
['time_compare', 'comparison_type'],
Expand Down
2 changes: 1 addition & 1 deletion superset-frontend/src/explore/controls.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -1126,7 +1126,7 @@ export const controls = {

rolling_type: {
type: 'SelectControl',
label: t('Rolling Function'),
label: t('Rolling'),
default: 'None',
choices: formatSelectOptions(['None', 'mean', 'sum', 'std', 'cumsum']),
description: t(
Expand Down
45 changes: 13 additions & 32 deletions superset/examples/birth_names.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,23 +106,22 @@ def load_birth_names(only_metadata=False, force=False):
obj.fetch_metadata()
tbl = obj

metrics = [
{
"expressionType": "SIMPLE",
"column": {"column_name": "num", "type": "BIGINT"},
"aggregate": "SUM",
"label": "Births",
"optionName": "metric_11",
}
]
metric = "sum__num"

defaults = {
"compare_lag": "10",
"compare_suffix": "o10Y",
"limit": "25",
"granularity_sqla": "ds",
"groupby": [],
"metric": "sum__num",
"metrics": [
{
"expressionType": "SIMPLE",
"column": {"column_name": "num", "type": "BIGINT"},
"aggregate": "SUM",
"label": "Births",
"optionName": "metric_11",
}
],
"row_limit": config["ROW_LIMIT"],
"since": "100 years ago",
"until": "now",
Expand All @@ -145,17 +144,14 @@ def load_birth_names(only_metadata=False, force=False):
granularity_sqla="ds",
compare_lag="5",
compare_suffix="over 5Y",
metric=metric,
),
),
Slice(
slice_name="Genders",
viz_type="pie",
datasource_type="table",
datasource_id=tbl.id,
params=get_slice_json(
defaults, viz_type="pie", groupby=["gender"], metric=metric
),
params=get_slice_json(defaults, viz_type="pie", groupby=["gender"]),
),
Slice(
slice_name="Trends",
Expand All @@ -169,7 +165,6 @@ def load_birth_names(only_metadata=False, force=False):
granularity_sqla="ds",
rich_tooltip=True,
show_legend=True,
metrics=metrics,
),
),
Slice(
Expand Down Expand Up @@ -220,7 +215,6 @@ def load_birth_names(only_metadata=False, force=False):
adhoc_filters=[gen_filter("gender", "girl")],
row_limit=50,
timeseries_limit_metric="sum__num",
metrics=metrics,
),
),
Slice(
Expand All @@ -237,7 +231,6 @@ def load_birth_names(only_metadata=False, force=False):
rotation="square",
limit="100",
adhoc_filters=[gen_filter("gender", "girl")],
metric=metric,
),
),
Slice(
Expand All @@ -250,7 +243,6 @@ def load_birth_names(only_metadata=False, force=False):
groupby=["name"],
adhoc_filters=[gen_filter("gender", "boy")],
row_limit=50,
metrics=metrics,
),
),
Slice(
Expand All @@ -267,7 +259,6 @@ def load_birth_names(only_metadata=False, force=False):
rotation="square",
limit="100",
adhoc_filters=[gen_filter("gender", "boy")],
metric=metric,
),
),
Slice(
Expand All @@ -285,7 +276,6 @@ def load_birth_names(only_metadata=False, force=False):
time_grain_sqla="P1D",
viz_type="area",
x_axis_forma="smart_date",
metrics=metrics,
),
),
Slice(
Expand All @@ -303,7 +293,6 @@ def load_birth_names(only_metadata=False, force=False):
time_grain_sqla="P1D",
viz_type="area",
x_axis_forma="smart_date",
metrics=metrics,
),
),
]
Expand All @@ -325,15 +314,14 @@ def load_birth_names(only_metadata=False, force=False):
},
metric_2="sum__num",
granularity_sqla="ds",
metrics=metrics,
),
),
Slice(
slice_name="Num Births Trend",
viz_type="line",
datasource_type="table",
datasource_id=tbl.id,
params=get_slice_json(defaults, viz_type="line", metrics=metrics),
params=get_slice_json(defaults, viz_type="line"),
),
Slice(
slice_name="Daily Totals",
Expand All @@ -347,7 +335,6 @@ def load_birth_names(only_metadata=False, force=False):
since="40 years ago",
until="now",
viz_type="table",
metrics=metrics,
),
),
Slice(
Expand Down Expand Up @@ -410,7 +397,6 @@ def load_birth_names(only_metadata=False, force=False):
datasource_id=tbl.id,
params=get_slice_json(
defaults,
metrics=metrics,
groupby=["name"],
row_limit=50,
timeseries_limit_metric={
Expand All @@ -431,7 +417,6 @@ def load_birth_names(only_metadata=False, force=False):
datasource_id=tbl.id,
params=get_slice_json(
defaults,
metric=metric,
viz_type="big_number_total",
granularity_sqla="ds",
adhoc_filters=[gen_filter("gender", "girl")],
Expand All @@ -444,11 +429,7 @@ def load_birth_names(only_metadata=False, force=False):
datasource_type="table",
datasource_id=tbl.id,
params=get_slice_json(
defaults,
viz_type="pivot_table",
groupby=["name"],
columns=["state"],
metrics=metrics,
defaults, viz_type="pivot_table", groupby=["name"], columns=["state"]
),
),
]
Expand Down
32 changes: 13 additions & 19 deletions superset/examples/world_bank.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,32 +97,31 @@ def load_world_bank_health_n_pop(
db.session.commit()
tbl.fetch_metadata()

metric = "sum__SP_POP_TOTL"
metrics = ["sum__SP_POP_TOTL"]
secondary_metric = {
"aggregate": "SUM",
"column": {
"column_name": "SP_RUR_TOTL",
"optionName": "_col_SP_RUR_TOTL",
"type": "DOUBLE",
},
"expressionType": "SIMPLE",
"hasCustomLabel": True,
"label": "Rural Population",
}

defaults = {
"compare_lag": "10",
"compare_suffix": "o10Y",
"limit": "25",
"granularity_sqla": "year",
"groupby": [],
"metric": "sum__SP_POP_TOTL",
"metrics": ["sum__SP_POP_TOTL"],
"row_limit": config["ROW_LIMIT"],
"since": "2014-01-01",
"until": "2014-01-02",
"time_range": "2014-01-01 : 2014-01-02",
"markup_type": "markdown",
"country_fieldtype": "cca3",
"secondary_metric": {
"aggregate": "SUM",
"column": {
"column_name": "SP_RUR_TOTL",
"optionName": "_col_SP_RUR_TOTL",
"type": "DOUBLE",
},
"expressionType": "SIMPLE",
"hasCustomLabel": True,
"label": "Rural Population",
},
"entity": "country_code",
"show_bubbles": True,
}
Expand Down Expand Up @@ -208,7 +207,6 @@ def load_world_bank_health_n_pop(
viz_type="world_map",
metric="sum__SP_RUR_TOTL_ZS",
num_period_compare="10",
secondary_metric=secondary_metric,
),
),
Slice(
Expand Down Expand Up @@ -266,8 +264,6 @@ def load_world_bank_health_n_pop(
groupby=["region", "country_name"],
since="2011-01-01",
until="2011-01-01",
metric=metric,
secondary_metric=secondary_metric,
),
),
Slice(
Expand All @@ -281,7 +277,6 @@ def load_world_bank_health_n_pop(
until="now",
viz_type="area",
groupby=["region"],
metrics=metrics,
),
),
Slice(
Expand All @@ -297,7 +292,6 @@ def load_world_bank_health_n_pop(
x_ticks_layout="staggered",
viz_type="box_plot",
groupby=["region"],
metrics=metrics,
),
),
Slice(
Expand Down
50 changes: 17 additions & 33 deletions superset/viz.py
Original file line number Diff line number Diff line change
Expand Up @@ -178,26 +178,6 @@ def run_extra_queries(self):
"""
pass

def apply_rolling(self, df):
fd = self.form_data
rolling_type = fd.get("rolling_type")
rolling_periods = int(fd.get("rolling_periods") or 0)
min_periods = int(fd.get("min_periods") or 0)

if rolling_type in ("mean", "std", "sum") and rolling_periods:
kwargs = dict(window=rolling_periods, min_periods=min_periods)
if rolling_type == "mean":
df = df.rolling(**kwargs).mean()
elif rolling_type == "std":
df = df.rolling(**kwargs).std()
elif rolling_type == "sum":
df = df.rolling(**kwargs).sum()
elif rolling_type == "cumsum":
df = df.cumsum()
if min_periods:
df = df[min_periods:]
return df

def get_samples(self):
query_obj = self.query_obj()
query_obj.update(
Expand Down Expand Up @@ -1121,18 +1101,6 @@ def query_obj(self):
self.form_data["metric"] = metric
return d

def get_data(self, df: pd.DataFrame) -> VizData:
df = df.pivot_table(
index=DTTM_ALIAS,
columns=[],
values=self.metric_labels,
fill_value=0,
aggfunc=sum,
)
df = self.apply_rolling(df)
df[DTTM_ALIAS] = df.index
return super().get_data(df)


class BigNumberTotalViz(BaseViz):

Expand Down Expand Up @@ -1257,7 +1225,23 @@ def process_data(self, df: pd.DataFrame, aggregate: bool = False) -> VizData:
dfs.sort_values(ascending=False, inplace=True)
df = df[dfs.index]

df = self.apply_rolling(df)
rolling_type = fd.get("rolling_type")
rolling_periods = int(fd.get("rolling_periods") or 0)
min_periods = int(fd.get("min_periods") or 0)

if rolling_type in ("mean", "std", "sum") and rolling_periods:
kwargs = dict(window=rolling_periods, min_periods=min_periods)
if rolling_type == "mean":
df = df.rolling(**kwargs).mean()
elif rolling_type == "std":
df = df.rolling(**kwargs).std()
elif rolling_type == "sum":
df = df.rolling(**kwargs).sum()
elif rolling_type == "cumsum":
df = df.cumsum()
if min_periods:
df = df[min_periods:]

if fd.get("contribution"):
dft = df.T
df = (dft / dft.sum()).T
Expand Down
Loading

0 comments on commit 04ccc8b

Please sign in to comment.