Skip to content

Commit

Permalink
fix(extra-filters): add logic for identifying applied extra filters (#…
Browse files Browse the repository at this point in the history
…11325)

* fix: use dashboard id for stable cache key (#11293)

* fix: button translations missing (#11187)

* button translations missing

* blank space before text

* feat: update time_compare description and choices (#11294)

* feat: update time_compare description and choices

* Update sections.jsx

* fix(extra-filters): add logic for identifying applied extra filters

* lint

Co-authored-by: Jesse Yang <jesse.yang@airbnb.com>
Co-authored-by: rubenSastre <ruben.sastre@decathlon.com>
Co-authored-by: Erik Ritter <erik.ritter@airbnb.com>
  • Loading branch information
4 people authored Oct 19, 2020
1 parent a123b24 commit 53293e3
Show file tree
Hide file tree
Showing 15 changed files with 200 additions and 28 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ describe('QueryAndSaveButtons', () => {

it('renders buttons with correct text', () => {
expect(wrapper.find(Button).contains('Run')).toBe(true);
expect(wrapper.find(Button).contains(' Save')).toBe(true);
expect(wrapper.find(Button).contains('Save')).toBe(true);
});

it('calls onQuery when query button is clicked', () => {
Expand Down
2 changes: 1 addition & 1 deletion superset-frontend/src/components/Menu/Menu.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -188,7 +188,7 @@ export function Menu({
<Nav className="navbar-right">
{!navbarRight.user_is_anonymous && <NewMenu />}
{settings && settings.length > 0 && (
<NavDropdown id="settings-dropdown" title="Settings">
<NavDropdown id="settings-dropdown" title={t('Settings')}>
{flatSettings.map((section, index) => {
if (section === '-') {
return (
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ export default class PublishedStatus extends React.Component {
this.togglePublished();
}}
>
Draft
{t('Draft')}
</Label>
</TooltipWrapper>
);
Expand All @@ -80,7 +80,7 @@ export default class PublishedStatus extends React.Component {
placement="bottom"
tooltip={draftDivTooltip}
>
<Label>Draft</Label>
<Label>{t('Draft')}</Label>
</TooltipWrapper>
);
}
Expand All @@ -98,7 +98,7 @@ export default class PublishedStatus extends React.Component {
this.togglePublished();
}}
>
Published
{t('Published')}
</Label>
</TooltipWrapper>
);
Expand Down
4 changes: 2 additions & 2 deletions superset-frontend/src/explore/components/QueryAndSaveBtns.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ export default function QueryAndSaveBtns({
buttonSize="small"
disabled={!canAdd}
>
<i className="fa fa-stop-circle-o" /> Stop
<i className="fa fa-stop-circle-o" /> {t('Stop')}
</Button>
) : (
<Button
Expand Down Expand Up @@ -116,7 +116,7 @@ export default function QueryAndSaveBtns({
onClick={onSave}
data-test="query-save-button"
>
<i className="fa fa-plus-circle" /> Save
<i className="fa fa-plus-circle" /> {t('Save')}
</Button>
</ButtonGroup>
{errorMessage && (
Expand Down
4 changes: 3 additions & 1 deletion superset-frontend/src/explore/controlPanels/sections.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -213,12 +213,14 @@ export const NVD3TimeSeries = [
'30 days',
'52 weeks',
'1 year',
'104 weeks',
'2 years',
]),
description: t(
'Overlay one or more timeseries from a ' +
'relative time period. Expects relative time deltas ' +
'in natural language (example: 24 hours, 7 days, ' +
'56 weeks, 365 days)',
'52 weeks, 365 days). Free text is supported.',
),
},
},
Expand Down
18 changes: 18 additions & 0 deletions superset/charts/schemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -705,6 +705,11 @@ class ChartDataExtrasSchema(Schema):


class ChartDataQueryObjectSchema(Schema):
applied_time_extras = fields.Dict(
description="A mapping of temporal extras that have been applied to the query",
required=False,
example={"__time_range": "1 year ago : now"},
)
filters = fields.List(fields.Nested(ChartDataFilterSchema), required=False)
granularity = fields.String(
description="Name of temporal column used for time filtering. For legacy Druid "
Expand Down Expand Up @@ -816,6 +821,13 @@ class ChartDataQueryObjectSchema(Schema):
"as `having_druid`.",
deprecated=True,
)
druid_time_origin = fields.String(
description="Starting point for time grain counting on legacy Druid "
"datasources. Used to change e.g. Monday/Sunday first-day-of-week. "
"This field is deprecated and should be passed to `extras` "
"as `druid_time_origin`.",
allow_none=True,
)


class ChartDataDatasourceSchema(Schema):
Expand Down Expand Up @@ -894,6 +906,12 @@ class ChartDataResponseResult(Schema):
description="Amount of rows in result set", allow_none=False,
)
data = fields.List(fields.Dict(), description="A list with results")
applied_filters = fields.List(
fields.Dict(), description="A list with applied filters"
)
rejected_filters = fields.List(
fields.Dict(), description="A list with rejected filters"
)


class ChartDataResponseSchema(Schema):
Expand Down
18 changes: 17 additions & 1 deletion superset/common/query_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
import logging
import math
from datetime import datetime, timedelta
from typing import Any, ClassVar, Dict, List, Optional, Union
from typing import Any, cast, ClassVar, Dict, List, Optional, Union

import numpy as np
import pandas as pd
Expand Down Expand Up @@ -160,6 +160,22 @@ def get_single_payload(self, query_obj: QueryObject) -> Dict[str, Any]:
if status != utils.QueryStatus.FAILED:
payload["data"] = self.get_data(df)
del payload["df"]

filters = query_obj.filter
filter_columns = cast(List[str], [flt.get("col") for flt in filters])
columns = set(self.datasource.column_names)
applied_time_columns, rejected_time_columns = utils.get_time_filter_status(
self.datasource, query_obj.applied_time_extras
)
payload["applied_filters"] = [
{"column": col} for col in filter_columns if col in columns
] + applied_time_columns
payload["rejected_filters"] = [
{"reason": "not_in_datasource", "column": col}
for col in filter_columns
if col not in columns
] + rejected_time_columns

if self.result_type == utils.ChartDataResultType.RESULTS:
return {"data": payload["data"]}
return payload
Expand Down
4 changes: 4 additions & 0 deletions superset/common/query_object.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ class DeprecatedField(NamedTuple):
DeprecatedField(old_name="where", new_name="where"),
DeprecatedField(old_name="having", new_name="having"),
DeprecatedField(old_name="having_filters", new_name="having_druid"),
DeprecatedField(old_name="druid_time_origin", new_name="druid_time_origin"),
)


Expand All @@ -59,6 +60,7 @@ class QueryObject:
and druid. The query objects are constructed on the client.
"""

applied_time_extras: Dict[str, str]
granularity: Optional[str]
from_dttm: Optional[datetime]
to_dttm: Optional[datetime]
Expand All @@ -79,6 +81,7 @@ class QueryObject:

def __init__(
self,
applied_time_extras: Optional[Dict[str, str]] = None,
granularity: Optional[str] = None,
metrics: Optional[List[Union[Dict[str, Any], str]]] = None,
groupby: Optional[List[str]] = None,
Expand All @@ -100,6 +103,7 @@ def __init__(
metrics = metrics or []
extras = extras or {}
is_sip_38 = is_feature_enabled("SIP_38_VIZ_REARCHITECTURE")
self.applied_time_extras = applied_time_extras or {}
self.granularity = granularity
self.from_dttm, self.to_dttm = utils.get_since_until(
relative_start=extras.get(
Expand Down
4 changes: 2 additions & 2 deletions superset/models/dashboard.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@ class Dashboard( # pylint: disable=too-many-instance-attributes
]

def __repr__(self) -> str:
return f"Dashboard<{self.slug or self.id}>"
return f"Dashboard<{self.id or self.slug}>"

@property
def table_names(self) -> str:
Expand Down Expand Up @@ -253,7 +253,7 @@ def data(self) -> Dict[str, Any]:

@cache.memoize(
# manage cache version manually
make_name=lambda fname: f"{fname}-v2",
make_name=lambda fname: f"{fname}-v2.1",
timeout=config["DASHBOARD_CACHE_TIMEOUT"],
unless=lambda: not is_feature_enabled("DASHBOARD_CACHE"),
)
Expand Down
4 changes: 2 additions & 2 deletions superset/templates/appbuilder/general/widgets/search.html
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@
</table>
<div class="filter-action" style="display:none">
<button type="submit" class="btn btn-sm btn-primary" id="search-action">
Search&nbsp;&nbsp;<i class="fa fa-search"></i>
{{_("Search")}}&nbsp;&nbsp;<i class="fa fa-search"></i>
</button>
</div>
</div>
Expand All @@ -54,7 +54,7 @@
$('.filter-action').toggle(true);
} else {
$('.filter-action').toggle(true);
$('.filter-action > button').html('Refresh&nbsp;&nbsp;<i class="fa fa-refresh"></i>');
$('.filter-action > button').html('{{_("Refresh")}}&nbsp;&nbsp;<i class="fa fa-refresh"></i>');
}
}

Expand Down
89 changes: 86 additions & 3 deletions superset/utils/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -910,6 +910,8 @@ def merge_extra_filters( # pylint: disable=too-many-branches
# that are external to the slice definition. We use those for dynamic
# interactive filters like the ones emitted by the "Filter Box" visualization.
# Note extra_filters only support simple filters.
applied_time_extras: Dict[str, str] = {}
form_data["applied_time_extras"] = applied_time_extras
if "extra_filters" in form_data:
# __form and __to are special extra_filters that target time
# boundaries. The rest of extra_filters are simple
Expand Down Expand Up @@ -948,9 +950,13 @@ def get_filter_key(f: Dict[str, Any]) -> str:
]:
filtr["isExtra"] = True
# Pull out time filters/options and merge into form data
if date_options.get(filtr["col"]):
if filtr.get("val"):
form_data[date_options[filtr["col"]]] = filtr["val"]
filter_column = filtr["col"]
time_extra = date_options.get(filter_column)
if time_extra:
time_extra_value = filtr.get("val")
if time_extra_value:
form_data[time_extra] = time_extra_value
applied_time_extras[filter_column] = time_extra_value
elif filtr["val"]:
# Merge column filters
filter_key = get_filter_key(filtr)
Expand Down Expand Up @@ -1567,5 +1573,82 @@ class RowLevelSecurityFilterType(str, Enum):
BASE = "Base"


class ExtraFiltersTimeColumnType(str, Enum):
GRANULARITY = "__granularity"
TIME_COL = "__time_col"
TIME_GRAIN = "__time_grain"
TIME_ORIGIN = "__time_origin"
TIME_RANGE = "__time_range"


def is_test() -> bool:
return strtobool(os.environ.get("SUPERSET_TESTENV", "false"))


def get_time_filter_status( # pylint: disable=too-many-branches
datasource: "BaseDatasource", applied_time_extras: Dict[str, str],
) -> Tuple[List[Dict[str, str]], List[Dict[str, str]]]:
temporal_columns = {
col.column_name for col in datasource.columns if col.is_temporal
}
applied: List[Dict[str, str]] = []
rejected: List[Dict[str, str]] = []
time_column = applied_time_extras.get(ExtraFiltersTimeColumnType.TIME_COL)
if time_column:
if time_column in temporal_columns:
applied.append({"column": ExtraFiltersTimeColumnType.TIME_COL})
else:
rejected.append(
{
"reason": "not_in_datasource",
"column": ExtraFiltersTimeColumnType.TIME_COL,
}
)

if ExtraFiltersTimeColumnType.TIME_GRAIN in applied_time_extras:
# are there any temporal columns to assign the time grain to?
if temporal_columns:
applied.append({"column": ExtraFiltersTimeColumnType.TIME_GRAIN})
else:
rejected.append(
{
"reason": "no_temporal_column",
"column": ExtraFiltersTimeColumnType.TIME_GRAIN,
}
)

if ExtraFiltersTimeColumnType.TIME_RANGE in applied_time_extras:
# are there any temporal columns to assign the time grain to?
if temporal_columns:
applied.append({"column": ExtraFiltersTimeColumnType.TIME_RANGE})
else:
rejected.append(
{
"reason": "no_temporal_column",
"column": ExtraFiltersTimeColumnType.TIME_RANGE,
}
)

if ExtraFiltersTimeColumnType.TIME_ORIGIN in applied_time_extras:
if datasource.type == "druid":
applied.append({"column": ExtraFiltersTimeColumnType.TIME_ORIGIN})
else:
rejected.append(
{
"reason": "not_druid_datasource",
"column": ExtraFiltersTimeColumnType.TIME_ORIGIN,
}
)

if ExtraFiltersTimeColumnType.GRANULARITY in applied_time_extras:
if datasource.type == "druid":
applied.append({"column": ExtraFiltersTimeColumnType.GRANULARITY})
else:
rejected.append(
{
"reason": "not_druid_datasource",
"column": ExtraFiltersTimeColumnType.GRANULARITY,
}
)

return applied, rejected
1 change: 1 addition & 0 deletions superset/views/dashboard/mixin.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@ class DashboardMixin: # pylint: disable=too-few-public-methods
"slug": _("Slug"),
"charts": _("Charts"),
"owners": _("Owners"),
"published": _("Published"),
"creator": _("Creator"),
"modified": _("Modified"),
"position_json": _("Position JSON"),
Expand Down
8 changes: 6 additions & 2 deletions superset/viz.py
Original file line number Diff line number Diff line change
Expand Up @@ -485,14 +485,18 @@ def get_payload(self, query_obj: Optional[QueryObjectDict] = None) -> VizPayload
filters = self.form_data.get("filters", [])
filter_columns = [flt.get("col") for flt in filters]
columns = set(self.datasource.column_names)
applied_time_extras = self.form_data.get("applied_time_extras", {})
applied_time_columns, rejected_time_columns = utils.get_time_filter_status(
self.datasource, applied_time_extras
)
payload["applied_filters"] = [
{"column": col} for col in filter_columns if col in columns
]
] + applied_time_columns
payload["rejected_filters"] = [
{"reason": "not_in_datasource", "column": col}
for col in filter_columns
if col not in columns
]
] + rejected_time_columns

return payload

Expand Down
24 changes: 24 additions & 0 deletions tests/charts/api_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -818,6 +818,30 @@ def test_chart_data_simple(self):
data = json.loads(rv.data.decode("utf-8"))
self.assertEqual(data["result"][0]["rowcount"], 45)

def test_chart_data_applied_time_extras(self):
"""
Chart data API: Test chart data query with applied time extras
"""
self.login(username="admin")
table = self.get_table_by_name("birth_names")
request_payload = get_query_context(table.name, table.id, table.type)
request_payload["queries"][0]["applied_time_extras"] = {
"__time_range": "100 years ago : now",
"__time_origin": "now",
}
rv = self.post_assert_metric(CHART_DATA_URI, request_payload, "data")
self.assertEqual(rv.status_code, 200)
data = json.loads(rv.data.decode("utf-8"))
self.assertEqual(
data["result"][0]["applied_filters"],
[{"column": "gender"}, {"column": "__time_range"},],
)
self.assertEqual(
data["result"][0]["rejected_filters"],
[{"column": "__time_origin", "reason": "not_druid_datasource"},],
)
self.assertEqual(data["result"][0]["rowcount"], 45)

def test_chart_data_limit_offset(self):
"""
Chart data API: Test chart data query with limit and offset
Expand Down
Loading

0 comments on commit 53293e3

Please sign in to comment.