Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix(cache): respect default cache timeout on v1 chart data requests #21441

Merged
merged 4 commits into from
Sep 13, 2022
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 7 additions & 1 deletion superset/charts/schemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -1194,6 +1194,12 @@ class ChartDataQueryContextSchema(Schema):
query_context_factory: Optional[QueryContextFactory] = None
datasource = fields.Nested(ChartDataDatasourceSchema)
queries = fields.List(fields.Nested(ChartDataQueryObjectSchema))
custom_cache_timeout = fields.Integer(
description="Override the default cache timeout",
required=False,
allow_none=True,
)

force = fields.Boolean(
description="Should the queries be forced to load from the source. "
"Default: `false`",
Expand Down Expand Up @@ -1255,7 +1261,7 @@ class ChartDataResponseResult(Schema):
)
cache_timeout = fields.Integer(
description="Cache timeout in following order: custom timeout, datasource "
"timeout, default config timeout.",
"timeout, default config timeout, cache config default timeout.",
required=True,
allow_none=True,
)
Expand Down
6 changes: 6 additions & 0 deletions superset/common/query_context_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -434,6 +434,12 @@ def get_cache_timeout(self) -> int:
cache_timeout_rv = self._query_context.get_cache_timeout()
if cache_timeout_rv:
return cache_timeout_rv
if (
data_cache_timeout := config["DATA_CACHE_CONFIG"].get(
"CACHE_DEFAULT_TIMEOUT"
)
) is not None:
return data_cache_timeout
return config["CACHE_DEFAULT_TIMEOUT"]

def cache_key(self, **extra: Any) -> str:
Expand Down
4 changes: 4 additions & 0 deletions tests/common/query_context_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -248,6 +248,8 @@ class QueryContextGenerator:
def generate(
self,
query_name: str,
force: bool = False,
custom_cache_timeout: Optional[int] = None,
add_postprocessing_operations: bool = False,
add_time_offsets: bool = False,
table_id=1,
Expand All @@ -259,6 +261,8 @@ def generate(
table = self.get_table(table_name, table_id, table_type)
return {
"datasource": {"id": table.id, "type": table.type},
"force": force,
"custom_cache_timeout": custom_cache_timeout,
"queries": [
get_query_object(
query_name,
Expand Down
60 changes: 60 additions & 0 deletions tests/integration_tests/charts/data/api_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -210,6 +210,66 @@ def test_with_row_limit_as_samples__rowcount_as_row_limit(self):
self.assert_row_count(rv, expected_row_count)
assert "GROUP BY" not in rv.json["result"][0]["query"]

@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
@mock.patch(
"superset.common.query_context_processor.config",
{
**app.config,
"CACHE_DEFAULT_TIMEOUT": 1234,
"DATA_CACHE_CONFIG": {
**app.config["DATA_CACHE_CONFIG"],
"CACHE_DEFAULT_TIMEOUT": None,
},
},
)
def test_cache_default_timeout(self):
query_context = get_query_context("birth_names", force=True)
rv = self.post_assert_metric(
CHART_DATA_URI,
query_context,
"data",
)
data = json.loads(rv.data.decode("utf-8"))
assert data["result"][0]["cache_timeout"] == 1234
villebro marked this conversation as resolved.
Show resolved Hide resolved

@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
@mock.patch(
"superset.common.query_context_processor.config",
{
**app.config,
"CACHE_DEFAULT_TIMEOUT": 100000,
"DATA_CACHE_CONFIG": {
**app.config["DATA_CACHE_CONFIG"],
"CACHE_DEFAULT_TIMEOUT": 3456,
},
},
)
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_data_cache_default_timeout(self):
query_context = get_query_context("birth_names", force=True)
rv = self.post_assert_metric(
CHART_DATA_URI,
query_context,
"data",
)
data = json.loads(rv.data.decode("utf-8"))
assert data["result"][0]["cache_timeout"] == 3456

@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_custom_cache_timeout(self):
query_context = get_query_context(
"birth_names",
force=True,
custom_cache_timeout=5678,
)
rv = self.post_assert_metric(
CHART_DATA_URI,
query_context,
"data",
)
data = json.loads(rv.data.decode("utf-8"))
assert data["result"][0]["cache_timeout"] == 5678

def test_with_incorrect_result_type__400(self):
self.query_context_payload["result_type"] = "qwerty"
rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data")
Expand Down
6 changes: 6 additions & 0 deletions tests/integration_tests/fixtures/query_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,8 @@ def get_table(self, name, id_, type_):

def get_query_context(
query_name: str,
force: bool = False,
custom_cache_timeout: Optional[int] = None,
add_postprocessing_operations: bool = False,
add_time_offsets: bool = False,
form_data: Optional[Dict[str, Any]] = None,
Expand All @@ -37,13 +39,17 @@ def get_query_context(
generated by the "Boy Name Cloud" chart in the examples.
:param query_name: name of an example query, which is always in the format
of `datasource_name[:test_case_name]`, where `:test_case_name` is optional.
:param force: force cache refresh
:param custom_cache_timeout: Custom cache timeout
:param add_postprocessing_operations: Add post-processing operations to QueryObject
:param add_time_offsets: Add time offsets to QueryObject(advanced analytics)
:param form_data: chart metadata
:return: Request payload
"""
return QueryContextGeneratorInteg().generate(
query_name=query_name,
force=force,
custom_cache_timeout=custom_cache_timeout,
add_postprocessing_operations=add_postprocessing_operations,
add_time_offsets=add_time_offsets,
form_data=form_data,
Expand Down