Skip to content

Commit

Permalink
fix tests
Browse files Browse the repository at this point in the history
  • Loading branch information
villebro committed Sep 13, 2022
1 parent 551d777 commit e03c418
Show file tree
Hide file tree
Showing 4 changed files with 66 additions and 72 deletions.
2 changes: 1 addition & 1 deletion superset/charts/schemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -1261,7 +1261,7 @@ class ChartDataResponseResult(Schema):
)
cache_timeout = fields.Integer(
description="Cache timeout in following order: custom timeout, datasource "
"timeout, default config timeout, cache config default timeout.",
"timeout, cache default timeout, config default cache timeout.",
required=True,
allow_none=True,
)
Expand Down
4 changes: 0 additions & 4 deletions tests/common/query_context_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -248,8 +248,6 @@ class QueryContextGenerator:
def generate(
self,
query_name: str,
force: bool = False,
custom_cache_timeout: Optional[int] = None,
add_postprocessing_operations: bool = False,
add_time_offsets: bool = False,
table_id=1,
Expand All @@ -261,8 +259,6 @@ def generate(
table = self.get_table(table_name, table_id, table_type)
return {
"datasource": {"id": table.id, "type": table.type},
"force": force,
"custom_cache_timeout": custom_cache_timeout,
"queries": [
get_query_object(
query_name,
Expand Down
126 changes: 65 additions & 61 deletions tests/integration_tests/charts/data/api_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
import copy
from datetime import datetime
from io import BytesIO
from typing import Optional
from typing import Any, Dict, Optional
from unittest import mock
from zipfile import ZipFile

Expand Down Expand Up @@ -210,66 +210,6 @@ def test_with_row_limit_as_samples__rowcount_as_row_limit(self):
self.assert_row_count(rv, expected_row_count)
assert "GROUP BY" not in rv.json["result"][0]["query"]

@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
@mock.patch(
"superset.common.query_context_processor.config",
{
**app.config,
"CACHE_DEFAULT_TIMEOUT": 1234,
"DATA_CACHE_CONFIG": {
**app.config["DATA_CACHE_CONFIG"],
"CACHE_DEFAULT_TIMEOUT": None,
},
},
)
def test_cache_default_timeout(self):
query_context = get_query_context("birth_names", force=True)
rv = self.post_assert_metric(
CHART_DATA_URI,
query_context,
"data",
)
data = json.loads(rv.data.decode("utf-8"))
assert data["result"][0]["cache_timeout"] == 1234

@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
@mock.patch(
"superset.common.query_context_processor.config",
{
**app.config,
"CACHE_DEFAULT_TIMEOUT": 100000,
"DATA_CACHE_CONFIG": {
**app.config["DATA_CACHE_CONFIG"],
"CACHE_DEFAULT_TIMEOUT": 3456,
},
},
)
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_data_cache_default_timeout(self):
query_context = get_query_context("birth_names", force=True)
rv = self.post_assert_metric(
CHART_DATA_URI,
query_context,
"data",
)
data = json.loads(rv.data.decode("utf-8"))
assert data["result"][0]["cache_timeout"] == 3456

@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_custom_cache_timeout(self):
query_context = get_query_context(
"birth_names",
force=True,
custom_cache_timeout=5678,
)
rv = self.post_assert_metric(
CHART_DATA_URI,
query_context,
"data",
)
data = json.loads(rv.data.decode("utf-8"))
assert data["result"][0]["cache_timeout"] == 5678

def test_with_incorrect_result_type__400(self):
self.query_context_payload["result_type"] = "qwerty"
rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data")
Expand Down Expand Up @@ -975,3 +915,67 @@ def test_chart_data_with_adhoc_column(self):
unique_genders = {row["male_or_female"] for row in data}
assert unique_genders == {"male", "female"}
assert result["applied_filters"] == [{"column": "male_or_female"}]


@pytest.fixture()
def physical_query_context(physical_dataset) -> Dict[str, Any]:
return {
"datasource": {
"type": physical_dataset.type,
"id": physical_dataset.id,
},
"queries": [
{
"columns": ["col1"],
"metrics": ["count"],
"orderby": [["col1", True]],
}
],
"result_type": ChartDataResultType.FULL,
"force": True,
}


@mock.patch(
"superset.common.query_context_processor.config",
{
**app.config,
"CACHE_DEFAULT_TIMEOUT": 1234,
"DATA_CACHE_CONFIG": {
**app.config["DATA_CACHE_CONFIG"],
"CACHE_DEFAULT_TIMEOUT": None,
},
},
)
def test_cache_default_timeout(test_client, login_as_admin, physical_query_context):
rv = test_client.post(CHART_DATA_URI, json=physical_query_context)
data = json.loads(rv.data.decode("utf-8"))
assert data["result"][0]["cache_timeout"] == 1234


def test_custom_cache_timeout(test_client, login_as_admin, physical_query_context):
physical_query_context["custom_cache_timeout"] = 5678
rv = test_client.post(CHART_DATA_URI, json=physical_query_context)
data = json.loads(rv.data.decode("utf-8"))
assert data["result"][0]["cache_timeout"] == 5678


@mock.patch(
"superset.common.query_context_processor.config",
{
**app.config,
"CACHE_DEFAULT_TIMEOUT": 100000,
"DATA_CACHE_CONFIG": {
**app.config["DATA_CACHE_CONFIG"],
"CACHE_DEFAULT_TIMEOUT": 3456,
},
},
)
def test_data_cache_default_timeout(
test_client,
login_as_admin,
physical_query_context,
):
rv = test_client.post(CHART_DATA_URI, json=physical_query_context)
data = json.loads(rv.data.decode("utf-8"))
assert data["result"][0]["cache_timeout"] == 3456
6 changes: 0 additions & 6 deletions tests/integration_tests/fixtures/query_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,6 @@ def get_table(self, name, id_, type_):

def get_query_context(
query_name: str,
force: bool = False,
custom_cache_timeout: Optional[int] = None,
add_postprocessing_operations: bool = False,
add_time_offsets: bool = False,
form_data: Optional[Dict[str, Any]] = None,
Expand All @@ -39,17 +37,13 @@ def get_query_context(
generated by the "Boy Name Cloud" chart in the examples.
:param query_name: name of an example query, which is always in the format
of `datasource_name[:test_case_name]`, where `:test_case_name` is optional.
:param force: force cache refresh
:param custom_cache_timeout: Custom cache timeout
:param add_postprocessing_operations: Add post-processing operations to QueryObject
:param add_time_offsets: Add time offsets to QueryObject(advanced analytics)
:param form_data: chart metadata
:return: Request payload
"""
return QueryContextGeneratorInteg().generate(
query_name=query_name,
force=force,
custom_cache_timeout=custom_cache_timeout,
add_postprocessing_operations=add_postprocessing_operations,
add_time_offsets=add_time_offsets,
form_data=form_data,
Expand Down

0 comments on commit e03c418

Please sign in to comment.