From f5e1dcc118de5921f287d3721df452c8e848d47d Mon Sep 17 00:00:00 2001 From: Dave Smith Date: Mon, 29 Apr 2019 12:53:59 -0700 Subject: [PATCH 1/9] Merge lastest from master into lyft-release-sp8 (#7405) * filter out all nan series (#7313) * improve not rich tooltip (#7345) * Create issue_label_bot.yaml (#7341) * fix: do not save colors without a color scheme (#7347) * [wtforms] Strip leading/trailing whitespace (#7084) * [schema] Updating the datasources schema (#5451) * limit tables/views returned if schema is not provided (#7358) * limit tables/views returned if schema is not provided * fix typo * improve code performance * handle the case when table name or view name does not present a schema * Add type anno (#7342) * Updated local dev instructions to include missing step * First pass at type annotations * [schema] Updating the base column schema (#5452) * Update 937d04c16b64_update_datasources.py (#7361) * Feature flag for client cache (#7348) * Feature flag for client cache * Fix integration test * Revert "Fix integration test" This reverts commit 58434ab98a015d6e96db4a97f26255aa282d989d. * Feature flag for client cache * Fix integration tests * Add feature flag to config.py * Add another feature check * Fix more integration tests * Fix raw HTML in SliceAdder (#7338) * remove backendSync.json (#7331) * [bubbles] issue when using duplicated metrics (#7087) * SUPERSET-7: Docker compose config version breaks on Ubuntu 16.04 (#7359) * SUPERSET-8: Update text in docs copyright footer (#7360) * SUPERSET-7: Docker compose config version breaks on Ubuntu 16.04 * SUPERSET-8: Extra text in docs copyright footer * [schema] Adding commits and removing unnecessary foreign-key definitions (#7371) * Store last selected dashboard in sessionStorage (#7181) * Store last selected dashboard in sessionStorage * Fix tests * [schema] Updating the base metric schema (#5453) * Fix NoneType bug & fill the test recipients with original recipients if empty (#7365) --- .github/issue_label_bot.yaml | 5 + .travis.yml | 2 - UPDATING.md | 20 +- contrib/docker/docker-compose.yml | 2 +- docs/conf.py | 4 +- superset/assets/backendSync.json | 3796 ----------------- .../cypress/integration/dashboard/controls.js | 4 +- .../integration/dashboard/edit_mode.js | 4 +- .../cypress/integration/dashboard/filter.js | 4 +- .../cypress/integration/dashboard/load.js | 4 +- .../cypress/integration/dashboard/save.js | 4 +- .../explore/components/SaveModal_spec.jsx | 30 +- superset/assets/src/chart/Chart.jsx | 3 +- superset/assets/src/chart/chartAction.js | 5 +- .../src/dashboard/actions/sliceEntities.js | 1 + .../src/dashboard/components/Header.jsx | 2 +- .../src/dashboard/components/SaveModal.jsx | 2 +- .../src/dashboard/components/SliceAdder.jsx | 3 +- .../src/explore/components/SaveModal.jsx | 14 +- superset/assets/src/featureFlags.ts | 1 + superset/config.py | 5 +- superset/connectors/base/models.py | 4 +- superset/connectors/druid/models.py | 14 +- superset/connectors/sqla/models.py | 4 +- .../7f2635b51f5d_update_base_columns.py | 137 + .../937d04c16b64_update_datasources.py | 52 + .../versions/d94d33dbe938_form_strip.py | 193 + .../e9df189e5c7e_update_base_metrics.py | 169 + superset/models/core.py | 5 + superset/models/helpers.py | 7 +- superset/utils/cache.py | 2 +- superset/utils/core.py | 64 +- superset/views/base.py | 27 + superset/views/core.py | 17 +- superset/views/schedules.py | 9 +- superset/viz.py | 14 +- tests/dict_import_export_tests.py | 6 +- tests/import_export_tests.py | 4 +- tests/sqllab_tests.py | 2 +- tox.ini | 2 + 40 files changed, 756 insertions(+), 3891 deletions(-) create mode 100644 .github/issue_label_bot.yaml delete mode 100644 superset/assets/backendSync.json create mode 100644 superset/migrations/versions/7f2635b51f5d_update_base_columns.py create mode 100644 superset/migrations/versions/937d04c16b64_update_datasources.py create mode 100644 superset/migrations/versions/d94d33dbe938_form_strip.py create mode 100644 superset/migrations/versions/e9df189e5c7e_update_base_metrics.py diff --git a/.github/issue_label_bot.yaml b/.github/issue_label_bot.yaml new file mode 100644 index 0000000000000..8af21514c3d9c --- /dev/null +++ b/.github/issue_label_bot.yaml @@ -0,0 +1,5 @@ +# for Issue Label Bot https://github.com/marketplace/issue-label-bot +label-alias: + bug: '#bug' + feature_request: '#enhancement' + question: '#question' diff --git a/.travis.yml b/.travis.yml index 32af2be1f4d64..9d4c523ed67e9 100644 --- a/.travis.yml +++ b/.travis.yml @@ -14,8 +14,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # -cache: - pip: true jobs: include: - language: python diff --git a/UPDATING.md b/UPDATING.md index bec2d4204bb43..f8d64ab48ec9a 100644 --- a/UPDATING.md +++ b/UPDATING.md @@ -21,11 +21,27 @@ under the License. This file documents any backwards-incompatible changes in Superset and assists people when migrating to a new version. +## Superset 0.34.0 + +* [5451](https://github.com/apache/incubator-superset/pull/5451): a change +which adds missing non-nullable fields to the `datasources` table. Depending on +the integrity of the data, manual intervention may be required. + +* [5452](https://github.com/apache/incubator-superset/pull/5452): a change +which adds missing non-nullable fields and uniqueness constraints to the +`columns`and `table_columns` tables. Depending on the integrity of the data, +manual intervention may be required. + +* [5453](https://github.com/apache/incubator-superset/pull/5453): a change +which adds missing non-nullable fields and uniqueness constraints to the metrics +and sql_metrics tables. Depending on the integrity of the data, manual +intervention may be required. + ## Superset 0.32.0 * `npm run backend-sync` is deprecated and no longer needed, will fail if called -* [5445](https://github.com/apache/incubator-superset/pull/5445) : a change -which prevents encoding of empty string from form data in the datanbase. +* [5445](https://github.com/apache/incubator-superset/pull/5445): a change +which prevents encoding of empty string from form data in the database. This involves a non-schema changing migration which does potentially impact a large number of records. Scheduled downtime may be advised. diff --git a/contrib/docker/docker-compose.yml b/contrib/docker/docker-compose.yml index b4d3f1836a53c..cd9b31e074951 100644 --- a/contrib/docker/docker-compose.yml +++ b/contrib/docker/docker-compose.yml @@ -14,7 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -version: '3' +version: '2' services: redis: image: redis:3.2 diff --git a/docs/conf.py b/docs/conf.py index 186b790eb5f5b..80d6de17cbeb4 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -65,7 +65,7 @@ # General information about the project. project = "Apache Superset" -copyright = 'Copyright © 2018 The Apache Software Foundation, Licensed under the Apache License, Version 2.0.' +copyright = 'Copyright © 2019 The Apache Software Foundation, Licensed under the Apache License, Version 2.0.' author = u'Apache Superset Dev' # The version info for the project you're documenting, acts as replacement for @@ -194,7 +194,7 @@ html_show_sphinx = False # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -html_show_copyright = True +html_show_copyright = False # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the diff --git a/superset/assets/backendSync.json b/superset/assets/backendSync.json deleted file mode 100644 index 2285f50ba6159..0000000000000 --- a/superset/assets/backendSync.json +++ /dev/null @@ -1,3796 +0,0 @@ -{ - "controls": { - "metrics": { - "type": "MetricsControl", - "multi": true, - "label": "Metrics", - "validators": [ - null - ], - "description": "One or many metrics to display" - }, - "metric": { - "type": "MetricsControl", - "multi": false, - "label": "Metric", - "validators": [ - null - ], - "description": "One or many metrics to display" - }, - "datasource": { - "type": "DatasourceControl", - "label": "Datasource", - "default": null, - "description": null - }, - "viz_type": { - "type": "VizTypeControl", - "label": "Visualization Type", - "default": "table", - "description": "The type of visualization to display" - }, - "percent_metrics": { - "type": "MetricsControl", - "multi": true, - "label": "Percentage Metrics", - "validators": [], - "default": [], - "description": "Metrics for which percentage of total are to be displayed" - }, - "y_axis_bounds": { - "type": "BoundsControl", - "label": "Y Axis Bounds", - "renderTrigger": true, - "default": [ - null, - null - ], - "description": "Bounds for the Y-axis. When left empty, the bounds are dynamically defined based on the min/max of the data. Note that this feature will only expand the axis range. It won't narrow the data's extent." - }, - "order_by_cols": { - "type": "SelectControl", - "multi": true, - "label": "Ordering", - "default": [], - "description": "One or many metrics to display" - }, - "color_picker": { - "label": "Fixed Color", - "description": "Use this to define a static color for all circles", - "type": "ColorPickerControl", - "default": { - "r": 0, - "g": 122, - "b": 135, - "a": 1 - }, - "renderTrigger": true - }, - "target_color_picker": { - "label": "Target Color", - "description": "Color of the target location", - "type": "ColorPickerControl", - "default": { - "r": 0, - "g": 122, - "b": 135, - "a": 1 - }, - "renderTrigger": true - }, - "legend_position": { - "label": "Legend Position", - "description": "Choose the position of the legend", - "type": "SelectControl", - "clearable": false, - "default": "tr", - "choices": [ - [ - null, - "None" - ], - [ - "tl", - "Top left" - ], - [ - "tr", - "Top right" - ], - [ - "bl", - "Bottom left" - ], - [ - "br", - "Bottom right" - ] - ], - "renderTrigger": true - }, - "fill_color_picker": { - "label": "Fill Color", - "description": " Set the opacity to 0 if you do not want to override the color specified in the GeoJSON", - "type": "ColorPickerControl", - "default": { - "r": 0, - "g": 122, - "b": 135, - "a": 1 - }, - "renderTrigger": true - }, - "stroke_color_picker": { - "label": "Stroke Color", - "description": " Set the opacity to 0 if you do not want to override the color specified in the GeoJSON", - "type": "ColorPickerControl", - "default": { - "r": 0, - "g": 122, - "b": 135, - "a": 1 - }, - "renderTrigger": true - }, - "metric_2": { - "type": "MetricsControl", - "multi": false, - "label": "Right Axis Metric", - "validators": [ - null - ], - "description": "Choose a metric for right axis", - "clearable": true - }, - "stacked_style": { - "type": "SelectControl", - "label": "Stacked Style", - "renderTrigger": true, - "choices": [ - [ - "stack", - "stack" - ], - [ - "stream", - "stream" - ], - [ - "expand", - "expand" - ] - ], - "default": "stack", - "description": "" - }, - "sort_x_axis": { - "type": "SelectControl", - "label": "Sort X Axis", - "choices": [ - [ - "alpha_asc", - "Axis ascending" - ], - [ - "alpha_desc", - "Axis descending" - ], - [ - "value_asc", - "sum(value) ascending" - ], - [ - "value_desc", - "sum(value) descending" - ] - ], - "clearable": false, - "default": "alpha_asc" - }, - "sort_y_axis": { - "type": "SelectControl", - "label": "Sort Y Axis", - "choices": [ - [ - "alpha_asc", - "Axis ascending" - ], - [ - "alpha_desc", - "Axis descending" - ], - [ - "value_asc", - "sum(value) ascending" - ], - [ - "value_desc", - "sum(value) descending" - ] - ], - "clearable": false, - "default": "alpha_asc" - }, - "linear_color_scheme": { - "type": "ColorSchemeControl", - "label": "Linear Color Scheme", - "default": "blue_white_yellow", - "clearable": false, - "description": "", - "renderTrigger": true, - "isLinear": true - }, - "normalize_across": { - "type": "SelectControl", - "label": "Normalize Across", - "choices": [ - [ - "heatmap", - "heatmap" - ], - [ - "x", - "x" - ], - [ - "y", - "y" - ] - ], - "default": "heatmap", - "description": "Color will be rendered based on a ratio of the cell against the sum of across this criteria" - }, - "horizon_color_scale": { - "type": "SelectControl", - "renderTrigger": true, - "label": "Value Domain", - "choices": [ - [ - "series", - "series" - ], - [ - "overall", - "overall" - ], - [ - "change", - "change" - ] - ], - "default": "series", - "description": "series: Treat each series independently; overall: All series use the same scale; change: Show changes compared to the first data point in each series" - }, - "canvas_image_rendering": { - "type": "SelectControl", - "label": "Rendering", - "renderTrigger": true, - "choices": [ - [ - "pixelated", - "pixelated (Sharp)" - ], - [ - "auto", - "auto (Smooth)" - ] - ], - "default": "pixelated", - "description": "image-rendering CSS attribute of the canvas object that defines how the browser scales up the image" - }, - "xscale_interval": { - "type": "SelectControl", - "label": "XScale Interval", - "renderTrigger": true, - "choices": [ - [ - 1, - "1" - ], - [ - 2, - "2" - ], - [ - 3, - "3" - ], - [ - 4, - "4" - ], - [ - 5, - "5" - ], - [ - 6, - "6" - ], - [ - 7, - "7" - ], - [ - 8, - "8" - ], - [ - 9, - "9" - ], - [ - 10, - "10" - ], - [ - 11, - "11" - ], - [ - 12, - "12" - ], - [ - 13, - "13" - ], - [ - 14, - "14" - ], - [ - 15, - "15" - ], - [ - 16, - "16" - ], - [ - 17, - "17" - ], - [ - 18, - "18" - ], - [ - 19, - "19" - ], - [ - 20, - "20" - ], - [ - 21, - "21" - ], - [ - 22, - "22" - ], - [ - 23, - "23" - ], - [ - 24, - "24" - ], - [ - 25, - "25" - ], - [ - 26, - "26" - ], - [ - 27, - "27" - ], - [ - 28, - "28" - ], - [ - 29, - "29" - ], - [ - 30, - "30" - ], - [ - 31, - "31" - ], - [ - 32, - "32" - ], - [ - 33, - "33" - ], - [ - 34, - "34" - ], - [ - 35, - "35" - ], - [ - 36, - "36" - ], - [ - 37, - "37" - ], - [ - 38, - "38" - ], - [ - 39, - "39" - ], - [ - 40, - "40" - ], - [ - 41, - "41" - ], - [ - 42, - "42" - ], - [ - 43, - "43" - ], - [ - 44, - "44" - ], - [ - 45, - "45" - ], - [ - 46, - "46" - ], - [ - 47, - "47" - ], - [ - 48, - "48" - ], - [ - 49, - "49" - ], - [ - 50, - "50" - ] - ], - "default": "1", - "clearable": false, - "description": "Number of steps to take between ticks when displaying the X scale" - }, - "yscale_interval": { - "type": "SelectControl", - "label": "YScale Interval", - "choices": [ - [ - 1, - "1" - ], - [ - 2, - "2" - ], - [ - 3, - "3" - ], - [ - 4, - "4" - ], - [ - 5, - "5" - ], - [ - 6, - "6" - ], - [ - 7, - "7" - ], - [ - 8, - "8" - ], - [ - 9, - "9" - ], - [ - 10, - "10" - ], - [ - 11, - "11" - ], - [ - 12, - "12" - ], - [ - 13, - "13" - ], - [ - 14, - "14" - ], - [ - 15, - "15" - ], - [ - 16, - "16" - ], - [ - 17, - "17" - ], - [ - 18, - "18" - ], - [ - 19, - "19" - ], - [ - 20, - "20" - ], - [ - 21, - "21" - ], - [ - 22, - "22" - ], - [ - 23, - "23" - ], - [ - 24, - "24" - ], - [ - 25, - "25" - ], - [ - 26, - "26" - ], - [ - 27, - "27" - ], - [ - 28, - "28" - ], - [ - 29, - "29" - ], - [ - 30, - "30" - ], - [ - 31, - "31" - ], - [ - 32, - "32" - ], - [ - 33, - "33" - ], - [ - 34, - "34" - ], - [ - 35, - "35" - ], - [ - 36, - "36" - ], - [ - 37, - "37" - ], - [ - 38, - "38" - ], - [ - 39, - "39" - ], - [ - 40, - "40" - ], - [ - 41, - "41" - ], - [ - 42, - "42" - ], - [ - 43, - "43" - ], - [ - 44, - "44" - ], - [ - 45, - "45" - ], - [ - 46, - "46" - ], - [ - 47, - "47" - ], - [ - 48, - "48" - ], - [ - 49, - "49" - ], - [ - 50, - "50" - ] - ], - "default": "1", - "clearable": false, - "renderTrigger": true, - "description": "Number of steps to take between ticks when displaying the Y scale" - }, - "include_time": { - "type": "CheckboxControl", - "label": "Include Time", - "description": "Whether to include the time granularity as defined in the time section", - "default": false - }, - "autozoom": { - "type": "CheckboxControl", - "label": "Auto Zoom", - "default": true, - "renderTrigger": true, - "description": "When checked, the map will zoom to your data after each query" - }, - "show_perc": { - "type": "CheckboxControl", - "label": "Show percentage", - "renderTrigger": true, - "description": "Whether to include the percentage in the tooltip", - "default": true - }, - "bar_stacked": { - "type": "CheckboxControl", - "label": "Stacked Bars", - "renderTrigger": true, - "default": false, - "description": null - }, - "pivot_margins": { - "type": "CheckboxControl", - "label": "Show totals", - "renderTrigger": false, - "default": true, - "description": "Display total row/column" - }, - "show_markers": { - "type": "CheckboxControl", - "label": "Show Markers", - "renderTrigger": true, - "default": false, - "description": "Show data points as circle markers on the lines" - }, - "show_bar_value": { - "type": "CheckboxControl", - "label": "Bar Values", - "default": false, - "renderTrigger": true, - "description": "Show the value on top of the bar" - }, - "order_bars": { - "type": "CheckboxControl", - "label": "Sort Bars", - "default": false, - "renderTrigger": true, - "description": "Sort bars by x labels." - }, - "combine_metric": { - "type": "CheckboxControl", - "label": "Combine Metrics", - "default": false, - "description": "Display metrics side by side within each column, as opposed to each column being displayed side by side for each metric." - }, - "show_controls": { - "type": "CheckboxControl", - "label": "Extra Controls", - "renderTrigger": true, - "default": false, - "description": "Whether to show extra controls or not. Extra controls include things like making mulitBar charts stacked or side by side." - }, - "reduce_x_ticks": { - "type": "CheckboxControl", - "label": "Reduce X ticks", - "renderTrigger": true, - "default": false, - "description": "Reduces the number of X-axis ticks to be rendered. If true, the x-axis will not overflow and labels may be missing. If false, a minimum width will be applied to columns and the width may overflow into an horizontal scroll." - }, - "include_series": { - "type": "CheckboxControl", - "label": "Include Series", - "renderTrigger": true, - "default": false, - "description": "Include series name as an axis" - }, - "secondary_metric": { - "type": "MetricsControl", - "multi": false, - "label": "Color Metric", - "validators": [], - "default": null, - "description": "A metric to use for color" - }, - "select_country": { - "type": "SelectControl", - "label": "Country Name", - "default": "France", - "choices": [ - [ - "Belgium", - "Belgium" - ], - [ - "Brazil", - "Brazil" - ], - [ - "Bulgaria", - "Bulgaria" - ], - [ - "China", - "China" - ], - [ - "Egypt", - "Egypt" - ], - [ - "France", - "France" - ], - [ - "Germany", - "Germany" - ], - [ - "Italy", - "Italy" - ], - [ - "Japan", - "Japan" - ], - [ - "Morocco", - "Morocco" - ], - [ - "Myanmar", - "Myanmar" - ], - [ - "Netherlands", - "Netherlands" - ], - [ - "Portugal", - "Portugal" - ], - [ - "Russia", - "Russia" - ], - [ - "Singapore", - "Singapore" - ], - [ - "Spain", - "Spain" - ], - [ - "Thailand", - "Thailand" - ], - [ - "Timorleste", - "Timorleste" - ], - [ - "Uk", - "Uk" - ], - [ - "Ukraine", - "Ukraine" - ], - [ - "Usa", - "Usa" - ], - [ - "Zambia", - "Zambia" - ] - ], - "description": "The name of the country that Superset should display" - }, - "country_fieldtype": { - "type": "SelectControl", - "label": "Country Field Type", - "default": "cca2", - "choices": [ - [ - "name", - "Full name" - ], - [ - "cioc", - "code International Olympic Committee (cioc)" - ], - [ - "cca2", - "code ISO 3166-1 alpha-2 (cca2)" - ], - [ - "cca3", - "code ISO 3166-1 alpha-3 (cca3)" - ] - ], - "description": "The country code standard that Superset should expect to find in the [country] column" - }, - "freq": { - "type": "SelectControl", - "label": "Frequency", - "default": "W-MON", - "freeForm": true, - "clearable": false, - "choices": [ - [ - "AS", - "Year (freq=AS)" - ], - [ - "52W-MON", - "52 weeks starting Monday (freq=52W-MON)" - ], - [ - "W-SUN", - "1 week starting Sunday (freq=W-SUN)" - ], - [ - "W-MON", - "1 week starting Monday (freq=W-MON)" - ], - [ - "D", - "Day (freq=D)" - ], - [ - "4W-MON", - "4 weeks (freq=4W-MON)" - ] - ], - "description": "The periodicity over which to pivot time. Users can provide\n \"Pandas\" offset alias.\n Click on the info bubble for more details on accepted \"freq\" expressions." - }, - "groupby": { - "type": "SelectControl", - "multi": true, - "label": "Group by", - "default": [], - "includeTime": false, - "description": "One or many controls to group by", - "valueKey": "column_name" - }, - "dimension": { - "type": "SelectControl", - "multi": false, - "label": "Dimension", - "default": null, - "includeTime": false, - "description": "Select a dimension", - "valueKey": "column_name" - }, - "columns": { - "type": "SelectControl", - "multi": true, - "label": "Columns", - "default": [], - "includeTime": false, - "description": "One or many controls to pivot as columns", - "valueKey": "column_name" - }, - "all_columns": { - "type": "SelectControl", - "multi": true, - "label": "Columns", - "default": [], - "description": "Columns to display", - "valueKey": "column_name" - }, - "spatial": { - "type": "SpatialControl", - "label": "Longitude & Latitude", - "validators": [ - null - ], - "description": "Point to your spatial columns" - }, - "start_spatial": { - "type": "SpatialControl", - "label": "Start Longitude & Latitude", - "validators": [ - null - ], - "description": "Point to your spatial columns" - }, - "end_spatial": { - "type": "SpatialControl", - "label": "End Longitude & Latitude", - "validators": [ - null - ], - "description": "Point to your spatial columns" - }, - "longitude": { - "type": "SelectControl", - "label": "Longitude", - "default": 1, - "validators": [ - null - ], - "description": "Select the longitude column" - }, - "latitude": { - "type": "SelectControl", - "label": "Latitude", - "default": 1, - "validators": [ - null - ], - "description": "Select the latitude column" - }, - "filter_nulls": { - "type": "CheckboxControl", - "label": "Ignore null locations", - "default": true, - "description": "Whether to ignore locations that are null" - }, - "geojson": { - "type": "SelectControl", - "label": "GeoJson Column", - "validators": [ - null - ], - "description": "Select the geojson column" - }, - "polygon": { - "type": "SelectControl", - "label": "Polygon Column", - "validators": [ - null - ], - "description": "Select the polygon column. Each row should contain JSON.array(N) of [longitude, latitude] points" - }, - "point_radius_scale": { - "type": "SelectControl", - "freeForm": true, - "label": "Point Radius Scale", - "validators": [ - null - ], - "default": null, - "choices": [ - [ - 0, - "0" - ], - [ - 100, - "100" - ], - [ - 200, - "200" - ], - [ - 300, - "300" - ], - [ - 500, - "500" - ] - ] - }, - "stroke_width": { - "type": "SelectControl", - "freeForm": true, - "label": "Stroke Width", - "validators": [ - null - ], - "default": null, - "renderTrigger": true, - "choices": [ - [ - 1, - "1" - ], - [ - 2, - "2" - ], - [ - 3, - "3" - ], - [ - 4, - "4" - ], - [ - 5, - "5" - ] - ] - }, - "all_columns_x": { - "type": "SelectControl", - "label": "X", - "default": null, - "description": "Columns to display" - }, - "all_columns_y": { - "type": "SelectControl", - "label": "Y", - "default": null, - "description": "Columns to display" - }, - "druid_time_origin": { - "type": "SelectControl", - "freeForm": true, - "label": "Origin", - "choices": [ - [ - "", - "default" - ], - [ - "now", - "now" - ] - ], - "default": null, - "description": "Defines the origin where time buckets start, accepts natural dates as in `now`, `sunday` or `1970-01-01`" - }, - "bottom_margin": { - "type": "SelectControl", - "clearable": false, - "freeForm": true, - "label": "Bottom Margin", - "choices": [ - [ - "auto", - "auto" - ], - [ - 50, - "50" - ], - [ - 75, - "75" - ], - [ - 100, - "100" - ], - [ - 125, - "125" - ], - [ - 150, - "150" - ], - [ - 200, - "200" - ] - ], - "default": "auto", - "renderTrigger": true, - "description": "Bottom margin, in pixels, allowing for more room for axis labels" - }, - "x_ticks_layout": { - "type": "SelectControl", - "label": "X Tick Layout", - "choices": [ - [ - "auto", - "auto" - ], - [ - "flat", - "flat" - ], - [ - "45°", - "45°" - ], - [ - "staggered", - "staggered" - ] - ], - "default": "auto", - "clearable": false, - "renderTrigger": true, - "description": "The way the ticks are laid out on the X-axis" - }, - "left_margin": { - "type": "SelectControl", - "freeForm": true, - "clearable": false, - "label": "Left Margin", - "choices": [ - [ - "auto", - "auto" - ], - [ - 50, - "50" - ], - [ - 75, - "75" - ], - [ - 100, - "100" - ], - [ - 125, - "125" - ], - [ - 150, - "150" - ], - [ - 200, - "200" - ] - ], - "default": "auto", - "renderTrigger": true, - "description": "Left margin, in pixels, allowing for more room for axis labels" - }, - "granularity": { - "type": "SelectControl", - "freeForm": true, - "label": "Time Granularity", - "default": "one day", - "choices": [ - [ - null, - "all" - ], - [ - "PT5S", - "5 seconds" - ], - [ - "PT30S", - "30 seconds" - ], - [ - "PT1M", - "1 minute" - ], - [ - "PT5M", - "5 minutes" - ], - [ - "PT30M", - "30 minutes" - ], - [ - "PT1H", - "1 hour" - ], - [ - "PT6H", - "6 hour" - ], - [ - "P1D", - "1 day" - ], - [ - "P7D", - "7 days" - ], - [ - "P1W", - "week" - ], - [ - "week_starting_sunday", - "week starting Sunday" - ], - [ - "week_ending_saturday", - "week ending Saturday" - ], - [ - "P1M", - "month" - ], - [ - "P3M", - "quarter" - ], - [ - "P1Y", - "year" - ] - ], - "description": "The time granularity for the visualization. Note that you can type and use simple natural language as in `10 seconds`, `1 day` or `56 weeks`" - }, - "domain_granularity": { - "type": "SelectControl", - "label": "Domain", - "default": "month", - "choices": [ - [ - "hour", - "hour" - ], - [ - "day", - "day" - ], - [ - "week", - "week" - ], - [ - "month", - "month" - ], - [ - "year", - "year" - ] - ], - "description": "The time unit used for the grouping of blocks" - }, - "subdomain_granularity": { - "type": "SelectControl", - "label": "Subdomain", - "default": "day", - "choices": [ - [ - "min", - "min" - ], - [ - "hour", - "hour" - ], - [ - "day", - "day" - ], - [ - "week", - "week" - ], - [ - "month", - "month" - ] - ], - "description": "The time unit for each block. Should be a smaller unit than domain_granularity. Should be larger or equal to Time Grain" - }, - "link_length": { - "type": "SelectControl", - "renderTrigger": true, - "freeForm": true, - "label": "Link Length", - "default": "200", - "choices": [ - [ - "10", - "10" - ], - [ - "25", - "25" - ], - [ - "50", - "50" - ], - [ - "75", - "75" - ], - [ - "100", - "100" - ], - [ - "150", - "150" - ], - [ - "200", - "200" - ], - [ - "250", - "250" - ] - ], - "description": "Link length in the force layout" - }, - "charge": { - "type": "SelectControl", - "renderTrigger": true, - "freeForm": true, - "label": "Charge", - "default": "-500", - "choices": [ - [ - "-50", - "-50" - ], - [ - "-75", - "-75" - ], - [ - "-100", - "-100" - ], - [ - "-150", - "-150" - ], - [ - "-200", - "-200" - ], - [ - "-250", - "-250" - ], - [ - "-500", - "-500" - ], - [ - "-1000", - "-1000" - ], - [ - "-2500", - "-2500" - ], - [ - "-5000", - "-5000" - ] - ], - "description": "Charge in the force layout" - }, - "granularity_sqla": { - "type": "SelectControl", - "label": "Time Column", - "description": "The time column for the visualization. Note that you can define arbitrary expression that return a DATETIME column in the table. Also note that the filter below is applied against this column or expression", - "clearable": false, - "valueKey": "column_name" - }, - "time_grain_sqla": { - "type": "SelectControl", - "label": "Time Grain", - "default": "P1D", - "description": "The time granularity for the visualization. This applies a date transformation to alter your time column and defines a new time granularity. The options here are defined on a per database engine basis in the Superset source code." - }, - "resample_rule": { - "type": "SelectControl", - "freeForm": true, - "label": "Rule", - "default": null, - "choices": [ - [ - "", - "" - ], - [ - "1T", - "1T" - ], - [ - "1H", - "1H" - ], - [ - "1D", - "1D" - ], - [ - "7D", - "7D" - ], - [ - "1M", - "1M" - ], - [ - "1AS", - "1AS" - ] - ], - "description": "Pandas resample rule" - }, - "resample_how": { - "type": "SelectControl", - "freeForm": true, - "label": "How", - "default": null, - "choices": [ - [ - "", - "" - ], - [ - "mean", - "mean" - ], - [ - "sum", - "sum" - ], - [ - "median", - "median" - ] - ], - "description": "Pandas resample how" - }, - "resample_fillmethod": { - "type": "SelectControl", - "freeForm": true, - "label": "Fill Method", - "default": null, - "choices": [ - [ - "", - "" - ], - [ - "ffill", - "ffill" - ], - [ - "bfill", - "bfill" - ] - ], - "description": "Pandas resample fill method" - }, - "time_range": { - "type": "DateFilterControl", - "freeForm": true, - "label": "Time range", - "default": "Last week" - }, - "max_bubble_size": { - "type": "SelectControl", - "freeForm": true, - "label": "Max Bubble Size", - "default": "25", - "choices": [ - [ - "5", - "5" - ], - [ - "10", - "10" - ], - [ - "15", - "15" - ], - [ - "25", - "25" - ], - [ - "50", - "50" - ], - [ - "75", - "75" - ], - [ - "100", - "100" - ] - ] - }, - "whisker_options": { - "type": "SelectControl", - "freeForm": true, - "label": "Whisker/outlier options", - "default": "Tukey", - "description": "Determines how whiskers and outliers are calculated.", - "choices": [ - [ - "Tukey", - "Tukey" - ], - [ - "Min/max (no outliers)", - "Min/max (no outliers)" - ], - [ - "2/98 percentiles", - "2/98 percentiles" - ], - [ - "9/91 percentiles", - "9/91 percentiles" - ] - ] - }, - "treemap_ratio": { - "type": "TextControl", - "label": "Ratio", - "renderTrigger": true, - "isFloat": true, - "default": 1.618033988749895, - "description": "Target aspect ratio for treemap tiles." - }, - "number_format": { - "type": "SelectControl", - "freeForm": true, - "label": "Number format", - "renderTrigger": true, - "default": "SMART_NUMBER", - "choices": [ - [ - ".1s", - ".1s (12345.432 => 10k)" - ], - [ - ".3s", - ".3s (12345.432 => 12.3k)" - ], - [ - ",.1%", - ",.1% (12345.432 => 1,234,543.2%)" - ], - [ - ".3%", - ".3% (12345.432 => 1234543.200%)" - ], - [ - ".4r", - ".4r (12345.432 => 12350)" - ], - [ - ",.3f", - ",.3f (12345.432 => 12,345.432)" - ], - [ - "+,", - "+, (12345.432 => +12,345.432)" - ], - [ - "$,.2f", - "$,.2f (12345.432 => $12,345.43)" - ] - ], - "description": "D3 format syntax: https://github.com/d3/d3-format" - }, - "row_limit": { - "type": "SelectControl", - "freeForm": true, - "label": "Row limit", - "validators": [ - null - ], - "default": 10000, - "choices": [ - [ - 10, - "10" - ], - [ - 50, - "50" - ], - [ - 100, - "100" - ], - [ - 250, - "250" - ], - [ - 500, - "500" - ], - [ - 1000, - "1000" - ], - [ - 5000, - "5000" - ], - [ - 10000, - "10000" - ], - [ - 50000, - "50000" - ] - ] - }, - "limit": { - "type": "SelectControl", - "freeForm": true, - "label": "Series limit", - "validators": [ - null - ], - "choices": [ - [ - 0, - "0" - ], - [ - 5, - "5" - ], - [ - 10, - "10" - ], - [ - 25, - "25" - ], - [ - 50, - "50" - ], - [ - 100, - "100" - ], - [ - 500, - "500" - ] - ], - "description": "Limits the number of time series that get displayed. A sub query (or an extra phase where sub queries are not supported) is applied to limit the number of time series that get fetched and displayed. This feature is useful when grouping by high cardinality dimension(s)." - }, - "timeseries_limit_metric": { - "type": "MetricsControl", - "label": "Sort By", - "default": null, - "description": "Metric used to define the top series" - }, - "order_desc": { - "type": "CheckboxControl", - "label": "Sort Descending", - "default": true, - "description": "Whether to sort descending or ascending" - }, - "rolling_type": { - "type": "SelectControl", - "label": "Rolling", - "default": "None", - "choices": [ - [ - "None", - "None" - ], - [ - "mean", - "mean" - ], - [ - "sum", - "sum" - ], - [ - "std", - "std" - ], - [ - "cumsum", - "cumsum" - ] - ], - "description": "Defines a rolling window function to apply, works along with the [Periods] text box" - }, - "multiplier": { - "type": "TextControl", - "label": "Multiplier", - "isFloat": true, - "renderTrigger": true, - "default": 1, - "description": "Factor to multiply the metric by" - }, - "rolling_periods": { - "type": "TextControl", - "label": "Periods", - "isInt": true, - "description": "Defines the size of the rolling window function, relative to the time granularity selected" - }, - "cell_size": { - "type": "TextControl", - "isInt": true, - "default": 10, - "validators": [ - null - ], - "renderTrigger": true, - "label": "Cell Size", - "description": "The size of the square cell, in pixels" - }, - "cell_padding": { - "type": "TextControl", - "isInt": true, - "validators": [ - null - ], - "renderTrigger": true, - "default": 2, - "label": "Cell Padding", - "description": "The distance between cells, in pixels" - }, - "cell_radius": { - "type": "TextControl", - "isInt": true, - "validators": [ - null - ], - "renderTrigger": true, - "default": 0, - "label": "Cell Radius", - "description": "The pixel radius" - }, - "steps": { - "type": "TextControl", - "isInt": true, - "validators": [ - null - ], - "renderTrigger": true, - "default": 10, - "label": "Color Steps", - "description": "The number color \"steps\"" - }, - "grid_size": { - "type": "TextControl", - "label": "Grid Size", - "renderTrigger": true, - "default": 20, - "isInt": true, - "description": "Defines the grid size in pixels" - }, - "min_periods": { - "type": "TextControl", - "label": "Min Periods", - "isInt": true, - "description": "The minimum number of rolling periods required to show a value. For instance if you do a cumulative sum on 7 days you may want your \"Min Period\" to be 7, so that all data points shown are the total of 7 periods. This will hide the \"ramp up\" taking place over the first 7 periods" - }, - "series": { - "type": "SelectControl", - "multi": false, - "label": "Series", - "default": null, - "includeTime": false, - "description": "Defines the grouping of entities. Each series is shown as a specific color on the chart and has a legend toggle", - "valueKey": "column_name" - }, - "entity": { - "type": "SelectControl", - "multi": false, - "label": "Entity", - "default": null, - "includeTime": false, - "description": "This defines the element to be plotted on the chart", - "valueKey": "column_name", - "validators": [ - null - ] - }, - "x": { - "type": "MetricsControl", - "multi": false, - "label": "X Axis", - "validators": [ - null - ], - "default": null, - "description": "Metric assigned to the [X] axis" - }, - "y": { - "type": "MetricsControl", - "multi": false, - "label": "Y Axis", - "validators": [ - null - ], - "default": null, - "description": "Metric assigned to the [Y] axis" - }, - "size": { - "type": "MetricsControl", - "multi": false, - "label": "Bubble Size", - "validators": [ - null - ], - "default": null, - "description": "One or many metrics to display" - }, - "url": { - "type": "TextControl", - "label": "URL", - "description": "The URL, this control is templated, so you can integrate {{ width }} and/or {{ height }} in your URL string.", - "default": "https://www.youtube.com/embed/AdSZJzb-aX8" - }, - "x_axis_label": { - "type": "TextControl", - "label": "X Axis Label", - "renderTrigger": true, - "default": "" - }, - "y_axis_label": { - "type": "TextControl", - "label": "Y Axis Label", - "renderTrigger": true, - "default": "" - }, - "compare_lag": { - "type": "TextControl", - "label": "Comparison Period Lag", - "isInt": true, - "description": "Based on granularity, number of time periods to compare against" - }, - "compare_suffix": { - "type": "TextControl", - "label": "Comparison suffix", - "description": "Suffix to apply after the percentage display" - }, - "table_timestamp_format": { - "type": "SelectControl", - "freeForm": true, - "label": "Table Timestamp Format", - "default": "%Y-%m-%d %H:%M:%S", - "renderTrigger": true, - "validators": [ - null - ], - "clearable": false, - "choices": [ - [ - "smart_date", - "Adaptative formating" - ], - [ - "%d/%m/%Y", - "%d/%m/%Y | 14/01/2019" - ], - [ - "%m/%d/%Y", - "%m/%d/%Y | 01/14/2019" - ], - [ - "%Y-%m-%d", - "%Y-%m-%d | 2019-01-14" - ], - [ - "%Y-%m-%d %H:%M:%S", - "%Y-%m-%d %H:%M:%S | 2019-01-14 01:32:10" - ], - [ - "%d-%m-%Y %H:%M:%S", - "%Y-%m-%d %H:%M:%S | 14-01-2019 01:32:10" - ], - [ - "%H:%M:%S", - "%H:%M:%S | 01:32:10" - ] - ], - "description": "Timestamp Format" - }, - "series_height": { - "type": "SelectControl", - "renderTrigger": true, - "freeForm": true, - "label": "Series Height", - "default": "25", - "choices": [ - [ - "10", - "10" - ], - [ - "25", - "25" - ], - [ - "40", - "40" - ], - [ - "50", - "50" - ], - [ - "75", - "75" - ], - [ - "100", - "100" - ], - [ - "150", - "150" - ], - [ - "200", - "200" - ] - ], - "description": "Pixel height of each series" - }, - "page_length": { - "type": "SelectControl", - "freeForm": true, - "renderTrigger": true, - "label": "Page Length", - "default": 0, - "choices": [ - [ - 0, - "0" - ], - [ - 10, - "10" - ], - [ - 25, - "25" - ], - [ - 40, - "40" - ], - [ - 50, - "50" - ], - [ - 75, - "75" - ], - [ - 100, - "100" - ], - [ - 150, - "150" - ], - [ - 200, - "200" - ] - ], - "description": "Rows per page, 0 means no pagination" - }, - "x_axis_format": { - "type": "SelectControl", - "freeForm": true, - "label": "X Axis Format", - "renderTrigger": true, - "default": "SMART_NUMBER", - "choices": [ - [ - ".1s", - ".1s (12345.432 => 10k)" - ], - [ - ".3s", - ".3s (12345.432 => 12.3k)" - ], - [ - ",.1%", - ",.1% (12345.432 => 1,234,543.2%)" - ], - [ - ".3%", - ".3% (12345.432 => 1234543.200%)" - ], - [ - ".4r", - ".4r (12345.432 => 12350)" - ], - [ - ",.3f", - ",.3f (12345.432 => 12,345.432)" - ], - [ - "+,", - "+, (12345.432 => +12,345.432)" - ], - [ - "$,.2f", - "$,.2f (12345.432 => $12,345.43)" - ] - ], - "description": "D3 format syntax: https://github.com/d3/d3-format" - }, - "x_axis_time_format": { - "type": "SelectControl", - "freeForm": true, - "label": "X Axis Format", - "renderTrigger": true, - "default": "smart_date", - "choices": [ - [ - "smart_date", - "Adaptative formating" - ], - [ - "%d/%m/%Y", - "%d/%m/%Y | 14/01/2019" - ], - [ - "%m/%d/%Y", - "%m/%d/%Y | 01/14/2019" - ], - [ - "%Y-%m-%d", - "%Y-%m-%d | 2019-01-14" - ], - [ - "%Y-%m-%d %H:%M:%S", - "%Y-%m-%d %H:%M:%S | 2019-01-14 01:32:10" - ], - [ - "%d-%m-%Y %H:%M:%S", - "%Y-%m-%d %H:%M:%S | 14-01-2019 01:32:10" - ], - [ - "%H:%M:%S", - "%H:%M:%S | 01:32:10" - ] - ], - "description": "D3 format syntax: https://github.com/d3/d3-format" - }, - "y_axis_format": { - "type": "SelectControl", - "freeForm": true, - "label": "Y Axis Format", - "renderTrigger": true, - "default": "SMART_NUMBER", - "choices": [ - [ - ".1s", - ".1s (12345.432 => 10k)" - ], - [ - ".3s", - ".3s (12345.432 => 12.3k)" - ], - [ - ",.1%", - ",.1% (12345.432 => 1,234,543.2%)" - ], - [ - ".3%", - ".3% (12345.432 => 1234543.200%)" - ], - [ - ".4r", - ".4r (12345.432 => 12350)" - ], - [ - ",.3f", - ",.3f (12345.432 => 12,345.432)" - ], - [ - "+,", - "+, (12345.432 => +12,345.432)" - ], - [ - "$,.2f", - "$,.2f (12345.432 => $12,345.43)" - ] - ], - "description": "D3 format syntax: https://github.com/d3/d3-format" - }, - "y_axis_2_format": { - "type": "SelectControl", - "freeForm": true, - "label": "Right Axis Format", - "default": "SMART_NUMBER", - "choices": [ - [ - ".1s", - ".1s (12345.432 => 10k)" - ], - [ - ".3s", - ".3s (12345.432 => 12.3k)" - ], - [ - ",.1%", - ",.1% (12345.432 => 1,234,543.2%)" - ], - [ - ".3%", - ".3% (12345.432 => 1234543.200%)" - ], - [ - ".4r", - ".4r (12345.432 => 12350)" - ], - [ - ",.3f", - ",.3f (12345.432 => 12,345.432)" - ], - [ - "+,", - "+, (12345.432 => +12,345.432)" - ], - [ - "$,.2f", - "$,.2f (12345.432 => $12,345.43)" - ] - ], - "description": "D3 format syntax: https://github.com/d3/d3-format" - }, - "date_time_format": { - "type": "SelectControl", - "freeForm": true, - "label": "Date Time Format", - "renderTrigger": true, - "default": "smart_date", - "choices": [ - [ - "smart_date", - "Adaptative formating" - ], - [ - "%d/%m/%Y", - "%d/%m/%Y | 14/01/2019" - ], - [ - "%m/%d/%Y", - "%m/%d/%Y | 01/14/2019" - ], - [ - "%Y-%m-%d", - "%Y-%m-%d | 2019-01-14" - ], - [ - "%Y-%m-%d %H:%M:%S", - "%Y-%m-%d %H:%M:%S | 2019-01-14 01:32:10" - ], - [ - "%d-%m-%Y %H:%M:%S", - "%Y-%m-%d %H:%M:%S | 14-01-2019 01:32:10" - ], - [ - "%H:%M:%S", - "%H:%M:%S | 01:32:10" - ] - ], - "description": "D3 format syntax: https://github.com/d3/d3-format" - }, - "markup_type": { - "type": "SelectControl", - "label": "Markup Type", - "clearable": false, - "choices": [ - [ - "markdown", - "markdown" - ], - [ - "html", - "html" - ] - ], - "default": "markdown", - "validators": [ - null - ], - "description": "Pick your favorite markup language" - }, - "rotation": { - "type": "SelectControl", - "label": "Rotation", - "choices": [ - [ - "random", - "random" - ], - [ - "flat", - "flat" - ], - [ - "square", - "square" - ] - ], - "renderTrigger": true, - "default": "flat", - "description": "Rotation to apply to words in the cloud" - }, - "line_interpolation": { - "type": "SelectControl", - "label": "Line Style", - "renderTrigger": true, - "choices": [ - [ - "linear", - "linear" - ], - [ - "basis", - "basis" - ], - [ - "cardinal", - "cardinal" - ], - [ - "monotone", - "monotone" - ], - [ - "step-before", - "step-before" - ], - [ - "step-after", - "step-after" - ] - ], - "default": "linear", - "description": "Line interpolation as defined by d3.js" - }, - "pie_label_type": { - "type": "SelectControl", - "label": "Label Type", - "default": "key", - "renderTrigger": true, - "choices": [ - [ - "key", - "Category Name" - ], - [ - "value", - "Value" - ], - [ - "percent", - "Percentage" - ], - [ - "key_value", - "Category and Value" - ], - [ - "key_percent", - "Category and Percentage" - ] - ], - "description": "What should be shown on the label?" - }, - "code": { - "type": "TextAreaControl", - "label": "Code", - "description": "Put your code here", - "default": "" - }, - "pandas_aggfunc": { - "type": "SelectControl", - "label": "Aggregation function", - "clearable": false, - "choices": [ - [ - "sum", - "sum" - ], - [ - "mean", - "mean" - ], - [ - "min", - "min" - ], - [ - "max", - "max" - ], - [ - "stdev", - "stdev" - ], - [ - "var", - "var" - ] - ], - "default": "sum", - "description": "Aggregate function to apply when pivoting and computing the total rows and columns" - }, - "js_agg_function": { - "type": "SelectControl", - "label": "Dynamic Aggregation Function", - "description": "The function to use when aggregating points into groups", - "default": "sum", - "clearable": false, - "renderTrigger": true, - "choices": [ - [ - "sum", - "sum" - ], - [ - "min", - "min" - ], - [ - "max", - "max" - ], - [ - "mean", - "mean" - ], - [ - "median", - "median" - ], - [ - "count", - "count" - ], - [ - "variance", - "variance" - ], - [ - "deviation", - "deviation" - ], - [ - "p1", - "p1" - ], - [ - "p5", - "p5" - ], - [ - "p95", - "p95" - ], - [ - "p99", - "p99" - ] - ] - }, - "size_from": { - "type": "TextControl", - "isInt": true, - "label": "Font Size From", - "renderTrigger": true, - "default": "20", - "description": "Font size for the smallest value in the list" - }, - "size_to": { - "type": "TextControl", - "isInt": true, - "label": "Font Size To", - "renderTrigger": true, - "default": "150", - "description": "Font size for the biggest value in the list" - }, - "header_font_size": { - "type": "SelectControl", - "label": "Header Font Size", - "renderTrigger": true, - "clearable": false, - "default": 0.3, - "options": [ - { - "label": "Tiny", - "value": 0.125 - }, - { - "label": "Small", - "value": 0.2 - }, - { - "label": "Normal", - "value": 0.3 - }, - { - "label": "Large", - "value": 0.4 - }, - { - "label": "Huge", - "value": 0.5 - } - ] - }, - "subheader_font_size": { - "type": "SelectControl", - "label": "Subheader Font Size", - "renderTrigger": true, - "clearable": false, - "default": 0.125, - "options": [ - { - "label": "Tiny", - "value": 0.125 - }, - { - "label": "Small", - "value": 0.2 - }, - { - "label": "Normal", - "value": 0.3 - }, - { - "label": "Large", - "value": 0.4 - }, - { - "label": "Huge", - "value": 0.5 - } - ] - }, - "instant_filtering": { - "type": "CheckboxControl", - "label": "Instant Filtering", - "renderTrigger": true, - "default": true, - "description": "Whether to apply filters as they change, or wait forusers to hit an [Apply] button" - }, - "extruded": { - "type": "CheckboxControl", - "label": "Extruded", - "renderTrigger": true, - "default": true, - "description": "Whether to make the grid 3D" - }, - "show_brush": { - "type": "SelectControl", - "label": "Show Range Filter", - "renderTrigger": true, - "clearable": false, - "default": "auto", - "choices": [ - [ - "yes", - "Yes" - ], - [ - "no", - "No" - ], - [ - "auto", - "Auto" - ] - ], - "description": "Whether to display the time range interactive selector" - }, - "date_filter": { - "type": "CheckboxControl", - "label": "Date Filter", - "default": true, - "description": "Whether to include a time filter" - }, - "show_sqla_time_granularity": { - "type": "CheckboxControl", - "label": "Show SQL Granularity Dropdown", - "default": false, - "description": "Check to include SQL Granularity dropdown" - }, - "show_sqla_time_column": { - "type": "CheckboxControl", - "label": "Show SQL Time Column", - "default": false, - "description": "Check to include Time Column dropdown" - }, - "show_druid_time_granularity": { - "type": "CheckboxControl", - "label": "Show Druid Granularity Dropdown", - "default": false, - "description": "Check to include Druid Granularity dropdown" - }, - "show_druid_time_origin": { - "type": "CheckboxControl", - "label": "Show Druid Time Origin", - "default": false, - "description": "Check to include Time Origin dropdown" - }, - "show_datatable": { - "type": "CheckboxControl", - "label": "Data Table", - "default": false, - "renderTrigger": true, - "description": "Whether to display the interactive data table" - }, - "include_search": { - "type": "CheckboxControl", - "label": "Search Box", - "renderTrigger": true, - "default": false, - "description": "Whether to include a client-side search box" - }, - "table_filter": { - "type": "CheckboxControl", - "label": "Emit Filter Events", - "renderTrigger": true, - "default": false, - "description": "Whether to apply filter when items are clicked" - }, - "align_pn": { - "type": "CheckboxControl", - "label": "Align +/-", - "renderTrigger": true, - "default": false, - "description": "Whether to align the background chart for +/- values" - }, - "color_pn": { - "type": "CheckboxControl", - "label": "Color +/-", - "renderTrigger": true, - "default": true, - "description": "Whether to color +/- values" - }, - "show_bubbles": { - "type": "CheckboxControl", - "label": "Show Bubbles", - "default": false, - "renderTrigger": true, - "description": "Whether to display bubbles on top of countries" - }, - "show_legend": { - "type": "CheckboxControl", - "label": "Legend", - "renderTrigger": true, - "default": true, - "description": "Whether to display the legend (toggles)" - }, - "send_time_range": { - "type": "CheckboxControl", - "label": "Propagate", - "renderTrigger": true, - "default": false, - "description": "Send range filter events to other charts" - }, - "toggle_polygons": { - "type": "CheckboxControl", - "label": "Multiple filtering", - "renderTrigger": true, - "default": true, - "description": "Allow sending multiple polygons as a filter event" - }, - "num_buckets": { - "type": "SelectControl", - "multi": false, - "freeForm": true, - "label": "Number of buckets to group data", - "default": 5, - "choices": [ - [ - 2, - "2" - ], - [ - 3, - "3" - ], - [ - 5, - "5" - ], - [ - 10, - "10" - ] - ], - "description": "How many buckets should the data be grouped in.", - "renderTrigger": true - }, - "break_points": { - "type": "SelectControl", - "multi": true, - "freeForm": true, - "label": "Bucket break points", - "choices": [], - "description": "List of n+1 values for bucketing metric into n buckets.", - "renderTrigger": true - }, - "show_labels": { - "type": "CheckboxControl", - "label": "Show Labels", - "renderTrigger": true, - "default": true, - "description": "Whether to display the labels. Note that the label only displays when the the 5% threshold." - }, - "show_values": { - "type": "CheckboxControl", - "label": "Show Values", - "renderTrigger": true, - "default": false, - "description": "Whether to display the numerical values within the cells" - }, - "show_metric_name": { - "type": "CheckboxControl", - "label": "Show Metric Names", - "renderTrigger": true, - "default": true, - "description": "Whether to display the metric name as a title" - }, - "show_trend_line": { - "type": "CheckboxControl", - "label": "Show Trend Line", - "renderTrigger": true, - "default": true, - "description": "Whether to display the trend line" - }, - "start_y_axis_at_zero": { - "type": "CheckboxControl", - "label": "Start y-axis at 0", - "renderTrigger": true, - "default": true, - "description": "Start y-axis at zero. Uncheck to start y-axis at minimum value in the data." - }, - "x_axis_showminmax": { - "type": "CheckboxControl", - "label": "X bounds", - "renderTrigger": true, - "default": false, - "description": "Whether to display the min and max values of the X-axis" - }, - "y_axis_showminmax": { - "type": "CheckboxControl", - "label": "Y bounds", - "renderTrigger": true, - "default": false, - "description": "Whether to display the min and max values of the Y-axis" - }, - "rich_tooltip": { - "type": "CheckboxControl", - "label": "Rich Tooltip", - "renderTrigger": true, - "default": true, - "description": "The rich tooltip shows a list of all series for that point in time" - }, - "y_log_scale": { - "type": "CheckboxControl", - "label": "Y Log Scale", - "default": false, - "renderTrigger": true, - "description": "Use a log scale for the Y-axis" - }, - "x_log_scale": { - "type": "CheckboxControl", - "label": "X Log Scale", - "default": false, - "renderTrigger": true, - "description": "Use a log scale for the X-axis" - }, - "log_scale": { - "type": "CheckboxControl", - "label": "Log Scale", - "default": false, - "renderTrigger": true, - "description": "Use a log scale" - }, - "donut": { - "type": "CheckboxControl", - "label": "Donut", - "default": false, - "renderTrigger": true, - "description": "Do you want a donut or a pie?" - }, - "labels_outside": { - "type": "CheckboxControl", - "label": "Put labels outside", - "default": true, - "renderTrigger": true, - "description": "Put the labels outside the pie?" - }, - "contribution": { - "type": "CheckboxControl", - "label": "Contribution", - "default": false, - "description": "Compute the contribution to the total" - }, - "time_compare": { - "type": "SelectControl", - "multi": true, - "freeForm": true, - "label": "Time Shift", - "choices": [ - [ - "1 day", - "1 day" - ], - [ - "1 week", - "1 week" - ], - [ - "28 days", - "28 days" - ], - [ - "30 days", - "30 days" - ], - [ - "52 weeks", - "52 weeks" - ], - [ - "1 year", - "1 year" - ] - ], - "description": "Overlay one or more timeseries from a relative time period. Expects relative time deltas in natural language (example: 24 hours, 7 days, 56 weeks, 365 days)" - }, - "comparison_type": { - "type": "SelectControl", - "label": "Calculation type", - "default": "values", - "choices": [ - [ - "values", - "Actual Values" - ], - [ - "absolute", - "Absolute difference" - ], - [ - "percentage", - "Percentage change" - ], - [ - "ratio", - "Ratio" - ] - ], - "description": "How to display time shifts: as individual lines; as the absolute difference between the main time series and each time shift; as the percentage change; or as the ratio between series and time shifts." - }, - "subheader": { - "type": "TextControl", - "label": "Subheader", - "description": "Description text that shows up below your Big Number" - }, - "mapbox_label": { - "type": "SelectControl", - "multi": true, - "label": "label", - "default": [], - "description": "`count` is COUNT(*) if a group by is used. Numerical columns will be aggregated with the aggregator. Non-numerical columns will be used to label points. Leave empty to get a count of points in each cluster." - }, - "mapbox_style": { - "type": "SelectControl", - "label": "Map Style", - "clearable": false, - "renderTrigger": true, - "choices": [ - [ - "mapbox://styles/mapbox/streets-v9", - "Streets" - ], - [ - "mapbox://styles/mapbox/dark-v9", - "Dark" - ], - [ - "mapbox://styles/mapbox/light-v9", - "Light" - ], - [ - "mapbox://styles/mapbox/satellite-streets-v9", - "Satellite Streets" - ], - [ - "mapbox://styles/mapbox/satellite-v9", - "Satellite" - ], - [ - "mapbox://styles/mapbox/outdoors-v9", - "Outdoors" - ] - ], - "default": "mapbox://styles/mapbox/light-v9", - "description": "Base layer map style" - }, - "clustering_radius": { - "type": "SelectControl", - "freeForm": true, - "label": "Clustering Radius", - "default": "60", - "choices": [ - [ - "0", - "0" - ], - [ - "20", - "20" - ], - [ - "40", - "40" - ], - [ - "60", - "60" - ], - [ - "80", - "80" - ], - [ - "100", - "100" - ], - [ - "200", - "200" - ], - [ - "500", - "500" - ], - [ - "1000", - "1000" - ] - ], - "description": "The radius (in pixels) the algorithm uses to define a cluster. Choose 0 to turn off clustering, but beware that a large number of points (>1000) will cause lag." - }, - "point_radius_fixed": { - "type": "FixedOrMetricControl", - "label": "Point Size", - "default": { - "type": "fix", - "value": 1000 - }, - "description": "Fixed point radius" - }, - "point_radius": { - "type": "SelectControl", - "label": "Point Radius", - "default": "Auto", - "description": "The radius of individual points (ones that are not in a cluster). Either a numerical column or `Auto`, which scales the point based on the largest cluster" - }, - "point_radius_unit": { - "type": "SelectControl", - "label": "Point Radius Unit", - "default": "Pixels", - "choices": [ - [ - "Pixels", - "Pixels" - ], - [ - "Miles", - "Miles" - ], - [ - "Kilometers", - "Kilometers" - ] - ], - "description": "The unit of measure for the specified point radius" - }, - "point_unit": { - "type": "SelectControl", - "label": "Point Unit", - "default": "square_m", - "clearable": false, - "choices": [ - [ - "square_m", - "Square meters" - ], - [ - "square_km", - "Square kilometers" - ], - [ - "square_miles", - "Square miles" - ], - [ - "radius_m", - "Radius in meters" - ], - [ - "radius_km", - "Radius in kilometers" - ], - [ - "radius_miles", - "Radius in miles" - ] - ], - "description": "The unit of measure for the specified point radius" - }, - "global_opacity": { - "type": "TextControl", - "label": "Opacity", - "default": 1, - "isFloat": true, - "description": "Opacity of all clusters, points, and labels. Between 0 and 1." - }, - "opacity": { - "type": "SliderControl", - "label": "Opacity", - "default": 80, - "step": 1, - "min": 0, - "max": 100, - "renderTrigger": true, - "description": "Opacity, expects values between 0 and 100" - }, - "viewport": { - "type": "ViewportControl", - "label": "Viewport", - "renderTrigger": false, - "description": "Parameters related to the view and perspective on the map", - "default": { - "longitude": 6.85236157047845, - "latitude": 31.222656842808707, - "zoom": 1, - "bearing": 0, - "pitch": 0 - }, - "dontRefreshOnChange": true - }, - "viewport_zoom": { - "type": "TextControl", - "label": "Zoom", - "renderTrigger": true, - "isFloat": true, - "default": 11, - "description": "Zoom level of the map", - "places": 8, - "dontRefreshOnChange": true - }, - "viewport_latitude": { - "type": "TextControl", - "label": "Default latitude", - "renderTrigger": true, - "default": 37.772123, - "isFloat": true, - "description": "Latitude of default viewport", - "places": 8, - "dontRefreshOnChange": true - }, - "viewport_longitude": { - "type": "TextControl", - "label": "Default longitude", - "renderTrigger": true, - "default": -122.405293, - "isFloat": true, - "description": "Longitude of default viewport", - "places": 8, - "dontRefreshOnChange": true - }, - "render_while_dragging": { - "type": "CheckboxControl", - "label": "Live render", - "default": true, - "description": "Points and clusters will update as the viewport is being changed" - }, - "mapbox_color": { - "type": "SelectControl", - "freeForm": true, - "label": "RGB Color", - "default": "rgb(0, 122, 135)", - "choices": [ - [ - "rgb(0, 139, 139)", - "Dark Cyan" - ], - [ - "rgb(128, 0, 128)", - "Purple" - ], - [ - "rgb(255, 215, 0)", - "Gold" - ], - [ - "rgb(69, 69, 69)", - "Dim Gray" - ], - [ - "rgb(220, 20, 60)", - "Crimson" - ], - [ - "rgb(34, 139, 34)", - "Forest Green" - ] - ], - "description": "The color for points and clusters in RGB" - }, - "color": { - "type": "ColorPickerControl", - "label": "Color", - "default": { - "r": 0, - "g": 122, - "b": 135, - "a": 1 - }, - "description": "Pick a color" - }, - "ranges": { - "type": "TextControl", - "label": "Ranges", - "default": "", - "description": "Ranges to highlight with shading" - }, - "range_labels": { - "type": "TextControl", - "label": "Range labels", - "default": "", - "description": "Labels for the ranges" - }, - "markers": { - "type": "TextControl", - "label": "Markers", - "default": "", - "description": "List of values to mark with triangles" - }, - "marker_labels": { - "type": "TextControl", - "label": "Marker labels", - "default": "", - "description": "Labels for the markers" - }, - "marker_lines": { - "type": "TextControl", - "label": "Marker lines", - "default": "", - "description": "List of values to mark with lines" - }, - "marker_line_labels": { - "type": "TextControl", - "label": "Marker line labels", - "default": "", - "description": "Labels for the marker lines" - }, - "annotation_layers": { - "type": "AnnotationLayerControl", - "label": "", - "default": [], - "description": "Annotation Layers", - "renderTrigger": true, - "tabOverride": "data" - }, - "adhoc_filters": { - "type": "AdhocFilterControl", - "label": "Filters", - "default": null, - "description": "", - "provideFormDataToProps": true - }, - "filters": { - "type": "FilterPanel" - }, - "slice_id": { - "type": "HiddenControl", - "label": "Chart ID", - "hidden": true, - "description": "The id of the active chart" - }, - "cache_timeout": { - "type": "HiddenControl", - "label": "Cache Timeout (seconds)", - "hidden": true, - "description": "The number of seconds before expiring the cache" - }, - "url_params": { - "type": "HiddenControl", - "label": "URL Parameters", - "hidden": true, - "description": "Extra parameters for use in jinja templated queries" - }, - "order_by_entity": { - "type": "CheckboxControl", - "label": "Order by entity id", - "description": "Important! Select this if the table is not already sorted by entity id, else there is no guarantee that all events for each entity are returned.", - "default": true - }, - "min_leaf_node_event_count": { - "type": "SelectControl", - "freeForm": false, - "label": "Minimum leaf node event count", - "default": 1, - "choices": [ - [ - 1, - "1" - ], - [ - 2, - "2" - ], - [ - 3, - "3" - ], - [ - 4, - "4" - ], - [ - 5, - "5" - ], - [ - 6, - "6" - ], - [ - 7, - "7" - ], - [ - 8, - "8" - ], - [ - 9, - "9" - ], - [ - 10, - "10" - ] - ], - "description": "Leaf nodes that represent fewer than this number of events will be initially hidden in the visualization" - }, - "color_scheme": { - "type": "ColorSchemeControl", - "label": "Color Scheme", - "default": "bnbColors", - "renderTrigger": true, - "description": "The color scheme for rendering chart" - }, - "label_colors": { - "type": "ColorMapControl", - "label": "Color Map", - "default": {} - }, - "significance_level": { - "type": "TextControl", - "label": "Significance Level", - "default": 0.05, - "description": "Threshold alpha level for determining significance" - }, - "pvalue_precision": { - "type": "TextControl", - "label": "p-value precision", - "default": 6, - "description": "Number of decimal places with which to display p-values" - }, - "liftvalue_precision": { - "type": "TextControl", - "label": "Lift percent precision", - "default": 4, - "description": "Number of decimal places with which to display lift values" - }, - "column_collection": { - "type": "CollectionControl", - "label": "Time Series Columns", - "validators": [ - null - ], - "controlName": "TimeSeriesColumnControl" - }, - "rose_area_proportion": { - "type": "CheckboxControl", - "label": "Use Area Proportions", - "description": "Check if the Rose Chart should use segment area instead of segment radius for proportioning", - "default": false, - "renderTrigger": true - }, - "time_series_option": { - "type": "SelectControl", - "label": "Options", - "validators": [ - null - ], - "default": "not_time", - "valueKey": "value", - "options": [ - { - "label": "Not Time Series", - "value": "not_time", - "description": "Ignore time" - }, - { - "label": "Time Series", - "value": "time_series", - "description": "Standard time series" - }, - { - "label": "Aggregate Mean", - "value": "agg_mean", - "description": "Mean of values over specified period" - }, - { - "label": "Aggregate Sum", - "value": "agg_sum", - "description": "Sum of values over specified period" - }, - { - "label": "Difference", - "value": "point_diff", - "description": "Metric change in value from `since` to `until`" - }, - { - "label": "Percent Change", - "value": "point_percent", - "description": "Metric percent change in value from `since` to `until`" - }, - { - "label": "Factor", - "value": "point_factor", - "description": "Metric factor change from `since` to `until`" - }, - { - "label": "Advanced Analytics", - "value": "adv_anal", - "description": "Use the Advanced Analytics options below" - } - ], - "description": "Settings for time series" - }, - "equal_date_size": { - "type": "CheckboxControl", - "label": "Equal Date Sizes", - "default": true, - "renderTrigger": true, - "description": "Check to force date partitions to have the same height" - }, - "partition_limit": { - "type": "TextControl", - "label": "Partition Limit", - "isInt": true, - "default": "5", - "description": "The maximum number of subdivisions of each group; lower values are pruned first" - }, - "min_radius": { - "type": "TextControl", - "label": "Minimum Radius", - "isFloat": true, - "validators": [ - null - ], - "renderTrigger": true, - "default": 2, - "description": "Minimum radius size of the circle, in pixels. As the zoom level changes, this insures that the circle respects this minimum radius." - }, - "max_radius": { - "type": "TextControl", - "label": "Maximum Radius", - "isFloat": true, - "validators": [ - null - ], - "renderTrigger": true, - "default": 250, - "description": "Maxium radius size of the circle, in pixels. As the zoom level changes, this insures that the circle respects this maximum radius." - }, - "partition_threshold": { - "type": "TextControl", - "label": "Partition Threshold", - "isFloat": true, - "default": "0.05", - "description": "Partitions whose height to parent height proportions are below this value are pruned" - }, - "line_column": { - "type": "SelectControl", - "label": "Lines column", - "default": null, - "description": "The database columns that contains lines information", - "validators": [ - null - ] - }, - "line_type": { - "type": "SelectControl", - "label": "Lines encoding", - "clearable": false, - "default": "json", - "description": "The encoding format of the lines", - "choices": [ - [ - "polyline", - "Polyline" - ], - [ - "json", - "JSON" - ], - [ - "geohash", - "geohash (square)" - ] - ] - }, - "line_width": { - "type": "TextControl", - "label": "Line width", - "renderTrigger": true, - "isInt": true, - "default": 10, - "description": "The width of the lines" - }, - "line_charts": { - "type": "SelectAsyncControl", - "multi": true, - "label": "Line charts", - "validators": [ - null - ], - "default": [], - "description": "Pick a set of line charts to layer on top of one another", - "dataEndpoint": "/sliceasync/api/read?_flt_0_viz_type=line&_flt_7_viz_type=line_multi", - "placeholder": "Select charts", - "onAsyncErrorMessage": "Error while fetching charts" - }, - "line_charts_2": { - "type": "SelectAsyncControl", - "multi": true, - "label": "Right Axis chart(s)", - "validators": [], - "default": [], - "description": "Choose one or more charts for right axis", - "dataEndpoint": "/sliceasync/api/read?_flt_0_viz_type=line&_flt_7_viz_type=line_multi", - "placeholder": "Select charts", - "onAsyncErrorMessage": "Error while fetching charts" - }, - "prefix_metric_with_slice_name": { - "type": "CheckboxControl", - "label": "Prefix metric name with slice name", - "default": false, - "renderTrigger": true - }, - "reverse_long_lat": { - "type": "CheckboxControl", - "label": "Reverse Lat & Long", - "default": false - }, - "deck_slices": { - "type": "SelectAsyncControl", - "multi": true, - "label": "deck.gl charts", - "validators": [ - null - ], - "default": [], - "description": "Pick a set of deck.gl charts to layer on top of one another", - "dataEndpoint": "/sliceasync/api/read?_flt_0_viz_type=deck_&_flt_7_viz_type=deck_multi", - "placeholder": "Select charts", - "onAsyncErrorMessage": "Error while fetching charts" - }, - "js_data_mutator": { - "type": "TextAreaControl", - "language": "javascript", - "label": "Javascript data interceptor", - "description": "Define a javascript function that receives the data array used in the visualization and is expected to return a modified version of that array. This can be used to alter properties of the data, filter, or enrich the array.", - "height": 100, - "default": "", - "aboveEditorSection": { - "type": "div", - "key": null, - "ref": null, - "props": { - "children": [ - { - "type": "p", - "key": null, - "ref": null, - "props": { - "children": "Define a javascript function that receives the data array used in the visualization and is expected to return a modified version of that array. This can be used to alter properties of the data, filter, or enrich the array." - }, - "_owner": null, - "_store": {} - }, - { - "type": "p", - "key": null, - "ref": null, - "props": { - "children": { - "type": "div", - "key": null, - "ref": null, - "props": { - "children": [ - "For more information about objects are in context in the scope of this function, refer to the", - { - "type": "a", - "key": null, - "ref": null, - "props": { - "href": "https://github.com/apache/incubator-superset/blob/master/superset/assets/src/modules/sandbox.js", - "children": [ - " source code of Superset's sandboxed parser", - "." - ] - }, - "_owner": null, - "_store": {} - }, - "." - ] - }, - "_owner": null, - "_store": {} - } - }, - "_owner": null, - "_store": {} - }, - null - ] - }, - "_owner": null, - "_store": {} - } - }, - "js_data": { - "type": "TextAreaControl", - "language": "javascript", - "label": "Javascript data mutator", - "description": "Define a function that receives intercepts the data objects and can mutate it", - "height": 100, - "default": "", - "aboveEditorSection": { - "type": "div", - "key": null, - "ref": null, - "props": { - "children": [ - { - "type": "p", - "key": null, - "ref": null, - "props": { - "children": "Define a function that receives intercepts the data objects and can mutate it" - }, - "_owner": null, - "_store": {} - }, - { - "type": "p", - "key": null, - "ref": null, - "props": { - "children": { - "type": "div", - "key": null, - "ref": null, - "props": { - "children": [ - "For more information about objects are in context in the scope of this function, refer to the", - { - "type": "a", - "key": null, - "ref": null, - "props": { - "href": "https://github.com/apache/incubator-superset/blob/master/superset/assets/src/modules/sandbox.js", - "children": [ - " source code of Superset's sandboxed parser", - "." - ] - }, - "_owner": null, - "_store": {} - }, - "." - ] - }, - "_owner": null, - "_store": {} - } - }, - "_owner": null, - "_store": {} - }, - null - ] - }, - "_owner": null, - "_store": {} - } - }, - "js_tooltip": { - "type": "TextAreaControl", - "language": "javascript", - "label": "Javascript tooltip generator", - "description": "Define a function that receives the input and outputs the content for a tooltip", - "height": 100, - "default": "", - "aboveEditorSection": { - "type": "div", - "key": null, - "ref": null, - "props": { - "children": [ - { - "type": "p", - "key": null, - "ref": null, - "props": { - "children": "Define a function that receives the input and outputs the content for a tooltip" - }, - "_owner": null, - "_store": {} - }, - { - "type": "p", - "key": null, - "ref": null, - "props": { - "children": { - "type": "div", - "key": null, - "ref": null, - "props": { - "children": [ - "For more information about objects are in context in the scope of this function, refer to the", - { - "type": "a", - "key": null, - "ref": null, - "props": { - "href": "https://github.com/apache/incubator-superset/blob/master/superset/assets/src/modules/sandbox.js", - "children": [ - " source code of Superset's sandboxed parser", - "." - ] - }, - "_owner": null, - "_store": {} - }, - "." - ] - }, - "_owner": null, - "_store": {} - } - }, - "_owner": null, - "_store": {} - }, - null - ] - }, - "_owner": null, - "_store": {} - } - }, - "js_onclick_href": { - "type": "TextAreaControl", - "language": "javascript", - "label": "Javascript onClick href", - "description": "Define a function that returns a URL to navigate to when user clicks", - "height": 100, - "default": "", - "aboveEditorSection": { - "type": "div", - "key": null, - "ref": null, - "props": { - "children": [ - { - "type": "p", - "key": null, - "ref": null, - "props": { - "children": "Define a function that returns a URL to navigate to when user clicks" - }, - "_owner": null, - "_store": {} - }, - { - "type": "p", - "key": null, - "ref": null, - "props": { - "children": { - "type": "div", - "key": null, - "ref": null, - "props": { - "children": [ - "For more information about objects are in context in the scope of this function, refer to the", - { - "type": "a", - "key": null, - "ref": null, - "props": { - "href": "https://github.com/apache/incubator-superset/blob/master/superset/assets/src/modules/sandbox.js", - "children": [ - " source code of Superset's sandboxed parser", - "." - ] - }, - "_owner": null, - "_store": {} - }, - "." - ] - }, - "_owner": null, - "_store": {} - } - }, - "_owner": null, - "_store": {} - }, - null - ] - }, - "_owner": null, - "_store": {} - } - }, - "js_columns": { - "type": "SelectControl", - "multi": true, - "label": "Extra data for JS", - "default": [], - "includeTime": false, - "description": "List of extra columns made available in Javascript functions", - "valueKey": "column_name" - }, - "stroked": { - "type": "CheckboxControl", - "label": "Stroked", - "renderTrigger": true, - "description": "Whether to display the stroke", - "default": false - }, - "filled": { - "type": "CheckboxControl", - "label": "Filled", - "renderTrigger": true, - "description": "Whether to fill the objects", - "default": true - }, - "normalized": { - "type": "CheckboxControl", - "label": "Normalized", - "renderTrigger": true, - "description": "Whether to normalize the histogram", - "default": false - } - } -} - diff --git a/superset/assets/cypress/integration/dashboard/controls.js b/superset/assets/cypress/integration/dashboard/controls.js index fe5581590c83c..77974a0b8c16d 100644 --- a/superset/assets/cypress/integration/dashboard/controls.js +++ b/superset/assets/cypress/integration/dashboard/controls.js @@ -38,8 +38,8 @@ export default () => describe('top-level controls', () => { .forEach((slice) => { const sliceRequest = `getJson_${slice.slice_id}`; sliceRequests.push(`@${sliceRequest}`); - const formData = `{"slice_id":${slice.slice_id},"viz_type":"${slice.form_data.viz_type}"}`; - cy.route('GET', `/superset/explore_json/?form_data=${formData}`).as(sliceRequest); + const formData = `{"slice_id":${slice.slice_id}}`; + cy.route('POST', `/superset/explore_json/?form_data=${formData}`).as(sliceRequest); const forceRefresh = `postJson_${slice.slice_id}_force`; forceRefreshRequests.push(`@${forceRefresh}`); diff --git a/superset/assets/cypress/integration/dashboard/edit_mode.js b/superset/assets/cypress/integration/dashboard/edit_mode.js index 280b6aae41f03..79198e60af3c3 100644 --- a/superset/assets/cypress/integration/dashboard/edit_mode.js +++ b/superset/assets/cypress/integration/dashboard/edit_mode.js @@ -28,9 +28,9 @@ export default () => describe('edit mode', () => { const bootstrapData = JSON.parse(data[0].dataset.bootstrap); const dashboard = bootstrapData.dashboard_data; const boxplotChartId = dashboard.slices.find(slice => (slice.form_data.viz_type === 'box_plot')).slice_id; - const formData = `{"slice_id":${boxplotChartId},"viz_type":"box_plot"}`; + const formData = `{"slice_id":${boxplotChartId}}`; const boxplotRequest = `/superset/explore_json/?form_data=${formData}`; - cy.route('GET', boxplotRequest).as('boxplotRequest'); + cy.route('POST', boxplotRequest).as('boxplotRequest'); }); cy.get('.dashboard-header').contains('Edit dashboard').click(); diff --git a/superset/assets/cypress/integration/dashboard/filter.js b/superset/assets/cypress/integration/dashboard/filter.js index f37c8c849eec6..97b40a4150c3d 100644 --- a/superset/assets/cypress/integration/dashboard/filter.js +++ b/superset/assets/cypress/integration/dashboard/filter.js @@ -39,9 +39,9 @@ export default () => describe('dashboard filter', () => { it('should apply filter', () => { const aliases = []; - const formData = `{"slice_id":${filterId},"viz_type":"filter_box"}`; + const formData = `{"slice_id":${filterId}}`; const filterRoute = `/superset/explore_json/?form_data=${formData}`; - cy.route('GET', filterRoute).as('fetchFilter'); + cy.route('POST', filterRoute).as('fetchFilter'); cy.wait('@fetchFilter'); sliceIds .filter(id => (parseInt(id, 10) !== filterId)) diff --git a/superset/assets/cypress/integration/dashboard/load.js b/superset/assets/cypress/integration/dashboard/load.js index 79daa30422563..c6427c7320afa 100644 --- a/superset/assets/cypress/integration/dashboard/load.js +++ b/superset/assets/cypress/integration/dashboard/load.js @@ -34,8 +34,8 @@ export default () => describe('load', () => { // then define routes and create alias for each requests slices.forEach((slice) => { const alias = `getJson_${slice.slice_id}`; - const formData = `{"slice_id":${slice.slice_id},"viz_type":"${slice.form_data.viz_type}"}`; - cy.route('GET', `/superset/explore_json/?form_data=${formData}`).as(alias); + const formData = `{"slice_id":${slice.slice_id}}`; + cy.route('POST', `/superset/explore_json/?form_data=${formData}`).as(alias); aliases.push(`@${alias}`); }); }); diff --git a/superset/assets/cypress/integration/dashboard/save.js b/superset/assets/cypress/integration/dashboard/save.js index 1d26ac219a085..772862d5b87e3 100644 --- a/superset/assets/cypress/integration/dashboard/save.js +++ b/superset/assets/cypress/integration/dashboard/save.js @@ -56,9 +56,9 @@ export default () => describe('save', () => { cy.wait('@copyRequest'); // should have box_plot chart - const formData = `{"slice_id":${boxplotChartId},"viz_type":"box_plot"}`; + const formData = `{"slice_id":${boxplotChartId}}`; const boxplotRequest = `/superset/explore_json/?form_data=${formData}`; - cy.route('GET', boxplotRequest).as('boxplotRequest'); + cy.route('POST', boxplotRequest).as('boxplotRequest'); cy.wait('@boxplotRequest'); cy.get('.grid-container .box_plot').should('be.exist'); diff --git a/superset/assets/spec/javascripts/explore/components/SaveModal_spec.jsx b/superset/assets/spec/javascripts/explore/components/SaveModal_spec.jsx index e7be94db967b9..d334688a4d018 100644 --- a/superset/assets/spec/javascripts/explore/components/SaveModal_spec.jsx +++ b/superset/assets/spec/javascripts/explore/components/SaveModal_spec.jsx @@ -19,6 +19,7 @@ import React from 'react'; import configureStore from 'redux-mock-store'; import thunk from 'redux-thunk'; +import { bindActionCreators } from 'redux'; import { shallow, mount } from 'enzyme'; import { Modal, Button, Radio } from 'react-bootstrap'; @@ -52,7 +53,12 @@ describe('SaveModal', () => { const defaultProps = { onHide: () => ({}), - actions: saveModalActions, + actions: bindActionCreators(saveModalActions, (arg) => { + if (typeof arg === 'function') { + return arg(jest.fn); + } + return arg; + }), form_data: { datasource: '107__table' }, }; const mockEvent = { @@ -108,15 +114,15 @@ describe('SaveModal', () => { it('componentDidMount', () => { sinon.spy(SaveModal.prototype, 'componentDidMount'); - sinon.spy(saveModalActions, 'fetchDashboards'); + sinon.spy(defaultProps.actions, 'fetchDashboards'); mount(, { context: { store }, }); expect(SaveModal.prototype.componentDidMount.calledOnce).toBe(true); - expect(saveModalActions.fetchDashboards.calledOnce).toBe(true); + expect(defaultProps.actions.fetchDashboards.calledOnce).toBe(true); SaveModal.prototype.componentDidMount.restore(); - saveModalActions.fetchDashboards.restore(); + defaultProps.actions.fetchDashboards.restore(); }); it('onChange', () => { @@ -139,7 +145,7 @@ describe('SaveModal', () => { .callsFake(() => ({ url: 'mockURL', payload: defaultProps.form_data })); sinon - .stub(saveModalActions, 'saveSlice') + .stub(defaultProps.actions, 'saveSlice') .callsFake(() => Promise.resolve({ data: { dashboard: '/mock/', slice: { slice_url: '/mock/' } } }), ); @@ -147,13 +153,13 @@ describe('SaveModal', () => { afterEach(() => { exploreUtils.getExploreUrlAndPayload.restore(); - saveModalActions.saveSlice.restore(); + defaultProps.actions.saveSlice.restore(); }); it('should save slice', () => { const wrapper = getWrapper(); wrapper.instance().saveOrOverwrite(true); - const args = saveModalActions.saveSlice.getCall(0).args; + const args = defaultProps.actions.saveSlice.getCall(0).args; expect(args[0]).toEqual(defaultProps.form_data); }); @@ -167,7 +173,7 @@ describe('SaveModal', () => { wrapper.setState({ saveToDashboardId }); wrapper.instance().saveOrOverwrite(true); - const args = saveModalActions.saveSlice.getCall(0).args; + const args = defaultProps.actions.saveSlice.getCall(0).args; expect(args[1].save_to_dashboard_id).toBe(saveToDashboardId); }); @@ -181,7 +187,7 @@ describe('SaveModal', () => { wrapper.setState({ newDashboardName }); wrapper.instance().saveOrOverwrite(true); - const args = saveModalActions.saveSlice.getCall(0).args; + const args = defaultProps.actions.saveSlice.getCall(0).args; expect(args[1].new_dashboard_name).toBe(newDashboardName); }); }); @@ -251,13 +257,13 @@ describe('SaveModal', () => { }); it('removeAlert', () => { - sinon.spy(saveModalActions, 'removeSaveModalAlert'); + sinon.spy(defaultProps.actions, 'removeSaveModalAlert'); const wrapper = getWrapper(); wrapper.setProps({ alert: 'old alert' }); wrapper.instance().removeAlert(); - expect(saveModalActions.removeSaveModalAlert.callCount).toBe(1); + expect(defaultProps.actions.removeSaveModalAlert.callCount).toBe(1); expect(wrapper.state().alert).toBeNull(); - saveModalActions.removeSaveModalAlert.restore(); + defaultProps.actions.removeSaveModalAlert.restore(); }); }); diff --git a/superset/assets/src/chart/Chart.jsx b/superset/assets/src/chart/Chart.jsx index c0d391673fc7a..cfc6ce8a97053 100644 --- a/superset/assets/src/chart/Chart.jsx +++ b/superset/assets/src/chart/Chart.jsx @@ -20,6 +20,7 @@ import PropTypes from 'prop-types'; import React from 'react'; import { Alert } from 'react-bootstrap'; +import { isFeatureEnabled, FeatureFlag } from 'src/featureFlags'; import { Logger, LOG_ACTIONS_RENDER_CHART_CONTAINER } from '../logger/LogUtils'; import Loading from '../components/Loading'; import RefreshChartOverlay from '../components/RefreshChartOverlay'; @@ -70,7 +71,7 @@ class Chart extends React.PureComponent { } componentDidMount() { if (this.props.triggerQuery) { - if (this.props.chartId > 0) { + if (this.props.chartId > 0 && isFeatureEnabled(FeatureFlag.CLIENT_CACHE)) { // Load saved chart with a GET request this.props.actions.getSavedChart( this.props.formData, diff --git a/superset/assets/src/chart/chartAction.js b/superset/assets/src/chart/chartAction.js index 3909dadd85967..dc9eb322d4cf1 100644 --- a/superset/assets/src/chart/chartAction.js +++ b/superset/assets/src/chart/chartAction.js @@ -21,6 +21,7 @@ /* eslint no-param-reassign: ["error", { "props": false }] */ import { t } from '@superset-ui/translation'; import { SupersetClient } from '@superset-ui/connection'; +import { isFeatureEnabled, FeatureFlag } from 'src/featureFlags'; import { getExploreUrlAndPayload, getAnnotationJsonUrl } from '../explore/exploreUtils'; import { requiresQuery, ANNOTATION_SOURCE_TYPES } from '../modules/AnnotationTypes'; import { addDangerToast } from '../messageToasts/actions'; @@ -194,7 +195,9 @@ export function exploreJSON(formData, force = false, timeout = 60, key, method) }; } - const clientMethod = method === 'GET' ? SupersetClient.get : SupersetClient.post; + const clientMethod = method === 'GET' && isFeatureEnabled(FeatureFlag.CLIENT_CACHE) + ? SupersetClient.get + : SupersetClient.post; const queryPromise = clientMethod(querySettings) .then(({ json }) => { dispatch(logEvent(LOG_ACTIONS_LOAD_CHART, { diff --git a/superset/assets/src/dashboard/actions/sliceEntities.js b/superset/assets/src/dashboard/actions/sliceEntities.js index e98d28bcc457d..efee23eec3509 100644 --- a/superset/assets/src/dashboard/actions/sliceEntities.js +++ b/superset/assets/src/dashboard/actions/sliceEntities.js @@ -77,6 +77,7 @@ export function fetchAllSlices(userId) { description_markdown: slice.description_markeddown, viz_type: slice.viz_type, modified: slice.modified, + changed_on_humanized: slice.changed_on_humanized, }; } }); diff --git a/superset/assets/src/dashboard/components/Header.jsx b/superset/assets/src/dashboard/components/Header.jsx index 92d3b1df62ba2..8f3f831e3fd21 100644 --- a/superset/assets/src/dashboard/components/Header.jsx +++ b/superset/assets/src/dashboard/components/Header.jsx @@ -222,7 +222,7 @@ class Header extends React.PureComponent { colorScheme, colorNamespace, ); - const labelColors = scale.getColorMap(); + const labelColors = colorScheme ? scale.getColorMap() : {}; const data = { positions, expanded_slices: expandedSlices, diff --git a/superset/assets/src/dashboard/components/SaveModal.jsx b/superset/assets/src/dashboard/components/SaveModal.jsx index 1873f0c3133a3..d926bc8aa55dd 100644 --- a/superset/assets/src/dashboard/components/SaveModal.jsx +++ b/superset/assets/src/dashboard/components/SaveModal.jsx @@ -110,7 +110,7 @@ class SaveModal extends React.PureComponent { colorScheme, colorNamespace, ); - const labelColors = scale.getColorMap(); + const labelColors = colorScheme ? scale.getColorMap() : {}; const data = { positions, css, diff --git a/superset/assets/src/dashboard/components/SliceAdder.jsx b/superset/assets/src/dashboard/components/SliceAdder.jsx index 9359759ae4090..dd6ad56756847 100644 --- a/superset/assets/src/dashboard/components/SliceAdder.jsx +++ b/superset/assets/src/dashboard/components/SliceAdder.jsx @@ -170,7 +170,6 @@ class SliceAdder extends React.Component { chartId: cellData.slice_id, sliceName: cellData.slice_name, }; - return ( { + const dashboardIds = this.props.dashboards.map(dashboard => dashboard.value); + let recentDashboard = sessionStorage.getItem('save_chart_recent_dashboard'); + recentDashboard = recentDashboard && parseInt(recentDashboard, 10); + if (recentDashboard !== null && dashboardIds.indexOf(recentDashboard) !== -1) { + this.setState({ saveToDashboardId: recentDashboard, addToDash: 'existing' }); + } + }); } onChange(name, event) { switch (name) { @@ -125,6 +132,11 @@ class SaveModal extends React.Component { sliceParams.goto_dash = gotodash; this.props.actions.saveSlice(this.props.form_data, sliceParams).then(({ data }) => { + if (data.dashboard_id === null) { + sessionStorage.removeItem('save_chart_recent_dashboard'); + } else { + sessionStorage.setItem('save_chart_recent_dashboard', data.dashboard_id); + } // Go to new slice url or dashboard url if (gotodash) { window.location = supersetURL(data.dashboard); diff --git a/superset/assets/src/featureFlags.ts b/superset/assets/src/featureFlags.ts index 54eee94829ae6..8638a5471f7c9 100644 --- a/superset/assets/src/featureFlags.ts +++ b/superset/assets/src/featureFlags.ts @@ -21,6 +21,7 @@ export enum FeatureFlag { SCOPED_FILTER = 'SCOPED_FILTER', OMNIBAR = 'OMNIBAR', + CLIENT_CACHE = 'CLIENT_CACHE', } export type FeatureFlagMap = { diff --git a/superset/config.py b/superset/config.py index b402fec9aa2c9..df14b0f2c1e55 100644 --- a/superset/config.py +++ b/superset/config.py @@ -200,7 +200,10 @@ # For example, DEFAULT_FEATURE_FLAGS = { 'FOO': True, 'BAR': False } here # and FEATURE_FLAGS = { 'BAR': True, 'BAZ': True } in superset_config.py # will result in combined feature flags of { 'FOO': True, 'BAR': True, 'BAZ': True } -DEFAULT_FEATURE_FLAGS = {} +DEFAULT_FEATURE_FLAGS = { + # Experimental feature introducing a client (browser) cache + 'CLIENT_CACHE': False, +} # A function that receives a dict of all feature flags # (DEFAULT_FEATURE_FLAGS merged with FEATURE_FLAGS) diff --git a/superset/connectors/base/models.py b/superset/connectors/base/models.py index eed92a688e88d..57db0ad4684d1 100644 --- a/superset/connectors/base/models.py +++ b/superset/connectors/base/models.py @@ -347,7 +347,7 @@ class BaseColumn(AuditMixinNullable, ImportMixin): __tablename__ = None # {connector_name}_column id = Column(Integer, primary_key=True) - column_name = Column(String(255)) + column_name = Column(String(255), nullable=False) verbose_name = Column(String(1024)) is_active = Column(Boolean, default=True) type = Column(String(32)) @@ -411,7 +411,7 @@ class BaseMetric(AuditMixinNullable, ImportMixin): __tablename__ = None # {connector_name}_metric id = Column(Integer, primary_key=True) - metric_name = Column(String(512)) + metric_name = Column(String(255), nullable=False) verbose_name = Column(String(1024)) metric_type = Column(String(32)) description = Column(Text) diff --git a/superset/connectors/druid/models.py b/superset/connectors/druid/models.py index 8440a25099a35..43a092c20c548 100644 --- a/superset/connectors/druid/models.py +++ b/superset/connectors/druid/models.py @@ -269,9 +269,7 @@ class DruidColumn(Model, BaseColumn): __tablename__ = 'columns' __table_args__ = (UniqueConstraint('column_name', 'datasource_id'),) - datasource_id = Column( - Integer, - ForeignKey('datasources.id')) + datasource_id = Column(Integer, ForeignKey('datasources.id')) # Setting enable_typechecks=False disables polymorphic inheritance. datasource = relationship( 'DruidDatasource', @@ -343,15 +341,14 @@ class DruidMetric(Model, BaseMetric): __tablename__ = 'metrics' __table_args__ = (UniqueConstraint('metric_name', 'datasource_id'),) - datasource_id = Column( - Integer, - ForeignKey('datasources.id')) + datasource_id = Column(Integer, ForeignKey('datasources.id')) + # Setting enable_typechecks=False disables polymorphic inheritance. datasource = relationship( 'DruidDatasource', backref=backref('metrics', cascade='all, delete-orphan'), enable_typechecks=False) - json = Column(Text) + json = Column(Text, nullable=False) export_fields = ( 'metric_name', 'verbose_name', 'metric_type', 'datasource_id', @@ -417,7 +414,7 @@ class DruidDatasource(Model, BaseDatasource): baselink = 'druiddatasourcemodelview' # Columns - datasource_name = Column(String(255)) + datasource_name = Column(String(255), nullable=False) is_hidden = Column(Boolean, default=False) filter_select_enabled = Column(Boolean, default=True) # override default fetch_values_from = Column(String(100)) @@ -427,7 +424,6 @@ class DruidDatasource(Model, BaseDatasource): 'DruidCluster', backref='datasources', foreign_keys=[cluster_name]) owners = relationship(owner_class, secondary=druiddatasource_user, backref='druiddatasources') - UniqueConstraint('cluster_name', 'datasource_name') export_fields = ( 'datasource_name', 'is_hidden', 'description', 'default_endpoint', diff --git a/superset/connectors/sqla/models.py b/superset/connectors/sqla/models.py index 64933708db6b1..f178db458bbf1 100644 --- a/superset/connectors/sqla/models.py +++ b/superset/connectors/sqla/models.py @@ -100,7 +100,7 @@ class TableColumn(Model, BaseColumn): backref=backref('columns', cascade='all, delete-orphan'), foreign_keys=[table_id]) is_dttm = Column(Boolean, default=False) - expression = Column(Text, default='') + expression = Column(Text) python_date_format = Column(String(255)) database_expression = Column(String(255)) @@ -209,7 +209,7 @@ class SqlMetric(Model, BaseMetric): 'SqlaTable', backref=backref('metrics', cascade='all, delete-orphan'), foreign_keys=[table_id]) - expression = Column(Text) + expression = Column(Text, nullable=False) export_fields = ( 'metric_name', 'verbose_name', 'metric_type', 'table_id', 'expression', diff --git a/superset/migrations/versions/7f2635b51f5d_update_base_columns.py b/superset/migrations/versions/7f2635b51f5d_update_base_columns.py new file mode 100644 index 0000000000000..7087c6a5ab19e --- /dev/null +++ b/superset/migrations/versions/7f2635b51f5d_update_base_columns.py @@ -0,0 +1,137 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""update base columns + +Note that the columns table was previously partially modifed by revision +f231d82b9b26. + +Revision ID: 7f2635b51f5d +Revises: 937d04c16b64 +Create Date: 2018-07-20 15:31:05.058050 + +""" + +# revision identifiers, used by Alembic. +revision = '7f2635b51f5d' +down_revision = '937d04c16b64' + +from alembic import op +from sqlalchemy import Column, engine, Integer, String +from sqlalchemy.ext.declarative import declarative_base + +from superset import db +from superset.utils.core import generic_find_uq_constraint_name + +Base = declarative_base() + +conv = { + 'uq': 'uq_%(table_name)s_%(column_0_name)s', +} + + +class BaseColumnMixin: + id = Column(Integer, primary_key=True) + + +class DruidColumn(BaseColumnMixin, Base): + __tablename__ = 'columns' + + datasource_id = Column(Integer) + + +class TableColumn(BaseColumnMixin, Base): + __tablename__ = 'table_columns' + + table_id = Column(Integer) + + +def upgrade(): + bind = op.get_bind() + session = db.Session(bind=bind) + + # Delete the orphaned columns records. + for record in session.query(DruidColumn).all(): + if record.datasource_id is None: + session.delete(record) + + session.commit() + + # Enforce that the columns.column_name column be non-nullable. + with op.batch_alter_table('columns') as batch_op: + batch_op.alter_column( + 'column_name', + existing_type=String(255), + nullable=False, + ) + + # Delete the orphaned table_columns records. + for record in session.query(TableColumn).all(): + if record.table_id is None: + session.delete(record) + + session.commit() + + # Reduce the size of the table_columns.column_name column for constraint + # viability and enforce that it be non-nullable. + with op.batch_alter_table('table_columns') as batch_op: + batch_op.alter_column( + 'column_name', + existing_type=String(256), + nullable=False, + type_=String(255), + ) + + # Add the missing uniqueness constraint to the table_columns table. + with op.batch_alter_table('table_columns', naming_convention=conv) as batch_op: + batch_op.create_unique_constraint( + 'uq_table_columns_column_name', + ['column_name', 'table_id'], + ) + + +def downgrade(): + bind = op.get_bind() + insp = engine.reflection.Inspector.from_engine(bind) + + # Remove the missing uniqueness constraint from the table_columns table. + with op.batch_alter_table('table_columns', naming_convention=conv) as batch_op: + batch_op.drop_constraint( + generic_find_uq_constraint_name( + 'table_columns', + {'column_name', 'table_id'}, + insp, + ) or 'uq_table_columns_column_name', + type_='unique', + ) + + # Restore the size of the table_columns.column_name column and forego that + # it be non-nullable. + with op.batch_alter_table('table_columns') as batch_op: + batch_op.alter_column( + 'column_name', + existing_type=String(255), + nullable=True, + type_=String(256), + ) + + # Forego that the columns.column_name be non-nullable. + with op.batch_alter_table('columns') as batch_op: + batch_op.alter_column( + 'column_name', + existing_type=String(255), + nullable=True, + ) diff --git a/superset/migrations/versions/937d04c16b64_update_datasources.py b/superset/migrations/versions/937d04c16b64_update_datasources.py new file mode 100644 index 0000000000000..a233a442eb4e8 --- /dev/null +++ b/superset/migrations/versions/937d04c16b64_update_datasources.py @@ -0,0 +1,52 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""update datasources + +Revision ID: 937d04c16b64 +Revises: d94d33dbe938 +Create Date: 2018-07-20 16:08:10.195843 + +""" + +# revision identifiers, used by Alembic. +revision = '937d04c16b64' +down_revision = 'd94d33dbe938' + +from alembic import op +import sqlalchemy as sa + + +def upgrade(): + + # Enforce that the datasource_name column be non-nullable. + with op.batch_alter_table('datasources') as batch_op: + batch_op.alter_column( + 'datasource_name', + existing_type=sa.String(255), + nullable=False, + ) + + +def downgrade(): + + # Forego that the datasource_name column be non-nullable. + with op.batch_alter_table('datasources') as batch_op: + batch_op.alter_column( + 'datasource_name', + existing_type=sa.String(255), + nullable=True, + ) diff --git a/superset/migrations/versions/d94d33dbe938_form_strip.py b/superset/migrations/versions/d94d33dbe938_form_strip.py new file mode 100644 index 0000000000000..b34bbaacb85ca --- /dev/null +++ b/superset/migrations/versions/d94d33dbe938_form_strip.py @@ -0,0 +1,193 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""form strip + +Revision ID: d94d33dbe938 +Revises: 80aa3f04bc82 +Create Date: 2019-03-21 10:22:01.610217 + +""" + +# revision identifiers, used by Alembic. +revision = 'd94d33dbe938' +down_revision = '80aa3f04bc82' + +from alembic import op +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy import Column, Integer, String, Text + +from superset import db +from superset.utils.core import MediumText + +Base = declarative_base() + + +class BaseColumnMixin(object): + id = Column(Integer, primary_key=True) + column_name = Column(String(255)) + description = Column(Text) + type = Column(String(32)) + verbose_name = Column(String(1024)) + + +class BaseDatasourceMixin(object): + id = Column(Integer, primary_key=True) + description = Column(Text) + + +class BaseMetricMixin(object): + id = Column(Integer, primary_key=True) + d3format = Column(String(128)) + description = Column(Text) + metric_name = Column(String(512)) + metric_type = Column(String(32)) + verbose_name = Column(String(1024)) + warning_text = Column(Text) + + +class Annotation(Base): + __tablename__ = 'annotation' + + id = Column(Integer, primary_key=True) + long_descr = Column(Text) + json_metadata = Column(Text) + short_descr = Column(String(500)) + + +class Dashboard(Base): + __tablename__ = 'dashboards' + + id = Column(Integer, primary_key=True) + css = Column(Text) + dashboard_title = Column(String(500)) + description = Column(Text) + json_metadata = Column(Text) + position_json = Column(MediumText()) + slug = Column(String(255)) + + +class Database(Base): + __tablename__ = 'dbs' + + id = Column(Integer, primary_key=True) + database_name = Column(String(250)) + extra = Column(Text) + force_ctas_schema = Column(String(250)) + sqlalchemy_uri = Column(String(1024)) + verbose_name = Column(String(250)) + + +class DruidCluster(Base): + __tablename__ = 'clusters' + + id = Column(Integer, primary_key=True) + broker_host = Column(String(255)) + broker_endpoint = Column(String(255)) + cluster_name = Column(String(250)) + verbose_name = Column(String(250)) + + +class DruidColumn(BaseColumnMixin, Base): + __tablename__ = 'columns' + + dimension_spec_json = Column(Text) + + +class DruidDatasource(BaseDatasourceMixin, Base): + __tablename__ = 'datasources' + + datasource_name = Column(String(255)) + default_endpoint = Column(Text) + fetch_values_from = Column(String(100)) + + +class DruidMetric(BaseMetricMixin, Base): + __tablename__ = 'metrics' + + json = Column(Text) + + +class Slice(Base): + __tablename__ = 'slices' + + id = Column(Integer, primary_key=True) + description = Column(Text) + params = Column(Text) + slice_name = Column(String(250)) + viz_type = Column(String(250)) + + +class SqlaTable(BaseDatasourceMixin, Base): + __tablename__ = 'tables' + + default_endpoint = Column(MediumText()) + fetch_values_predicate = Column(String(1000)) + main_dttm_col = Column(String(250)) + schema = Column(String(255)) + sql = Column(Text) + table_name = Column(String(250)) + template_params = Column(Text) + + +class SqlMetric(BaseMetricMixin, Base): + __tablename__ = 'sql_metrics' + + expression = Column(Text) + + +class TableColumn(BaseColumnMixin, Base): + __tablename__ = 'table_columns' + + database_expression = Column(String(255)) + expression = Column(Text) + python_date_format = Column(String(255)) + + +def upgrade(): + bind = op.get_bind() + session = db.Session(bind=bind) + + tables = [ + Annotation, + Dashboard, + Database, + DruidCluster, + DruidColumn, + DruidDatasource, + DruidMetric, + Slice, + SqlaTable, + SqlMetric, + TableColumn, + ] + + for table in tables: + for record in session.query(table).all(): + for col in record.__table__.columns.values(): + if not col.primary_key: + value = getattr(record, col.name) + + if value is not None and value.strip() == '': + setattr(record, col.name, None) + + session.commit() + + session.close() + + +def downgrade(): + pass diff --git a/superset/migrations/versions/e9df189e5c7e_update_base_metrics.py b/superset/migrations/versions/e9df189e5c7e_update_base_metrics.py new file mode 100644 index 0000000000000..07a6c4d571767 --- /dev/null +++ b/superset/migrations/versions/e9df189e5c7e_update_base_metrics.py @@ -0,0 +1,169 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""update base metrics + +Note that the metrics table was previously partially modifed by revision +f231d82b9b26. + +Revision ID: e9df189e5c7e +Revises: 7f2635b51f5d +Create Date: 2018-07-20 15:57:48.118304 + +""" + +# revision identifiers, used by Alembic. +revision = 'e9df189e5c7e' +down_revision = '7f2635b51f5d' + +from alembic import op +from sqlalchemy import Column, engine, Integer, String, Text +from sqlalchemy.ext.declarative import declarative_base + +from superset import db +from superset.utils.core import generic_find_uq_constraint_name + +Base = declarative_base() + +conv = { + 'uq': 'uq_%(table_name)s_%(column_0_name)s', +} + + +class BaseMetricMixin: + id = Column(Integer, primary_key=True) + + +class DruidMetric(BaseMetricMixin, Base): + __tablename__ = 'metrics' + + datasource_id = Column(Integer) + + +class SqlMetric(BaseMetricMixin, Base): + __tablename__ = 'sql_metrics' + + table_id = Column(Integer) + + +def upgrade(): + bind = op.get_bind() + session = db.Session(bind=bind) + + # Delete the orphaned metrics records. + for record in session.query(DruidMetric).all(): + if record.datasource_id is None: + session.delete(record) + + session.commit() + + # Enforce that metrics.metric_name column be non-nullable. + with op.batch_alter_table('metrics') as batch_op: + batch_op.alter_column( + 'metric_name', + existing_type=String(255), + nullable=False, + ) + + # Enforce that metrics.json column be non-nullable. + with op.batch_alter_table('metrics') as batch_op: + batch_op.alter_column( + 'json', + existing_type=Text, + nullable=False, + ) + + # Delete the orphaned sql_metrics records. + for record in session.query(SqlMetric).all(): + if record.table_id is None: + session.delete(record) + + session.commit() + + # Reduce the size of the sql_metrics.metric_name column for constraint + # viability and enforce that it to be non-nullable. + with op.batch_alter_table('sql_metrics') as batch_op: + batch_op.alter_column( + 'metric_name', + existing_type=String(512), + nullable=False, + type_=String(255), + ) + + # Enforce that sql_metrics.expression column be non-nullable. + with op.batch_alter_table('sql_metrics') as batch_op: + batch_op.alter_column( + 'expression', + existing_type=Text, + nullable=False, + ) + + # Add the missing uniqueness constraint to the sql_metrics table. + with op.batch_alter_table('sql_metrics', naming_convention=conv) as batch_op: + batch_op.create_unique_constraint( + 'uq_sql_metrics_metric_name', + ['metric_name', 'table_id'], + ) + + +def downgrade(): + bind = op.get_bind() + insp = engine.reflection.Inspector.from_engine(bind) + + # Remove the missing uniqueness constraint from the sql_metrics table. + with op.batch_alter_table('sql_metrics', naming_convention=conv) as batch_op: + batch_op.drop_constraint( + generic_find_uq_constraint_name( + 'sql_metrics', + {'metric_name', 'table_id'}, + insp, + ) or 'uq_sql_metrics_table_id', + type_='unique', + ) + + # Restore the size of the sql_metrics.metric_name column and forego that it + # be non-nullable. + with op.batch_alter_table('sql_metrics') as batch_op: + batch_op.alter_column( + 'metric_name', + existing_type=String(255), + nullable=True, + type_=String(512), + ) + + # Forego that the sql_metrics.expression column be non-nullable. + with op.batch_alter_table('sql_metrics') as batch_op: + batch_op.alter_column( + 'expression', + existing_type=Text, + nullable=True, + ) + + # Forego that the metrics.metric_name column be non-nullable. + with op.batch_alter_table('metrics') as batch_op: + batch_op.alter_column( + 'metric_name', + existing_type=String(255), + nullable=True, + ) + + # Forego that the metrics.json column be non-nullable. + with op.batch_alter_table('metrics') as batch_op: + batch_op.alter_column( + 'json', + existing_type=Text, + nullable=True, + ) diff --git a/superset/models/core.py b/superset/models/core.py index b848604a46d64..fb5850c0fa1b0 100644 --- a/superset/models/core.py +++ b/superset/models/core.py @@ -243,6 +243,7 @@ def data(self): 'slice_name': self.slice_name, 'slice_url': self.slice_url, 'modified': self.modified(), + 'changed_on_humanized': self.changed_on_humanized, 'changed_on': self.changed_on.isoformat(), } @@ -748,6 +749,10 @@ def table_cache_enabled(self): def table_cache_timeout(self): return self.metadata_cache_timeout.get('table_cache_timeout') + @property + def default_schemas(self): + return self.get_extra().get('default_schemas', []) + @classmethod def get_password_masked_url_from_uri(cls, uri): url = make_url(uri) diff --git a/superset/models/helpers.py b/superset/models/helpers.py index a1e9b3c61aadb..78b438d9f8f17 100644 --- a/superset/models/helpers.py +++ b/superset/models/helpers.py @@ -288,10 +288,13 @@ def changed_by_(self): def changed_on_(self): return Markup(f'{self.changed_on}') + @property + def changed_on_humanized(self): + return humanize.naturaltime(datetime.now() - self.changed_on) + @renders('changed_on') def modified(self): - time_str = humanize.naturaltime(datetime.now() - self.changed_on) - return Markup(f'{time_str}') + return Markup(f'{self.changed_on_humanized}') class QueryResult(object): diff --git a/superset/utils/cache.py b/superset/utils/cache.py index 569e39728c2b5..4c44d421f0613 100644 --- a/superset/utils/cache.py +++ b/superset/utils/cache.py @@ -20,7 +20,7 @@ from superset import tables_cache -def view_cache_key(*unused_args, **unused_kwargs): +def view_cache_key(*unused_args, **unused_kwargs) -> str: args_hash = hash(frozenset(request.args.items())) return 'view/{}/{}'.format(request.path, args_hash) diff --git a/superset/utils/core.py b/superset/utils/core.py index be8bc08b3e7b0..122998e2fea0e 100644 --- a/superset/utils/core.py +++ b/superset/utils/core.py @@ -31,7 +31,8 @@ import signal import smtplib import sys -from typing import Optional, Tuple +from time import struct_time +from typing import List, Optional, Tuple import uuid import zlib @@ -39,7 +40,8 @@ import celery from dateutil.parser import parse from dateutil.relativedelta import relativedelta -from flask import flash, g, Markup, render_template +from flask import flash, Flask, g, Markup, render_template +from flask_appbuilder.security.sqla.models import User from flask_babel import gettext as __ from flask_babel import lazy_gettext as _ from flask_caching import Cache @@ -51,6 +53,7 @@ import sqlalchemy as sa from sqlalchemy import event, exc, select, Text from sqlalchemy.dialects.mysql import MEDIUMTEXT +from sqlalchemy.sql.type_api import Variant from sqlalchemy.types import TEXT, TypeDecorator from superset.exceptions import SupersetException, SupersetTimeoutException @@ -138,11 +141,11 @@ def wrapper(f): return wrapper -def js_string_to_python(item): +def js_string_to_python(item: str) -> Optional[str]: return None if item in ('null', 'undefined') else item -def string_to_num(s): +def string_to_num(s: str): """Converts a string to an int/float Returns ``None`` if it can't be converted @@ -182,7 +185,7 @@ def __init__(self, **args): } -def list_minus(l, minus): +def list_minus(l: List, minus: List) -> List: """Returns l without what is in minus >>> list_minus([1, 2, 3], [2]) @@ -230,7 +233,7 @@ def parse_human_datetime(s): return dttm -def dttm_from_timtuple(d): +def dttm_from_timtuple(d: struct_time) -> datetime: return datetime( d.tm_year, d.tm_mon, d.tm_mday, d.tm_hour, d.tm_min, d.tm_sec) @@ -284,7 +287,7 @@ def default(self, o): return json.JSONEncoder.default(self, o) -def parse_human_timedelta(s): +def parse_human_timedelta(s: str): """ Returns ``datetime.datetime`` from natural language time deltas @@ -349,7 +352,7 @@ def base_json_conv(obj): return '[bytes]' -def json_iso_dttm_ser(obj, pessimistic=False): +def json_iso_dttm_ser(obj, pessimistic: Optional[bool] = False): """ json serializer that deals with dates @@ -420,7 +423,7 @@ def error_msg_from_exception(e): return msg or '{}'.format(e) -def markdown(s, markup_wrap=False): +def markdown(s: str, markup_wrap: Optional[bool] = False) -> str: safe_markdown_tags = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'b', 'i', 'strong', 'em', 'tt', 'p', 'br', 'span', 'div', 'blockquote', 'code', 'hr', 'ul', 'ol', @@ -438,7 +441,7 @@ def markdown(s, markup_wrap=False): return s -def readfile(file_path): +def readfile(file_path: str) -> Optional[str]: with open(file_path) as f: content = f.read() return content @@ -677,17 +680,18 @@ def send_MIME_email(e_from, e_to, mime_msg, config, dryrun=False): logging.info(mime_msg.as_string()) -def get_email_address_list(address_string): +def get_email_address_list(address_string: str) -> List[str]: + address_string_list: List[str] = [] if isinstance(address_string, str): if ',' in address_string: - address_string = address_string.split(',') + address_string_list = address_string.split(',') elif '\n' in address_string: - address_string = address_string.split('\n') + address_string_list = address_string.split('\n') elif ';' in address_string: - address_string = address_string.split(';') + address_string_list = address_string.split(';') else: - address_string = [address_string] - return [x.strip() for x in address_string if x.strip()] + address_string_list = [address_string] + return [x.strip() for x in address_string_list if x.strip()] def choicify(values): @@ -695,11 +699,13 @@ def choicify(values): return [(v, v) for v in values] -def setup_cache(app, cache_config): +def setup_cache(app: Flask, cache_config) -> Optional[Cache]: """Setup the flask-cache on a flask app""" if cache_config and cache_config.get('CACHE_TYPE') != 'null': return Cache(app, config=cache_config) + return None + def zlib_compress(data): """ @@ -766,7 +772,7 @@ def to_adhoc(filt, expressionType='SIMPLE', clause='where'): return result -def merge_extra_filters(form_data): +def merge_extra_filters(form_data: dict): # extra_filters are temporary/contextual filters (using the legacy constructs) # that are external to the slice definition. We use those for dynamic # interactive filters like the ones emitted by the "Filter Box" visualization. @@ -837,7 +843,7 @@ def get_filter_key(f): del form_data['extra_filters'] -def merge_request_params(form_data, params): +def merge_request_params(form_data: dict, params: dict): url_params = {} for key, value in params.items(): if key in ('form_data', 'r'): @@ -846,12 +852,12 @@ def merge_request_params(form_data, params): form_data['url_params'] = url_params -def get_update_perms_flag(): +def get_update_perms_flag() -> bool: val = os.environ.get('SUPERSET_UPDATE_PERMS') return val.lower() not in ('0', 'false', 'no') if val else True -def user_label(user): +def user_label(user: User) -> Optional[str]: """Given a user ORM FAB object, returns a label""" if user: if user.first_name and user.last_name: @@ -859,6 +865,8 @@ def user_label(user): else: return user.username + return None + def get_or_create_main_db(): from superset import conf, db @@ -887,7 +895,7 @@ def get_main_database(session): ) -def is_adhoc_metric(metric): +def is_adhoc_metric(metric) -> bool: return ( isinstance(metric, dict) and ( @@ -913,7 +921,7 @@ def get_metric_names(metrics): return [get_metric_name(metric) for metric in metrics] -def ensure_path_exists(path): +def ensure_path_exists(path: str): try: os.makedirs(path) except OSError as exc: @@ -997,7 +1005,7 @@ def get_since_until(time_range: Optional[str] = None, return since, until # noqa: T400 -def add_ago_to_since(since): +def add_ago_to_since(since: str) -> str: """ Backwards compatibility hack. Without this slices with since: 7 days will be treated as 7 days in the future. @@ -1072,17 +1080,17 @@ def split_adhoc_filters_into_base_filters(fd): fd['filters'] = simple_where_filters -def get_username(): +def get_username() -> Optional[str]: """Get username if within the flask context, otherwise return noffin'""" try: return g.user.username except Exception: - pass + return None -def MediumText(): +def MediumText() -> Variant: return Text().with_variant(MEDIUMTEXT(), 'mysql') -def shortid(): +def shortid() -> str: return '{}'.format(uuid.uuid4())[-12:] diff --git a/superset/views/base.py b/superset/views/base.py index 071f2b335b658..113b9c68e3975 100644 --- a/superset/views/base.py +++ b/superset/views/base.py @@ -19,16 +19,20 @@ import functools import logging import traceback +from typing import Any, Dict from flask import abort, flash, g, get_flashed_messages, redirect, Response from flask_appbuilder import BaseView, ModelView from flask_appbuilder.actions import action +from flask_appbuilder.forms import DynamicForm from flask_appbuilder.models.sqla.filters import BaseFilter from flask_appbuilder.widgets import ListWidget from flask_babel import get_locale from flask_babel import gettext as __ from flask_babel import lazy_gettext as _ +from flask_wtf.form import FlaskForm import simplejson as json +from wtforms.fields.core import Field, UnboundField import yaml from superset import conf, db, get_feature_flags, security_manager @@ -368,3 +372,26 @@ def check_ownership(obj, raise_if_false=True): raise security_exception else: return False + + +def bind_field( + self, + form: DynamicForm, + unbound_field: UnboundField, + options: Dict[Any, Any], + ) -> Field: + """ + Customize how fields are bound by stripping all whitespace. + + :param form: The form + :param unbound_field: The unbound field + :param options: The field options + :returns: The bound field + """ + + filters = unbound_field.kwargs.get('filters', []) + filters.append(lambda x: x.strip() if isinstance(x, str) else x) + return unbound_field.bind(form=form, filters=filters, **options) + + +FlaskForm.Meta.bind_field = bind_field diff --git a/superset/views/core.py b/superset/views/core.py index c599bfe20658d..e22acb7410b63 100755 --- a/superset/views/core.py +++ b/superset/views/core.py @@ -576,7 +576,8 @@ class SliceAsync(SliceModelView): # noqa route_base = '/sliceasync' list_columns = [ 'id', 'slice_link', 'viz_type', 'slice_name', - 'creator', 'modified', 'icons'] + 'creator', 'modified', 'icons', 'changed_on_humanized', + ] label_columns = { 'icons': ' ', 'slice_link': _('Chart'), @@ -592,7 +593,8 @@ class SliceAddView(SliceModelView): # noqa 'id', 'slice_name', 'slice_url', 'edit_url', 'viz_type', 'params', 'description', 'description_markeddown', 'datasource_id', 'datasource_type', 'datasource_name_text', 'datasource_link', - 'owners', 'modified', 'changed_on'] + 'owners', 'modified', 'changed_on', 'changed_on_humanized', + ] appbuilder.add_view_no_menu(SliceAddView) @@ -1487,6 +1489,7 @@ def save_or_overwrite_slice( 'can_overwrite': is_owner(slc, g.user), 'form_data': slc.form_data, 'slice': slc.data, + 'dashboard_id': dash.id if dash else None, } if request.args.get('goto_dash') == 'true': @@ -1587,6 +1590,16 @@ def tables(self, db_id, schema, substr, force_refresh='false'): table_names = [tn for tn in table_names if substr in tn] view_names = [vn for vn in view_names if substr in vn] + if not schema and database.default_schemas: + def get_schema(tbl_or_view_name): + return tbl_or_view_name.split('.')[0] if '.' in tbl_or_view_name else None + + user_schema = g.user.email.split('@')[0] + valid_schemas = set(database.default_schemas + [user_schema]) + + table_names = [tn for tn in table_names if get_schema(tn) in valid_schemas] + view_names = [vn for vn in view_names if get_schema(vn) in valid_schemas] + max_items = config.get('MAX_TABLE_NAMES') or len(table_names) total_items = len(table_names) + len(view_names) max_tables = len(table_names) diff --git a/superset/views/schedules.py b/superset/views/schedules.py index 4dae3cbfb7292..6fdae77265219 100644 --- a/superset/views/schedules.py +++ b/superset/views/schedules.py @@ -90,9 +90,12 @@ class EmailScheduleView(SupersetModelView, DeleteMixin): edit_form_extra_fields = add_form_extra_fields def process_form(self, form, is_created): - recipients = form.test_email_recipients.data.strip() or None + if form.test_email_recipients.data: + test_email_recipients = form.test_email_recipients.data.strip() + else: + test_email_recipients = None self._extra_data['test_email'] = form.test_email.data - self._extra_data['test_email_recipients'] = recipients + self._extra_data['test_email_recipients'] = test_email_recipients def pre_add(self, obj): try: @@ -111,7 +114,7 @@ def pre_update(self, obj): def post_add(self, obj): # Schedule a test mail if the user requested for it. if self._extra_data['test_email']: - recipients = self._extra_data['test_email_recipients'] + recipients = self._extra_data['test_email_recipients'] or obj.recipients args = (self.schedule_type, obj.id) kwargs = dict(recipients=recipients) schedule_email_report.apply_async(args=args, kwargs=kwargs) diff --git a/superset/viz.py b/superset/viz.py index 57386b9d6c6b9..123c361d1bdcb 100644 --- a/superset/viz.py +++ b/superset/viz.py @@ -954,11 +954,11 @@ def query_obj(self): self.series = form_data.get('series') or self.entity d['row_limit'] = form_data.get('limit') - d['metrics'] = [ + d['metrics'] = list(set([ self.z_metric, self.x_metric, self.y_metric, - ] + ])) if not all(d['metrics'] + [self.entity]): raise Exception(_('Pick a metric for x, y and size')) return d @@ -1112,16 +1112,22 @@ def to_series(self, df, classed='', title_suffix=''): series_title = series_title + (title_suffix,) values = [] + non_nan_cnt = 0 for ds in df.index: if ds in ys: d = { 'x': ds, 'y': ys[ds], } + if not np.isnan(ys[ds]): + non_nan_cnt += 1 else: d = {} values.append(d) + if non_nan_cnt == 0: + continue + d = { 'key': series_title, 'values': values, @@ -1224,7 +1230,9 @@ def get_data(self, df): comparison_type = fd.get('comparison_type') or 'values' df = self.process_data(df) if comparison_type == 'values': - chart_data = self.to_series(df) + # Filter out series with all NaN + chart_data = self.to_series(df.dropna(axis=1, how='all')) + for i, (label, df2) in enumerate(self._extra_chart_data): chart_data.extend( self.to_series( diff --git a/tests/dict_import_export_tests.py b/tests/dict_import_export_tests.py index 50cb0f7cbdc8d..f1f93fa64f89e 100644 --- a/tests/dict_import_export_tests.py +++ b/tests/dict_import_export_tests.py @@ -72,7 +72,7 @@ def create_table( 'params': json.dumps(params), 'columns': [{'column_name': c} for c in cols_names], - 'metrics': [{'metric_name': c} for c in metric_names], + 'metrics': [{'metric_name': c, 'expression': ''} for c in metric_names], } table = SqlaTable( @@ -84,7 +84,7 @@ def create_table( for col_name in cols_names: table.columns.append(TableColumn(column_name=col_name)) for metric_name in metric_names: - table.metrics.append(SqlMetric(metric_name=metric_name)) + table.metrics.append(SqlMetric(metric_name=metric_name, expression='')) return table, dict_rep def create_druid_datasource( @@ -98,7 +98,7 @@ def create_druid_datasource( 'id': id, 'params': json.dumps(params), 'columns': [{'column_name': c} for c in cols_names], - 'metrics': [{'metric_name': c} for c in metric_names], + 'metrics': [{'metric_name': c, 'json': '{}'} for c in metric_names], } datasource = DruidDatasource( diff --git a/tests/import_export_tests.py b/tests/import_export_tests.py index ad1aa908682f0..f6d04426d73ff 100644 --- a/tests/import_export_tests.py +++ b/tests/import_export_tests.py @@ -113,7 +113,7 @@ def create_table( table.columns.append( TableColumn(column_name=col_name)) for metric_name in metric_names: - table.metrics.append(SqlMetric(metric_name=metric_name)) + table.metrics.append(SqlMetric(metric_name=metric_name, expression='')) return table def create_druid_datasource( @@ -130,7 +130,7 @@ def create_druid_datasource( DruidColumn(column_name=col_name)) for metric_name in metric_names: datasource.metrics.append(DruidMetric( - metric_name=metric_name)) + metric_name=metric_name, json='{}')) return datasource def get_slice(self, slc_id): diff --git a/tests/sqllab_tests.py b/tests/sqllab_tests.py index 5fe9ef1862189..dc86866a9960a 100644 --- a/tests/sqllab_tests.py +++ b/tests/sqllab_tests.py @@ -307,7 +307,7 @@ def test_sqllab_viz(self): 'columns': [{ 'is_date': False, 'type': 'STRING', - 'nam:qe': 'viz_type', + 'name': 'viz_type', 'is_dim': True, }, { 'is_date': False, diff --git a/tox.ini b/tox.ini index 5eb9db5df7624..dbe2baaca3aff 100644 --- a/tox.ini +++ b/tox.ini @@ -29,6 +29,8 @@ exclude = superset/templates venv ignore = + E121 + E125 FI12 FI15 FI16 From bfd685e5a2c28818d6207f4b3ae4e37d1b559117 Mon Sep 17 00:00:00 2001 From: Kim Truong <47833996+khtruong@users.noreply.github.com> Date: Mon, 29 Apr 2019 21:57:28 -0700 Subject: [PATCH 2/9] feat: see Presto row and array data types (#7391) * feat: see Presto row and array data types * fix: address PR comments * fix: lint and build issues * fix: add types --- superset/db_engine_specs.py | 194 +++++++++++++++++- superset/models/core.py | 2 +- superset/models/sql_types/presto_sql_types.py | 88 ++++++++ tests/db_engine_specs_test.py | 61 ++++++ 4 files changed, 339 insertions(+), 6 deletions(-) create mode 100644 superset/models/sql_types/presto_sql_types.py diff --git a/superset/db_engine_specs.py b/superset/db_engine_specs.py index 620ac4ee971cf..56766749bcecc 100644 --- a/superset/db_engine_specs.py +++ b/superset/db_engine_specs.py @@ -41,8 +41,11 @@ from flask_babel import lazy_gettext as _ import pandas import sqlalchemy as sqla -from sqlalchemy import Column, select +from sqlalchemy import Column, select, types from sqlalchemy.engine import create_engine +from sqlalchemy.engine.base import Engine +from sqlalchemy.engine.reflection import Inspector +from sqlalchemy.engine.result import RowProxy from sqlalchemy.engine.url import make_url from sqlalchemy.sql import quoted_name, text from sqlalchemy.sql.expression import TextAsFrom @@ -52,6 +55,7 @@ from superset import app, conf, db, sql_parse from superset.exceptions import SupersetTemplateException +from superset.models.sql_types.presto_sql_types import type_map as presto_type_map from superset.utils import core as utils QueryStatus = utils.QueryStatus @@ -105,7 +109,7 @@ class BaseEngineSpec(object): """Abstract class for database engine specific configurations""" engine = 'base' # str as defined in sqlalchemy.engine.engine - time_grain_functions = {} + time_grain_functions: dict = {} time_groupby_inline = False limit_method = LimitMethod.FORCE_LIMIT time_secondary_columns = False @@ -113,8 +117,8 @@ class BaseEngineSpec(object): allows_subquery = True supports_column_aliases = True force_column_alias_quotes = False - arraysize = None - max_column_name_length = None + arraysize = 0 + max_column_name_length = 0 @classmethod def get_time_expr(cls, expr, pdf, time_grain, grain): @@ -351,6 +355,10 @@ def get_table_names(cls, inspector, schema): def get_view_names(cls, inspector, schema): return sorted(inspector.get_view_names(schema)) + @classmethod + def get_columns(cls, inspector: Inspector, table_name: str, schema: str) -> list: + return inspector.get_columns(table_name, schema) + @classmethod def where_latest_partition( cls, table_name, schema, database, qry, columns=None): @@ -735,7 +743,7 @@ class MySQLEngineSpec(BaseEngineSpec): 'INTERVAL DAYOFWEEK(DATE_SUB({col}, INTERVAL 1 DAY)) - 1 DAY))', } - type_code_map = {} # loaded from get_datatype only if needed + type_code_map: dict = {} # loaded from get_datatype only if needed @classmethod def convert_dttm(cls, target_type, dttm): @@ -814,6 +822,178 @@ def get_view_names(cls, inspector, schema): """ return [] + @classmethod + def _create_column_info(cls, column: RowProxy, name: str, data_type: str) -> dict: + """ + Create column info object + :param column: column object + :param name: column name + :param data_type: column data type + :return: column info object + """ + return { + 'name': name, + 'type': data_type, + # newer Presto no longer includes this column + 'nullable': getattr(column, 'Null', True), + 'default': None, + } + + @classmethod + def _get_full_name(cls, names: list) -> str: + """ + Get the full column name + :param names: list of all individual column names + :return: full column name + """ + return '.'.join(row_type[0] for row_type in names if row_type[0] is not None) + + @classmethod + def _has_nested_data_types(cls, component_type: str) -> bool: + """ + Check if string contains a data type. We determine if there is a data type by + whitespace or multiple data types by commas + :param component_type: data type + :return: boolean + """ + comma_regex = r',(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)' + white_space_regex = r'\s(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)' + return re.search(comma_regex, component_type) is not None \ + or re.search(white_space_regex, component_type) is not None + + @classmethod + def _split_data_type(cls, data_type: str, delimiter: str) -> list: + """ + Split data type based on given delimiter. Do not split the string if the + delimiter is enclosed in quotes + :param data_type: data type + :param delimiter: string separator (i.e. open parenthesis, closed parenthesis, + comma, whitespace) + :return:list of strings after breaking it by the delimiter + """ + return re.split( + r'{}(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)'.format(delimiter), data_type) + + @classmethod + def _parse_structural_column(cls, column: RowProxy, result: list) -> None: + """ + Parse a row or array column + :param column: column + :param result: list tracking the results + """ + full_data_type = '{} {}'.format(column.Column, column.Type) + # split on open parenthesis ( to get the structural + # data type and its component types + data_types = cls._split_data_type(full_data_type, r'\(') + stack: list = [] + for data_type in data_types: + # split on closed parenthesis ) to track which component + # types belong to what structural data type + inner_types = cls._split_data_type(data_type, r'\)') + for inner_type in inner_types: + # We have finished parsing multiple structural data types + if not inner_type and len(stack) > 0: + stack.pop() + elif cls._has_nested_data_types(inner_type): + # split on comma , to get individual data types + single_fields = cls._split_data_type(inner_type, ', ') + for single_field in single_fields: + # If component type starts with a comma, the first single field + # will be an empty string. Disregard this empty string. + if not single_field: + continue + # split on whitespace to get field name and data type + field_info = cls._split_data_type(single_field, r'\s') + # check if there is a structural data type within + # overall structural data type + if field_info[1] == 'array' or field_info[1] == 'row': + stack.append((field_info[0], field_info[1])) + full_parent_path = cls._get_full_name(stack) + result.append(cls._create_column_info( + column, full_parent_path, + presto_type_map[field_info[1]]())) + else: # otherwise this field is a basic data type + full_parent_path = cls._get_full_name(stack) + column_name = '{}.{}'.format(full_parent_path, field_info[0]) + result.append(cls._create_column_info( + column, column_name, presto_type_map[field_info[1]]())) + # If the component type ends with a structural data type, do not pop + # the stack. We have run across a structural data type within the + # overall structural data type. Otherwise, we have completely parsed + # through the entire structural data type and can move on. + if not (inner_type.endswith('array') or inner_type.endswith('row')): + stack.pop() + # We have an array of row objects (i.e. array(row(...))) + elif 'array' == inner_type or 'row' == inner_type: + # Push a dummy object to represent the structural data type + stack.append((None, inner_type)) + # We have an array of a basic data types(i.e. array(varchar)). + elif len(stack) > 0: + # Because it is an array of a basic data type. We have finished + # parsing the structural data type and can move on. + stack.pop() + + @classmethod + def _show_columns(cls, inspector: Inspector, table_name: str, schema: str) -> list: + """ + Show presto column names + :param inspector: object that performs database schema inspection + :param table_name: table name + :param schema: schema name + :return: list of column objects + """ + quote = inspector.engine.dialect.identifier_preparer.quote_identifier + full_table = quote(table_name) + if schema: + full_table = '{}.{}'.format(quote(schema), full_table) + columns = inspector.bind.execute('SHOW COLUMNS FROM {}'.format(full_table)) + return columns + + @classmethod + def get_columns(cls, inspector: Inspector, table_name: str, schema: str) -> list: + """ + Get columns from a Presto data source. This includes handling row and + array data types + :param inspector: object that performs database schema inspection + :param table_name: table name + :param schema: schema name + :return: a list of results that contain column info + (i.e. column name and data type) + """ + columns = cls._show_columns(inspector, table_name, schema) + result: list = [] + for column in columns: + try: + # parse column if it is a row or array + if 'array' in column.Type or 'row' in column.Type: + cls._parse_structural_column(column, result) + continue + else: # otherwise column is a basic data type + column_type = presto_type_map[column.Type]() + except KeyError: + print('Did not recognize type {} of column {}'.format( + column.Type, column.Column)) + column_type = types.NullType + result.append(cls._create_column_info(column, column.Column, column_type)) + return result + + @classmethod + def select_star(cls, my_db, table_name: str, engine: Engine, schema: str = None, + limit: int = 100, show_cols: bool = False, indent: bool = True, + latest_partition: bool = True, cols: list = []) -> str: + """ + Temporary method until we have a function that can handle row and array columns + """ + presto_cols = cols + if show_cols: + dot_regex = r'\.(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)' + presto_cols = [ + col for col in presto_cols if re.search(dot_regex, col['name']) is None] + return super(PrestoEngineSpec, cls).select_star( + my_db, table_name, engine, schema, limit, + show_cols, indent, latest_partition, presto_cols, + ) + @classmethod def adjust_database_uri(cls, uri, selected_schema=None): database = uri.database @@ -1323,6 +1503,10 @@ def handle_cursor(cls, cursor, query, session): time.sleep(hive_poll_interval) polled = cursor.poll() + @classmethod + def get_columns(cls, inspector: Inspector, table_name: str, schema: str) -> list: + return inspector.get_columns(table_name, schema) + @classmethod def where_latest_partition( cls, table_name, schema, database, qry, columns=None): diff --git a/superset/models/core.py b/superset/models/core.py index fb5850c0fa1b0..e16a234bfd723 100644 --- a/superset/models/core.py +++ b/superset/models/core.py @@ -1075,7 +1075,7 @@ def get_table(self, table_name, schema=None): autoload_with=self.get_sqla_engine()) def get_columns(self, table_name, schema=None): - return self.inspector.get_columns(table_name, schema) + return self.db_engine_spec.get_columns(self.inspector, table_name, schema) def get_indexes(self, table_name, schema=None): return self.inspector.get_indexes(table_name, schema) diff --git a/superset/models/sql_types/presto_sql_types.py b/superset/models/sql_types/presto_sql_types.py new file mode 100644 index 0000000000000..021c15cffa51e --- /dev/null +++ b/superset/models/sql_types/presto_sql_types.py @@ -0,0 +1,88 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from sqlalchemy import types +from sqlalchemy.sql.sqltypes import Integer +from sqlalchemy.sql.type_api import TypeEngine + + +# _compiler_dispatch is defined to help with type compilation + +class TinyInteger(Integer): + """ + A type for tiny ``int`` integers. + """ + def _compiler_dispatch(self, visitor, **kw): + return 'TINYINT' + + +class Interval(TypeEngine): + """ + A type for intervals. + """ + def _compiler_dispatch(self, visitor, **kw): + return 'INTERVAL' + + +class Array(TypeEngine): + + """ + A type for arrays. + """ + def _compiler_dispatch(self, visitor, **kw): + return 'ARRAY' + + +class Map(TypeEngine): + + """ + A type for maps. + """ + def _compiler_dispatch(self, visitor, **kw): + return 'MAP' + + +class Row(TypeEngine): + + """ + A type for rows. + """ + def _compiler_dispatch(self, visitor, **kw): + return 'ROW' + + +type_map = { + 'boolean': types.Boolean, + 'tinyint': TinyInteger, + 'smallint': types.SmallInteger, + 'integer': types.Integer, + 'bigint': types.BigInteger, + 'real': types.Float, + 'double': types.Float, + 'decimal': types.DECIMAL, + 'varchar': types.String, + 'char': types.CHAR, + 'varbinary': types.VARBINARY, + 'JSON': types.JSON, + 'date': types.DATE, + 'time': types.Time, + 'timestamp': types.TIMESTAMP, + 'interval': Interval, + 'array': Array, + 'map': Map, + 'row': Row, +} diff --git a/tests/db_engine_specs_test.py b/tests/db_engine_specs_test.py index e1286076bb623..ef9d6bc17da1a 100644 --- a/tests/db_engine_specs_test.py +++ b/tests/db_engine_specs_test.py @@ -19,6 +19,7 @@ from sqlalchemy import column, select, table from sqlalchemy.dialects.mssql import pymssql +from sqlalchemy.engine.result import RowProxy from sqlalchemy.types import String, UnicodeText from superset import db_engine_specs @@ -322,6 +323,66 @@ def test_engine_time_grain_validity(self): def test_presto_get_view_names_return_empty_list(self): self.assertEquals([], PrestoEngineSpec.get_view_names(mock.ANY, mock.ANY)) + def verify_presto_column(self, column, expected_results): + inspector = mock.Mock() + inspector.engine.dialect.identifier_preparer.quote_identifier = mock.Mock() + keymap = {'Column': (None, None, 0), + 'Type': (None, None, 1), + 'Null': (None, None, 2)} + row = RowProxy(mock.Mock(), column, [None, None, None, None], keymap) + inspector.bind.execute = mock.Mock(return_value=[row]) + results = PrestoEngineSpec.get_columns(inspector, '', '') + self.assertEqual(len(expected_results), len(results)) + for expected_result, result in zip(expected_results, results): + self.assertEqual(expected_result[0], result['name']) + self.assertEqual(expected_result[1], str(result['type'])) + + def test_presto_get_column(self): + presto_column = ('column_name', 'boolean', '') + expected_results = [('column_name', 'BOOLEAN')] + self.verify_presto_column(presto_column, expected_results) + + def test_presto_get_simple_row_column(self): + presto_column = ('column_name', 'row(nested_obj double)', '') + expected_results = [ + ('column_name', 'ROW'), + ('column_name.nested_obj', 'FLOAT')] + self.verify_presto_column(presto_column, expected_results) + + def test_presto_get_simple_row_column_with_tricky_name(self): + presto_column = ('column_name', 'row("Field Name(Tricky, Name)" double)', '') + expected_results = [ + ('column_name', 'ROW'), + ('column_name."Field Name(Tricky, Name)"', 'FLOAT')] + self.verify_presto_column(presto_column, expected_results) + + def test_presto_get_simple_array_column(self): + presto_column = ('column_name', 'array(double)', '') + expected_results = [('column_name', 'ARRAY')] + self.verify_presto_column(presto_column, expected_results) + + def test_presto_get_row_within_array_within_row_column(self): + presto_column = ( + 'column_name', + 'row(nested_array array(row(nested_row double)), nested_obj double)', '') + expected_results = [ + ('column_name', 'ROW'), + ('column_name.nested_array', 'ARRAY'), + ('column_name.nested_array.nested_row', 'FLOAT'), + ('column_name.nested_obj', 'FLOAT'), + ] + self.verify_presto_column(presto_column, expected_results) + + def test_presto_get_array_within_row_within_array_column(self): + presto_column = ( + 'column_name', + 'array(row(nested_array array(double), nested_obj double))', '') + expected_results = [ + ('column_name', 'ARRAY'), + ('column_name.nested_array', 'ARRAY'), + ('column_name.nested_obj', 'FLOAT')] + self.verify_presto_column(presto_column, expected_results) + def test_hive_get_view_names_return_empty_list(self): self.assertEquals([], HiveEngineSpec.get_view_names(mock.ANY, mock.ANY)) From 51c7f92c23e081f661de2d2ecdcd793b5785b58c Mon Sep 17 00:00:00 2001 From: Dave Smith Date: Wed, 1 May 2019 08:17:59 -0700 Subject: [PATCH 3/9] Incorporate feedback from initial PR (prematurely merged to lyft-release-sp8) (#7415) * add stronger type hints where possible * fix: lint issues and add select_star func in Hive * add missing pkg init * fix: build issues * fix: pylint issues * fix: use logging instead of print --- superset/db_engine_specs.py | 40 ++++++++++++------- superset/models/sql_types/__init__.py | 16 ++++++++ superset/models/sql_types/presto_sql_types.py | 33 +++++++++++---- 3 files changed, 67 insertions(+), 22 deletions(-) create mode 100644 superset/models/sql_types/__init__.py diff --git a/superset/db_engine_specs.py b/superset/db_engine_specs.py index 56766749bcecc..32fc1aa79c6b7 100644 --- a/superset/db_engine_specs.py +++ b/superset/db_engine_specs.py @@ -36,6 +36,7 @@ import re import textwrap import time +from typing import List, Tuple from flask import g from flask_babel import lazy_gettext as _ @@ -840,13 +841,13 @@ def _create_column_info(cls, column: RowProxy, name: str, data_type: str) -> dic } @classmethod - def _get_full_name(cls, names: list) -> str: + def _get_full_name(cls, names: List[Tuple[str, str]]) -> str: """ Get the full column name :param names: list of all individual column names :return: full column name """ - return '.'.join(row_type[0] for row_type in names if row_type[0] is not None) + return '.'.join(column[0] for column in names if column[0]) @classmethod def _has_nested_data_types(cls, component_type: str) -> bool: @@ -862,20 +863,20 @@ def _has_nested_data_types(cls, component_type: str) -> bool: or re.search(white_space_regex, component_type) is not None @classmethod - def _split_data_type(cls, data_type: str, delimiter: str) -> list: + def _split_data_type(cls, data_type: str, delimiter: str) -> List[str]: """ Split data type based on given delimiter. Do not split the string if the delimiter is enclosed in quotes :param data_type: data type :param delimiter: string separator (i.e. open parenthesis, closed parenthesis, comma, whitespace) - :return:list of strings after breaking it by the delimiter + :return: list of strings after breaking it by the delimiter """ return re.split( r'{}(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)'.format(delimiter), data_type) @classmethod - def _parse_structural_column(cls, column: RowProxy, result: list) -> None: + def _parse_structural_column(cls, column: RowProxy, result: List[dict]) -> None: """ Parse a row or array column :param column: column @@ -885,7 +886,7 @@ def _parse_structural_column(cls, column: RowProxy, result: list) -> None: # split on open parenthesis ( to get the structural # data type and its component types data_types = cls._split_data_type(full_data_type, r'\(') - stack: list = [] + stack: List[Tuple[str, str]] = [] for data_type in data_types: # split on closed parenthesis ) to track which component # types belong to what structural data type @@ -926,7 +927,7 @@ def _parse_structural_column(cls, column: RowProxy, result: list) -> None: # We have an array of row objects (i.e. array(row(...))) elif 'array' == inner_type or 'row' == inner_type: # Push a dummy object to represent the structural data type - stack.append((None, inner_type)) + stack.append(('', inner_type)) # We have an array of a basic data types(i.e. array(varchar)). elif len(stack) > 0: # Because it is an array of a basic data type. We have finished @@ -934,7 +935,8 @@ def _parse_structural_column(cls, column: RowProxy, result: list) -> None: stack.pop() @classmethod - def _show_columns(cls, inspector: Inspector, table_name: str, schema: str) -> list: + def _show_columns( + cls, inspector: Inspector, table_name: str, schema: str) -> List[RowProxy]: """ Show presto column names :param inspector: object that performs database schema inspection @@ -950,7 +952,8 @@ def _show_columns(cls, inspector: Inspector, table_name: str, schema: str) -> li return columns @classmethod - def get_columns(cls, inspector: Inspector, table_name: str, schema: str) -> list: + def get_columns( + cls, inspector: Inspector, table_name: str, schema: str) -> List[dict]: """ Get columns from a Presto data source. This includes handling row and array data types @@ -961,7 +964,7 @@ def get_columns(cls, inspector: Inspector, table_name: str, schema: str) -> list (i.e. column name and data type) """ columns = cls._show_columns(inspector, table_name, schema) - result: list = [] + result: List[dict] = [] for column in columns: try: # parse column if it is a row or array @@ -971,7 +974,7 @@ def get_columns(cls, inspector: Inspector, table_name: str, schema: str) -> list else: # otherwise column is a basic data type column_type = presto_type_map[column.Type]() except KeyError: - print('Did not recognize type {} of column {}'.format( + logging.info('Did not recognize type {} of column {}'.format( column.Type, column.Column)) column_type = types.NullType result.append(cls._create_column_info(column, column.Column, column_type)) @@ -980,7 +983,7 @@ def get_columns(cls, inspector: Inspector, table_name: str, schema: str) -> list @classmethod def select_star(cls, my_db, table_name: str, engine: Engine, schema: str = None, limit: int = 100, show_cols: bool = False, indent: bool = True, - latest_partition: bool = True, cols: list = []) -> str: + latest_partition: bool = True, cols: List[dict] = []) -> str: """ Temporary method until we have a function that can handle row and array columns """ @@ -989,7 +992,7 @@ def select_star(cls, my_db, table_name: str, engine: Engine, schema: str = None, dot_regex = r'\.(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)' presto_cols = [ col for col in presto_cols if re.search(dot_regex, col['name']) is None] - return super(PrestoEngineSpec, cls).select_star( + return BaseEngineSpec.select_star( my_db, table_name, engine, schema, limit, show_cols, indent, latest_partition, presto_cols, ) @@ -1504,7 +1507,8 @@ def handle_cursor(cls, cursor, query, session): polled = cursor.poll() @classmethod - def get_columns(cls, inspector: Inspector, table_name: str, schema: str) -> list: + def get_columns( + cls, inspector: Inspector, table_name: str, schema: str) -> List[dict]: return inspector.get_columns(table_name, schema) @classmethod @@ -1538,6 +1542,14 @@ def _partition_query( cls, table_name, limit=0, order_by=None, filters=None): return f'SHOW PARTITIONS {table_name}' + @classmethod + def select_star(cls, my_db, table_name: str, engine: Engine, schema: str = None, + limit: int = 100, show_cols: bool = False, indent: bool = True, + latest_partition: bool = True, cols: List[dict] = []) -> str: + return BaseEngineSpec.select_star( + my_db, table_name, engine, schema, limit, + show_cols, indent, latest_partition, cols) + @classmethod def modify_url_for_impersonation(cls, url, impersonate_user, username): """ diff --git a/superset/models/sql_types/__init__.py b/superset/models/sql_types/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/superset/models/sql_types/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/superset/models/sql_types/presto_sql_types.py b/superset/models/sql_types/presto_sql_types.py index 021c15cffa51e..5dfe1479e170a 100644 --- a/superset/models/sql_types/presto_sql_types.py +++ b/superset/models/sql_types/presto_sql_types.py @@ -26,7 +26,11 @@ class TinyInteger(Integer): """ A type for tiny ``int`` integers. """ - def _compiler_dispatch(self, visitor, **kw): + def python_type(self): + return int + + @classmethod + def _compiler_dispatch(cls, _visitor, **_kw): return 'TINYINT' @@ -34,34 +38,47 @@ class Interval(TypeEngine): """ A type for intervals. """ - def _compiler_dispatch(self, visitor, **kw): + def python_type(self): + return None + + @classmethod + def _compiler_dispatch(cls, _visitor, **_kw): return 'INTERVAL' class Array(TypeEngine): - """ A type for arrays. """ - def _compiler_dispatch(self, visitor, **kw): + def python_type(self): + return list + + @classmethod + def _compiler_dispatch(cls, _visitor, **_kw): return 'ARRAY' class Map(TypeEngine): - """ A type for maps. """ - def _compiler_dispatch(self, visitor, **kw): + def python_type(self): + return dict + + @classmethod + def _compiler_dispatch(cls, _visitor, **_kw): return 'MAP' class Row(TypeEngine): - """ A type for rows. """ - def _compiler_dispatch(self, visitor, **kw): + def python_type(self): + return None + + @classmethod + def _compiler_dispatch(cls, _visitor, **_kw): return 'ROW' From bbb0b7544be043bef7fcdb4f64d727fdf786e7aa Mon Sep 17 00:00:00 2001 From: khtruong Date: Thu, 2 May 2019 14:18:47 -0700 Subject: [PATCH 4/9] feat: view presto row objects in data grid --- .../FilterableTable/FilterableTable.jsx | 2 +- .../FilterableTable/FilterableTableStyles.css | 7 ++ superset/db_engine_specs.py | 78 +++++++++++++++++-- tests/db_engine_specs_test.py | 23 ++++++ 4 files changed, 104 insertions(+), 6 deletions(-) diff --git a/superset/assets/src/components/FilterableTable/FilterableTable.jsx b/superset/assets/src/components/FilterableTable/FilterableTable.jsx index 72a22c7ba20c3..3ef03ba8ae9a0 100644 --- a/superset/assets/src/components/FilterableTable/FilterableTable.jsx +++ b/superset/assets/src/components/FilterableTable/FilterableTable.jsx @@ -138,7 +138,7 @@ export default class FilterableTable extends PureComponent { headerRenderer({ dataKey, label, sortBy, sortDirection }) { return ( -
+
{label} {sortBy === dataKey && diff --git a/superset/assets/src/components/FilterableTable/FilterableTableStyles.css b/superset/assets/src/components/FilterableTable/FilterableTableStyles.css index 5be4a369499d2..d058c3db97cbb 100644 --- a/superset/assets/src/components/FilterableTable/FilterableTableStyles.css +++ b/superset/assets/src/components/FilterableTable/FilterableTableStyles.css @@ -72,3 +72,10 @@ } .even-row { background: #f2f2f2; } .odd-row { background: #ffffff; } +.header-style { + direction: rtl; + overflow: hidden; + text-align: left; + text-overflow: ellipsis; + white-space: nowrap; +} \ No newline at end of file diff --git a/superset/db_engine_specs.py b/superset/db_engine_specs.py index 32fc1aa79c6b7..582399cc66203 100644 --- a/superset/db_engine_specs.py +++ b/superset/db_engine_specs.py @@ -49,6 +49,7 @@ from sqlalchemy.engine.result import RowProxy from sqlalchemy.engine.url import make_url from sqlalchemy.sql import quoted_name, text +from sqlalchemy.sql.expression import ColumnClause from sqlalchemy.sql.expression import TextAsFrom from sqlalchemy.types import String, UnicodeText import sqlparse @@ -980,19 +981,82 @@ def get_columns( result.append(cls._create_column_info(column, column.Column, column_type)) return result + @classmethod + def _is_column_name_quoted(cls, column_name: str) -> bool: + """ + Check if column name is in quotes + :param column_name: column name + :return: boolean + """ + return column_name.startswith('"') and column_name.endswith('"') + + @classmethod + def _get_fields(cls, cols: List[dict]) -> List[ColumnClause]: + """ + Format column clauses where names are in quotes and labels are specified + :param cols: columns + :return: column clauses + """ + column_clauses = [] + dot_regex = r'\.(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)' + for col in cols: + # get individual column names + col_names = re.split(dot_regex, col['name']) + # quote each column name if it is not already quoted + for index, col_name in enumerate(col_names): + if not cls._is_column_name_quoted(col_name): + col_names[index] = '"{}"'.format(col_name) + quoted_col_name = '.'.join(col_names) + # create column clause in the format "name"."name" AS "name.name" + column_clause = sqla.literal_column(quoted_col_name).label(col['name']) + column_clauses.append(column_clause) + return column_clauses + + @classmethod + def _filter_presto_cols(cls, cols: List[dict]) -> List[dict]: + """ + We want to filter out columns that correspond to array content because you cannot + select array content without an index, which will lead to a large and complicated + query. We know which columns to skip because cols is a list provided to us in a + specific order where a structural column is positioned right before its content. + + Example: Column Name: ColA, Column Data Type: array(row(nest_obj int)) + cols = [ ..., ColA, ColA.nest_obj, ... ] + + When we run across an array, check if subsequent column names start with the + array name and skip them. + :param cols: columns + :return: filtered list of columns + """ + filtered_cols = [] + curr_array_col_name = '' + for col in cols: + # col corresponds to an array's content and should be skipped + if curr_array_col_name and col['name'].startswith(curr_array_col_name): + continue + # col is an array so we need to check if subsequent + # columns correspond to the array's contents + elif str(col['type']) == 'ARRAY': + curr_array_col_name = col['name'] + filtered_cols.append(col) + else: + curr_array_col_name = '' + filtered_cols.append(col) + return filtered_cols + @classmethod def select_star(cls, my_db, table_name: str, engine: Engine, schema: str = None, limit: int = 100, show_cols: bool = False, indent: bool = True, latest_partition: bool = True, cols: List[dict] = []) -> str: """ - Temporary method until we have a function that can handle row and array columns + Include selecting properties of row objects. We cannot easily break arrays into + rows, so render the whole array in its own row and skip columns that correspond + to an array's contents. """ presto_cols = cols if show_cols: - dot_regex = r'\.(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)' - presto_cols = [ - col for col in presto_cols if re.search(dot_regex, col['name']) is None] - return BaseEngineSpec.select_star( + presto_cols = cls._filter_presto_cols(cols) + return super(PrestoEngineSpec, cls).select_star( my_db, table_name, engine, schema, limit, show_cols, indent, latest_partition, presto_cols, ) @@ -1526,6 +1590,10 @@ def where_latest_partition( return qry.where(Column(col_name) == value) return False + @classmethod + def _get_fields(cls, cols: List[dict]) -> List[ColumnClause]: + return BaseEngineSpec._get_fields(cols) + @classmethod def latest_sub_partition(cls, table_name, schema, database, **kwargs): # TODO(bogdan): implement` diff --git a/tests/db_engine_specs_test.py b/tests/db_engine_specs_test.py index ef9d6bc17da1a..2a22c590ed69c 100644 --- a/tests/db_engine_specs_test.py +++ b/tests/db_engine_specs_test.py @@ -383,6 +383,29 @@ def test_presto_get_array_within_row_within_array_column(self): ('column_name.nested_obj', 'FLOAT')] self.verify_presto_column(presto_column, expected_results) + def test_presto_get_fields(self): + cols = [ + {'name': 'column'}, + {'name': 'column.nested_obj'}, + {'name': 'column."quoted.nested obj"'}] + actual_results = PrestoEngineSpec._get_fields(cols) + expected_results = [ + {'name': '"column"', 'label': 'column'}, + {'name': '"column"."nested_obj"', 'label': 'column.nested_obj'}, + {'name': '"column"."quoted.nested obj"', + 'label': 'column."quoted.nested obj"'}] + for actual_result, expected_result in zip(actual_results, expected_results): + self.assertEqual(actual_result.element.name, expected_result['name']) + self.assertEqual(actual_result.name, expected_result['label']) + + def test_presto_filter_presto_cols(self): + cols = [ + {'name': 'column', 'type': 'ARRAY'}, + {'name': 'column.nested_obj', 'type': 'FLOAT'}] + actual_results = PrestoEngineSpec._filter_presto_cols(cols) + expected_results = [cols[0]] + self.assertEqual(actual_results, expected_results) + def test_hive_get_view_names_return_empty_list(self): self.assertEquals([], HiveEngineSpec.get_view_names(mock.ANY, mock.ANY)) From ff6c208d7219b2c0348fa4d030458868980ca3ce Mon Sep 17 00:00:00 2001 From: khtruong Date: Fri, 3 May 2019 13:42:45 -0700 Subject: [PATCH 5/9] fix: address feedback --- .../FilterableTable/FilterableTableStyles.css | 2 -- superset/db_engine_specs.py | 28 +++++++++++++++---- 2 files changed, 22 insertions(+), 8 deletions(-) diff --git a/superset/assets/src/components/FilterableTable/FilterableTableStyles.css b/superset/assets/src/components/FilterableTable/FilterableTableStyles.css index d058c3db97cbb..7a0d3ba0ea7d3 100644 --- a/superset/assets/src/components/FilterableTable/FilterableTableStyles.css +++ b/superset/assets/src/components/FilterableTable/FilterableTableStyles.css @@ -73,9 +73,7 @@ .even-row { background: #f2f2f2; } .odd-row { background: #ffffff; } .header-style { - direction: rtl; overflow: hidden; - text-align: left; text-overflow: ellipsis; white-space: nowrap; } \ No newline at end of file diff --git a/superset/db_engine_specs.py b/superset/db_engine_specs.py index 582399cc66203..6923a11665ec5 100644 --- a/superset/db_engine_specs.py +++ b/superset/db_engine_specs.py @@ -998,7 +998,15 @@ def _get_fields(cls, cols: List[dict]) -> List[ColumnClause]: :return: column clauses """ column_clauses = [] - dot_regex = r'\.(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)' + # Column names are separated by periods. This regex will find periods in a string + # if they are not enclosed in quotes because if a period is enclosed in quotes, + # then that period is part of a column name. + dot_pattern = r"""\. # split on period + (?= # look ahead + (?: # create non-capture group + [^\"]*\"[^\"]*\" # two quotes + )*[^\"]*$) # end regex""" + dot_regex = re.compile(dot_pattern, re.VERBOSE) for col in cols: # get individual column names col_names = re.split(dot_regex, col['name']) @@ -1006,7 +1014,9 @@ def _get_fields(cls, cols: List[dict]) -> List[ColumnClause]: for index, col_name in enumerate(col_names): if not cls._is_column_name_quoted(col_name): col_names[index] = '"{}"'.format(col_name) - quoted_col_name = '.'.join(col_names) + quoted_col_name = '.'.join( + col_name if cls._is_column_name_quoted(col_name) else f'"{col_name}"' + for col_name in col_names) # create column clause in the format "name"."name" AS "name.name" column_clause = sqla.literal_column(quoted_col_name).label(col['name']) column_clauses.append(column_clause) @@ -1015,10 +1025,16 @@ def _get_fields(cls, cols: List[dict]) -> List[ColumnClause]: @classmethod def _filter_presto_cols(cls, cols: List[dict]) -> List[dict]: """ - We want to filter out columns that correspond to array content because you cannot - select array content without an index, which will lead to a large and complicated - query. We know which columns to skip because cols is a list provided to us in a - specific order where a structural column is positioned right before its content. + We want to filter out columns that correspond to array content because expanding + arrays would require us to use unnest and join. This can lead to a large, + complicated, and slow query. + + Example: select array_content + from TABLE + cross join UNNEST(array_column) as t(array_content); + + We know which columns to skip because cols is a list provided to us in a specific + order where a structural column is positioned right before its content. Example: Column Name: ColA, Column Data Type: array(row(nest_obj int)) cols = [ ..., ColA, ColA.nest_obj, ... ] From b1aface05f9a3315719d56da092a9eed366d85af Mon Sep 17 00:00:00 2001 From: khtruong Date: Fri, 3 May 2019 13:44:31 -0700 Subject: [PATCH 6/9] fix: spacing --- superset/db_engine_specs.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/superset/db_engine_specs.py b/superset/db_engine_specs.py index 6923a11665ec5..5fed48049107f 100644 --- a/superset/db_engine_specs.py +++ b/superset/db_engine_specs.py @@ -1002,10 +1002,10 @@ def _get_fields(cls, cols: List[dict]) -> List[ColumnClause]: # if they are not enclosed in quotes because if a period is enclosed in quotes, # then that period is part of a column name. dot_pattern = r"""\. # split on period - (?= # look ahead - (?: # create non-capture group - [^\"]*\"[^\"]*\" # two quotes - )*[^\"]*$) # end regex""" + (?= # look ahead + (?: # create non-capture group + [^\"]*\"[^\"]*\" # two quotes + )*[^\"]*$) # end regex""" dot_regex = re.compile(dot_pattern, re.VERBOSE) for col in cols: # get individual column names From 59a81e105f8cdcecdc342936cf8b5babfce34697 Mon Sep 17 00:00:00 2001 From: Beto Dealmeida Date: Fri, 3 May 2019 14:25:36 -0700 Subject: [PATCH 7/9] Workaround for no results returned (#7442) --- .../assets/src/SqlLab/components/QueryAutoRefresh.jsx | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/superset/assets/src/SqlLab/components/QueryAutoRefresh.jsx b/superset/assets/src/SqlLab/components/QueryAutoRefresh.jsx index 541ce2d65d6ba..13834cb9d919a 100644 --- a/superset/assets/src/SqlLab/components/QueryAutoRefresh.jsx +++ b/superset/assets/src/SqlLab/components/QueryAutoRefresh.jsx @@ -41,10 +41,18 @@ class QueryAutoRefresh extends React.PureComponent { const { queries, queriesLastUpdate } = this.props; const now = new Date().getTime(); + // due to a race condition, queries can be marked as successful before the + // results key is set; this is a workaround until we fix the underlying + // problem + const isQueryRunning = q => ( + ['running', 'started', 'pending', 'fetching'].indexOf(q.state) >= 0 || + (q.state === 'success' && q.resultsKey === null) + ); + return ( queriesLastUpdate > 0 && Object.values(queries).some( - q => ['running', 'started', 'pending', 'fetching'].indexOf(q.state) >= 0 && + q => isQueryRunning(q) && now - q.startDttm < MAX_QUERY_AGE_TO_POLL, ) ); From 2a003c1cd97b330d056982dd2f2ea163324e7950 Mon Sep 17 00:00:00 2001 From: Kim Truong <47833996+khtruong@users.noreply.github.com> Date: Fri, 3 May 2019 14:59:42 -0700 Subject: [PATCH 8/9] feat: view presto row objects in data grid (#7436) * feat: view presto row objects in data grid * fix: address feedback * fix: spacing --- .../FilterableTable/FilterableTable.jsx | 2 +- .../FilterableTable/FilterableTableStyles.css | 5 + superset/db_engine_specs.py | 94 ++++++++++++++++++- tests/db_engine_specs_test.py | 23 +++++ 4 files changed, 118 insertions(+), 6 deletions(-) diff --git a/superset/assets/src/components/FilterableTable/FilterableTable.jsx b/superset/assets/src/components/FilterableTable/FilterableTable.jsx index 72a22c7ba20c3..3ef03ba8ae9a0 100644 --- a/superset/assets/src/components/FilterableTable/FilterableTable.jsx +++ b/superset/assets/src/components/FilterableTable/FilterableTable.jsx @@ -138,7 +138,7 @@ export default class FilterableTable extends PureComponent { headerRenderer({ dataKey, label, sortBy, sortDirection }) { return ( -
+
{label} {sortBy === dataKey && diff --git a/superset/assets/src/components/FilterableTable/FilterableTableStyles.css b/superset/assets/src/components/FilterableTable/FilterableTableStyles.css index 5be4a369499d2..7a0d3ba0ea7d3 100644 --- a/superset/assets/src/components/FilterableTable/FilterableTableStyles.css +++ b/superset/assets/src/components/FilterableTable/FilterableTableStyles.css @@ -72,3 +72,8 @@ } .even-row { background: #f2f2f2; } .odd-row { background: #ffffff; } +.header-style { + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; +} \ No newline at end of file diff --git a/superset/db_engine_specs.py b/superset/db_engine_specs.py index 32fc1aa79c6b7..5fed48049107f 100644 --- a/superset/db_engine_specs.py +++ b/superset/db_engine_specs.py @@ -49,6 +49,7 @@ from sqlalchemy.engine.result import RowProxy from sqlalchemy.engine.url import make_url from sqlalchemy.sql import quoted_name, text +from sqlalchemy.sql.expression import ColumnClause from sqlalchemy.sql.expression import TextAsFrom from sqlalchemy.types import String, UnicodeText import sqlparse @@ -980,19 +981,98 @@ def get_columns( result.append(cls._create_column_info(column, column.Column, column_type)) return result + @classmethod + def _is_column_name_quoted(cls, column_name: str) -> bool: + """ + Check if column name is in quotes + :param column_name: column name + :return: boolean + """ + return column_name.startswith('"') and column_name.endswith('"') + + @classmethod + def _get_fields(cls, cols: List[dict]) -> List[ColumnClause]: + """ + Format column clauses where names are in quotes and labels are specified + :param cols: columns + :return: column clauses + """ + column_clauses = [] + # Column names are separated by periods. This regex will find periods in a string + # if they are not enclosed in quotes because if a period is enclosed in quotes, + # then that period is part of a column name. + dot_pattern = r"""\. # split on period + (?= # look ahead + (?: # create non-capture group + [^\"]*\"[^\"]*\" # two quotes + )*[^\"]*$) # end regex""" + dot_regex = re.compile(dot_pattern, re.VERBOSE) + for col in cols: + # get individual column names + col_names = re.split(dot_regex, col['name']) + # quote each column name if it is not already quoted + for index, col_name in enumerate(col_names): + if not cls._is_column_name_quoted(col_name): + col_names[index] = '"{}"'.format(col_name) + quoted_col_name = '.'.join( + col_name if cls._is_column_name_quoted(col_name) else f'"{col_name}"' + for col_name in col_names) + # create column clause in the format "name"."name" AS "name.name" + column_clause = sqla.literal_column(quoted_col_name).label(col['name']) + column_clauses.append(column_clause) + return column_clauses + + @classmethod + def _filter_presto_cols(cls, cols: List[dict]) -> List[dict]: + """ + We want to filter out columns that correspond to array content because expanding + arrays would require us to use unnest and join. This can lead to a large, + complicated, and slow query. + + Example: select array_content + from TABLE + cross join UNNEST(array_column) as t(array_content); + + We know which columns to skip because cols is a list provided to us in a specific + order where a structural column is positioned right before its content. + + Example: Column Name: ColA, Column Data Type: array(row(nest_obj int)) + cols = [ ..., ColA, ColA.nest_obj, ... ] + + When we run across an array, check if subsequent column names start with the + array name and skip them. + :param cols: columns + :return: filtered list of columns + """ + filtered_cols = [] + curr_array_col_name = '' + for col in cols: + # col corresponds to an array's content and should be skipped + if curr_array_col_name and col['name'].startswith(curr_array_col_name): + continue + # col is an array so we need to check if subsequent + # columns correspond to the array's contents + elif str(col['type']) == 'ARRAY': + curr_array_col_name = col['name'] + filtered_cols.append(col) + else: + curr_array_col_name = '' + filtered_cols.append(col) + return filtered_cols + @classmethod def select_star(cls, my_db, table_name: str, engine: Engine, schema: str = None, limit: int = 100, show_cols: bool = False, indent: bool = True, latest_partition: bool = True, cols: List[dict] = []) -> str: """ - Temporary method until we have a function that can handle row and array columns + Include selecting properties of row objects. We cannot easily break arrays into + rows, so render the whole array in its own row and skip columns that correspond + to an array's contents. """ presto_cols = cols if show_cols: - dot_regex = r'\.(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)' - presto_cols = [ - col for col in presto_cols if re.search(dot_regex, col['name']) is None] - return BaseEngineSpec.select_star( + presto_cols = cls._filter_presto_cols(cols) + return super(PrestoEngineSpec, cls).select_star( my_db, table_name, engine, schema, limit, show_cols, indent, latest_partition, presto_cols, ) @@ -1526,6 +1606,10 @@ def where_latest_partition( return qry.where(Column(col_name) == value) return False + @classmethod + def _get_fields(cls, cols: List[dict]) -> List[ColumnClause]: + return BaseEngineSpec._get_fields(cols) + @classmethod def latest_sub_partition(cls, table_name, schema, database, **kwargs): # TODO(bogdan): implement` diff --git a/tests/db_engine_specs_test.py b/tests/db_engine_specs_test.py index ef9d6bc17da1a..2a22c590ed69c 100644 --- a/tests/db_engine_specs_test.py +++ b/tests/db_engine_specs_test.py @@ -383,6 +383,29 @@ def test_presto_get_array_within_row_within_array_column(self): ('column_name.nested_obj', 'FLOAT')] self.verify_presto_column(presto_column, expected_results) + def test_presto_get_fields(self): + cols = [ + {'name': 'column'}, + {'name': 'column.nested_obj'}, + {'name': 'column."quoted.nested obj"'}] + actual_results = PrestoEngineSpec._get_fields(cols) + expected_results = [ + {'name': '"column"', 'label': 'column'}, + {'name': '"column"."nested_obj"', 'label': 'column.nested_obj'}, + {'name': '"column"."quoted.nested obj"', + 'label': 'column."quoted.nested obj"'}] + for actual_result, expected_result in zip(actual_results, expected_results): + self.assertEqual(actual_result.element.name, expected_result['name']) + self.assertEqual(actual_result.name, expected_result['label']) + + def test_presto_filter_presto_cols(self): + cols = [ + {'name': 'column', 'type': 'ARRAY'}, + {'name': 'column.nested_obj', 'type': 'FLOAT'}] + actual_results = PrestoEngineSpec._filter_presto_cols(cols) + expected_results = [cols[0]] + self.assertEqual(actual_results, expected_results) + def test_hive_get_view_names_return_empty_list(self): self.assertEquals([], HiveEngineSpec.get_view_names(mock.ANY, mock.ANY)) From 90eef519f3bd7e04bd5cea8201893d42c8757f5f Mon Sep 17 00:00:00 2001 From: Beto Dealmeida Date: Fri, 3 May 2019 15:10:43 -0700 Subject: [PATCH 9/9] feat: Scheduling queries from SQL Lab (#7416) * Lightweight pipelines POC * Add docs * Minor fixes * Remove Lyft URL * Use enum * Minor fix * Fix unit tests * Mark props as required --- docs/installation.rst | 78 +++++++++++++ superset/assets/package-lock.json | 46 +++++++- superset/assets/package.json | 1 + .../SqlLab/components/ScheduleQueryButton.jsx | 109 ++++++++++++++++++ .../src/SqlLab/components/SqlEditor.jsx | 14 +++ superset/assets/src/featureFlags.ts | 3 +- superset/views/sql_lab.py | 7 +- 7 files changed, 250 insertions(+), 8 deletions(-) create mode 100644 superset/assets/src/SqlLab/components/ScheduleQueryButton.jsx diff --git a/docs/installation.rst b/docs/installation.rst index a06da876dc729..0f372bdbed1d3 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -816,6 +816,84 @@ in this dictionary are made available for users to use in their SQL. 'my_crazy_macro': lambda x: x*2, } +**Scheduling queries** + +You can optionally allow your users to schedule queries directly in SQL Lab. +This is done by addding extra metadata to saved queries, which are then picked +up by an external scheduled (like [Apache Airflow](https://airflow.apache.org/)). + +To allow scheduled queries, add the following to your `config.py`: + +.. code-block:: python + + FEATURE_FLAGS = { + # Configuration for scheduling queries from SQL Lab. This information is + # collected when the user clicks "Schedule query", and saved into the `extra` + # field of saved queries. + # See: https://github.com/mozilla-services/react-jsonschema-form + 'SCHEDULED_QUERIES': { + 'JSONSCHEMA': { + 'title': 'Schedule', + 'description': ( + 'In order to schedule a query, you need to specify when it ' + 'should start running, when it should stop running, and how ' + 'often it should run. You can also optionally specify ' + 'dependencies that should be met before the query is ' + 'executed. Please read the documentation for best practices ' + 'and more information on how to specify dependencies.' + ), + 'type': 'object', + 'properties': { + 'output_table': { + 'type': 'string', + 'title': 'Output table name', + }, + 'start_date': { + 'type': 'string', + 'format': 'date-time', + 'title': 'Start date', + }, + 'end_date': { + 'type': 'string', + 'format': 'date-time', + 'title': 'End date', + }, + 'schedule_interval': { + 'type': 'string', + 'title': 'Schedule interval', + }, + 'dependencies': { + 'type': 'array', + 'title': 'Dependencies', + 'items': { + 'type': 'string', + }, + }, + }, + }, + 'UISCHEMA': { + 'schedule_interval': { + 'ui:placeholder': '@daily, @weekly, etc.', + }, + 'dependencies': { + 'ui:help': ( + 'Check the documentation for the correct format when ' + 'defining dependencies.' + ), + }, + }, + }, + } + +This feature flag is based on [react-jsonschema-form](https://github.com/mozilla-services/react-jsonschema-form), +and will add a button called "Schedule Query" to SQL Lab. When the button is +clicked, a modal will show up where the user can add the metadata required for +scheduling the query. + +This information can then be retrieved from the endpoint `/savedqueryviewapi/api/read` +and used to schedule the queries that have `scheduled_queries` in their JSON +metadata. For schedulers other than Airflow, additional fields can be easily +added to the configuration file above. Celery Flower ------------- diff --git a/superset/assets/package-lock.json b/superset/assets/package-lock.json index 704e6f3afd8aa..e5075d9699665 100644 --- a/superset/assets/package-lock.json +++ b/superset/assets/package-lock.json @@ -5791,8 +5791,7 @@ "co": { "version": "4.6.0", "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", - "integrity": "sha1-bqa989hTrlTMuOR7+gvz+QMfsYQ=", - "dev": true + "integrity": "sha1-bqa989hTrlTMuOR7+gvz+QMfsYQ=" }, "coa": { "version": "2.0.2", @@ -6071,8 +6070,7 @@ "core-js": { "version": "2.6.0", "resolved": "https://registry.npmjs.org/core-js/-/core-js-2.6.0.tgz", - "integrity": "sha512-kLRC6ncVpuEW/1kwrOXYX6KQASCVtrh1gQr/UiaVgFlf9WE5Vp+lNe5+h3LuMr5PAucWnnEXwH0nQHRH/gpGtw==", - "dev": true + "integrity": "sha512-kLRC6ncVpuEW/1kwrOXYX6KQASCVtrh1gQr/UiaVgFlf9WE5Vp+lNe5+h3LuMr5PAucWnnEXwH0nQHRH/gpGtw==" }, "core-util-is": { "version": "1.0.2", @@ -13233,6 +13231,11 @@ "integrity": "sha1-7dFMgk4sycHgsKG0K7UhBRakJDg=", "dev": true }, + "lodash.topath": { + "version": "4.5.2", + "resolved": "https://registry.npmjs.org/lodash.topath/-/lodash.topath-4.5.2.tgz", + "integrity": "sha1-NhY1Hzu6YZlKCTGYlmC9AyVP0Ak=" + }, "lodash.uniq": { "version": "4.5.0", "resolved": "https://registry.npmjs.org/lodash.uniq/-/lodash.uniq-4.5.0.tgz", @@ -17458,6 +17461,41 @@ "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.6.3.tgz", "integrity": "sha512-u7FDWtthB4rWibG/+mFbVd5FvdI20yde86qKGx4lVUTWmPlSWQ4QxbBIrrs+HnXGbxOUlUzTAP/VDmvCwaP2yA==" }, + "react-jsonschema-form": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/react-jsonschema-form/-/react-jsonschema-form-1.2.0.tgz", + "integrity": "sha512-rR77qoFiQ5TxDYwsJz8UWmDner4jQ4xMnDqeV6Nvg7GtoEyOUoTVkI/SBMEzfXuF/piWZXYjquP96Hy/2L7C+Q==", + "requires": { + "ajv": "^5.2.3", + "babel-runtime": "^6.26.0", + "core-js": "^2.5.7", + "lodash.topath": "^4.5.2", + "prop-types": "^15.5.8" + }, + "dependencies": { + "ajv": { + "version": "5.5.2", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-5.5.2.tgz", + "integrity": "sha1-c7Xuyj+rZT49P5Qis0GtQiBdyWU=", + "requires": { + "co": "^4.6.0", + "fast-deep-equal": "^1.0.0", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.3.0" + } + }, + "fast-deep-equal": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-1.1.0.tgz", + "integrity": "sha1-wFNHeBfIa1HaqFPIHgWbcz0CNhQ=" + }, + "json-schema-traverse": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.3.1.tgz", + "integrity": "sha1-NJptRMU6Ud6JtAgFxdXlm0F9M0A=" + } + } + }, "react-lifecycles-compat": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/react-lifecycles-compat/-/react-lifecycles-compat-3.0.4.tgz", diff --git a/superset/assets/package.json b/superset/assets/package.json index 6914c736490b9..6edb971331dc9 100644 --- a/superset/assets/package.json +++ b/superset/assets/package.json @@ -117,6 +117,7 @@ "react-dom": "^16.4.1", "react-gravatar": "^2.6.1", "react-hot-loader": "^4.3.6", + "react-jsonschema-form": "^1.2.0", "react-map-gl": "^4.0.10", "react-markdown": "^3.3.0", "react-redux": "^5.0.2", diff --git a/superset/assets/src/SqlLab/components/ScheduleQueryButton.jsx b/superset/assets/src/SqlLab/components/ScheduleQueryButton.jsx new file mode 100644 index 0000000000000..2e7e16e3167af --- /dev/null +++ b/superset/assets/src/SqlLab/components/ScheduleQueryButton.jsx @@ -0,0 +1,109 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import React from 'react'; +import PropTypes from 'prop-types'; +import Form from 'react-jsonschema-form'; +import { t } from '@superset-ui/translation'; + +import Button from '../../components/Button'; +import ModalTrigger from '../../components/ModalTrigger'; + +const propTypes = { + defaultLabel: PropTypes.string, + sql: PropTypes.string.isRequired, + schema: PropTypes.string.isRequired, + dbId: PropTypes.number.isRequired, + animation: PropTypes.bool, + onSchedule: PropTypes.func, +}; +const defaultProps = { + defaultLabel: t('Undefined'), + animation: true, + onSchedule: () => {}, +}; + +class ScheduleQueryButton extends React.PureComponent { + constructor(props) { + super(props); + this.state = { + description: '', + label: props.defaultLabel, + showSchedule: false, + }; + this.toggleSchedule = this.toggleSchedule.bind(this); + this.onSchedule = this.onSchedule.bind(this); + this.onCancel = this.onCancel.bind(this); + this.onLabelChange = this.onLabelChange.bind(this); + this.onDescriptionChange = this.onDescriptionChange.bind(this); + } + onSchedule({ formData }) { + const query = { + label: this.state.label, + description: this.state.description, + db_id: this.props.dbId, + schema: this.props.schema, + sql: this.props.sql, + extra_json: JSON.stringify({ schedule_info: formData }), + }; + this.props.onSchedule(query); + this.saveModal.close(); + } + onCancel() { + this.saveModal.close(); + } + onLabelChange(e) { + this.setState({ label: e.target.value }); + } + onDescriptionChange(e) { + this.setState({ description: e.target.value }); + } + toggleSchedule(e) { + this.setState({ target: e.target, showSchedule: !this.state.showSchedule }); + } + renderModalBody() { + return ( +
+ ); + } + render() { + return ( + + { this.saveModal = ref; }} + modalTitle={t('Schedule Query')} + modalBody={this.renderModalBody()} + triggerNode={ + + } + bsSize="medium" + /> + + ); + } +} +ScheduleQueryButton.propTypes = propTypes; +ScheduleQueryButton.defaultProps = defaultProps; + +export default ScheduleQueryButton; diff --git a/superset/assets/src/SqlLab/components/SqlEditor.jsx b/superset/assets/src/SqlLab/components/SqlEditor.jsx index 960a4af15ef50..f4495af2a4dc6 100644 --- a/superset/assets/src/SqlLab/components/SqlEditor.jsx +++ b/superset/assets/src/SqlLab/components/SqlEditor.jsx @@ -36,6 +36,7 @@ import LimitControl from './LimitControl'; import TemplateParamsEditor from './TemplateParamsEditor'; import SouthPane from './SouthPane'; import SaveQuery from './SaveQuery'; +import ScheduleQueryButton from './ScheduleQueryButton'; import ShareSqlLabQuery from './ShareSqlLabQuery'; import Timer from '../../components/Timer'; import Hotkeys from '../../components/Hotkeys'; @@ -43,6 +44,7 @@ import SqlEditorLeftBar from './SqlEditorLeftBar'; import AceEditorWrapper from './AceEditorWrapper'; import { STATE_BSSTYLE_MAP } from '../constants'; import RunQueryActionButton from './RunQueryActionButton'; +import { FeatureFlag, isFeatureEnabled } from '../../featureFlags'; const SQL_EDITOR_PADDING = 10; const SQL_TOOLBAR_HEIGHT = 51; @@ -313,6 +315,18 @@ class SqlEditor extends React.PureComponent { sql={this.state.sql} /> + {isFeatureEnabled(FeatureFlag.SCHEDULED_QUERIES) && + + + + }