Skip to content

Commit

Permalink
Merge branch 'master' of https://github.com/apache/superset into feat…
Browse files Browse the repository at this point in the history
…/applitools-cypress
  • Loading branch information
geido committed May 5, 2022
2 parents bb7c9d2 + 902ac05 commit fabd5b6
Show file tree
Hide file tree
Showing 40 changed files with 195 additions and 6,020 deletions.
2 changes: 1 addition & 1 deletion RESOURCES/INTHEWILD.md
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ Join our growing community!

### Energy
- [Airboxlab](https://foobot.io) [@antoine-galataud]
- [DouroECI](http://douroeci.com/en/) [@nunohelibeires]
- [DouroECI](https://www.douroeci.com/) [@nunohelibeires]
- [Safaricom](https://www.safaricom.co.ke/) [@mmutiso]
- [Scoot](https://scoot.co/) [@haaspt]

Expand Down
1 change: 1 addition & 0 deletions UPDATING.md
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ assists people when migrating to a new version.

### Breaking Changes

- [19770](https://github.com/apache/superset/pull/19770): As per SIPs 11 and 68, the native NoSQL Druid connector is deprecated and has been removed. Druid is still supported through SQLAlchemy via pydruid. The config keys `DRUID_IS_ACTIVE` and `DRUID_METADATA_LINKS_ENABLED` have also been removed.
- [19274](https://github.com/apache/superset/pull/19274): The `PUBLIC_ROLE_LIKE_GAMMA` config key has been removed, set `PUBLIC_ROLE_LIKE = "Gamma"` to have the same functionality.
- [19273](https://github.com/apache/superset/pull/19273): The `SUPERSET_CELERY_WORKERS` and `SUPERSET_WORKERS` config keys has been removed. Configure Celery directly using `CELERY_CONFIG` on Superset.
- [19262](https://github.com/apache/superset/pull/19262): Per [SIP-11](https://github.com/apache/superset/issues/6032) and [SIP-68](https://github.com/apache/superset/issues/14909) the native NoSQL Druid connector is deprecated and will no longer be supported. Druid SQL is still [supported](https://superset.apache.org/docs/databases/druid).
Expand Down
34 changes: 17 additions & 17 deletions superset-frontend/plugins/plugin-chart-table/src/TableChart.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ import {
ensureIsArray,
GenericDataType,
getTimeFormatterForGranularity,
styled,
t,
tn,
} from '@superset-ui/core';
Expand Down Expand Up @@ -317,7 +318,6 @@ export default function TableChart<D extends DataRecord = DataRecord>(
const getColumnConfigs = useCallback(
(column: DataColumnMeta, i: number): ColumnWithLooseAccessor<D> => {
const { key, label, isNumeric, dataType, isMetric, config = {} } = column;
const isFilter = !isNumeric && emitFilter;
const columnWidth = Number.isNaN(Number(config.columnWidth))
? config.columnWidth
: Number(config.columnWidth);
Expand Down Expand Up @@ -348,7 +348,7 @@ export default function TableChart<D extends DataRecord = DataRecord>(
getValueRange(key, alignPositiveNegative);

let className = '';
if (isFilter) {
if (emitFilter) {
className += ' dt-is-filter';
}

Expand Down Expand Up @@ -376,6 +376,19 @@ export default function TableChart<D extends DataRecord = DataRecord>(
});
}

const StyledCell = styled.td`
text-align: ${sharedStyle.textAlign};
background: ${backgroundColor ||
(valueRange
? cellBar({
value: value as number,
valueRange,
alignPositiveNegative,
colorPositiveNegative,
})
: undefined)};
`;

const cellProps = {
// show raw number in title in case of numeric values
title: typeof value === 'number' ? String(value) : undefined,
Expand All @@ -388,27 +401,14 @@ export default function TableChart<D extends DataRecord = DataRecord>(
value == null ? 'dt-is-null' : '',
isActiveFilterValue(key, value) ? ' dt-is-active-filter' : '',
].join(' '),
style: {
...sharedStyle,
background:
backgroundColor ||
(valueRange
? cellBar({
value: value as number,
valueRange,
alignPositiveNegative,
colorPositiveNegative,
})
: undefined),
},
};
if (html) {
// eslint-disable-next-line react/no-danger
return <td {...cellProps} dangerouslySetInnerHTML={html} />;
return <StyledCell {...cellProps} dangerouslySetInnerHTML={html} />;
}
// If cellProps renderes textContent already, then we don't have to
// render `Cell`. This saves some time for large tables.
return <td {...cellProps}>{text}</td>;
return <StyledCell {...cellProps}>{text}</StyledCell>;
},
Header: ({ column: col, onClick, style }) => (
<th
Expand Down
2 changes: 1 addition & 1 deletion superset-frontend/src/SqlLab/actions/sqlLab.js
Original file line number Diff line number Diff line change
Expand Up @@ -1045,7 +1045,7 @@ function getTableMetadata(table, query, dispatch) {
function getTableExtendedMetadata(table, query, dispatch) {
return SupersetClient.get({
endpoint: encodeURI(
`/superset/extra_table_metadata/${query.dbId}/` +
`/api/v1/database/${query.dbId}/table_extra/` +
`${encodeURIComponent(table.name)}/${encodeURIComponent(
table.schema,
)}/`,
Expand Down
4 changes: 2 additions & 2 deletions superset-frontend/src/SqlLab/actions/sqlLab.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -415,10 +415,10 @@ describe('async actions', () => {
fetchMock.delete(updateTableSchemaEndpoint, {});
fetchMock.post(updateTableSchemaEndpoint, JSON.stringify({ id: 1 }));

const getTableMetadataEndpoint = 'glob:*/api/v1/database/*';
const getTableMetadataEndpoint = 'glob:**/api/v1/database/*/table/*/*/';
fetchMock.get(getTableMetadataEndpoint, {});
const getExtraTableMetadataEndpoint =
'glob:*/superset/extra_table_metadata/*';
'glob:**/api/v1/database/*/table_extra/*/*/';
fetchMock.get(getExtraTableMetadataEndpoint, {});

let isFeatureEnabledMock;
Expand Down
4 changes: 2 additions & 2 deletions superset-frontend/src/dashboard/stylesheets/resizable.less
Original file line number Diff line number Diff line change
Expand Up @@ -92,12 +92,12 @@

.dragdroppable-column .resizable-container-handle--right {
/* override the default because the inner column's handle's mouse target is very small */
right: -10px !important;
right: 0 !important;
}

.dragdroppable-column .dragdroppable-column .resizable-container-handle--right {
/* override the default because the inner column's handle's mouse target is very small */
right: 0px !important;
right: 0 !important;
}

.resizable-container-handle--bottom {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ export const DndFilterSelect = (props: DndFilterSelectProps) => {

if (!isSqllabView && dbId && name && schema) {
SupersetClient.get({
endpoint: `/superset/extra_table_metadata/${dbId}/${name}/${schema}/`,
endpoint: `/api/v1/database/${dbId}/table_extra/${name}/${schema}/`,
})
.then(({ json }: { json: Record<string, any> }) => {
if (json && json.partitions) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ class AdhocFilterControl extends React.Component {

if (!isSqllabView && dbId && name && schema) {
SupersetClient.get({
endpoint: `/superset/extra_table_metadata/${dbId}/${name}/${schema}/`,
endpoint: `/api/v1/database/${dbId}/table_extra/${name}/${schema}/`,
})
.then(({ json }) => {
if (json && json.partitions) {
Expand Down
59 changes: 0 additions & 59 deletions superset-frontend/src/visualizations/FilterBox/controlPanel.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -20,36 +20,6 @@ import React from 'react';
import { t } from '@superset-ui/core';
import { sections } from '@superset-ui/chart-controls';

const appContainer = document.getElementById('app');
const bootstrapData = JSON.parse(appContainer.getAttribute('data-bootstrap'));
const druidIsActive = !!bootstrapData?.common?.conf?.DRUID_IS_ACTIVE;
const druidSection = druidIsActive
? [
[
{
name: 'show_druid_time_granularity',
config: {
type: 'CheckboxControl',
label: t('Show Druid granularity dropdown'),
default: false,
description: t('Check to include Druid granularity dropdown'),
},
},
],
[
{
name: 'show_druid_time_origin',
config: {
type: 'CheckboxControl',
label: t('Show Druid time origin'),
default: false,
description: t('Check to include time origin dropdown'),
},
},
],
]
: [];

export default {
controlPanelSections: [
sections.legacyTimeseriesTime,
Expand Down Expand Up @@ -96,35 +66,6 @@ export default {
},
},
],
[
{
name: 'show_sqla_time_granularity',
config: {
type: 'CheckboxControl',
label: druidIsActive
? t('Show SQL time grain dropdown')
: t('Show time grain dropdown'),
default: false,
description: druidIsActive
? t('Check to include SQL time grain dropdown')
: t('Check to include time grain dropdown'),
},
},
],
[
{
name: 'show_sqla_time_column',
config: {
type: 'CheckboxControl',
label: druidIsActive
? t('Show SQL time column')
: t('Show time column'),
default: false,
description: t('Check to include time column dropdown'),
},
},
],
...druidSection,
['adhoc_filters'],
],
},
Expand Down
34 changes: 0 additions & 34 deletions superset/cli/update.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
import logging
import os
import sys
from datetime import datetime
from typing import Optional

import click
Expand Down Expand Up @@ -53,39 +52,6 @@ def set_database_uri(database_name: str, uri: str, skip_create: bool) -> None:
database_utils.get_or_create_db(database_name, uri, not skip_create)


@click.command()
@with_appcontext
@click.option(
"--datasource",
"-d",
help="Specify which datasource name to load, if "
"omitted, all datasources will be refreshed",
)
@click.option(
"--merge",
"-m",
is_flag=True,
default=False,
help="Specify using 'merge' property during operation. " "Default value is False.",
)
def refresh_druid(datasource: str, merge: bool) -> None:
"""Refresh druid datasources"""
# pylint: disable=import-outside-toplevel
from superset.connectors.druid.models import DruidCluster

session = db.session()

for cluster in session.query(DruidCluster).all():
try:
cluster.refresh_datasources(datasource_name=datasource, merge_flag=merge)
except Exception as ex: # pylint: disable=broad-except
print("Error while processing cluster '{}'\n{}".format(cluster, str(ex)))
logger.exception(ex)
cluster.metadata_last_refreshed = datetime.now()
print("Refreshed metadata from cluster " "[" + cluster.cluster_name + "]")
session.commit()


@click.command()
@with_appcontext
def update_datasources_cache() -> None:
Expand Down
22 changes: 5 additions & 17 deletions superset/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -258,16 +258,6 @@ def _try_json_readsha(filepath: str, length: int) -> Optional[str]:
DRUID_TZ = tz.tzutc()
DRUID_ANALYSIS_TYPES = ["cardinality"]

# Legacy Druid NoSQL (native) connector
# Druid supports a SQL interface in its newer versions.
# Setting this flag to True enables the deprecated, API-based Druid
# connector. This feature may be removed at a future date.
DRUID_IS_ACTIVE = False

# If Druid is active whether to include the links to scan/refresh Druid datasources.
# This should be disabled if you are trying to wean yourself off of the Druid NoSQL
# connector.
DRUID_METADATA_LINKS_ENABLED = True

# ----------------------------------------------------
# AUTHENTICATION CONFIG
Expand Down Expand Up @@ -646,19 +636,12 @@ def _try_json_readsha(filepath: str, length: int) -> Optional[str]:

VIZ_TYPE_DENYLIST: List[str] = []

# ---------------------------------------------------
# List of data sources not to be refreshed in druid cluster
# ---------------------------------------------------

DRUID_DATA_SOURCE_DENYLIST: List[str] = []

# --------------------------------------------------
# Modules, datasources and middleware to be registered
# --------------------------------------------------
DEFAULT_MODULE_DS_MAP = OrderedDict(
[
("superset.connectors.sqla.models", ["SqlaTable"]),
("superset.connectors.druid.models", ["DruidDatasource"]),
]
)
ADDITIONAL_MODULE_DS_MAP: Dict[str, List[str]] = {}
Expand Down Expand Up @@ -984,8 +967,11 @@ def CSV_TO_HIVE_UPLOAD_DIRECTORY_FUNC( # pylint: disable=invalid-name
# Provide a callable that receives a tracking_url and returns another
# URL. This is used to translate internal Hadoop job tracker URL
# into a proxied one


TRACKING_URL_TRANSFORMER = lambda x: x


# Interval between consecutive polls when using Hive Engine
HIVE_POLL_INTERVAL = int(timedelta(seconds=5).total_seconds())

Expand Down Expand Up @@ -1203,8 +1189,10 @@ def SQL_QUERY_MUTATOR( # pylint: disable=invalid-name,unused-argument
# to allow mutating the object with this callback.
# This can be used to set any properties of the object based on naming
# conventions and such. You can find examples in the tests.

SQLA_TABLE_MUTATOR = lambda table: table


# Global async query config options.
# Requires GLOBAL_ASYNC_QUERIES feature flag to be enabled.
GLOBAL_ASYNC_QUERIES_REDIS_CONFIG = {
Expand Down
16 changes: 0 additions & 16 deletions superset/connectors/druid/__init__.py

This file was deleted.

Loading

0 comments on commit fabd5b6

Please sign in to comment.