Skip to content

Commit

Permalink
[flake8] Fixing additional flake8 issue w/ the presence of ignore (#4474
Browse files Browse the repository at this point in the history
)
  • Loading branch information
john-bodley authored and Grace Guo committed Feb 23, 2018
1 parent cacf53c commit 8aac63e
Show file tree
Hide file tree
Showing 7 changed files with 51 additions and 44 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ app.db
*.sqllite
.vscode
.python-version
.tox

# Node.js, webpack artifacts
*.entry.js
Expand Down
16 changes: 8 additions & 8 deletions superset/connectors/druid/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
from superset import conf, db, import_util, sm, utils
from superset.connectors.base.models import BaseColumn, BaseDatasource, BaseMetric
from superset.models.helpers import (
AuditMixinNullable, ImportMixin, QueryResult, set_perm,
AuditMixinNullable, ImportMixin, QueryResult, set_perm,
)
from superset.utils import (
DimSelector, DTTM_ALIAS, flasher, MetricPermException,
Expand Down Expand Up @@ -582,11 +582,11 @@ def int_or_0(v):
v1nums = (v1nums + [0, 0, 0])[:3]
v2nums = (v2nums + [0, 0, 0])[:3]
return (
v1nums[0] > v2nums[0] or
(v1nums[0] == v2nums[0] and v1nums[1] > v2nums[1]) or
(v1nums[0] == v2nums[0] and v1nums[1] == v2nums[1] and
v1nums[2] > v2nums[2])
)
v1nums[0] > v2nums[0] or
(v1nums[0] == v2nums[0] and v1nums[1] > v2nums[1]) or
(v1nums[0] == v2nums[0] and v1nums[1] == v2nums[1] and
v1nums[2] > v2nums[2])
)

def latest_metadata(self):
"""Returns segment metadata from the latest segment"""
Expand Down Expand Up @@ -869,8 +869,8 @@ def recursive_get_fields(_conf):
def resolve_postagg(postagg, post_aggs, agg_names, visited_postaggs, metrics_dict):
mconf = postagg.json_obj
required_fields = set(
DruidDatasource.recursive_get_fields(mconf)
+ mconf.get('fieldNames', []))
DruidDatasource.recursive_get_fields(mconf) +
mconf.get('fieldNames', []))
# Check if the fields are already in aggs
# or is a previous postagg
required_fields = set([
Expand Down
4 changes: 2 additions & 2 deletions superset/forms.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,8 @@ def all_db_items():
validators=[
FileRequired(), FileAllowed(['csv'], _('CSV Files Only!'))])
con = QuerySelectField(
query_factory=all_db_items,
get_pk=lambda a: a.id, get_label=lambda a: a.database_name)
query_factory=all_db_items,
get_pk=lambda a: a.id, get_label=lambda a: a.database_name)
sep = StringField(
_('Delimiter'),
description=_('Delimiter used by CSV file (for whitespace use \s+).'),
Expand Down
28 changes: 16 additions & 12 deletions superset/models/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,8 +61,9 @@ def export_schema(cls, recursive=True, include_parent_ref=False):
if parent_ref:
parent_excludes = {c.name for c in parent_ref.local_columns}

def formatter(c): return ('{0} Default ({1})'.format(
str(c.type), c.default.arg) if c.default else str(c.type))
def formatter(c):
return ('{0} Default ({1})'.format(
str(c.type), c.default.arg) if c.default else str(c.type))

schema = {c.name: formatter(c) for c in cls.__table__.columns
if (c.name in cls.export_fields and
Expand Down Expand Up @@ -96,7 +97,7 @@ def import_from_dict(cls, session, dict_rep, parent=None,
for p in parent_refs.keys():
if p not in dict_rep:
raise RuntimeError(
'{0}: Missing field {1}'.format(cls.__name__, p))
'{0}: Missing field {1}'.format(cls.__name__, p))
else:
# Set foreign keys to parent obj
for k, v in parent_refs.items():
Expand Down Expand Up @@ -176,19 +177,22 @@ def export_to_dict(self, recursive=True, include_parent_ref=False,
if (c.name in self.export_fields and
c.name not in parent_excludes and
(include_defaults or (
getattr(self, c.name) is not None and
(not c.default or
getattr(self, c.name) != c.default.arg))))
getattr(self, c.name) is not None and
(not c.default or
getattr(self, c.name) != c.default.arg))))
}
if recursive:
for c in self.export_children:
# sorting to make lists of children stable
dict_rep[c] = sorted([child.export_to_dict(
recursive=recursive,
include_parent_ref=include_parent_ref,
include_defaults=include_defaults)
for child in getattr(self, c)],
key=lambda k: sorted(k.items()))
dict_rep[c] = sorted(
[
child.export_to_dict(
recursive=recursive,
include_parent_ref=include_parent_ref,
include_defaults=include_defaults,
) for child in getattr(self, c)
],
key=lambda k: sorted(k.items()))

return dict_rep

Expand Down
16 changes: 8 additions & 8 deletions superset/views/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -350,7 +350,7 @@ def form_post(self, form):
except OSError:
pass
message = u'Table name {} already exists. Please pick another'.format(
form.name.data) if isinstance(e, IntegrityError) else text_type(e)
form.name.data) if isinstance(e, IntegrityError) else text_type(e)
flash(
message,
'danger')
Expand Down Expand Up @@ -993,10 +993,10 @@ def get_viz(
def slice(self, slice_id):
viz_obj = self.get_viz(slice_id)
endpoint = '/superset/explore/{}/{}?form_data={}'.format(
viz_obj.datasource.type,
viz_obj.datasource.id,
parse.quote(json.dumps(viz_obj.form_data)),
)
viz_obj.datasource.type,
viz_obj.datasource.id,
parse.quote(json.dumps(viz_obj.form_data)),
)
if request.args.get('standalone') == 'true':
endpoint += '&standalone=true'
return redirect(endpoint)
Expand Down Expand Up @@ -1098,9 +1098,9 @@ def annotation_json(self, layer_id):
'val': layer_id}]
datasource = AnnotationDatasource()
viz_obj = viz.viz_types['table'](
datasource,
form_data=form_data,
force=False,
datasource,
form_data=form_data,
force=False,
)
try:
payload = viz_obj.get_payload()
Expand Down
29 changes: 15 additions & 14 deletions tests/dict_import_export_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,8 @@

from superset import db
from superset.connectors.druid.models import (
DruidColumn, DruidDatasource, DruidMetric)
DruidColumn, DruidDatasource, DruidMetric,
)
from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn
from .base_tests import SupersetTestCase

Expand Down Expand Up @@ -81,12 +82,12 @@ def create_druid_datasource(
cluster_name = 'druid_test'
params = {DBREF: id, 'database_name': cluster_name}
dict_rep = {
'cluster_name': cluster_name,
'datasource_name': name,
'id': id,
'params': json.dumps(params),
'columns': [{'column_name': c} for c in cols_names],
'metrics': [{'metric_name': c} for c in metric_names],
'cluster_name': cluster_name,
'datasource_name': name,
'id': id,
'params': json.dumps(params),
'columns': [{'column_name': c} for c in cols_names],
'metrics': [{'metric_name': c} for c in metric_names],
}

datasource = DruidDatasource(
Expand Down Expand Up @@ -180,12 +181,12 @@ def test_import_table_override_append(self):
imported_table = SqlaTable.import_from_dict(db.session, dict_table)
db.session.commit()
table_over, dict_table_over = self.create_table(
'table_override', id=ID_PREFIX + 3,
cols_names=['new_col1', 'col2', 'col3'],
metric_names=['new_metric1'])
'table_override', id=ID_PREFIX + 3,
cols_names=['new_col1', 'col2', 'col3'],
metric_names=['new_metric1'])
imported_over_table = SqlaTable.import_from_dict(
db.session,
dict_table_over)
db.session,
dict_table_over)
db.session.commit()

imported_over = self.get_table(imported_over_table.id)
Expand Down Expand Up @@ -289,8 +290,8 @@ def test_import_druid_override_append(self):
cols_names=['new_col1', 'col2', 'col3'],
metric_names=['new_metric1'])
imported_over_cluster = DruidDatasource.import_from_dict(
db.session,
table_over_dict)
db.session,
table_over_dict)
db.session.commit()
imported_over = self.get_datasource(imported_over_cluster.id)
self.assertEquals(imported_cluster.id, imported_over.id)
Expand Down
1 change: 1 addition & 0 deletions tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ exclude =
superset/data
superset/migrations
superset/templates
ignore =
import-order-style = google
max-line-length = 90

Expand Down

0 comments on commit 8aac63e

Please sign in to comment.