Skip to content

Commit

Permalink
pylint errors will now break the build (#2543)
Browse files Browse the repository at this point in the history
* Linting pylint errors

* Backing off of an unecessary change
  • Loading branch information
mistercrunch authored Apr 4, 2017
1 parent c31210b commit db6b2f3
Show file tree
Hide file tree
Showing 18 changed files with 99 additions and 69 deletions.
2 changes: 1 addition & 1 deletion .pylintrc
Original file line number Diff line number Diff line change
Expand Up @@ -277,7 +277,7 @@ ignore-mixin-members=yes
# (useful for modules/projects where namespaces are manipulated during runtime
# and thus existing member attributes cannot be deduced by static analysis. It
# supports qualified module names, as well as Unix pattern matching.
ignored-modules=
ignored-modules=numpy,pandas,alembic.op,sqlalchemy,alembic.context,flask_appbuilder.security.sqla.PermissionView.role,flask_appbuilder.Model.metadata,flask_appbuilder.Base.metadata

# List of class names for which member attributes should not be checked (useful
# for classes with dynamically set attributes). This supports the use of
Expand Down
1 change: 1 addition & 0 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ env:
- TRAVIS_NODE_VERSION="5.11"
matrix:
- TOX_ENV=javascript
- TOX_ENV=pylint
- TOX_ENV=py34-postgres
- TOX_ENV=py34-sqlite
- TOX_ENV=py27-mysql
Expand Down
3 changes: 3 additions & 0 deletions dev-reqs.txt
Original file line number Diff line number Diff line change
@@ -1,10 +1,13 @@
codeclimate-test-reporter
coveralls
flake8
flask_cors
mock
mysqlclient
nose
psycopg2
pylint
pythrifthiveapi
pyyaml
# Also install everything we need to build Sphinx docs
-r dev-reqs-for-docs.txt
2 changes: 2 additions & 0 deletions pylint-errors.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
#!/bin/bash
pylint superset --errors-only
8 changes: 4 additions & 4 deletions superset/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,8 +133,8 @@ def load_examples(load_test_data):
def refresh_druid(datasource, merge):
"""Refresh druid datasources"""
session = db.session()
from superset import models
for cluster in session.query(models.DruidCluster).all():
from superset.connectors.druid.models import DruidCluster
for cluster in session.query(DruidCluster).all():
try:
cluster.refresh_datasources(datasource_name=datasource,
merge_flag=merge)
Expand All @@ -153,8 +153,8 @@ def refresh_druid(datasource, merge):
@manager.command
def update_datasources_cache():
"""Refresh sqllab datasources cache"""
from superset import models
for database in db.session.query(models.Database).all():
from superset.models.core import Database
for database in db.session.query(Database).all():
print('Fetching {} datasources ...'.format(database.name))
try:
database.all_table_names(force=True)
Expand Down
29 changes: 22 additions & 7 deletions superset/connectors/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,23 @@ class BaseDatasource(AuditMixinNullable, ImportMixin):

"""A common interface to objects that are queryable (tables and datasources)"""

# ---------------------------------------------------------------
# class attributes to define when deriving BaseDatasource
# ---------------------------------------------------------------
__tablename__ = None # {connector_name}_datasource
type = None # datasoure type, str to be defined when deriving this class
baselink = None # url portion pointing to ModelView endpoint

column_class = None # link to derivative of BaseColumn
metric_class = None # link to derivative of BaseMetric

# Used to do code highlighting when displaying the query in the UI
query_language = None

name = None # can be a Column or a property pointing to one

# ---------------------------------------------------------------

# Columns
id = Column(Integer, primary_key=True)
description = Column(Text)
Expand All @@ -30,6 +39,11 @@ class BaseDatasource(AuditMixinNullable, ImportMixin):
params = Column(String(1000))
perm = Column(String(1000))

# placeholder for a relationship to a derivative of BaseColumn
columns = []
# placeholder for a relationship to a derivative of BaseMetric
metrics = []

@property
def column_names(self):
return sorted([c.column_name for c in self.columns])
Expand Down Expand Up @@ -69,6 +83,14 @@ def column_formats(self):
if m.d3format
}

@property
def metrics_combo(self):
return sorted(
[
(m.metric_name, m.verbose_name or m.metric_name)
for m in self.metrics],
key=lambda x: x[1])

@property
def data(self):
"""Data representation of the datasource sent to the frontend"""
Expand All @@ -91,13 +113,6 @@ def data(self):
'type': self.type,
}

# TODO move this block to SqlaTable.data
if self.type == 'table':
grains = self.database.grains() or []
if grains:
grains = [(g.name, g.name) for g in grains]
d['granularity_sqla'] = utils.choicify(self.dttm_cols)
d['time_grain_sqla'] = grains
return d


Expand Down
20 changes: 2 additions & 18 deletions superset/connectors/druid/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -268,16 +268,6 @@ class DruidMetric(Model, BaseMetric):
enable_typechecks=False)
json = Column(Text)

def refresh_datasources(self, datasource_name=None, merge_flag=False):
"""Refresh metadata of all datasources in the cluster
If ``datasource_name`` is specified, only that datasource is updated
"""
self.druid_version = self.get_druid_version()
for datasource in self.get_datasources():
if datasource not in conf.get('DRUID_DATA_SOURCE_BLACKLIST'):
if not datasource_name or datasource_name == datasource:
DruidDatasource.sync_to_db(datasource, self, merge_flag)
export_fields = (
'metric_name', 'verbose_name', 'metric_type', 'datasource_name',
'json', 'description', 'is_restricted', 'd3format'
Expand Down Expand Up @@ -341,12 +331,6 @@ class DruidDatasource(Model, BaseDatasource):
'cluster_name', 'offset', 'cache_timeout', 'params'
)

@property
def metrics_combo(self):
return sorted(
[(m.metric_name, m.verbose_name) for m in self.metrics],
key=lambda x: x[1])

@property
def database(self):
return self.cluster
Expand Down Expand Up @@ -784,15 +768,15 @@ def recursive_get_fields(_conf):
mconf.get('probabilities', ''),
)
elif mconf.get('type') == 'fieldAccess':
post_aggs[metric_name] = Field(mconf.get('name'), '')
post_aggs[metric_name] = Field(mconf.get('name'))
elif mconf.get('type') == 'constant':
post_aggs[metric_name] = Const(
mconf.get('value'),
output_name=mconf.get('name', '')
)
elif mconf.get('type') == 'hyperUniqueCardinality':
post_aggs[metric_name] = HyperUniqueCardinality(
mconf.get('name'), ''
mconf.get('name')
)
else:
post_aggs[metric_name] = Postaggregator(
Expand Down
7 changes: 4 additions & 3 deletions superset/connectors/druid/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,11 +100,12 @@ class DruidMetricInlineView(CompactCRUDMixin, SupersetModelView): # noqa
}

def post_add(self, metric):
utils.init_metrics_perm(superset, [metric])
if metric.is_restricted:
security.merge_perm(sm, 'metric_access', metric.get_perm())

def post_update(self, metric):
utils.init_metrics_perm(superset, [metric])

if metric.is_restricted:
security.merge_perm(sm, 'metric_access', metric.get_perm())

appbuilder.add_view_no_menu(DruidMetricInlineView)

Expand Down
19 changes: 11 additions & 8 deletions superset/connectors/sqla/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -251,14 +251,6 @@ def html(self):
"dataframe table table-striped table-bordered "
"table-condensed"))

@property
def metrics_combo(self):
return sorted(
[
(m.metric_name, m.verbose_name or m.metric_name)
for m in self.metrics],
key=lambda x: x[1])

@property
def sql_url(self):
return self.database.sql_url + "?table_name=" + str(self.table_name)
Expand All @@ -276,6 +268,17 @@ def get_col(self, col_name):
if col_name == col.column_name:
return col

@property
def data(self):
d = super(SqlaTable, self).data
if self.type == 'table':
grains = self.database.grains() or []
if grains:
grains = [(g.name, g.name) for g in grains]
d['granularity_sqla'] = utils.choicify(self.dttm_cols)
d['time_grain_sqla'] = grains
return d

def values_for_column(self, column_name, limit=10000):
"""Runs query against sqla to retrieve some
sample values for the given column.
Expand Down
4 changes: 2 additions & 2 deletions superset/data/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -818,7 +818,7 @@ def load_unicode_test_data():
# generate date/numeric data
df['date'] = datetime.datetime.now().date()
df['value'] = [random.randint(1, 100) for _ in range(len(df))]
df.to_sql(
df.to_sql( # pylint: disable=no-member
'unicode_test',
db.engine,
if_exists='replace',
Expand Down Expand Up @@ -953,7 +953,7 @@ def load_long_lat_data():
pdf['date'] = datetime.datetime.now().date()
pdf['occupancy'] = [random.randint(1, 6) for _ in range(len(pdf))]
pdf['radius_miles'] = [random.uniform(1, 3) for _ in range(len(pdf))]
pdf.to_sql(
pdf.to_sql( # pylint: disable=no-member
'long_lat',
db.engine,
if_exists='replace',
Expand Down
2 changes: 2 additions & 0 deletions superset/db_engines/presto.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@ def cancel(self):
return

response = presto.requests.delete(self._nextUri)

# pylint: disable=no-member
if response.status_code != presto.requests.codes.no_content:
fmt = "Unexpected status code after cancel {}\n{}"
raise presto.OperationalError(
Expand Down
3 changes: 2 additions & 1 deletion superset/migrations/env.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,10 @@
# for 'autogenerate' support
# from myapp import mymodel
from flask import current_app

config.set_main_option('sqlalchemy.url',
current_app.config.get('SQLALCHEMY_DATABASE_URI'))
target_metadata = Base.metadata
target_metadata = Base.metadata # pylint: disable=no-member

# other values from the config, defined by the needs of env.py,
# can be acquired:
Expand Down
37 changes: 22 additions & 15 deletions superset/models/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@
from urllib import parse # noqa

config = app.config
metadata = Model.metadata # pylint: disable=no-member


def set_related_perm(mapper, connection, target): # noqa
Expand Down Expand Up @@ -80,7 +81,7 @@ class CssTemplate(Model, AuditMixinNullable):
css = Column(Text, default='')


slice_user = Table('slice_user', Model.metadata,
slice_user = Table('slice_user', metadata,
Column('id', Integer, primary_key=True),
Column('user_id', Integer, ForeignKey('ab_user.id')),
Column('slice_id', Integer, ForeignKey('slices.id'))
Expand Down Expand Up @@ -121,26 +122,30 @@ def datasource(self):
@datasource.getter
@utils.memoized
def get_datasource(self):
ds = db.session.query(
self.cls_model).filter_by(
id=self.datasource_id).first()
return ds
return (
db.session.query(self.cls_model)
.filter_by(id=self.datasource_id)
.first()
)

@renders('datasource_name')
def datasource_link(self):
# pylint: disable=no-member
datasource = self.datasource
if datasource:
return self.datasource.link
return datasource.link if datasource else None

@property
def datasource_edit_url(self):
self.datasource.url
# pylint: disable=no-member
datasource = self.datasource
return datasource.url if datasource else None

@property
@utils.memoized
def viz(self):
d = json.loads(self.params)
viz_class = viz_types[self.viz_type]
# pylint: disable=no-member
return viz_class(self.datasource, form_data=d)

@property
Expand Down Expand Up @@ -269,14 +274,14 @@ def import_obj(cls, slc_to_import, import_time=None):


dashboard_slices = Table(
'dashboard_slices', Model.metadata,
'dashboard_slices', metadata,
Column('id', Integer, primary_key=True),
Column('dashboard_id', Integer, ForeignKey('dashboards.id')),
Column('slice_id', Integer, ForeignKey('slices.id')),
)

dashboard_user = Table(
'dashboard_user', Model.metadata,
'dashboard_user', metadata,
Column('id', Integer, primary_key=True),
Column('user_id', Integer, ForeignKey('ab_user.id')),
Column('dashboard_id', Integer, ForeignKey('dashboards.id'))
Expand Down Expand Up @@ -307,6 +312,7 @@ def __repr__(self):

@property
def table_names(self):
# pylint: disable=no-member
return ", ".join(
{"{}".format(s.datasource.full_name) for s in self.slices})

Expand All @@ -320,6 +326,7 @@ def datasources(self):

@property
def sqla_metadata(self):
# pylint: disable=no-member
metadata = MetaData(bind=self.get_sqla_engine())
return metadata.reflect()

Expand Down Expand Up @@ -816,7 +823,6 @@ class Query(Model):
# Could be configured in the superset config.
limit = Column(Integer)
limit_used = Column(Boolean, default=False)
limit_reached = Column(Boolean, default=False)
select_as_cta = Column(Boolean)
select_as_cta_used = Column(Boolean, default=False)

Expand Down Expand Up @@ -915,19 +921,20 @@ def datasource(self):
@datasource.getter
@utils.memoized
def get_datasource(self):
# pylint: disable=no-member
ds = db.session.query(self.cls_model).filter_by(
id=self.datasource_id).first()
return ds

@property
def datasource_link(self):
return self.datasource.link
return self.datasource.link # pylint: disable=no-member

@property
def roles_with_datasource(self):
action_list = ''
pv = sm.find_permission_view_menu(
'datasource_access', self.datasource.perm)
perm = self.datasource.perm # pylint: disable=no-member
pv = sm.find_permission_view_menu('datasource_access', perm)
for r in pv.role:
if r.name in self.ROLES_BLACKLIST:
continue
Expand All @@ -944,7 +951,7 @@ def roles_with_datasource(self):
@property
def user_roles(self):
action_list = ''
for r in self.created_by.roles:
for r in self.created_by.roles: # pylint: disable=no-member
url = (
'/superset/approve?datasource_type={self.datasource_type}&'
'datasource_id={self.datasource_id}&'
Expand Down
Loading

0 comments on commit db6b2f3

Please sign in to comment.