Skip to content

Commit

Permalink
Fix pylint
Browse files Browse the repository at this point in the history
  • Loading branch information
EugeneTorap committed Jul 15, 2023
1 parent 2956ec8 commit a0eb3b2
Show file tree
Hide file tree
Showing 46 changed files with 97 additions and 125 deletions.
2 changes: 1 addition & 1 deletion requirements/development.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# SHA1:4c0ce3a84b01a5a3fe6c72cbf2fc96e5eada2dbe
# SHA1:6efbd7091f929419b62a027517953f527b237726
#
# This file is autogenerated by pip-compile-multi
# To update, run:
Expand Down
2 changes: 1 addition & 1 deletion superset/charts/commands/delete.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ def validate(self) -> None:
if reports := ReportScheduleDAO.find_by_chart_ids(self._model_ids):
report_names = [report.name for report in reports]
raise ChartDeleteFailedReportsExistError(
_("There are associated alerts or reports: %s" % ",".join(report_names))
_(f"There are associated alerts or reports: {','.join(report_names)}")
)
# Check ownership
for model in self._models:
Expand Down
1 change: 0 additions & 1 deletion superset/charts/commands/warm_up_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,6 @@ def run(self) -> dict[str, Any]:
force=True,
)

# pylint: disable=assigning-non-slot
g.form_data = form_data
payload = obj.get_payload()
delattr(g, "form_data")
Expand Down
4 changes: 2 additions & 2 deletions superset/charts/data/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@ def get_data(self, pk: int) -> Response:
query_context = self._create_query_context_from_form(json_body)
command = ChartDataCommand(query_context)
command.validate()
except DatasourceNotFound as error:
except DatasourceNotFound:
return self.response_404()
except QueryObjectValidationError as error:
return self.response_400(message=error.message)
Expand Down Expand Up @@ -233,7 +233,7 @@ def data(self) -> Response:
query_context = self._create_query_context_from_form(json_body)
command = ChartDataCommand(query_context)
command.validate()
except DatasourceNotFound as error:
except DatasourceNotFound:
return self.response_404()
except QueryObjectValidationError as error:
return self.response_400(message=error.message)
Expand Down
3 changes: 2 additions & 1 deletion superset/charts/filters.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,8 @@ def apply(self, query: Query, value: Any) -> Query:
Slice.id == FavStar.obj_id,
),
isouter=True,
).filter( # pylint: disable=comparison-with-callable
).filter(
# pylint: disable=comparison-with-callable
or_(
Slice.id.in_(owner_ids_query),
Slice.created_by_fk == get_user_id(),
Expand Down
4 changes: 0 additions & 4 deletions superset/cli/importexport.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,6 @@ def export_dashboards(dashboard_file: Optional[str] = None) -> None:
from superset.dashboards.commands.export import ExportDashboardsCommand
from superset.models.dashboard import Dashboard

# pylint: disable=assigning-non-slot
g.user = security_manager.find_user(username="admin")

dashboard_ids = [id_ for (id_,) in db.session.query(Dashboard.id).all()]
Expand Down Expand Up @@ -109,7 +108,6 @@ def export_datasources(datasource_file: Optional[str] = None) -> None:
from superset.connectors.sqla.models import SqlaTable
from superset.datasets.commands.export import ExportDatasetsCommand

# pylint: disable=assigning-non-slot
g.user = security_manager.find_user(username="admin")

dataset_ids = [id_ for (id_,) in db.session.query(SqlaTable.id).all()]
Expand Down Expand Up @@ -151,7 +149,6 @@ def import_dashboards(path: str, username: Optional[str]) -> None:
)

if username is not None:
# pylint: disable=assigning-non-slot
g.user = security_manager.find_user(username=username)
if is_zipfile(path):
with ZipFile(path) as bundle:
Expand Down Expand Up @@ -317,7 +314,6 @@ def import_dashboards(path: str, recursive: bool, username: str) -> None:
elif path_object.exists() and recursive:
files.extend(path_object.rglob("*.json"))
if username is not None:
# pylint: disable=assigning-non-slot
g.user = security_manager.find_user(username=username)
contents = {}
for path_ in files:
Expand Down
7 changes: 1 addition & 6 deletions superset/cli/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,12 +71,7 @@ def init() -> None:
def version(verbose: bool) -> None:
"""Prints the current version number"""
print(Fore.BLUE + "-=" * 15)
print(
Fore.YELLOW
+ "Superset "
+ Fore.CYAN
+ "{version}".format(version=app.config["VERSION_STRING"])
)
print(Fore.YELLOW + "Superset " + Fore.CYAN + f"{app.config['VERSION_STRING']}")
print(Fore.BLUE + "-=" * 15)
if verbose:
print("[DB] : " + f"{db.engine}")
Expand Down
2 changes: 1 addition & 1 deletion superset/commands/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def __init__(
super().__init__(
_(
self.message_format.format(
object_type, '"%s" ' % object_id if object_id else ""
object_type, f'"{object_id}" ' if object_id else ""
)
),
exception,
Expand Down
2 changes: 1 addition & 1 deletion superset/commands/importers/v1/examples.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ def _get_uuids(cls) -> set[str]:
)

@staticmethod
def _import( # pylint: disable=arguments-differ, too-many-locals, too-many-branches
def _import( # pylint: disable=too-many-locals, too-many-branches
session: Session,
configs: dict[str, Any],
overwrite: bool = False,
Expand Down
2 changes: 1 addition & 1 deletion superset/connectors/sqla/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -300,7 +300,7 @@ def type_generic(self) -> utils.GenericDataType | None:
return GenericDataType.TEMPORAL

return (
column_spec.generic_type # pylint: disable=used-before-assignment
column_spec.generic_type
if (
column_spec := self.db_engine_spec.get_column_spec(
self.type,
Expand Down
2 changes: 1 addition & 1 deletion superset/connectors/sqla/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -410,7 +410,7 @@ class TableModelView( # pylint: disable=too-many-ancestors
)
}

def post_add( # pylint: disable=arguments-differ
def post_add(
self,
item: "TableModelView",
flash_message: bool = True,
Expand Down
2 changes: 1 addition & 1 deletion superset/daos/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
from superset.extensions import db
from superset.utils.core import get_iterable

T = TypeVar("T", bound=Model) # pylint: disable=invalid-name
T = TypeVar("T", bound=Model)


class BaseDAO(Generic[T]):
Expand Down
13 changes: 6 additions & 7 deletions superset/dashboards/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -284,10 +284,9 @@ def ensure_thumbnails_enabled(self) -> Optional[Response]:

def __repr__(self) -> str:
"""Deterministic string representation of the API instance for etag_cache."""
return "Superset.dashboards.api.DashboardRestApi@v{}{}".format(
self.appbuilder.app.config["VERSION_STRING"],
self.appbuilder.app.config["VERSION_SHA"],
)
version_str = self.appbuilder.app.config["VERSION_STRING"]
version_sha = self.appbuilder.app.config["VERSION_SHA"]
return f"Superset.dashboards.api.DashboardRestApi@v{version_str}{version_sha}"

@expose("/<id_or_slug>", methods=("GET",))
@protect()
Expand All @@ -305,7 +304,7 @@ def __repr__(self) -> str:
@statsd_metrics
@with_dashboard
@event_logger.log_this_with_extra_payload
# pylint: disable=arguments-differ
# pylint: disable=arguments-differ,arguments-renamed
def get(
self,
dash: Dashboard,
Expand Down Expand Up @@ -756,8 +755,8 @@ def bulk_delete(self, **kwargs: Any) -> Response:
@event_logger.log_this_with_context(
action=lambda self, *args, **kwargs: f"{self.__class__.__name__}.export",
log_to_statsd=False,
) # pylint: disable=too-many-locals
def export(self, **kwargs: Any) -> Response:
)
def export(self, **kwargs: Any) -> Response: # pylint: disable=too-many-locals
"""Export dashboards
---
get:
Expand Down
2 changes: 1 addition & 1 deletion superset/dashboards/commands/delete.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ def validate(self) -> None:
if reports := ReportScheduleDAO.find_by_dashboard_ids(self._model_ids):
report_names = [report.name for report in reports]
raise DashboardDeleteFailedReportsExistError(
_("There are associated alerts or reports: %s" % ",".join(report_names))
_(f"There are associated alerts or reports: {','.join(report_names)}")
)
# Check ownership
for model in self._models:
Expand Down
2 changes: 1 addition & 1 deletion superset/dashboards/filter_sets/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,7 @@ def get_list(self, dashboard_id: int, **kwargs: Any) -> Response:
$ref: '#/components/responses/404'
"""
if not DashboardDAO.find_by_id(cast(int, dashboard_id)):
return self.response(404, message="dashboard '%s' not found" % dashboard_id)
return self.response(404, message=f"dashboard '{dashboard_id}' not found")
rison_data = kwargs.setdefault("rison", {})
rison_data.setdefault("filters", [])
rison_data["filters"].append(
Expand Down
3 changes: 1 addition & 2 deletions superset/dashboards/filter_sets/commands/delete.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,6 @@ def validate(self) -> None:
except FilterSetNotFoundError as err:
if FilterSetDAO.find_by_id(self._filter_set_id): # type: ignore
raise FilterSetForbiddenError(
'the filter-set does not related to dashboard "%s"'
% str(self._dashboard_id)
f"the filter-set does not related to dashboard {self._dashboard_id}"
) from err
raise err
3 changes: 1 addition & 2 deletions superset/dashboards/filter_sets/schemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,6 @@ def _validate_json_meta_data(self, json_meta_data: str) -> None:

class FilterSetPostSchema(FilterSetSchema):
json_metadata_schema: JsonMetadataSchema = JsonMetadataSchema()
# pylint: disable=W0613
name = fields.String(
required=True,
allow_none=False,
Expand Down Expand Up @@ -83,7 +82,7 @@ class FilterSetPutSchema(FilterSetSchema):
)

@post_load
def validate( # pylint: disable=unused-argument
def validate(
self, data: Mapping[Any, Any], *, many: Any, partial: Any
) -> dict[str, Any]:
if JSON_METADATA_FIELD in data:
Expand Down
8 changes: 4 additions & 4 deletions superset/databases/commands/create.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,10 +142,10 @@ def validate(self) -> None:
if exceptions:
exception = DatabaseInvalidError()
exception.extend(exceptions)

ex_cls_name = exception.__class__.__name__
ex_cls_names = ".".join(exception.get_list_classnames())
event_logger.log_with_context(
action="db_connection_failed.{}.{}".format(
exception.__class__.__name__,
".".join(exception.get_list_classnames()),
)
action=f"db_connection_failed.{ex_cls_name}.{ex_cls_names}"
)
raise exception
2 changes: 1 addition & 1 deletion superset/databases/commands/delete.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ def validate(self) -> None:
if reports := ReportScheduleDAO.find_by_database_id(self._model_id):
report_names = [report.name for report in reports]
raise DatabaseDeleteFailedReportsExistError(
_("There are associated alerts or reports: %s" % ",".join(report_names))
_(f"There are associated alerts or reports: {','.join(report_names)}")
)
# Check if there are datasets for this database
if self._model.tables:
Expand Down
6 changes: 2 additions & 4 deletions superset/databases/commands/validate_sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,10 +108,8 @@ def validate(self) -> None:
raise NoValidatorFoundError(
SupersetError(
message=__(
"No validator named {} found "
"(configured for the {} engine)".format(
validator_name, spec.engine
)
f"No validator named {validator_name} found "
f"(configured for the {spec.engine} engine)"
),
error_type=SupersetErrorType.GENERIC_DB_ENGINE_ERROR,
level=ErrorLevel.ERROR,
Expand Down
8 changes: 4 additions & 4 deletions superset/databases/ssh_tunnel/commands/create.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,10 +83,10 @@ def validate(self) -> None:
if exceptions:
exception = SSHTunnelInvalidError()
exception.extend(exceptions)

ex_cls_name = exception.__class__.__name__
ex_cls_names = ".".join(exception.get_list_classnames())
event_logger.log_with_context(
action="ssh_tunnel_creation_failed.{}.{}".format(
exception.__class__.__name__,
".".join(exception.get_list_classnames()),
)
action=f"ssh_tunnel_creation_failed.{ex_cls_name}.{ex_cls_names}"
)
raise exception
4 changes: 2 additions & 2 deletions superset/datasets/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -474,8 +474,8 @@ def delete(self, pk: int) -> Response:
@event_logger.log_this_with_context(
action=lambda self, *args, **kwargs: f"{self.__class__.__name__}.export",
log_to_statsd=False,
) # pylint: disable=too-many-locals
def export(self, **kwargs: Any) -> Response:
)
def export(self, **kwargs: Any) -> Response: # pylint: disable=too-many-locals
"""Export datasets
---
get:
Expand Down
7 changes: 4 additions & 3 deletions superset/db_engine_specs/hive.py
Original file line number Diff line number Diff line change
Expand Up @@ -312,9 +312,10 @@ def progress(cls, log_lines: list[str]) -> int:
reduce_progress = int(match.groupdict()["reduce_progress"])
stages[stage_number] = (map_progress + reduce_progress) / 2
logger.info(
"Progress detail: {}, " # pylint: disable=logging-format-interpolation
"current job {}, "
"total jobs: {}".format(stages, current_job, total_jobs)
"Progress detail: %s, current job %s, total jobs: %s",
stages,
current_job,
total_jobs,
)

stage_progress = sum(stages.values()) / len(stages.values()) if stages else 0
Expand Down
2 changes: 1 addition & 1 deletion superset/db_engine_specs/ocient.py
Original file line number Diff line number Diff line change
Expand Up @@ -238,7 +238,7 @@ class OcientEngineSpec(BaseEngineSpec):
# Store mapping of superset Query id -> Ocient ID
# These are inserted into the cache when executing the query
# They are then removed, either upon cancellation or query completion
query_id_mapping: dict[str, str] = dict()
query_id_mapping: dict[str, str] = {}
query_id_mapping_lock = threading.Lock()

custom_errors: dict[Pattern[str], tuple[str, SupersetErrorType, dict[str, Any]]] = {
Expand Down
26 changes: 13 additions & 13 deletions superset/db_engine_specs/presto.py
Original file line number Diff line number Diff line change
Expand Up @@ -619,12 +619,13 @@ def latest_sub_partition(
raise SupersetTemplateException(msg)
if len(kwargs.keys()) != len(part_fields) - 1:
msg = (
"A filter needs to be specified for {} out of the " "{} fields."
).format(len(part_fields) - 1, len(part_fields))
f"A filter needs to be specified for {len(part_fields) - 1} out of the "
f"{len(part_fields)} fields."
)
raise SupersetTemplateException(msg)

for field in part_fields:
if field not in kwargs.keys():
if field not in kwargs:
field_to_return = field

sql = cls._partition_query(
Expand Down Expand Up @@ -931,9 +932,7 @@ def _parse_structural_column( # pylint: disable=too-many-locals
)
else: # otherwise this field is a basic data type
full_parent_path = cls._get_full_name(stack)
column_name = "{}.{}".format(
full_parent_path, field_info[0]
)
column_name = f"{full_parent_path}.{field_info[0]}"
result.append(
cls._create_column_info(column_name, column_type)
)
Expand Down Expand Up @@ -1319,8 +1318,10 @@ def handle_cursor(cls, cursor: Cursor, query: Query, session: Session) -> None:
if total_splits and completed_splits:
progress = 100 * (completed_splits / total_splits)
logger.info(
"Query {} progress: {} / {} " # pylint: disable=logging-format-interpolation
"splits".format(query_id, completed_splits, total_splits)
"Query %s progress: %s / %s splits",
query_id,
completed_splits,
total_splits,
)
if progress > query.progress:
query.progress = progress
Expand All @@ -1337,11 +1338,10 @@ def _extract_error_message(cls, ex: Exception) -> str:
and isinstance(ex.orig[0], dict)
):
error_dict = ex.orig[0]
return "{} at {}: {}".format(
error_dict.get("errorName"),
error_dict.get("errorLocation"),
error_dict.get("message"),
)
error_name = error_dict.get("errorName")
error_location = error_dict.get("errorLocation")
message = error_dict.get("message")
return f"{error_name} at {error_location}: {message}"
if type(ex).__name__ == "DatabaseError" and hasattr(ex, "args") and ex.args:
error_dict = ex.args[0]
return error_dict.get("message", _("Unknown Presto Error"))
Expand Down
6 changes: 3 additions & 3 deletions superset/legacy.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,6 @@
def update_time_range(form_data: dict[str, Any]) -> None:
"""Move since and until to time_range."""
if "since" in form_data or "until" in form_data:
form_data["time_range"] = "{} : {}".format(
form_data.pop("since", "") or "", form_data.pop("until", "") or ""
)
form_data[
"time_range"
] = f'{form_data.pop("since", "") or ""} : {form_data.pop("until", "") or ""}'
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ def created_by_fk(cls):
def created_by(cls):
return relationship(
"User",
primaryjoin="%s.created_by_fk == User.id" % cls.__name__,
primaryjoin=f"{cls.__name__}.created_by_fk == User.id",
enable_typechecks=False,
)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ def format_seconds(value):
else:
period = "second"

return "{} {}{}".format(value, period, "s" if value > 1 else "")
return f"{value} {period}{'s' if value > 1 else ''}"


def compute_time_compare(granularity, periods):
Expand All @@ -120,7 +120,7 @@ def compute_time_compare(granularity, periods):
obj = isodate.parse_duration(granularity) * periods
except isodate.isoerror.ISO8601Error:
# if parse_human_timedelta can parse it, return it directly
delta = "{} {}{}".format(periods, granularity, "s" if periods > 1 else "")
delta = f"{periods} {granularity}{'s' if periods > 1 else ''}"
obj = parse_human_timedelta(delta)
if obj:
return delta
Expand Down
Loading

0 comments on commit a0eb3b2

Please sign in to comment.