From c6f48c50e3819c3ec1aee2ad786372127fcd2224 Mon Sep 17 00:00:00 2001 From: Ville Brofeldt <ville.v.brofeldt@gmail.com> Date: Fri, 3 Apr 2020 11:59:44 +0300 Subject: [PATCH 1/3] lint: accept 2 letter names by default --- .pylintrc | 14 +++++++------- superset/common/query_context.py | 6 ++---- superset/connectors/base/models.py | 6 +++--- superset/connectors/sqla/models.py | 2 +- superset/datasets/api.py | 2 +- superset/datasets/schemas.py | 4 ++-- superset/db_engine_specs/base.py | 4 +--- superset/db_engine_specs/presto.py | 4 +--- superset/models/annotations.py | 4 ++-- superset/models/core.py | 12 ++++++------ superset/models/dashboard.py | 2 +- superset/models/datasource_access_request.py | 2 +- superset/models/helpers.py | 2 +- superset/models/schedules.py | 2 +- superset/models/slice.py | 2 +- superset/models/sql_lab.py | 12 ++++-------- superset/models/tags.py | 4 ++-- superset/models/user_attributes.py | 2 +- superset/views/base_api.py | 2 +- superset/views/database/decorators.py | 4 +--- 20 files changed, 40 insertions(+), 52 deletions(-) diff --git a/.pylintrc b/.pylintrc index abe341a8c72c8..0ecf516b12e12 100644 --- a/.pylintrc +++ b/.pylintrc @@ -132,10 +132,10 @@ include-naming-hint=no property-classes=abc.abstractproperty # Regular expression matching correct argument names -argument-rgx=[a-z_][a-z0-9_]{2,30}$ +argument-rgx=[a-z_][a-z0-9_]{1,30}$ # Naming hint for argument names -argument-name-hint=[a-z_][a-z0-9_]{2,30}$ +argument-name-hint=[a-z_][a-z0-9_]{1,30}$ # Regular expression matching correct method names method-rgx=[a-z_][a-z0-9_]{2,30}$ @@ -147,7 +147,7 @@ method-name-hint=[a-z_][a-z0-9_]{2,30}$ variable-rgx=[a-z_][a-z0-9_]{1,30}$ # Naming hint for variable names -variable-name-hint=[a-z_][a-z0-9_]{2,30}$ +variable-name-hint=[a-z_][a-z0-9_]{1,30}$ # Regular expression matching correct inline iteration names inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ @@ -168,10 +168,10 @@ class-rgx=[A-Z_][a-zA-Z0-9]+$ class-name-hint=[A-Z_][a-zA-Z0-9]+$ # Regular expression matching correct class attribute names -class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ +class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{1,30}|(__.*__))$ # Naming hint for class attribute names -class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ +class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{1,30}|(__.*__))$ # Regular expression matching correct module names module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ @@ -180,10 +180,10 @@ module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ # Regular expression matching correct attribute names -attr-rgx=[a-z_][a-z0-9_]{2,30}$ +attr-rgx=[a-z_][a-z0-9_]{1,30}$ # Naming hint for attribute names -attr-name-hint=[a-z_][a-z0-9_]{2,30}$ +attr-name-hint=[a-z_][a-z0-9_]{1,30}$ # Regular expression matching correct function names function-rgx=[a-z_][a-z0-9_]{2,30}$ diff --git a/superset/common/query_context.py b/superset/common/query_context.py index 525d8292ff0df..a0c040ccfaf54 100644 --- a/superset/common/query_context.py +++ b/superset/common/query_context.py @@ -113,7 +113,7 @@ def get_query_result(self, query_object: QueryObject) -> Dict[str, Any]: } @staticmethod - def df_metrics_to_num( # pylint: disable=invalid-name,no-self-use + def df_metrics_to_num( # pylint: disable=no-self-use df: pd.DataFrame, query_object: QueryObject ) -> None: """Converting metrics to numeric when pandas.read_sql cannot""" @@ -122,9 +122,7 @@ def df_metrics_to_num( # pylint: disable=invalid-name,no-self-use df[col] = pd.to_numeric(df[col], errors="coerce") @staticmethod - def get_data( # pylint: disable=invalid-name,no-self-use - df: pd.DataFrame, - ) -> List[Dict]: + def get_data(df: pd.DataFrame,) -> List[Dict]: # pylint: disable=no-self-use return df.to_dict(orient="records") def get_single_payload(self, query_obj: QueryObject) -> Dict[str, Any]: diff --git a/superset/connectors/base/models.py b/superset/connectors/base/models.py index dfcafbfddd19b..8e1acc74adecd 100644 --- a/superset/connectors/base/models.py +++ b/superset/connectors/base/models.py @@ -76,7 +76,7 @@ class BaseDatasource( # --------------------------------------------------------------- # Columns - id = Column(Integer, primary_key=True) # pylint: disable=invalid-name + id = Column(Integer, primary_key=True) description = Column(Text) default_endpoint = Column(Text) is_featured = Column(Boolean, default=False) # TODO deprecating @@ -453,7 +453,7 @@ class BaseColumn(AuditMixinNullable, ImportMixin): __tablename__: Optional[str] = None # {connector_name}_column - id = Column(Integer, primary_key=True) # pylint: disable=invalid-name + id = Column(Integer, primary_key=True) column_name = Column(String(255), nullable=False) verbose_name = Column(String(1024)) is_active = Column(Boolean, default=True) @@ -526,7 +526,7 @@ class BaseMetric(AuditMixinNullable, ImportMixin): __tablename__: Optional[str] = None # {connector_name}_metric - id = Column(Integer, primary_key=True) # pylint: disable=invalid-name + id = Column(Integer, primary_key=True) metric_name = Column(String(255), nullable=False) verbose_name = Column(String(1024)) metric_type = Column(String(32)) diff --git a/superset/connectors/sqla/models.py b/superset/connectors/sqla/models.py index 947551cf5bf21..44205e7c54ebe 100644 --- a/superset/connectors/sqla/models.py +++ b/superset/connectors/sqla/models.py @@ -1254,7 +1254,7 @@ class RowLevelSecurityFilter(Model, AuditMixinNullable): """ __tablename__ = "row_level_security_filters" - id = Column(Integer, primary_key=True) # pylint: disable=invalid-name + id = Column(Integer, primary_key=True) roles = relationship( security_manager.role_model, secondary=RLSFilterRoles, diff --git a/superset/datasets/api.py b/superset/datasets/api.py index 8c1877796cff7..03c77b4c739e1 100644 --- a/superset/datasets/api.py +++ b/superset/datasets/api.py @@ -345,7 +345,7 @@ def export(self, **kwargs: Any) -> Response: @expose("/<pk>/refresh", methods=["PUT"]) @protect() @safe - def refresh(self, pk: int) -> Response: # pylint: disable=invalid-name + def refresh(self, pk: int) -> Response: """Refresh a Dataset --- put: diff --git a/superset/datasets/schemas.py b/superset/datasets/schemas.py index b27df9328c308..7fac3592de491 100644 --- a/superset/datasets/schemas.py +++ b/superset/datasets/schemas.py @@ -39,7 +39,7 @@ def validate_python_date_format(value: str) -> None: class DatasetColumnsPutSchema(Schema): - id = fields.Integer() # pylint: disable=invalid-name + id = fields.Integer() column_name = fields.String(required=True, validate=Length(1, 255)) type = fields.String(validate=Length(1, 32)) verbose_name = fields.String(allow_none=True, Length=(1, 1024)) @@ -55,7 +55,7 @@ class DatasetColumnsPutSchema(Schema): class DatasetMetricsPutSchema(Schema): - id = fields.Integer() # pylint: disable=invalid-name + id = fields.Integer() expression = fields.String(required=True) description = fields.String(allow_none=True) metric_name = fields.String(required=True, validate=Length(1, 255)) diff --git a/superset/db_engine_specs/base.py b/superset/db_engine_specs/base.py index 478cca27303aa..eaf5405b27c19 100644 --- a/superset/db_engine_specs/base.py +++ b/superset/db_engine_specs/base.py @@ -441,9 +441,7 @@ def csv_to_df(**kwargs: Any) -> pd.DataFrame: return df @classmethod - def df_to_sql( # pylint: disable=invalid-name - cls, df: pd.DataFrame, **kwargs: Any - ) -> None: + def df_to_sql(cls, df: pd.DataFrame, **kwargs: Any) -> None: """ Upload data from a Pandas DataFrame to a database. For regular engines this calls the DataFrame.to_sql() method. Can be overridden for engines that don't work well with to_sql(), e.g. diff --git a/superset/db_engine_specs/presto.py b/superset/db_engine_specs/presto.py index 038b25b8cace9..04089ba4eb0fc 100644 --- a/superset/db_engine_specs/presto.py +++ b/superset/db_engine_specs/presto.py @@ -863,9 +863,7 @@ def where_latest_partition( # pylint: disable=too-many-arguments return query @classmethod - def _latest_partition_from_df( # pylint: disable=invalid-name - cls, df: pd.DataFrame - ) -> Optional[List[str]]: + def _latest_partition_from_df(cls, df: pd.DataFrame) -> Optional[List[str]]: if not df.empty: return df.to_records(index=False)[0].item() return None diff --git a/superset/models/annotations.py b/superset/models/annotations.py index 33197ddd5c30d..07e23517486a8 100644 --- a/superset/models/annotations.py +++ b/superset/models/annotations.py @@ -27,7 +27,7 @@ class AnnotationLayer(Model, AuditMixinNullable): """A logical namespace for a set of annotations""" __tablename__ = "annotation_layer" - id = Column(Integer, primary_key=True) # pylint: disable=invalid-name + id = Column(Integer, primary_key=True) name = Column(String(250)) descr = Column(Text) @@ -40,7 +40,7 @@ class Annotation(Model, AuditMixinNullable): """Time-related annotation""" __tablename__ = "annotation" - id = Column(Integer, primary_key=True) # pylint: disable=invalid-name + id = Column(Integer, primary_key=True) start_dttm = Column(DateTime) end_dttm = Column(DateTime) layer_id = Column(Integer, ForeignKey("annotation_layer.id"), nullable=False) diff --git a/superset/models/core.py b/superset/models/core.py index 1f56019ff3700..0861fc13f709e 100755 --- a/superset/models/core.py +++ b/superset/models/core.py @@ -73,7 +73,7 @@ class Url(Model, AuditMixinNullable): """Used for the short url feature""" __tablename__ = "url" - id = Column(Integer, primary_key=True) # pylint: disable=invalid-name + id = Column(Integer, primary_key=True) url = Column(Text) @@ -82,7 +82,7 @@ class KeyValue(Model): # pylint: disable=too-few-public-methods """Used for any type of key-value store""" __tablename__ = "keyvalue" - id = Column(Integer, primary_key=True) # pylint: disable=invalid-name + id = Column(Integer, primary_key=True) value = Column(Text, nullable=False) @@ -91,7 +91,7 @@ class CssTemplate(Model, AuditMixinNullable): """CSS templates for dashboards""" __tablename__ = "css_templates" - id = Column(Integer, primary_key=True) # pylint: disable=invalid-name + id = Column(Integer, primary_key=True) template_name = Column(String(250)) css = Column(Text, default="") @@ -106,7 +106,7 @@ class Database( type = "table" __table_args__ = (UniqueConstraint("database_name"),) - id = Column(Integer, primary_key=True) # pylint: disable=invalid-name + id = Column(Integer, primary_key=True) verbose_name = Column(String(250), unique=True) # short unique name, used in permissions database_name = Column(String(250), unique=True, nullable=False) @@ -645,7 +645,7 @@ class Log(Model): # pylint: disable=too-few-public-methods __tablename__ = "logs" - id = Column(Integer, primary_key=True) # pylint: disable=invalid-name + id = Column(Integer, primary_key=True) action = Column(String(512)) user_id = Column(Integer, ForeignKey("ab_user.id")) dashboard_id = Column(Integer) @@ -662,7 +662,7 @@ class Log(Model): # pylint: disable=too-few-public-methods class FavStar(Model): # pylint: disable=too-few-public-methods __tablename__ = "favstar" - id = Column(Integer, primary_key=True) # pylint: disable=invalid-name + id = Column(Integer, primary_key=True) user_id = Column(Integer, ForeignKey("ab_user.id")) class_name = Column(String(50)) obj_id = Column(Integer) diff --git a/superset/models/dashboard.py b/superset/models/dashboard.py index 8779bb7a5f6ba..cd3b0f95fd741 100644 --- a/superset/models/dashboard.py +++ b/superset/models/dashboard.py @@ -119,7 +119,7 @@ class Dashboard( # pylint: disable=too-many-instance-attributes """The dashboard object!""" __tablename__ = "dashboards" - id = Column(Integer, primary_key=True) # pylint: disable=invalid-name + id = Column(Integer, primary_key=True) dashboard_title = Column(String(500)) position_json = Column(utils.MediumText()) description = Column(Text) diff --git a/superset/models/datasource_access_request.py b/superset/models/datasource_access_request.py index 803a91115f9a4..8940611b5eceb 100644 --- a/superset/models/datasource_access_request.py +++ b/superset/models/datasource_access_request.py @@ -37,7 +37,7 @@ class DatasourceAccessRequest(Model, AuditMixinNullable): """ORM model for the access requests for datasources and dbs.""" __tablename__ = "access_request" - id = Column(Integer, primary_key=True) # pylint: disable=invalid-name + id = Column(Integer, primary_key=True) datasource_id = Column(Integer) datasource_type = Column(String(200)) diff --git a/superset/models/helpers.py b/superset/models/helpers.py index 7a1433453571e..77c1742feebb1 100644 --- a/superset/models/helpers.py +++ b/superset/models/helpers.py @@ -376,7 +376,7 @@ class QueryResult: # pylint: disable=too-few-public-methods def __init__( # pylint: disable=too-many-arguments self, df, query, duration, status=QueryStatus.SUCCESS, error_message=None ): - self.df: pd.DataFrame = df # pylint: disable=invalid-name + self.df: pd.DataFrame = df self.query: str = query self.duration: int = duration self.status: str = status diff --git a/superset/models/schedules.py b/superset/models/schedules.py index 815697ecdaf6c..5d10b567658bc 100644 --- a/superset/models/schedules.py +++ b/superset/models/schedules.py @@ -50,7 +50,7 @@ class EmailSchedule: __tablename__ = "email_schedules" - id = Column(Integer, primary_key=True) # pylint: disable=invalid-name + id = Column(Integer, primary_key=True) active = Column(Boolean, default=True, index=True) crontab = Column(String(50)) diff --git a/superset/models/slice.py b/superset/models/slice.py index 71371faa1368a..d867431a2ff4f 100644 --- a/superset/models/slice.py +++ b/superset/models/slice.py @@ -55,7 +55,7 @@ class Slice( """A slice is essentially a report or a view on data""" __tablename__ = "slices" - id = Column(Integer, primary_key=True) # pylint: disable=invalid-name + id = Column(Integer, primary_key=True) slice_name = Column(String(250)) datasource_id = Column(Integer) datasource_type = Column(String(200)) diff --git a/superset/models/sql_lab.py b/superset/models/sql_lab.py index 3dad0da31c401..8f0254df538c9 100644 --- a/superset/models/sql_lab.py +++ b/superset/models/sql_lab.py @@ -48,7 +48,7 @@ class Query(Model, ExtraJSONMixin): table may represent multiple SQL statements executed sequentially""" __tablename__ = "query" - id = Column(Integer, primary_key=True) # pylint: disable=invalid-name + id = Column(Integer, primary_key=True) client_id = Column(String(11), unique=True, nullable=False) database_id = Column(Integer, ForeignKey("dbs.id"), nullable=False) @@ -150,7 +150,7 @@ class SavedQuery(Model, AuditMixinNullable, ExtraJSONMixin): """ORM model for SQL query""" __tablename__ = "saved_query" - id = Column(Integer, primary_key=True) # pylint: disable=invalid-name + id = Column(Integer, primary_key=True) user_id = Column(Integer, ForeignKey("ab_user.id"), nullable=True) db_id = Column(Integer, ForeignKey("dbs.id"), nullable=True) schema = Column(String(128)) @@ -195,9 +195,7 @@ class TabState(Model, AuditMixinNullable, ExtraJSONMixin): __tablename__ = "tab_state" # basic info - id = Column( # pylint: disable=invalid-name - Integer, primary_key=True, autoincrement=True - ) + id = Column(Integer, primary_key=True, autoincrement=True) user_id = Column(Integer, ForeignKey("ab_user.id")) label = Column(String(256)) active = Column(Boolean, default=False) @@ -248,9 +246,7 @@ class TableSchema(Model, AuditMixinNullable, ExtraJSONMixin): __tablename__ = "table_schema" - id = Column( # pylint: disable=invalid-name - Integer, primary_key=True, autoincrement=True - ) + id = Column(Integer, primary_key=True, autoincrement=True) tab_state_id = Column(Integer, ForeignKey("tab_state.id", ondelete="CASCADE")) database_id = Column(Integer, ForeignKey("dbs.id"), nullable=False) diff --git a/superset/models/tags.py b/superset/models/tags.py index 779113bec711c..0cb00cc4d0958 100644 --- a/superset/models/tags.py +++ b/superset/models/tags.py @@ -62,7 +62,7 @@ class Tag(Model, AuditMixinNullable): """A tag attached to an object (query, chart or dashboard).""" __tablename__ = "tag" - id = Column(Integer, primary_key=True) # pylint: disable=invalid-name + id = Column(Integer, primary_key=True) name = Column(String(250), unique=True) type = Column(Enum(TagTypes)) @@ -72,7 +72,7 @@ class TaggedObject(Model, AuditMixinNullable): """An association between an object and a tag.""" __tablename__ = "tagged_object" - id = Column(Integer, primary_key=True) # pylint: disable=invalid-name + id = Column(Integer, primary_key=True) tag_id = Column(Integer, ForeignKey("tag.id")) object_id = Column(Integer) object_type = Column(Enum(ObjectTypes)) diff --git a/superset/models/user_attributes.py b/superset/models/user_attributes.py index 2c69feb971acb..648e5307c603b 100644 --- a/superset/models/user_attributes.py +++ b/superset/models/user_attributes.py @@ -34,7 +34,7 @@ class UserAttribute(Model, AuditMixinNullable): """ __tablename__ = "user_attribute" - id = Column(Integer, primary_key=True) # pylint: disable=invalid-name + id = Column(Integer, primary_key=True) user_id = Column(Integer, ForeignKey("ab_user.id")) user = relationship( security_manager.user_model, backref="extra_attributes", foreign_keys=[user_id] diff --git a/superset/views/base_api.py b/superset/views/base_api.py index 5f49780f90f23..22d62a48a8862 100644 --- a/superset/views/base_api.py +++ b/superset/views/base_api.py @@ -44,7 +44,7 @@ def check_ownership_and_item_exists(f): A Decorator that checks if an object exists and is owned by the current user """ - def wraps(self, pk): # pylint: disable=invalid-name + def wraps(self, pk): item = self.datamodel.get( pk, self._base_filters # pylint: disable=protected-access ) diff --git a/superset/views/database/decorators.py b/superset/views/database/decorators.py index 789fbce039427..3dd0e2acd7532 100644 --- a/superset/views/database/decorators.py +++ b/superset/views/database/decorators.py @@ -32,9 +32,7 @@ def check_datasource_access(f): A Decorator that checks if a user has datasource access """ - def wraps( - self, pk: int, table_name: str, schema_name: Optional[str] = None - ): # pylint: disable=invalid-name + def wraps(self, pk: int, table_name: str, schema_name: Optional[str] = None): schema_name_parsed = parse_js_uri_path_item(schema_name, eval_undefined=True) table_name_parsed = parse_js_uri_path_item(table_name) if not table_name_parsed: From 523f80e340ca32d329d8583e35690d8aa823d798 Mon Sep 17 00:00:00 2001 From: Ville Brofeldt <ville.v.brofeldt@gmail.com> Date: Mon, 6 Apr 2020 10:37:11 +0300 Subject: [PATCH 2/3] Address review comments --- .pylintrc | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/.pylintrc b/.pylintrc index 0ecf516b12e12..4d9dd3f9170f6 100644 --- a/.pylintrc +++ b/.pylintrc @@ -115,10 +115,10 @@ evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / stateme [BASIC] # Good variable names which should always be accepted, separated by a comma -good-names=i,j,k,ex,Run,_,d,e,v,o,l,x,ts,f +good-names=_,d,df,e,ex,f,i,id,j,k,l,o,pk,Run,ts,v,x # Bad variable names which should always be refused, separated by a comma -bad-names=foo,bar,baz,toto,tutu,tata,d,fd +bad-names=d,fd,foo,bar,baz,toto,tutu,tata # Colon-delimited sets of names that determine each other's naming style when # the name regexes allow several styles. @@ -132,10 +132,10 @@ include-naming-hint=no property-classes=abc.abstractproperty # Regular expression matching correct argument names -argument-rgx=[a-z_][a-z0-9_]{1,30}$ +argument-rgx=[a-z_][a-z0-9_]{2,30}$ # Naming hint for argument names -argument-name-hint=[a-z_][a-z0-9_]{1,30}$ +argument-name-hint=[a-z_][a-z0-9_]{2,30}$ # Regular expression matching correct method names method-rgx=[a-z_][a-z0-9_]{2,30}$ @@ -147,7 +147,7 @@ method-name-hint=[a-z_][a-z0-9_]{2,30}$ variable-rgx=[a-z_][a-z0-9_]{1,30}$ # Naming hint for variable names -variable-name-hint=[a-z_][a-z0-9_]{1,30}$ +variable-name-hint=[a-z_][a-z0-9_]{2,30}$ # Regular expression matching correct inline iteration names inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ @@ -168,10 +168,10 @@ class-rgx=[A-Z_][a-zA-Z0-9]+$ class-name-hint=[A-Z_][a-zA-Z0-9]+$ # Regular expression matching correct class attribute names -class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{1,30}|(__.*__))$ +class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ # Naming hint for class attribute names -class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{1,30}|(__.*__))$ +class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ # Regular expression matching correct module names module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ @@ -180,10 +180,10 @@ module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ # Regular expression matching correct attribute names -attr-rgx=[a-z_][a-z0-9_]{1,30}$ +attr-rgx=[a-z_][a-z0-9_]{2,30}$ # Naming hint for attribute names -attr-name-hint=[a-z_][a-z0-9_]{1,30}$ +attr-name-hint=[a-z_][a-z0-9_]{2,30}$ # Regular expression matching correct function names function-rgx=[a-z_][a-z0-9_]{2,30}$ From 3a09c9a2f3fec1ee48fd9cdb114b54633b1dc91b Mon Sep 17 00:00:00 2001 From: Ville Brofeldt <ville.v.brofeldt@gmail.com> Date: Wed, 8 Apr 2020 14:05:34 +0300 Subject: [PATCH 3/3] Remove e and d from good-names --- .pylintrc | 4 +- superset/charts/api.py | 30 ++--- superset/charts/commands/bulk_delete.py | 4 +- superset/charts/commands/create.py | 12 +- superset/charts/commands/delete.py | 4 +- superset/charts/commands/update.py | 12 +- superset/charts/dao.py | 4 +- superset/cli.py | 18 +-- superset/common/query_context.py | 16 +-- superset/connectors/druid/models.py | 8 +- superset/connectors/druid/views.py | 10 +- superset/connectors/sqla/models.py | 18 +-- superset/connectors/sqla/views.py | 4 +- superset/dao/base.py | 12 +- superset/dashboards/api.py | 30 ++--- superset/dashboards/commands/bulk_delete.py | 4 +- superset/dashboards/commands/create.py | 8 +- superset/dashboards/commands/delete.py | 4 +- superset/dashboards/commands/update.py | 8 +- superset/dashboards/dao.py | 4 +- superset/dataframe.py | 8 +- superset/datasets/api.py | 32 +++--- superset/datasets/commands/create.py | 8 +- superset/datasets/commands/delete.py | 4 +- superset/datasets/commands/refresh.py | 4 +- superset/datasets/commands/update.py | 8 +- superset/datasets/dao.py | 8 +- superset/db_engine_specs/base.py | 14 +-- superset/db_engine_specs/druid.py | 6 +- superset/db_engine_specs/hive.py | 4 +- superset/db_engine_specs/mysql.py | 8 +- superset/db_engine_specs/presto.py | 16 +-- superset/examples/helpers.py | 6 +- ...ique_constraint_on_dashboard_slices_tbl.py | 8 +- .../3325d4caccc8_dashboard_scoped_filters.py | 4 +- .../3b626e2a6783_sync_db_with_models.py | 32 +++--- superset/migrations/versions/4736ec66ce19_.py | 4 +- .../versions/65903709c321_allow_dml.py | 4 +- ...bc82_add_parent_ids_in_dashboard_layout.py | 8 +- .../versions/ab8c66efdd01_resample.py | 8 +- .../b46fa1b0b39e_add_params_to_tables.py | 4 +- ...06ae5eb46_cal_heatmap_metric_to_metrics.py | 4 +- .../db0c65b146bd_update_slice_model_json.py | 4 +- .../db527d8c4c78_add_db_verbose_name.py | 6 +- ...2db2af7be_add_template_params_to_tables.py | 4 +- .../versions/fb13d49b72f9_better_filters.py | 6 +- superset/models/core.py | 14 +-- superset/models/helpers.py | 20 ++-- superset/models/slice.py | 20 ++-- superset/result_set.py | 8 +- superset/sql_lab.py | 26 ++--- superset/sql_validators/presto_db.py | 6 +- superset/utils/core.py | 16 +-- superset/utils/decorators.py | 4 +- superset/utils/log.py | 18 +-- superset/views/base.py | 46 ++++---- superset/views/base_api.py | 22 ++-- superset/views/core.py | 106 +++++++++--------- superset/views/database/api.py | 4 +- superset/views/database/mixins.py | 8 +- superset/views/database/views.py | 4 +- superset/viz.py | 16 +-- 62 files changed, 388 insertions(+), 386 deletions(-) diff --git a/.pylintrc b/.pylintrc index 4d9dd3f9170f6..70ad8bc5b1c99 100644 --- a/.pylintrc +++ b/.pylintrc @@ -115,10 +115,10 @@ evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / stateme [BASIC] # Good variable names which should always be accepted, separated by a comma -good-names=_,d,df,e,ex,f,i,id,j,k,l,o,pk,Run,ts,v,x +good-names=_,df,ex,f,i,id,j,k,l,o,pk,Run,ts,v,x # Bad variable names which should always be refused, separated by a comma -bad-names=d,fd,foo,bar,baz,toto,tutu,tata +bad-names=fd,foo,bar,baz,toto,tutu,tata # Colon-delimited sets of names that determine each other's naming style when # the name regexes allow several styles. diff --git a/superset/charts/api.py b/superset/charts/api.py index 761de3c33e936..fb31064f9f853 100644 --- a/superset/charts/api.py +++ b/superset/charts/api.py @@ -170,11 +170,11 @@ def post(self) -> Response: try: new_model = CreateChartCommand(g.user, item.data).run() return self.response(201, id=new_model.id, result=item.data) - except ChartInvalidError as e: - return self.response_422(message=e.normalized_messages()) - except ChartCreateFailedError as e: - logger.error(f"Error creating model {self.__class__.__name__}: {e}") - return self.response_422(message=str(e)) + except ChartInvalidError as ex: + return self.response_422(message=ex.normalized_messages()) + except ChartCreateFailedError as ex: + logger.error(f"Error creating model {self.__class__.__name__}: {ex}") + return self.response_422(message=str(ex)) @expose("/<pk>", methods=["PUT"]) @protect() @@ -237,11 +237,11 @@ def put( # pylint: disable=too-many-return-statements, arguments-differ return self.response_404() except ChartForbiddenError: return self.response_403() - except ChartInvalidError as e: - return self.response_422(message=e.normalized_messages()) - except ChartUpdateFailedError as e: - logger.error(f"Error updating model {self.__class__.__name__}: {e}") - return self.response_422(message=str(e)) + except ChartInvalidError as ex: + return self.response_422(message=ex.normalized_messages()) + except ChartUpdateFailedError as ex: + logger.error(f"Error updating model {self.__class__.__name__}: {ex}") + return self.response_422(message=str(ex)) @expose("/<pk>", methods=["DELETE"]) @protect() @@ -285,9 +285,9 @@ def delete(self, pk: int) -> Response: # pylint: disable=arguments-differ return self.response_404() except ChartForbiddenError: return self.response_403() - except ChartDeleteFailedError as e: - logger.error(f"Error deleting model {self.__class__.__name__}: {e}") - return self.response_422(message=str(e)) + except ChartDeleteFailedError as ex: + logger.error(f"Error deleting model {self.__class__.__name__}: {ex}") + return self.response_422(message=str(ex)) @expose("/", methods=["DELETE"]) @protect() @@ -346,5 +346,5 @@ def bulk_delete( return self.response_404() except ChartForbiddenError: return self.response_403() - except ChartBulkDeleteFailedError as e: - return self.response_422(message=str(e)) + except ChartBulkDeleteFailedError as ex: + return self.response_422(message=str(ex)) diff --git a/superset/charts/commands/bulk_delete.py b/superset/charts/commands/bulk_delete.py index 5e715610b879e..de1af113d0086 100644 --- a/superset/charts/commands/bulk_delete.py +++ b/superset/charts/commands/bulk_delete.py @@ -44,8 +44,8 @@ def run(self) -> None: self.validate() try: ChartDAO.bulk_delete(self._models) - except DeleteFailedError as e: - logger.exception(e.exception) + except DeleteFailedError as ex: + logger.exception(ex.exception) raise ChartBulkDeleteFailedError() def validate(self) -> None: diff --git a/superset/charts/commands/create.py b/superset/charts/commands/create.py index 6efcfa17b3379..8e7dcb7e11a63 100644 --- a/superset/charts/commands/create.py +++ b/superset/charts/commands/create.py @@ -44,8 +44,8 @@ def run(self) -> Model: self.validate() try: chart = ChartDAO.create(self._properties) - except DAOCreateFailedError as e: - logger.exception(e.exception) + except DAOCreateFailedError as ex: + logger.exception(ex.exception) raise ChartCreateFailedError() return chart @@ -60,8 +60,8 @@ def validate(self) -> None: try: datasource = get_datasource_by_id(datasource_id, datasource_type) self._properties["datasource_name"] = datasource.name - except ValidationError as e: - exceptions.append(e) + except ValidationError as ex: + exceptions.append(ex) # Validate/Populate dashboards dashboards = DashboardDAO.find_by_ids(dashboard_ids) @@ -72,8 +72,8 @@ def validate(self) -> None: try: owners = populate_owners(self._actor, owner_ids) self._properties["owners"] = owners - except ValidationError as e: - exceptions.append(e) + except ValidationError as ex: + exceptions.append(ex) if exceptions: exception = ChartInvalidError() exception.add_list(exceptions) diff --git a/superset/charts/commands/delete.py b/superset/charts/commands/delete.py index c21abee32b060..3feb3dbc09ff5 100644 --- a/superset/charts/commands/delete.py +++ b/superset/charts/commands/delete.py @@ -45,8 +45,8 @@ def run(self) -> Model: self.validate() try: chart = ChartDAO.delete(self._model) - except DAODeleteFailedError as e: - logger.exception(e.exception) + except DAODeleteFailedError as ex: + logger.exception(ex.exception) raise ChartDeleteFailedError() return chart diff --git a/superset/charts/commands/update.py b/superset/charts/commands/update.py index 6a110d5f6594c..21c236ca96213 100644 --- a/superset/charts/commands/update.py +++ b/superset/charts/commands/update.py @@ -52,8 +52,8 @@ def run(self) -> Model: self.validate() try: chart = ChartDAO.update(self._model, self._properties) - except DAOUpdateFailedError as e: - logger.exception(e.exception) + except DAOUpdateFailedError as ex: + logger.exception(ex.exception) raise ChartUpdateFailedError() return chart @@ -84,8 +84,8 @@ def validate(self) -> None: try: datasource = get_datasource_by_id(datasource_id, datasource_type) self._properties["datasource_name"] = datasource.name - except ValidationError as e: - exceptions.append(e) + except ValidationError as ex: + exceptions.append(ex) # Validate/Populate dashboards dashboards = DashboardDAO.find_by_ids(dashboard_ids) @@ -97,8 +97,8 @@ def validate(self) -> None: try: owners = populate_owners(self._actor, owner_ids) self._properties["owners"] = owners - except ValidationError as e: - exceptions.append(e) + except ValidationError as ex: + exceptions.append(ex) if exceptions: exception = ChartInvalidError() exception.add_list(exceptions) diff --git a/superset/charts/dao.py b/superset/charts/dao.py index 80ea3d6f47b6d..01bfdc6cd6401 100644 --- a/superset/charts/dao.py +++ b/superset/charts/dao.py @@ -47,7 +47,7 @@ def bulk_delete(models: Optional[List[Slice]], commit: bool = True) -> None: ) if commit: db.session.commit() - except SQLAlchemyError as e: + except SQLAlchemyError as ex: if commit: db.session.rollback() - raise e + raise ex diff --git a/superset/cli.py b/superset/cli.py index c6cdc45c1158c..2da47e162dce0 100755 --- a/superset/cli.py +++ b/superset/cli.py @@ -197,9 +197,9 @@ def refresh_druid(datasource, merge): for cluster in session.query(DruidCluster).all(): try: cluster.refresh_datasources(datasource_name=datasource, merge_flag=merge) - except Exception as e: # pylint: disable=broad-except - print("Error while processing cluster '{}'\n{}".format(cluster, str(e))) - logger.exception(e) + except Exception as ex: # pylint: disable=broad-except + print("Error while processing cluster '{}'\n{}".format(cluster, str(ex))) + logger.exception(ex) cluster.metadata_last_refreshed = datetime.now() print("Refreshed metadata from cluster " "[" + cluster.cluster_name + "]") session.commit() @@ -245,9 +245,9 @@ def import_dashboards(path, recursive, username): try: with file_.open() as data_stream: dashboard_import_export.import_dashboards(db.session, data_stream) - except Exception as e: # pylint: disable=broad-except + except Exception as ex: # pylint: disable=broad-except logger.error("Error when importing dashboard from file %s", file_) - logger.error(e) + logger.error(ex) @superset.command() @@ -317,9 +317,9 @@ def import_datasources(path, sync, recursive): dict_import_export.import_from_dict( db.session, yaml.safe_load(data_stream), sync=sync_array ) - except Exception as e: # pylint: disable=broad-except + except Exception as ex: # pylint: disable=broad-except logger.error("Error when importing datasources from file %s", file_) - logger.error(e) + logger.error(ex) @superset.command() @@ -397,8 +397,8 @@ def update_datasources_cache(): database.get_all_view_names_in_database( force=True, cache=True, cache_timeout=24 * 60 * 60 ) - except Exception as e: # pylint: disable=broad-except - print("{}".format(str(e))) + except Exception as ex: # pylint: disable=broad-except + print("{}".format(str(ex))) @superset.command() diff --git a/superset/common/query_context.py b/superset/common/query_context.py index a0c040ccfaf54..6377fb19767e7 100644 --- a/superset/common/query_context.py +++ b/superset/common/query_context.py @@ -195,10 +195,10 @@ def get_df_payload( # pylint: disable=too-many-locals,too-many-statements status = utils.QueryStatus.SUCCESS is_loaded = True stats_logger.incr("loaded_from_cache") - except Exception as e: # pylint: disable=broad-except - logger.exception(e) + except Exception as ex: # pylint: disable=broad-except + logger.exception(ex) logger.error( - "Error reading cache: %s", utils.error_msg_from_exception(e) + "Error reading cache: %s", utils.error_msg_from_exception(ex) ) logger.info("Serving from cache") @@ -214,10 +214,10 @@ def get_df_payload( # pylint: disable=too-many-locals,too-many-statements if not self.force: stats_logger.incr("loaded_from_source_without_force") is_loaded = True - except Exception as e: # pylint: disable=broad-except - logger.exception(e) + except Exception as ex: # pylint: disable=broad-except + logger.exception(ex) if not error_message: - error_message = "{}".format(e) + error_message = "{}".format(ex) status = utils.QueryStatus.FAILED stacktrace = utils.get_stacktrace() @@ -232,11 +232,11 @@ def get_df_payload( # pylint: disable=too-many-locals,too-many-statements stats_logger.incr("set_cache_key") cache.set(cache_key, cache_binary, timeout=self.cache_timeout) - except Exception as e: # pylint: disable=broad-except + except Exception as ex: # pylint: disable=broad-except # cache.set call can fail if the backend is down or if # the key is too large or whatever other reasons logger.warning("Could not cache key %s", cache_key) - logger.exception(e) + logger.exception(ex) cache.delete(cache_key) return { "cache_key": cache_key, diff --git a/superset/connectors/druid/models.py b/superset/connectors/druid/models.py index a1e15c6f0c27b..73ca6d5f2cf6a 100644 --- a/superset/connectors/druid/models.py +++ b/superset/connectors/druid/models.py @@ -657,9 +657,9 @@ def latest_metadata(self): merge=self.merge_flag, analysisTypes=[], ) - except Exception as e: + except Exception as ex: logger.warning("Failed first attempt to get latest segment") - logger.exception(e) + logger.exception(ex) if not segment_metadata: # if no segments in the past 7 days, look at all segments lbound = datetime(1901, 1, 1).isoformat()[:10] @@ -674,9 +674,9 @@ def latest_metadata(self): merge=self.merge_flag, analysisTypes=[], ) - except Exception as e: + except Exception as ex: logger.warning("Failed 2nd attempt to get latest segment") - logger.exception(e) + logger.exception(ex) if segment_metadata: return segment_metadata[-1]["columns"] diff --git a/superset/connectors/druid/views.py b/superset/connectors/druid/views.py index 231616c3b1229..15e2d3df3e69e 100644 --- a/superset/connectors/druid/views.py +++ b/superset/connectors/druid/views.py @@ -112,8 +112,8 @@ def pre_update(self, col): if col.dimension_spec_json: try: dimension_spec = json.loads(col.dimension_spec_json) - except ValueError as e: - raise ValueError("Invalid Dimension Spec JSON: " + str(e)) + except ValueError as ex: + raise ValueError("Invalid Dimension Spec JSON: " + str(ex)) if not isinstance(dimension_spec, dict): raise ValueError("Dimension Spec must be a JSON object") if "outputName" not in dimension_spec: @@ -374,15 +374,15 @@ def refresh_datasources(self, refresh_all=True): valid_cluster = True try: cluster.refresh_datasources(refresh_all=refresh_all) - except Exception as e: + except Exception as ex: valid_cluster = False flash( "Error while processing cluster '{}'\n{}".format( - cluster_name, utils.error_msg_from_exception(e) + cluster_name, utils.error_msg_from_exception(ex) ), "danger", ) - logger.exception(e) + logger.exception(ex) pass if valid_cluster: cluster.metadata_last_refreshed = datetime.now() diff --git a/superset/connectors/sqla/models.py b/superset/connectors/sqla/models.py index 44205e7c54ebe..4d1b9f24d45fb 100644 --- a/superset/connectors/sqla/models.py +++ b/superset/connectors/sqla/models.py @@ -96,11 +96,11 @@ def query(self, query_obj: Dict[str, Any]) -> QueryResult: status = utils.QueryStatus.SUCCESS try: df = pd.read_sql_query(qry.statement, db.engine) - except Exception as e: + except Exception as ex: df = pd.DataFrame() status = utils.QueryStatus.FAILED - logger.exception(e) - error_message = utils.error_msg_from_exception(e) + logger.exception(ex) + error_message = utils.error_msg_from_exception(ex) return QueryResult( status=status, df=df, duration=0, query="", error_message=error_message ) @@ -1055,12 +1055,12 @@ def mutator(df: pd.DataFrame) -> None: try: df = self.database.get_df(sql, self.schema, mutator) - except Exception as e: + except Exception as ex: df = pd.DataFrame() status = utils.QueryStatus.FAILED logger.exception(f"Query {sql} on schema {self.schema} failed") db_engine_spec = self.database.db_engine_spec - error_message = db_engine_spec.extract_error_message(e) + error_message = db_engine_spec.extract_error_message(ex) return QueryResult( status=status, @@ -1077,8 +1077,8 @@ def fetch_metadata(self, commit=True) -> None: """Fetches the metadata for the table and merges it in""" try: table = self.get_sqla_table_object() - except Exception as e: - logger.exception(e) + except Exception as ex: + logger.exception(ex) raise Exception( _( "Table [{}] doesn't seem to exist in the specified database, " @@ -1102,10 +1102,10 @@ def fetch_metadata(self, commit=True) -> None: datatype = db_engine_spec.column_datatype_to_string( col.type, db_dialect ) - except Exception as e: + except Exception as ex: datatype = "UNKNOWN" logger.error("Unrecognized data type in {}.{}".format(table, col.name)) - logger.exception(e) + logger.exception(ex) dbcol = dbcols.get(col.name, None) if not dbcol: dbcol = TableColumn(column_name=col.name, type=datatype, table=self) diff --git a/superset/connectors/sqla/views.py b/superset/connectors/sqla/views.py index f9aec88666ff5..b59bf3c8e75ff 100644 --- a/superset/connectors/sqla/views.py +++ b/superset/connectors/sqla/views.py @@ -387,7 +387,7 @@ def pre_add(self, table): # Fail before adding if the table can't be found try: table.get_sqla_table_object() - except Exception as e: + except Exception as ex: logger.exception(f"Got an error in pre_add for {table.name}") raise Exception( _( @@ -395,7 +395,7 @@ def pre_add(self, table): "please double check your " "database connection, schema, and " "table name, error: {}" - ).format(table.name, str(e)) + ).format(table.name, str(ex)) ) def post_add(self, table, flash_message=True): diff --git a/superset/dao/base.py b/superset/dao/base.py index 7158643e219e9..020feed7e6546 100644 --- a/superset/dao/base.py +++ b/superset/dao/base.py @@ -89,9 +89,9 @@ def create(cls, properties: Dict, commit: bool = True) -> Model: db.session.add(model) if commit: db.session.commit() - except SQLAlchemyError as e: # pragma: no cover + except SQLAlchemyError as ex: # pragma: no cover db.session.rollback() - raise DAOCreateFailedError(exception=e) + raise DAOCreateFailedError(exception=ex) return model @classmethod @@ -106,9 +106,9 @@ def update(cls, model: Model, properties: Dict, commit: bool = True) -> Model: db.session.merge(model) if commit: db.session.commit() - except SQLAlchemyError as e: # pragma: no cover + except SQLAlchemyError as ex: # pragma: no cover db.session.rollback() - raise DAOUpdateFailedError(exception=e) + raise DAOUpdateFailedError(exception=ex) return model @classmethod @@ -121,7 +121,7 @@ def delete(cls, model: Model, commit: bool = True) -> Model: db.session.delete(model) if commit: db.session.commit() - except SQLAlchemyError as e: # pragma: no cover + except SQLAlchemyError as ex: # pragma: no cover db.session.rollback() - raise DAODeleteFailedError(exception=e) + raise DAODeleteFailedError(exception=ex) return model diff --git a/superset/dashboards/api.py b/superset/dashboards/api.py index 752470af9d57d..5039fa8d64978 100644 --- a/superset/dashboards/api.py +++ b/superset/dashboards/api.py @@ -168,11 +168,11 @@ def post(self) -> Response: try: new_model = CreateDashboardCommand(g.user, item.data).run() return self.response(201, id=new_model.id, result=item.data) - except DashboardInvalidError as e: - return self.response_422(message=e.normalized_messages()) - except DashboardCreateFailedError as e: - logger.error(f"Error creating model {self.__class__.__name__}: {e}") - return self.response_422(message=str(e)) + except DashboardInvalidError as ex: + return self.response_422(message=ex.normalized_messages()) + except DashboardCreateFailedError as ex: + logger.error(f"Error creating model {self.__class__.__name__}: {ex}") + return self.response_422(message=str(ex)) @expose("/<pk>", methods=["PUT"]) @protect() @@ -235,11 +235,11 @@ def put( # pylint: disable=too-many-return-statements, arguments-differ return self.response_404() except DashboardForbiddenError: return self.response_403() - except DashboardInvalidError as e: - return self.response_422(message=e.normalized_messages()) - except DashboardUpdateFailedError as e: - logger.error(f"Error updating model {self.__class__.__name__}: {e}") - return self.response_422(message=str(e)) + except DashboardInvalidError as ex: + return self.response_422(message=ex.normalized_messages()) + except DashboardUpdateFailedError as ex: + logger.error(f"Error updating model {self.__class__.__name__}: {ex}") + return self.response_422(message=str(ex)) @expose("/<pk>", methods=["DELETE"]) @protect() @@ -283,9 +283,9 @@ def delete(self, pk: int) -> Response: # pylint: disable=arguments-differ return self.response_404() except DashboardForbiddenError: return self.response_403() - except DashboardDeleteFailedError as e: - logger.error(f"Error deleting model {self.__class__.__name__}: {e}") - return self.response_422(message=str(e)) + except DashboardDeleteFailedError as ex: + logger.error(f"Error deleting model {self.__class__.__name__}: {ex}") + return self.response_422(message=str(ex)) @expose("/", methods=["DELETE"]) @protect() @@ -344,8 +344,8 @@ def bulk_delete( return self.response_404() except DashboardForbiddenError: return self.response_403() - except DashboardBulkDeleteFailedError as e: - return self.response_422(message=str(e)) + except DashboardBulkDeleteFailedError as ex: + return self.response_422(message=str(ex)) @expose("/export/", methods=["GET"]) @protect() diff --git a/superset/dashboards/commands/bulk_delete.py b/superset/dashboards/commands/bulk_delete.py index 654675bef0d2e..cb2bab62e9af5 100644 --- a/superset/dashboards/commands/bulk_delete.py +++ b/superset/dashboards/commands/bulk_delete.py @@ -45,8 +45,8 @@ def run(self) -> None: try: DashboardDAO.bulk_delete(self._models) return None - except DeleteFailedError as e: - logger.exception(e.exception) + except DeleteFailedError as ex: + logger.exception(ex.exception) raise DashboardBulkDeleteFailedError() def validate(self) -> None: diff --git a/superset/dashboards/commands/create.py b/superset/dashboards/commands/create.py index 38658be3b2bb2..73025cdf7a785 100644 --- a/superset/dashboards/commands/create.py +++ b/superset/dashboards/commands/create.py @@ -43,8 +43,8 @@ def run(self) -> Model: self.validate() try: dashboard = DashboardDAO.create(self._properties) - except DAOCreateFailedError as e: - logger.exception(e.exception) + except DAOCreateFailedError as ex: + logger.exception(ex.exception) raise DashboardCreateFailedError() return dashboard @@ -60,8 +60,8 @@ def validate(self) -> None: try: owners = populate_owners(self._actor, owner_ids) self._properties["owners"] = owners - except ValidationError as e: - exceptions.append(e) + except ValidationError as ex: + exceptions.append(ex) if exceptions: exception = DashboardInvalidError() exception.add_list(exceptions) diff --git a/superset/dashboards/commands/delete.py b/superset/dashboards/commands/delete.py index 8a695327b9bf2..08d0fcb1dd240 100644 --- a/superset/dashboards/commands/delete.py +++ b/superset/dashboards/commands/delete.py @@ -45,8 +45,8 @@ def run(self) -> Model: self.validate() try: dashboard = DashboardDAO.delete(self._model) - except DAODeleteFailedError as e: - logger.exception(e.exception) + except DAODeleteFailedError as ex: + logger.exception(ex.exception) raise DashboardDeleteFailedError() return dashboard diff --git a/superset/dashboards/commands/update.py b/superset/dashboards/commands/update.py index 8f23dafa0df4b..56357c9ebcc0c 100644 --- a/superset/dashboards/commands/update.py +++ b/superset/dashboards/commands/update.py @@ -50,8 +50,8 @@ def run(self) -> Model: self.validate() try: dashboard = DashboardDAO.update(self._model, self._properties) - except DAOUpdateFailedError as e: - logger.exception(e.exception) + except DAOUpdateFailedError as ex: + logger.exception(ex.exception) raise DashboardUpdateFailedError() return dashboard @@ -80,8 +80,8 @@ def validate(self) -> None: try: owners = populate_owners(self._actor, owner_ids) self._properties["owners"] = owners - except ValidationError as e: - exceptions.append(e) + except ValidationError as ex: + exceptions.append(ex) if exceptions: exception = DashboardInvalidError() exception.add_list(exceptions) diff --git a/superset/dashboards/dao.py b/superset/dashboards/dao.py index 050b507b6369f..d19629082a673 100644 --- a/superset/dashboards/dao.py +++ b/superset/dashboards/dao.py @@ -63,7 +63,7 @@ def bulk_delete(models: Optional[List[Dashboard]], commit: bool = True) -> None: ) if commit: db.session.commit() - except SQLAlchemyError as e: + except SQLAlchemyError as ex: if commit: db.session.rollback() - raise e + raise ex diff --git a/superset/dataframe.py b/superset/dataframe.py index 746a2a45c3911..e8cd0d13e6651 100644 --- a/superset/dataframe.py +++ b/superset/dataframe.py @@ -26,10 +26,10 @@ def df_to_records(dframe: pd.DataFrame) -> List[Dict[str, Any]]: data: List[Dict[str, Any]] = dframe.to_dict(orient="records") # TODO: refactor this - for d in data: - for k, v in list(d.items()): + for row in data: + for key, value in list(row.items()): # if an int is too big for JavaScript to handle # convert it to a string - if isinstance(v, int) and abs(v) > JS_MAX_INTEGER: - d[k] = str(v) + if isinstance(value, int) and abs(value) > JS_MAX_INTEGER: + row[key] = str(value) return data diff --git a/superset/datasets/api.py b/superset/datasets/api.py index 03c77b4c739e1..b7c7a54ecbe21 100644 --- a/superset/datasets/api.py +++ b/superset/datasets/api.py @@ -171,11 +171,11 @@ def post(self) -> Response: try: new_model = CreateDatasetCommand(g.user, item.data).run() return self.response(201, id=new_model.id, result=item.data) - except DatasetInvalidError as e: - return self.response_422(message=e.normalized_messages()) - except DatasetCreateFailedError as e: - logger.error(f"Error creating model {self.__class__.__name__}: {e}") - return self.response_422(message=str(e)) + except DatasetInvalidError as ex: + return self.response_422(message=ex.normalized_messages()) + except DatasetCreateFailedError as ex: + logger.error(f"Error creating model {self.__class__.__name__}: {ex}") + return self.response_422(message=str(ex)) @expose("/<pk>", methods=["PUT"]) @protect() @@ -238,11 +238,11 @@ def put( # pylint: disable=too-many-return-statements, arguments-differ return self.response_404() except DatasetForbiddenError: return self.response_403() - except DatasetInvalidError as e: - return self.response_422(message=e.normalized_messages()) - except DatasetUpdateFailedError as e: - logger.error(f"Error updating model {self.__class__.__name__}: {e}") - return self.response_422(message=str(e)) + except DatasetInvalidError as ex: + return self.response_422(message=ex.normalized_messages()) + except DatasetUpdateFailedError as ex: + logger.error(f"Error updating model {self.__class__.__name__}: {ex}") + return self.response_422(message=str(ex)) @expose("/<pk>", methods=["DELETE"]) @protect() @@ -286,9 +286,9 @@ def delete(self, pk: int) -> Response: # pylint: disable=arguments-differ return self.response_404() except DatasetForbiddenError: return self.response_403() - except DatasetDeleteFailedError as e: - logger.error(f"Error deleting model {self.__class__.__name__}: {e}") - return self.response_422(message=str(e)) + except DatasetDeleteFailedError as ex: + logger.error(f"Error deleting model {self.__class__.__name__}: {ex}") + return self.response_422(message=str(ex)) @expose("/export/", methods=["GET"]) @protect() @@ -384,6 +384,6 @@ def refresh(self, pk: int) -> Response: return self.response_404() except DatasetForbiddenError: return self.response_403() - except DatasetRefreshFailedError as e: - logger.error(f"Error refreshing dataset {self.__class__.__name__}: {e}") - return self.response_422(message=str(e)) + except DatasetRefreshFailedError as ex: + logger.error(f"Error refreshing dataset {self.__class__.__name__}: {ex}") + return self.response_422(message=str(ex)) diff --git a/superset/datasets/commands/create.py b/superset/datasets/commands/create.py index 9200a1371e225..3114a4f005d7c 100644 --- a/superset/datasets/commands/create.py +++ b/superset/datasets/commands/create.py @@ -60,8 +60,8 @@ def run(self) -> Model: "schema_access", dataset.schema_perm ) db.session.commit() - except (SQLAlchemyError, DAOCreateFailedError) as e: - logger.exception(e) + except (SQLAlchemyError, DAOCreateFailedError) as ex: + logger.exception(ex) db.session.rollback() raise DatasetCreateFailedError() return dataset @@ -92,8 +92,8 @@ def validate(self) -> None: try: owners = populate_owners(self._actor, owner_ids) self._properties["owners"] = owners - except ValidationError as e: - exceptions.append(e) + except ValidationError as ex: + exceptions.append(ex) if exceptions: exception = DatasetInvalidError() exception.add_list(exceptions) diff --git a/superset/datasets/commands/delete.py b/superset/datasets/commands/delete.py index 4ee3ed76c1c2f..551d222dfbcd1 100644 --- a/superset/datasets/commands/delete.py +++ b/superset/datasets/commands/delete.py @@ -51,8 +51,8 @@ def run(self) -> Model: "datasource_access", dataset.get_perm() ) db.session.commit() - except (SQLAlchemyError, DAODeleteFailedError) as e: - logger.exception(e) + except (SQLAlchemyError, DAODeleteFailedError) as ex: + logger.exception(ex) db.session.rollback() raise DatasetDeleteFailedError() return dataset diff --git a/superset/datasets/commands/refresh.py b/superset/datasets/commands/refresh.py index aed5efc294177..22869570bd9fd 100644 --- a/superset/datasets/commands/refresh.py +++ b/superset/datasets/commands/refresh.py @@ -46,8 +46,8 @@ def run(self) -> Model: try: self._model.fetch_metadata() return self._model - except Exception as e: - logger.exception(e) + except Exception as ex: + logger.exception(ex) raise DatasetRefreshFailedError() raise DatasetRefreshFailedError() diff --git a/superset/datasets/commands/update.py b/superset/datasets/commands/update.py index 00626b45a7beb..c7f70dd16cc61 100644 --- a/superset/datasets/commands/update.py +++ b/superset/datasets/commands/update.py @@ -60,8 +60,8 @@ def run(self) -> Model: try: dataset = DatasetDAO.update(self._model, self._properties) return dataset - except DAOUpdateFailedError as e: - logger.exception(e.exception) + except DAOUpdateFailedError as ex: + logger.exception(ex.exception) raise DatasetUpdateFailedError() raise DatasetUpdateFailedError() @@ -92,8 +92,8 @@ def validate(self) -> None: try: owners = populate_owners(self._actor, owner_ids) self._properties["owners"] = owners - except ValidationError as e: - exceptions.append(e) + except ValidationError as ex: + exceptions.append(ex) # Validate columns columns = self._properties.get("columns") diff --git a/superset/datasets/dao.py b/superset/datasets/dao.py index ae5d4061c473e..5dfe4ef49e005 100644 --- a/superset/datasets/dao.py +++ b/superset/datasets/dao.py @@ -45,8 +45,8 @@ def get_owner_by_id(owner_id: int) -> Optional[object]: def get_database_by_id(database_id: int) -> Optional[Database]: try: return db.session.query(Database).filter_by(id=database_id).one_or_none() - except SQLAlchemyError as e: # pragma: no cover - logger.error(f"Could not get database by id: {e}") + except SQLAlchemyError as ex: # pragma: no cover + logger.error(f"Could not get database by id: {ex}") return None @staticmethod @@ -54,8 +54,8 @@ def validate_table_exists(database: Database, table_name: str, schema: str) -> b try: database.get_table(table_name, schema=schema) return True - except SQLAlchemyError as e: # pragma: no cover - logger.error(f"Got an error {e} validating table: {table_name}") + except SQLAlchemyError as ex: # pragma: no cover + logger.error(f"Got an error {ex} validating table: {table_name}") return False @staticmethod diff --git a/superset/db_engine_specs/base.py b/superset/db_engine_specs/base.py index eaf5405b27c19..98b5eef300f21 100644 --- a/superset/db_engine_specs/base.py +++ b/superset/db_engine_specs/base.py @@ -560,13 +560,13 @@ def handle_cursor(cls, cursor: Any, query: Query, session: Session) -> None: pass @classmethod - def extract_error_message(cls, e: Exception) -> str: - return f"{cls.engine} error: {cls._extract_error_message(e)}" + def extract_error_message(cls, ex: Exception) -> str: + return f"{cls.engine} error: {cls._extract_error_message(ex)}" @classmethod - def _extract_error_message(cls, e: Exception) -> Optional[str]: + def _extract_error_message(cls, ex: Exception) -> Optional[str]: """Extract error message for queries""" - return utils.error_msg_from_exception(e) + return utils.error_msg_from_exception(ex) @classmethod def adjust_database_uri(cls, uri: URL, selected_schema: Optional[str]) -> None: @@ -975,7 +975,7 @@ def get_extra_params(database: "Database") -> Dict[str, Any]: if database.extra: try: extra = json.loads(database.extra) - except json.JSONDecodeError as e: - logger.error(e) - raise e + except json.JSONDecodeError as ex: + logger.error(ex) + raise ex return extra diff --git a/superset/db_engine_specs/druid.py b/superset/db_engine_specs/druid.py index 07bf1a90b3225..ab4e36a522a77 100644 --- a/superset/db_engine_specs/druid.py +++ b/superset/db_engine_specs/druid.py @@ -64,9 +64,9 @@ def get_extra_params(database: "Database") -> Dict[str, Any]: """ try: extra = json.loads(database.extra or "{}") - except json.JSONDecodeError as e: - logger.error(e) - raise e + except json.JSONDecodeError as ex: + logger.error(ex) + raise ex if database.server_cert: engine_params = extra.get("engine_params", {}) diff --git a/superset/db_engine_specs/hive.py b/superset/db_engine_specs/hive.py index 82d20d7cea90a..b6240df6e783f 100644 --- a/superset/db_engine_specs/hive.py +++ b/superset/db_engine_specs/hive.py @@ -203,8 +203,8 @@ def adjust_database_uri( uri.database = parse.quote(selected_schema, safe="") @classmethod - def _extract_error_message(cls, e: Exception) -> str: - msg = str(e) + def _extract_error_message(cls, ex: Exception) -> str: + msg = str(ex) match = re.search(r'errorMessage="(.*?)(?<!\\)"', msg) if match: msg = match.group(1) diff --git a/superset/db_engine_specs/mysql.py b/superset/db_engine_specs/mysql.py index b19527f7a2ee8..abd7bd015ece0 100644 --- a/superset/db_engine_specs/mysql.py +++ b/superset/db_engine_specs/mysql.py @@ -86,12 +86,12 @@ def epoch_to_dttm(cls) -> str: return "from_unixtime({col})" @classmethod - def _extract_error_message(cls, e: Exception) -> str: + def _extract_error_message(cls, ex: Exception) -> str: """Extract error message for queries""" - message = str(e) + message = str(ex) try: - if isinstance(e.args, tuple) and len(e.args) > 1: - message = e.args[1] + if isinstance(ex.args, tuple) and len(ex.args) > 1: + message = ex.args[1] except Exception: # pylint: disable=broad-except pass return message diff --git a/superset/db_engine_specs/presto.py b/superset/db_engine_specs/presto.py index 04089ba4eb0fc..9bc9307ea7a56 100644 --- a/superset/db_engine_specs/presto.py +++ b/superset/db_engine_specs/presto.py @@ -762,22 +762,22 @@ def handle_cursor(cls, cursor: Any, query: Query, session: Session) -> None: polled = cursor.poll() @classmethod - def _extract_error_message(cls, e: Exception) -> Optional[str]: + def _extract_error_message(cls, ex: Exception) -> Optional[str]: if ( - hasattr(e, "orig") - and type(e.orig).__name__ == "DatabaseError" # type: ignore - and isinstance(e.orig[0], dict) # type: ignore + hasattr(ex, "orig") + and type(ex.orig).__name__ == "DatabaseError" # type: ignore + and isinstance(ex.orig[0], dict) # type: ignore ): - error_dict = e.orig[0] # type: ignore + error_dict = ex.orig[0] # type: ignore return "{} at {}: {}".format( error_dict.get("errorName"), error_dict.get("errorLocation"), error_dict.get("message"), ) - if type(e).__name__ == "DatabaseError" and hasattr(e, "args") and e.args: - error_dict = e.args[0] + if type(ex).__name__ == "DatabaseError" and hasattr(ex, "args") and ex.args: + error_dict = ex.args[0] return error_dict.get("message") - return utils.error_msg_from_exception(e) + return utils.error_msg_from_exception(ex) @classmethod def _partition_query( # pylint: disable=too-many-arguments,too-many-locals diff --git a/superset/examples/helpers.py b/superset/examples/helpers.py index 9c300bbeba379..58f8de293d16f 100644 --- a/superset/examples/helpers.py +++ b/superset/examples/helpers.py @@ -62,9 +62,9 @@ def merge_slice(slc: Slice) -> None: def get_slice_json(defaults: Dict[Any, Any], **kwargs: Any) -> str: - d = defaults.copy() - d.update(kwargs) - return json.dumps(d, indent=4, sort_keys=True) + defaults_copy = defaults.copy() + defaults_copy.update(kwargs) + return json.dumps(defaults_copy, indent=4, sort_keys=True) def get_example_data( diff --git a/superset/migrations/versions/190188938582_adding_unique_constraint_on_dashboard_slices_tbl.py b/superset/migrations/versions/190188938582_adding_unique_constraint_on_dashboard_slices_tbl.py index a91b3da65e597..3cea2e032ced9 100644 --- a/superset/migrations/versions/190188938582_adding_unique_constraint_on_dashboard_slices_tbl.py +++ b/superset/migrations/versions/190188938582_adding_unique_constraint_on_dashboard_slices_tbl.py @@ -88,13 +88,13 @@ def upgrade(): batch_op.create_unique_constraint( "uq_dashboard_slice", ["dashboard_id", "slice_id"] ) - except Exception as e: - logging.exception(e) + except Exception as ex: + logging.exception(ex) def downgrade(): try: with op.batch_alter_table("dashboard_slices") as batch_op: batch_op.drop_constraint("uq_dashboard_slice", type_="unique") - except Exception as e: - logging.exception(e) + except Exception as ex: + logging.exception(ex) diff --git a/superset/migrations/versions/3325d4caccc8_dashboard_scoped_filters.py b/superset/migrations/versions/3325d4caccc8_dashboard_scoped_filters.py index d3a96427cd582..5aa38fd13a48d 100644 --- a/superset/migrations/versions/3325d4caccc8_dashboard_scoped_filters.py +++ b/superset/migrations/versions/3325d4caccc8_dashboard_scoped_filters.py @@ -101,8 +101,8 @@ def upgrade(): dashboard.json_metadata = None session.merge(dashboard) - except Exception as e: - logging.exception(f"dashboard {dashboard.id} has error: {e}") + except Exception as ex: + logging.exception(f"dashboard {dashboard.id} has error: {ex}") session.commit() session.close() diff --git a/superset/migrations/versions/3b626e2a6783_sync_db_with_models.py b/superset/migrations/versions/3b626e2a6783_sync_db_with_models.py index dd199468e5ef4..b7e55974d3fac 100644 --- a/superset/migrations/versions/3b626e2a6783_sync_db_with_models.py +++ b/superset/migrations/versions/3b626e2a6783_sync_db_with_models.py @@ -58,8 +58,8 @@ def upgrade(): batch_op.drop_constraint(slices_ibfk_2, type_="foreignkey") batch_op.drop_column("druid_datasource_id") batch_op.drop_column("table_id") - except Exception as e: - logging.warning(str(e)) + except Exception as ex: + logging.warning(str(ex)) # fixed issue: https://github.com/airbnb/superset/issues/466 try: @@ -67,27 +67,27 @@ def upgrade(): batch_op.create_foreign_key( None, "datasources", ["datasource_name"], ["datasource_name"] ) - except Exception as e: - logging.warning(str(e)) + except Exception as ex: + logging.warning(str(ex)) try: with op.batch_alter_table("query") as batch_op: batch_op.create_unique_constraint("client_id", ["client_id"]) - except Exception as e: - logging.warning(str(e)) + except Exception as ex: + logging.warning(str(ex)) try: with op.batch_alter_table("query") as batch_op: batch_op.drop_column("name") - except Exception as e: - logging.warning(str(e)) + except Exception as ex: + logging.warning(str(ex)) def downgrade(): try: with op.batch_alter_table("tables") as batch_op: batch_op.create_index("table_name", ["table_name"], unique=True) - except Exception as e: - logging.warning(str(e)) + except Exception as ex: + logging.warning(str(ex)) try: with op.batch_alter_table("slices") as batch_op: @@ -111,8 +111,8 @@ def downgrade(): "slices_ibfk_1", "datasources", ["druid_datasource_id"], ["id"] ) batch_op.create_foreign_key("slices_ibfk_2", "tables", ["table_id"], ["id"]) - except Exception as e: - logging.warning(str(e)) + except Exception as ex: + logging.warning(str(ex)) try: fk_columns = generic_find_constraint_name( @@ -123,12 +123,12 @@ def downgrade(): ) with op.batch_alter_table("columns") as batch_op: batch_op.drop_constraint(fk_columns, type_="foreignkey") - except Exception as e: - logging.warning(str(e)) + except Exception as ex: + logging.warning(str(ex)) op.add_column("query", sa.Column("name", sa.String(length=256), nullable=True)) try: with op.batch_alter_table("query") as batch_op: batch_op.drop_constraint("client_id", type_="unique") - except Exception as e: - logging.warning(str(e)) + except Exception as ex: + logging.warning(str(ex)) diff --git a/superset/migrations/versions/4736ec66ce19_.py b/superset/migrations/versions/4736ec66ce19_.py index f2d04f9a36710..99b3e0b5e3500 100644 --- a/superset/migrations/versions/4736ec66ce19_.py +++ b/superset/migrations/versions/4736ec66ce19_.py @@ -120,14 +120,14 @@ def upgrade(): or "uq_datasources_datasource_name", type_="unique", ) - except Exception as e: + except Exception as ex: logging.warning( "Constraint drop failed, you may want to do this " "manually on your database. For context, this is a known " "issue around undeterministic contraint names on Postgres " "and perhaps more databases through SQLAlchemy." ) - logging.exception(e) + logging.exception(ex) def downgrade(): diff --git a/superset/migrations/versions/65903709c321_allow_dml.py b/superset/migrations/versions/65903709c321_allow_dml.py index 0c72d418df437..6836e8ee20402 100644 --- a/superset/migrations/versions/65903709c321_allow_dml.py +++ b/superset/migrations/versions/65903709c321_allow_dml.py @@ -39,6 +39,6 @@ def upgrade(): def downgrade(): try: op.drop_column("dbs", "allow_dml") - except Exception as e: - logging.exception(e) + except Exception as ex: + logging.exception(ex) pass diff --git a/superset/migrations/versions/80aa3f04bc82_add_parent_ids_in_dashboard_layout.py b/superset/migrations/versions/80aa3f04bc82_add_parent_ids_in_dashboard_layout.py index 1e29855ed3f57..c6361009ee4fa 100644 --- a/superset/migrations/versions/80aa3f04bc82_add_parent_ids_in_dashboard_layout.py +++ b/superset/migrations/versions/80aa3f04bc82_add_parent_ids_in_dashboard_layout.py @@ -81,8 +81,8 @@ def upgrade(): layout, indent=None, separators=(",", ":"), sort_keys=True ) session.merge(dashboard) - except Exception as e: - logging.exception(e) + except Exception as ex: + logging.exception(ex) session.commit() session.close() @@ -111,8 +111,8 @@ def downgrade(): layout, indent=None, separators=(",", ":"), sort_keys=True ) session.merge(dashboard) - except Exception as e: - logging.exception(e) + except Exception as ex: + logging.exception(ex) session.commit() session.close() diff --git a/superset/migrations/versions/ab8c66efdd01_resample.py b/superset/migrations/versions/ab8c66efdd01_resample.py index aa7bf868f4a6f..928636040598d 100644 --- a/superset/migrations/versions/ab8c66efdd01_resample.py +++ b/superset/migrations/versions/ab8c66efdd01_resample.py @@ -85,8 +85,8 @@ def upgrade(): params.pop("resample_fillmethod", None) params.pop("resample_how", None) slc.params = json.dumps(params, sort_keys=True) - except Exception as e: - logging.exception(e) + except Exception as ex: + logging.exception(ex) session.commit() session.close() @@ -110,8 +110,8 @@ def downgrade(): del params["resample_method"] slc.params = json.dumps(params, sort_keys=True) - except Exception as e: - logging.exception(e) + except Exception as ex: + logging.exception(ex) session.commit() session.close() diff --git a/superset/migrations/versions/b46fa1b0b39e_add_params_to_tables.py b/superset/migrations/versions/b46fa1b0b39e_add_params_to_tables.py index 8bc309cc30f3b..97e58b1735d51 100644 --- a/superset/migrations/versions/b46fa1b0b39e_add_params_to_tables.py +++ b/superset/migrations/versions/b46fa1b0b39e_add_params_to_tables.py @@ -39,5 +39,5 @@ def upgrade(): def downgrade(): try: op.drop_column("tables", "params") - except Exception as e: - logging.warning(str(e)) + except Exception as ex: + logging.warning(str(ex)) diff --git a/superset/migrations/versions/bf706ae5eb46_cal_heatmap_metric_to_metrics.py b/superset/migrations/versions/bf706ae5eb46_cal_heatmap_metric_to_metrics.py index 9b936c9112aee..3e2b81c17a82c 100644 --- a/superset/migrations/versions/bf706ae5eb46_cal_heatmap_metric_to_metrics.py +++ b/superset/migrations/versions/bf706ae5eb46_cal_heatmap_metric_to_metrics.py @@ -62,8 +62,8 @@ def upgrade(): session.merge(slc) session.commit() print("Upgraded ({}/{}): {}".format(i, slice_len, slc.slice_name)) - except Exception as e: - print(slc.slice_name + " error: " + str(e)) + except Exception as ex: + print(slc.slice_name + " error: " + str(ex)) session.close() diff --git a/superset/migrations/versions/db0c65b146bd_update_slice_model_json.py b/superset/migrations/versions/db0c65b146bd_update_slice_model_json.py index f6ed41b2e6958..56d5f887b3e0e 100644 --- a/superset/migrations/versions/db0c65b146bd_update_slice_model_json.py +++ b/superset/migrations/versions/db0c65b146bd_update_slice_model_json.py @@ -60,8 +60,8 @@ def upgrade(): session.merge(slc) session.commit() print("Upgraded ({}/{}): {}".format(i, slice_len, slc.slice_name)) - except Exception as e: - print(slc.slice_name + " error: " + str(e)) + except Exception as ex: + print(slc.slice_name + " error: " + str(ex)) session.close() diff --git a/superset/migrations/versions/db527d8c4c78_add_db_verbose_name.py b/superset/migrations/versions/db527d8c4c78_add_db_verbose_name.py index 30bc9817a7053..0cb9c94b5663d 100644 --- a/superset/migrations/versions/db527d8c4c78_add_db_verbose_name.py +++ b/superset/migrations/versions/db527d8c4c78_add_db_verbose_name.py @@ -43,7 +43,7 @@ def upgrade(): try: op.create_unique_constraint(None, "dbs", ["verbose_name"]) op.create_unique_constraint(None, "clusters", ["verbose_name"]) - except Exception as e: + except Exception: logging.info("Constraint not created, expected when using sqlite") @@ -51,5 +51,5 @@ def downgrade(): try: op.drop_column("dbs", "verbose_name") op.drop_column("clusters", "verbose_name") - except Exception as e: - logging.exception(e) + except Exception as ex: + logging.exception(ex) diff --git a/superset/migrations/versions/e502db2af7be_add_template_params_to_tables.py b/superset/migrations/versions/e502db2af7be_add_template_params_to_tables.py index c2bb2ec0ff369..b76ea623822c8 100644 --- a/superset/migrations/versions/e502db2af7be_add_template_params_to_tables.py +++ b/superset/migrations/versions/e502db2af7be_add_template_params_to_tables.py @@ -37,5 +37,5 @@ def upgrade(): def downgrade(): try: op.drop_column("tables", "template_params") - except Exception as e: - logging.warning(str(e)) + except Exception as ex: + logging.warning(str(ex)) diff --git a/superset/migrations/versions/fb13d49b72f9_better_filters.py b/superset/migrations/versions/fb13d49b72f9_better_filters.py index 97564e82a13a6..2a58fdf21c017 100644 --- a/superset/migrations/versions/fb13d49b72f9_better_filters.py +++ b/superset/migrations/versions/fb13d49b72f9_better_filters.py @@ -78,7 +78,7 @@ def upgrade(): for slc in filter_box_slices.all(): try: upgrade_slice(slc) - except Exception as e: + except Exception as ex: logging.exception(e) session.commit() @@ -100,8 +100,8 @@ def downgrade(): params["metric"] = flts[0].get("metric") params["groupby"] = [o.get("column") for o in flts] slc.params = json.dumps(params, sort_keys=True) - except Exception as e: - logging.exception(e) + except Exception as ex: + logging.exception(ex) session.commit() session.close() diff --git a/superset/models/core.py b/superset/models/core.py index 0861fc13f709e..10609860e9903 100755 --- a/superset/models/core.py +++ b/superset/models/core.py @@ -481,8 +481,8 @@ def get_all_table_names_in_schema( return [ utils.DatasourceName(table=table, schema=schema) for table in tables ] - except Exception as e: # pylint: disable=broad-except - logger.exception(e) + except Exception as ex: # pylint: disable=broad-except + logger.exception(ex) @cache_util.memoized_func( key=lambda *args, **kwargs: f"db:{{}}:schema:{kwargs.get('schema')}:view_list", # type: ignore @@ -511,8 +511,8 @@ def get_all_view_names_in_schema( database=self, inspector=self.inspector, schema=schema ) return [utils.DatasourceName(table=view, schema=schema) for view in views] - except Exception as e: # pylint: disable=broad-except - logger.exception(e) + except Exception as ex: # pylint: disable=broad-except + logger.exception(ex) @cache_util.memoized_func( key=lambda *args, **kwargs: "db:{}:schema_list", attribute_in_key="id" @@ -564,9 +564,9 @@ def get_encrypted_extra(self): if self.encrypted_extra: try: encrypted_extra = json.loads(self.encrypted_extra) - except json.JSONDecodeError as e: - logger.error(e) - raise e + except json.JSONDecodeError as ex: + logger.error(ex) + raise ex return encrypted_extra def get_table(self, table_name: str, schema: Optional[str] = None) -> Table: diff --git a/superset/models/helpers.py b/superset/models/helpers.py index 77c1742feebb1..9a8a5a7bf46b4 100644 --- a/superset/models/helpers.py +++ b/superset/models/helpers.py @@ -166,14 +166,14 @@ def import_from_dict( try: obj_query = session.query(cls).filter(and_(*filters)) obj = obj_query.one_or_none() - except MultipleResultsFound as e: + except MultipleResultsFound as ex: logger.error( "Error importing %s \n %s \n %s", cls.__name__, str(obj_query), yaml.safe_dump(dict_rep), ) - raise e + raise ex if not obj: is_new_obj = True @@ -274,14 +274,14 @@ def copy(self): return new_obj def alter_params(self, **kwargs): - d = self.params_dict - d.update(kwargs) - self.params = json.dumps(d) + params = self.params_dict + params.update(kwargs) + self.params = json.dumps(params) def remove_params(self, param_to_remove: str) -> None: - d = self.params_dict - d.pop(param_to_remove, None) - self.params = json.dumps(d) + params = self.params_dict + params.pop(param_to_remove, None) + self.params = json.dumps(params) def reset_ownership(self): """ object will belong to the user the current user """ @@ -395,8 +395,8 @@ def extra(self): except Exception: # pylint: disable=broad-except return {} - def set_extra_json(self, d): - self.extra_json = json.dumps(d) + def set_extra_json(self, extras): + self.extra_json = json.dumps(extras) def set_extra_json_key(self, key, value): extra = self.extra diff --git a/superset/models/slice.py b/superset/models/slice.py index d867431a2ff4f..6cfadab07a959 100644 --- a/superset/models/slice.py +++ b/superset/models/slice.py @@ -135,9 +135,9 @@ def datasource_edit_url(self) -> Optional[str]: @property # type: ignore @utils.memoized def viz(self) -> BaseViz: - d = json.loads(self.params) + form_data = json.loads(self.params) viz_class = viz_types[self.viz_type] - return viz_class(datasource=self.datasource, form_data=d) + return viz_class(datasource=self.datasource, form_data=form_data) @property def description_markeddown(self) -> str: @@ -146,14 +146,14 @@ def description_markeddown(self) -> str: @property def data(self) -> Dict[str, Any]: """Data used to render slice in templates""" - d: Dict[str, Any] = {} + data: Dict[str, Any] = {} self.token = "" try: - d = self.viz.data - self.token = d.get("token") # type: ignore - except Exception as e: # pylint: disable=broad-except - logger.exception(e) - d["error"] = str(e) + data = self.viz.data + self.token = data.get("token") # type: ignore + except Exception as ex: # pylint: disable=broad-except + logger.exception(ex) + data["error"] = str(ex) return { "cache_timeout": self.cache_timeout, "datasource": self.datasource_name, @@ -178,9 +178,9 @@ def form_data(self) -> Dict[str, Any]: form_data: Dict[str, Any] = {} try: form_data = json.loads(self.params) - except Exception as e: # pylint: disable=broad-except + except Exception as ex: # pylint: disable=broad-except logger.error("Malformed json in slice's params") - logger.exception(e) + logger.exception(ex) form_data.update( { "slice_id": self.id, diff --git a/superset/result_set.py b/superset/result_set.py index 1f42a28d57779..4166d1a5b96e5 100644 --- a/superset/result_set.py +++ b/superset/result_set.py @@ -138,8 +138,8 @@ def __init__( pa_data[i] = pa.Array.from_pandas( series, type=pa.timestamp("ns", tz=tz) ) - except Exception as e: - logger.exception(e) + except Exception as ex: + logger.exception(ex) self.table = pa.Table.from_arrays(pa_data, names=column_names) self._type_dict: Dict[str, Any] = {} @@ -150,8 +150,8 @@ def __init__( for i, col in enumerate(column_names) if deduped_cursor_desc } - except Exception as e: - logger.exception(e) + except Exception as ex: + logger.exception(ex) @staticmethod def convert_pa_dtype(pa_dtype: pa.DataType) -> Optional[str]: diff --git a/superset/sql_lab.py b/superset/sql_lab.py index 01dd2e5bb130c..65d61f4cd8da4 100644 --- a/superset/sql_lab.py +++ b/superset/sql_lab.py @@ -135,9 +135,9 @@ def session_scope(nullpool): try: yield session session.commit() - except Exception as e: + except Exception as ex: session.rollback() - logger.exception(e) + logger.exception(ex) raise finally: session.close() @@ -175,12 +175,12 @@ def get_sql_results( # pylint: disable=too-many-arguments expand_data=expand_data, log_params=log_params, ) - except Exception as e: # pylint: disable=broad-except + except Exception as ex: # pylint: disable=broad-except logger.error("Query %d", query_id) - logger.debug("Query %d: %s", query_id, e) + logger.debug("Query %d: %s", query_id, ex) stats_logger.incr("error_sqllab_unhandled") query = get_query(query_id, session) - return handle_query_error(str(e), query, session) + return handle_query_error(str(ex), query, session) # pylint: disable=too-many-arguments @@ -253,17 +253,17 @@ def execute_sql_statement(sql_statement, query, user_name, session, cursor, log_ ) data = db_engine_spec.fetch_data(cursor, query.limit) - except SoftTimeLimitExceeded as e: + except SoftTimeLimitExceeded as ex: logger.error("Query %d: Time limit exceeded", query.id) - logger.debug("Query %d: %s", query.id, e) + logger.debug("Query %d: %s", query.id, ex) raise SqlLabTimeoutException( "SQL Lab timeout. This environment's policy is to kill queries " "after {} seconds.".format(SQLLAB_TIMEOUT) ) - except Exception as e: - logger.error("Query %d: %s", query.id, type(e)) - logger.debug("Query %d: %s", query.id, e) - raise SqlLabException(db_engine_spec.extract_error_message(e)) + except Exception as ex: + logger.error("Query %d: %s", query.id, type(ex)) + logger.debug("Query %d: %s", query.id, ex) + raise SqlLabException(db_engine_spec.extract_error_message(ex)) logger.debug("Query %d: Fetching cursor description", query.id) cursor_description = cursor.description @@ -378,8 +378,8 @@ def execute_sql_statements( result_set = execute_sql_statement( statement, query, user_name, session, cursor, log_params ) - except Exception as e: # pylint: disable=broad-except - msg = str(e) + except Exception as ex: # pylint: disable=broad-except + msg = str(ex) if statement_count > 1: msg = f"[Statement {i+1} out of {statement_count}] " + msg payload = handle_query_error(msg, query, session, payload) diff --git a/superset/sql_validators/presto_db.py b/superset/sql_validators/presto_db.py index caf8dc23764da..fc5efda27c171 100644 --- a/superset/sql_validators/presto_db.py +++ b/superset/sql_validators/presto_db.py @@ -136,9 +136,9 @@ def validate_statement( start_column=start_column, end_column=end_column, ) - except Exception as e: - logger.exception(f"Unexpected error running validation query: {e}") - raise e + except Exception as ex: + logger.exception(f"Unexpected error running validation query: {ex}") + raise ex @classmethod def validate( diff --git a/superset/utils/core.py b/superset/utils/core.py index e1ab257fcfa4a..e72c8ccabd8b6 100644 --- a/superset/utils/core.py +++ b/superset/utils/core.py @@ -263,8 +263,8 @@ def parse_human_datetime(s): if parsed_flags & 2 == 0: parsed_dttm = parsed_dttm.replace(hour=0, minute=0, second=0) dttm = dttm_from_timetuple(parsed_dttm.utctimetuple()) - except Exception as e: - logger.exception(e) + except Exception as ex: + logger.exception(ex) raise ValueError("Couldn't parse date string [{}]".format(s)) return dttm @@ -565,8 +565,8 @@ def validate_json(obj: Union[bytes, bytearray, str]) -> None: if obj: try: json.loads(obj) - except Exception as e: - logger.error(f"JSON is not valid {e}") + except Exception as ex: + logger.error(f"JSON is not valid {ex}") raise SupersetException("JSON is not valid") @@ -597,16 +597,16 @@ def __enter__(self): try: signal.signal(signal.SIGALRM, self.handle_timeout) signal.alarm(self.seconds) - except ValueError as e: + except ValueError as ex: logger.warning("timeout can't be used in the current context") - logger.exception(e) + logger.exception(ex) def __exit__(self, type, value, traceback): try: signal.alarm(0) - except ValueError as e: + except ValueError as ex: logger.warning("timeout can't be used in the current context") - logger.exception(e) + logger.exception(ex) def pessimistic_connection_handling(some_engine): diff --git a/superset/utils/decorators.py b/superset/utils/decorators.py index c7d23ec5158c1..52ba61f82ff32 100644 --- a/superset/utils/decorators.py +++ b/superset/utils/decorators.py @@ -37,8 +37,8 @@ def stats_timing(stats_key, stats_logger): start_ts = now_as_float() try: yield start_ts - except Exception as e: - raise e + except Exception as ex: + raise ex finally: stats_logger.timing(stats_key, now_as_float() - start_ts) diff --git a/superset/utils/log.py b/superset/utils/log.py index 98e344ff5fa1f..5d8c52e873cf1 100644 --- a/superset/utils/log.py +++ b/superset/utils/log.py @@ -37,19 +37,19 @@ def wrapper(*args, **kwargs): user_id = None if g.user: user_id = g.user.get_id() - d = request.form.to_dict() or {} + form_data = request.form.to_dict() or {} # request parameters can overwrite post body request_params = request.args.to_dict() - d.update(request_params) - d.update(kwargs) + form_data.update(request_params) + form_data.update(kwargs) - slice_id = d.get("slice_id") - dashboard_id = d.get("dashboard_id") + slice_id = form_data.get("slice_id") + dashboard_id = form_data.get("dashboard_id") try: slice_id = int( - slice_id or json.loads(d.get("form_data")).get("slice_id") + slice_id or json.loads(form_data.get("form_data")).get("slice_id") ) except (ValueError, TypeError): slice_id = 0 @@ -61,10 +61,10 @@ def wrapper(*args, **kwargs): # bulk insert try: - explode_by = d.get("explode") - records = json.loads(d.get(explode_by)) + explode_by = form_data.get("explode") + records = json.loads(form_data.get(explode_by)) except Exception: # pylint: disable=broad-except - records = [d] + records = [form_data] referrer = request.referrer[:1000] if request.referrer else None diff --git a/superset/views/base.py b/superset/views/base.py index f169a79f59a27..2c16212d0c403 100644 --- a/superset/views/base.py +++ b/superset/views/base.py @@ -107,8 +107,8 @@ def api(f): def wraps(self, *args, **kwargs): try: return f(self, *args, **kwargs) - except Exception as e: # pylint: disable=broad-except - logger.exception(e) + except Exception as ex: # pylint: disable=broad-except + logger.exception(ex) return json_error_response(get_error_msg()) return functools.update_wrapper(wraps, f) @@ -124,22 +124,24 @@ def handle_api_exception(f): def wraps(self, *args, **kwargs): try: return f(self, *args, **kwargs) - except SupersetSecurityException as e: - logger.exception(e) + except SupersetSecurityException as ex: + logger.exception(ex) return json_error_response( - utils.error_msg_from_exception(e), status=e.status, link=e.link + utils.error_msg_from_exception(ex), status=ex.status, link=ex.link ) - except SupersetException as e: - logger.exception(e) + except SupersetException as ex: + logger.exception(ex) return json_error_response( - utils.error_msg_from_exception(e), status=e.status + utils.error_msg_from_exception(ex), status=ex.status ) - except HTTPException as e: - logger.exception(e) - return json_error_response(utils.error_msg_from_exception(e), status=e.code) - except Exception as e: # pylint: disable=broad-except - logger.exception(e) - return json_error_response(utils.error_msg_from_exception(e)) + except HTTPException as ex: + logger.exception(ex) + return json_error_response( + utils.error_msg_from_exception(ex), status=ex.code + ) + except Exception as ex: # pylint: disable=broad-except + logger.exception(ex) + return json_error_response(utils.error_msg_from_exception(ex)) return functools.update_wrapper(wraps, f) @@ -176,8 +178,8 @@ def menu_data(): or f"/profile/{g.user.username}/" ) # when user object has no username - except NameError as e: - logger.exception(e) + except NameError as ex: + logger.exception(ex) if logo_target_path.startswith("/"): root_path = f"/superset{logo_target_path}" @@ -261,8 +263,8 @@ class ListWidgetWithCheckboxes(ListWidget): # pylint: disable=too-few-public-me def validate_json(_form, field): try: json.loads(field.data) - except Exception as e: - logger.exception(e) + except Exception as ex: + logger.exception(ex) raise Exception(_("json isn't valid")) @@ -303,8 +305,8 @@ def _delete(self, primary_key): abort(404) try: self.pre_delete(item) - except Exception as e: # pylint: disable=broad-except - flash(str(e), "danger") + except Exception as ex: # pylint: disable=broad-except + flash(str(ex), "danger") else: view_menu = security_manager.find_view_menu(item.get_perm()) pvs = ( @@ -338,8 +340,8 @@ def muldelete(self, items): for item in items: try: self.pre_delete(item) - except Exception as e: # pylint: disable=broad-except - flash(str(e), "danger") + except Exception as ex: # pylint: disable=broad-except + flash(str(ex), "danger") else: self._delete(item.id) self.update_redirect() diff --git a/superset/views/base_api.py b/superset/views/base_api.py index 22d62a48a8862..4faad91fab3ed 100644 --- a/superset/views/base_api.py +++ b/superset/views/base_api.py @@ -52,8 +52,8 @@ def wraps(self, pk): return self.response_404() try: check_ownership(item) - except SupersetSecurityException as e: - return self.response(403, message=str(e)) + except SupersetSecurityException as ex: + return self.response(403, message=str(ex)) return f(self, item) return functools.update_wrapper(wraps, f) @@ -290,9 +290,9 @@ def put(self, item): # pylint: disable=arguments-differ return self.response( 200, result=self.edit_model_schema.dump(item.data, many=False).data ) - except SQLAlchemyError as e: - logger.error(f"Error updating model {self.__class__.__name__}: {e}") - return self.response_422(message=str(e)) + except SQLAlchemyError as ex: + logger.error(f"Error updating model {self.__class__.__name__}: {ex}") + return self.response_422(message=str(ex)) @expose("/", methods=["POST"]) @protect() @@ -342,9 +342,9 @@ def post(self): result=self.add_model_schema.dump(item.data, many=False).data, id=item.data.id, ) - except SQLAlchemyError as e: - logger.error(f"Error creating model {self.__class__.__name__}: {e}") - return self.response_422(message=str(e)) + except SQLAlchemyError as ex: + logger.error(f"Error creating model {self.__class__.__name__}: {ex}") + return self.response_422(message=str(ex)) @expose("/<pk>", methods=["DELETE"]) @protect() @@ -383,6 +383,6 @@ def delete(self, item): # pylint: disable=arguments-differ try: self.datamodel.delete(item, raise_exception=True) return self.response(200, message="OK") - except SQLAlchemyError as e: - logger.error(f"Error deleting model {self.__class__.__name__}: {e}") - return self.response_422(message=str(e)) + except SQLAlchemyError as ex: + logger.error(f"Error deleting model {self.__class__.__name__}: {ex}") + return self.response_422(message=str(ex)) diff --git a/superset/views/core.py b/superset/views/core.py index 02fd207486c33..f574d4141e76b 100755 --- a/superset/views/core.py +++ b/superset/views/core.py @@ -185,8 +185,8 @@ def check_datasource_perms( datasource_id, datasource_type = get_datasource_info( datasource_id, datasource_type, form_data ) - except SupersetException as e: - raise SupersetSecurityException(str(e)) + except SupersetException as ex: + raise SupersetSecurityException(str(ex)) if datasource_type is None: raise SupersetSecurityException("Could not determine datasource type") @@ -317,8 +317,8 @@ def store(self): obj = models.KeyValue(value=value) db.session.add(obj) db.session.commit() - except Exception as e: - return json_error_response(e) + except Exception as ex: + return json_error_response(ex) return Response(json.dumps({"id": obj.id}), status=200) @event_logger.log_this @@ -329,8 +329,8 @@ def get_value(self, key_id): kv = db.session.query(models.KeyValue).filter_by(id=key_id).scalar() if not kv: return Response(status=404, content_type="text/plain") - except Exception as e: - return json_error_response(e) + except Exception as ex: + return json_error_response(ex) return Response(kv.value, status=200, content_type="text/plain") @@ -600,9 +600,9 @@ def get_query_string_response(self, viz_obj): query_obj = viz_obj.query_obj() if query_obj: query = viz_obj.datasource.get_query_str(query_obj) - except Exception as e: - logger.exception(e) - return json_error_response(e) + except Exception as ex: + logger.exception(ex) + return json_error_response(ex) if not query: query = "No query." @@ -706,8 +706,8 @@ def explore_json(self, datasource_type=None, datasource_id=None): datasource_id, datasource_type = get_datasource_info( datasource_id, datasource_type, form_data ) - except SupersetException as e: - return json_error_response(utils.error_msg_from_exception(e)) + except SupersetException as ex: + return json_error_response(utils.error_msg_from_exception(ex)) viz_obj = get_viz( datasource_type=datasource_type, @@ -729,19 +729,19 @@ def import_dashboards(self): if request.method == "POST" and f: try: dashboard_import_export.import_dashboards(db.session, f.stream) - except DatabaseNotFound as e: - logger.exception(e) + except DatabaseNotFound as ex: + logger.exception(ex) flash( _( "Cannot import dashboard: %(db_error)s.\n" "Make sure to create the database before " "importing the dashboard.", - db_error=e, + db_error=ex, ), "danger", ) - except Exception as e: - logger.exception(e) + except Exception as ex: + logger.exception(ex) flash( _( "An unknown error occurred. " @@ -1371,11 +1371,11 @@ def testconn(self): with closing(engine.connect()) as conn: conn.scalar(select([1])) return json_success('"OK"') - except CertificateException as e: - logger.info(e.message) - return json_error_response(e.message) - except NoSuchModuleError as e: - logger.info("Invalid driver %s", e) + except CertificateException as ex: + logger.info(ex.message) + return json_error_response(ex.message) + except NoSuchModuleError as ex: + logger.info("Invalid driver %s", ex) driver_name = make_url(uri).drivername return json_error_response( _( @@ -1384,24 +1384,24 @@ def testconn(self): ), 400, ) - except ArgumentError as e: - logger.info("Invalid URI %s", e) + except ArgumentError as ex: + logger.info("Invalid URI %s", ex) return json_error_response( _( "Invalid connection string, a valid string usually follows:\n" "'DRIVER://USER:PASSWORD@DB-HOST/DATABASE-NAME'" ) ) - except OperationalError as e: - logger.warning("Connection failed %s", e) + except OperationalError as ex: + logger.warning("Connection failed %s", ex) return json_error_response( _("Connection failed, please check your connection settings."), 400 ) - except DBSecurityException as e: - logger.warning("Stopped an unsafe database connection. %s", e) - return json_error_response(_(str(e)), 400) - except Exception as e: - logger.error("Unexpected error %s", e) + except DBSecurityException as ex: + logger.warning("Stopped an unsafe database connection. %s", ex) + return json_error_response(_(str(ex)), 400) + except Exception as ex: + logger.error("Unexpected error %s", ex) return json_error_response( _("Unexpected error occurred, please check your logs for details"), 400 ) @@ -1706,9 +1706,9 @@ def warm_up_cache(self): force=True, ) obj.get_json() - except Exception as e: + except Exception as ex: logger.exception("Failed to warm up cache") - return json_error_response(utils.error_msg_from_exception(e)) + return json_error_response(utils.error_msg_from_exception(ex)) return json_success( json.dumps( [{"slice_id": slc.id, "slice_name": slc.slice_name} for slc in slices] @@ -1950,9 +1950,9 @@ def sync_druid_source(self): return json_error_response(err_msg) try: DruidDatasource.sync_to_db_from_config(druid_config, user, cluster) - except Exception as e: - logger.exception(utils.error_msg_from_exception(e)) - return json_error_response(utils.error_msg_from_exception(e)) + except Exception as ex: + logger.exception(utils.error_msg_from_exception(ex)) + return json_error_response(utils.error_msg_from_exception(ex)) return Response(status=201) @has_access @@ -2064,11 +2064,11 @@ def estimate_query_cost( cost = mydb.db_engine_spec.estimate_query_cost( mydb, schema, sql, utils.QuerySource.SQL_LAB ) - except SupersetTimeoutException as e: - logger.exception(e) + except SupersetTimeoutException as ex: + logger.exception(ex) return json_error_response(timeout_msg) - except Exception as e: - return json_error_response(str(e)) + except Exception as ex: + return json_error_response(str(ex)) spec = mydb.db_engine_spec query_cost_formatters = get_feature_flags().get( @@ -2226,15 +2226,15 @@ def validate_sql_json(self): encoding=None, ) return json_success(payload) - except Exception as e: - logger.exception(e) + except Exception as ex: + logger.exception(ex) msg = _( f"{validator.name} was unable to check your query.\n" "Please recheck your query.\n" - f"Exception: {e}" + f"Exception: {ex}" ) # Return as a 400 if the database error message says we got a 4xx error - if re.search(r"([\W]|^)4\d{2}([\W]|$)", str(e)): + if re.search(r"([\W]|^)4\d{2}([\W]|$)", str(ex)): return json_error_response(f"{msg}", status=400) else: return json_error_response(f"{msg}") @@ -2268,8 +2268,8 @@ def _sql_json_async( expand_data=expand_data, log_params=log_params, ) - except Exception as e: - logger.exception(f"Query {query.id}: {e}") + except Exception as ex: + logger.exception(f"Query {query.id}: {ex}") msg = _( "Failed to start remote query on a worker. " "Tell your administrator to verify the availability of " @@ -2330,8 +2330,8 @@ def _sql_json_sync( ignore_nan=True, encoding=None, ) - except Exception as e: - logger.exception(f"Query {query.id}: {e}") + except Exception as ex: + logger.exception(f"Query {query.id}: {ex}") return json_error_response(f"{{e}}") if data.get("status") == QueryStatus.FAILED: return json_error_response(payload=data) @@ -2414,8 +2414,8 @@ def sql_json_exec( session.flush() query_id = query.id session.commit() # shouldn't be necessary - except SQLAlchemyError as e: - logger.error(f"Errors saving query details {e}") + except SQLAlchemyError as ex: + logger.error(f"Errors saving query details {ex}") session.rollback() raise Exception(_("Query record was not created as expected.")) if not query_id: @@ -2440,8 +2440,8 @@ def sql_json_exec( rendered_query = template_processor.process_template( query.sql, **template_params ) - except Exception as e: - error_msg = utils.error_msg_from_exception(e) + except Exception as ex: + error_msg = utils.error_msg_from_exception(ex) return json_error_response( f"Query {query_id}: Template rendering failed: {error_msg}" ) @@ -2799,8 +2799,8 @@ def schemas_access_for_csv_upload(self): database, schemas_allowed, False ) return self.json_response(schemas_allowed_processed) - except Exception as e: - logger.exception(e) + except Exception as ex: + logger.exception(ex) return json_error_response( "Failed to fetch schemas allowed for csv upload in this database! " "Please contact your Superset Admin!" diff --git a/superset/views/database/api.py b/superset/views/database/api.py index d4ee3c995e23e..166abb7c2c115 100644 --- a/superset/views/database/api.py +++ b/superset/views/database/api.py @@ -270,9 +270,9 @@ def table_metadata(self, database: Database, table_name: str, schema_name: str): self.incr_stats("init", self.table_metadata.__name__) try: table_info: Dict = get_table_metadata(database, table_name, schema_name) - except SQLAlchemyError as e: + except SQLAlchemyError as ex: self.incr_stats("error", self.table_metadata.__name__) - return self.response_422(error_msg_from_exception(e)) + return self.response_422(error_msg_from_exception(ex)) self.incr_stats("success", self.table_metadata.__name__) return self.response(200, **table_info) diff --git a/superset/views/database/mixins.py b/superset/views/database/mixins.py index 7f115dc02d046..dae7228c89c88 100644 --- a/superset/views/database/mixins.py +++ b/superset/views/database/mixins.py @@ -234,9 +234,9 @@ def check_extra(self, database): # pylint: disable=no-self-use # this will check whether json.loads(extra) can succeed try: extra = database.get_extra() - except Exception as e: + except Exception as ex: raise Exception( - _("Extra field cannot be decoded by JSON. %{msg}s", msg=str(e)) + _("Extra field cannot be decoded by JSON. %{msg}s", msg=str(ex)) ) # this will check whether 'metadata_params' is configured correctly @@ -256,7 +256,7 @@ def check_encrypted_extra(self, database): # pylint: disable=no-self-use # this will check whether json.loads(secure_extra) can succeed try: database.get_encrypted_extra() - except Exception as e: + except Exception as ex: raise Exception( - _("Extra field cannot be decoded by JSON. %{msg}s", msg=str(e)) + _("Extra field cannot be decoded by JSON. %{msg}s", msg=str(ex)) ) diff --git a/superset/views/database/views.py b/superset/views/database/views.py index 794b3e756dfdc..47bf1c3b87d42 100644 --- a/superset/views/database/views.py +++ b/superset/views/database/views.py @@ -158,7 +158,7 @@ def form_post(self, form): table.fetch_metadata() db.session.add(table) db.session.commit() - except Exception as e: # pylint: disable=broad-except + except Exception as ex: # pylint: disable=broad-except db.session.rollback() try: os.remove(path) @@ -171,7 +171,7 @@ def form_post(self, form): filename=csv_filename, table_name=form.name.data, db_name=database.database_name, - error_msg=str(e), + error_msg=str(ex), ) flash(message, "danger") diff --git a/superset/viz.py b/superset/viz.py index f17f234f4bfe8..3de5e0501d6bd 100644 --- a/superset/viz.py +++ b/superset/viz.py @@ -431,10 +431,10 @@ def get_df_payload(self, query_obj=None, **kwargs): self.status = utils.QueryStatus.SUCCESS is_loaded = True stats_logger.incr("loaded_from_cache") - except Exception as e: - logger.exception(e) + except Exception as ex: + logger.exception(ex) logger.error( - "Error reading cache: " + utils.error_msg_from_exception(e) + "Error reading cache: " + utils.error_msg_from_exception(ex) ) logger.info("Serving from cache") @@ -446,10 +446,10 @@ def get_df_payload(self, query_obj=None, **kwargs): if not self.force: stats_logger.incr("loaded_from_source_without_force") is_loaded = True - except Exception as e: - logger.exception(e) + except Exception as ex: + logger.exception(ex) if not self.error_message: - self.error_message = "{}".format(e) + self.error_message = "{}".format(ex) self.status = utils.QueryStatus.FAILED stacktrace = utils.get_stacktrace() @@ -469,11 +469,11 @@ def get_df_payload(self, query_obj=None, **kwargs): stats_logger.incr("set_cache_key") cache.set(cache_key, cache_value, timeout=self.cache_timeout) - except Exception as e: + except Exception as ex: # cache.set call can fail if the backend is down or if # the key is too large or whatever other reasons logger.warning("Could not cache key {}".format(cache_key)) - logger.exception(e) + logger.exception(ex) cache.delete(cache_key) return { "cache_key": self._any_cache_key,