diff --git a/superset/annotation_layers/annotations/api.py b/superset/annotation_layers/annotations/api.py index 1c63f699d51c6..17c4ee5f6448e 100644 --- a/superset/annotation_layers/annotations/api.py +++ b/superset/annotation_layers/annotations/api.py @@ -311,7 +311,10 @@ def post(self, pk: int) -> Response: # pylint: disable=arguments-differ return self.response_422(message=ex.normalized_messages()) except AnnotationCreateFailedError as ex: logger.error( - "Error creating annotation %s: %s", self.__class__.__name__, str(ex) + "Error creating annotation %s: %s", + self.__class__.__name__, + str(ex), + exc_info=True, ) return self.response_422(message=str(ex)) @@ -384,7 +387,10 @@ def put( # pylint: disable=arguments-differ return self.response_422(message=ex.normalized_messages()) except AnnotationUpdateFailedError as ex: logger.error( - "Error updating annotation %s: %s", self.__class__.__name__, str(ex) + "Error updating annotation %s: %s", + self.__class__.__name__, + str(ex), + exc_info=True, ) return self.response_422(message=str(ex)) @@ -436,7 +442,10 @@ def delete( # pylint: disable=arguments-differ return self.response_404() except AnnotationDeleteFailedError as ex: logger.error( - "Error deleting annotation %s: %s", self.__class__.__name__, str(ex) + "Error deleting annotation %s: %s", + self.__class__.__name__, + str(ex), + exc_info=True, ) return self.response_422(message=str(ex)) diff --git a/superset/annotation_layers/api.py b/superset/annotation_layers/api.py index b5f81d4609c09..f43b385544222 100644 --- a/superset/annotation_layers/api.py +++ b/superset/annotation_layers/api.py @@ -158,6 +158,7 @@ def delete(self, pk: int) -> Response: "Error deleting annotation layer %s: %s", self.__class__.__name__, str(ex), + exc_info=True, ) return self.response_422(message=str(ex)) @@ -220,7 +221,10 @@ def post(self) -> Response: return self.response_422(message=ex.normalized_messages()) except AnnotationLayerCreateFailedError as ex: logger.error( - "Error creating annotation %s: %s", self.__class__.__name__, str(ex) + "Error creating annotation %s: %s", + self.__class__.__name__, + str(ex), + exc_info=True, ) return self.response_422(message=str(ex)) @@ -290,7 +294,10 @@ def put(self, pk: int) -> Response: return self.response_422(message=ex.normalized_messages()) except AnnotationLayerUpdateFailedError as ex: logger.error( - "Error updating annotation %s: %s", self.__class__.__name__, str(ex) + "Error updating annotation %s: %s", + self.__class__.__name__, + str(ex), + exc_info=True, ) return self.response_422(message=str(ex)) diff --git a/superset/cachekeys/api.py b/superset/cachekeys/api.py index 8548898b4c455..ff19f3a0fe834 100644 --- a/superset/cachekeys/api.py +++ b/superset/cachekeys/api.py @@ -122,7 +122,7 @@ def invalidate(self) -> Response: len(datasource_uids), ) except SQLAlchemyError as ex: # pragma: no cover - logger.error(ex) + logger.error(ex, exc_info=True) db.session.rollback() return self.response_500(str(ex)) db.session.commit() diff --git a/superset/charts/api.py b/superset/charts/api.py index aba386c8358c3..61be4aa9ae956 100644 --- a/superset/charts/api.py +++ b/superset/charts/api.py @@ -277,7 +277,10 @@ def post(self) -> Response: return self.response_422(message=ex.normalized_messages()) except ChartCreateFailedError as ex: logger.error( - "Error creating model %s: %s", self.__class__.__name__, str(ex) + "Error creating model %s: %s", + self.__class__.__name__, + str(ex), + exc_info=True, ) return self.response_422(message=str(ex)) @@ -352,7 +355,10 @@ def put(self, pk: int) -> Response: response = self.response_422(message=ex.normalized_messages()) except ChartUpdateFailedError as ex: logger.error( - "Error updating model %s: %s", self.__class__.__name__, str(ex) + "Error updating model %s: %s", + self.__class__.__name__, + str(ex), + exc_info=True, ) response = self.response_422(message=str(ex)) @@ -407,7 +413,10 @@ def delete(self, pk: int) -> Response: return self.response_403() except ChartDeleteFailedError as ex: logger.error( - "Error deleting model %s: %s", self.__class__.__name__, str(ex) + "Error deleting model %s: %s", + self.__class__.__name__, + str(ex), + exc_info=True, ) return self.response_422(message=str(ex)) diff --git a/superset/common/query_context.py b/superset/common/query_context.py index 16520add7783c..98e02f4e825e3 100644 --- a/superset/common/query_context.py +++ b/superset/common/query_context.py @@ -332,7 +332,9 @@ def get_df_payload( # pylint: disable=too-many-statements,too-many-locals except KeyError as ex: logger.exception(ex) logger.error( - "Error reading cache: %s", error_msg_from_exception(ex) + "Error reading cache: %s", + error_msg_from_exception(ex), + exc_info=True, ) logger.info("Serving from cache") diff --git a/superset/dashboards/api.py b/superset/dashboards/api.py index 47fdd3dbd23f1..f485905849072 100644 --- a/superset/dashboards/api.py +++ b/superset/dashboards/api.py @@ -459,7 +459,10 @@ def post(self) -> Response: return self.response_422(message=ex.normalized_messages()) except DashboardCreateFailedError as ex: logger.error( - "Error creating model %s: %s", self.__class__.__name__, str(ex) + "Error creating model %s: %s", + self.__class__.__name__, + str(ex), + exc_info=True, ) return self.response_422(message=str(ex)) @@ -532,7 +535,10 @@ def put(self, pk: int) -> Response: return self.response_422(message=ex.normalized_messages()) except DashboardUpdateFailedError as ex: logger.error( - "Error updating model %s: %s", self.__class__.__name__, str(ex) + "Error updating model %s: %s", + self.__class__.__name__, + str(ex), + exc_info=True, ) response = self.response_422(message=str(ex)) return response @@ -586,7 +592,10 @@ def delete(self, pk: int) -> Response: return self.response_403() except DashboardDeleteFailedError as ex: logger.error( - "Error deleting model %s: %s", self.__class__.__name__, str(ex) + "Error deleting model %s: %s", + self.__class__.__name__, + str(ex), + exc_info=True, ) return self.response_422(message=str(ex)) diff --git a/superset/databases/api.py b/superset/databases/api.py index 390d34657c9b2..5b964e68c21de 100644 --- a/superset/databases/api.py +++ b/superset/databases/api.py @@ -243,7 +243,10 @@ def post(self) -> Response: return self.response_422(message=str(ex)) except DatabaseCreateFailedError as ex: logger.error( - "Error creating model %s: %s", self.__class__.__name__, str(ex) + "Error creating model %s: %s", + self.__class__.__name__, + str(ex), + exc_info=True, ) return self.response_422(message=str(ex)) @@ -320,7 +323,10 @@ def put( # pylint: disable=too-many-return-statements, arguments-differ return self.response_422(message=str(ex)) except DatabaseUpdateFailedError as ex: logger.error( - "Error updating model %s: %s", self.__class__.__name__, str(ex) + "Error updating model %s: %s", + self.__class__.__name__, + str(ex), + exc_info=True, ) return self.response_422(message=str(ex)) @@ -373,7 +379,10 @@ def delete(self, pk: int) -> Response: # pylint: disable=arguments-differ return self.response_422(message=str(ex)) except DatabaseDeleteFailedError as ex: logger.error( - "Error deleting model %s: %s", self.__class__.__name__, str(ex) + "Error deleting model %s: %s", + self.__class__.__name__, + str(ex), + exc_info=True, ) return self.response_422(message=str(ex)) @@ -800,7 +809,7 @@ def import_(self) -> Response: logger.warning("Import database failed") return self.response_422(message=exc.normalized_messages()) except DatabaseImportError as exc: - logger.error("Import database failed") + logger.error("Import database failed", exc_info=True) return self.response_500(message=str(exc)) @expose("//function_names/", methods=["GET"]) diff --git a/superset/datasets/api.py b/superset/datasets/api.py index 25a2c33cf2e46..b70dd45b7c6b8 100644 --- a/superset/datasets/api.py +++ b/superset/datasets/api.py @@ -239,7 +239,10 @@ def post(self) -> Response: return self.response_422(message=ex.normalized_messages()) except DatasetCreateFailedError as ex: logger.error( - "Error creating model %s: %s", self.__class__.__name__, str(ex) + "Error creating model %s: %s", + self.__class__.__name__, + str(ex), + exc_info=True, ) return self.response_422(message=str(ex)) @@ -325,7 +328,10 @@ def put(self, pk: int) -> Response: response = self.response_422(message=ex.normalized_messages()) except DatasetUpdateFailedError as ex: logger.error( - "Error updating model %s: %s", self.__class__.__name__, str(ex) + "Error updating model %s: %s", + self.__class__.__name__, + str(ex), + exc_info=True, ) response = self.response_422(message=str(ex)) return response @@ -379,7 +385,10 @@ def delete(self, pk: int) -> Response: return self.response_403() except DatasetDeleteFailedError as ex: logger.error( - "Error deleting model %s: %s", self.__class__.__name__, str(ex) + "Error deleting model %s: %s", + self.__class__.__name__, + str(ex), + exc_info=True, ) return self.response_422(message=str(ex)) @@ -512,7 +521,10 @@ def refresh(self, pk: int) -> Response: return self.response_403() except DatasetRefreshFailedError as ex: logger.error( - "Error refreshing dataset %s: %s", self.__class__.__name__, str(ex) + "Error refreshing dataset %s: %s", + self.__class__.__name__, + str(ex), + exc_info=True, ) return self.response_422(message=str(ex)) @@ -716,5 +728,5 @@ def import_(self) -> Response: logger.warning("Import dataset failed") return self.response_422(message=exc.normalized_messages()) except DatasetImportError as exc: - logger.error("Import dataset failed") + logger.error("Import dataset failed", exc_info=True) return self.response_500(message=str(exc)) diff --git a/superset/datasets/columns/api.py b/superset/datasets/columns/api.py index a459806db60ce..d04827d42f7e4 100644 --- a/superset/datasets/columns/api.py +++ b/superset/datasets/columns/api.py @@ -99,6 +99,9 @@ def delete( # pylint: disable=arguments-differ return self.response_403() except DatasetColumnDeleteFailedError as ex: logger.error( - "Error deleting dataset column %s: %s", self.__class__.__name__, str(ex) + "Error deleting dataset column %s: %s", + self.__class__.__name__, + str(ex), + exc_info=True, ) return self.response_422(message=str(ex)) diff --git a/superset/datasets/commands/bulk_delete.py b/superset/datasets/commands/bulk_delete.py index 49ad5e40c8ab7..0ba6b4e767f1b 100644 --- a/superset/datasets/commands/bulk_delete.py +++ b/superset/datasets/commands/bulk_delete.py @@ -66,7 +66,8 @@ def run(self) -> None: else: if not view_menu: logger.error( - "Could not find the data access permission for the dataset" + "Could not find the data access permission for the dataset", + exc_info=True, ) db.session.commit() diff --git a/superset/datasets/commands/delete.py b/superset/datasets/commands/delete.py index f538415768115..028df423e44ae 100644 --- a/superset/datasets/commands/delete.py +++ b/superset/datasets/commands/delete.py @@ -68,7 +68,8 @@ def run(self) -> Model: else: if not view_menu: logger.error( - "Could not find the data access permission for the dataset" + "Could not find the data access permission for the dataset", + exc_info=True, ) db.session.commit() except (SQLAlchemyError, DAODeleteFailedError) as ex: diff --git a/superset/datasets/dao.py b/superset/datasets/dao.py index f2f57d2a22fab..68315be3b1cd1 100644 --- a/superset/datasets/dao.py +++ b/superset/datasets/dao.py @@ -48,7 +48,7 @@ def get_database_by_id(database_id: int) -> Optional[Database]: try: return db.session.query(Database).filter_by(id=database_id).one_or_none() except SQLAlchemyError as ex: # pragma: no cover - logger.error("Could not get database by id: %s", str(ex)) + logger.error("Could not get database by id: %s", str(ex), exc_info=True) return None @staticmethod diff --git a/superset/datasets/metrics/api.py b/superset/datasets/metrics/api.py index 948ede0ed7faf..b55ab9dbb4895 100644 --- a/superset/datasets/metrics/api.py +++ b/superset/datasets/metrics/api.py @@ -99,6 +99,9 @@ def delete( # pylint: disable=arguments-differ return self.response_403() except DatasetMetricDeleteFailedError as ex: logger.error( - "Error deleting dataset column %s: %s", self.__class__.__name__, str(ex) + "Error deleting dataset column %s: %s", + self.__class__.__name__, + str(ex), + exc_info=True, ) return self.response_422(message=str(ex)) diff --git a/superset/db_engine_specs/base.py b/superset/db_engine_specs/base.py index 3f91d58dda242..807b2a4548a2a 100644 --- a/superset/db_engine_specs/base.py +++ b/superset/db_engine_specs/base.py @@ -836,7 +836,7 @@ def get_table_comment( # It's expected that some dialects don't implement the comment method pass except Exception as ex: # pylint: disable=broad-except - logger.error("Unexpected error while fetching table comment") + logger.error("Unexpected error while fetching table comment", exc_info=True) logger.exception(ex) return comment @@ -1218,7 +1218,7 @@ def get_extra_params(database: "Database") -> Dict[str, Any]: try: extra = json.loads(database.extra) except json.JSONDecodeError as ex: - logger.error(ex) + logger.error(ex, exc_info=True) raise ex return extra diff --git a/superset/db_engine_specs/hive.py b/superset/db_engine_specs/hive.py index 66c68b3c27a19..7fec8d3d05575 100644 --- a/superset/db_engine_specs/hive.py +++ b/superset/db_engine_specs/hive.py @@ -523,6 +523,7 @@ def get_function_names(cls, database: "Database") -> List[str]: "Expected column `%s`, found: %s.", cls._show_functions_column, ", ".join(columns), + exc_info=True, ) # if the results have a single column, use that if len(columns) == 1: diff --git a/superset/models/core.py b/superset/models/core.py index 5dadca2b3b0d0..a917b92cc3641 100755 --- a/superset/models/core.py +++ b/superset/models/core.py @@ -176,7 +176,9 @@ def function_names(self) -> List[str]: # function_names property is used in bulk APIs and should not hard crash # more info in: https://github.com/apache/superset/issues/9678 logger.error( - "Failed to fetch database function names with error: %s", str(ex) + "Failed to fetch database function names with error: %s", + str(ex), + exc_info=True, ) return [] @@ -594,7 +596,7 @@ def get_encrypted_extra(self) -> Dict[str, Any]: try: encrypted_extra = json.loads(self.encrypted_extra) except json.JSONDecodeError as ex: - logger.error(ex) + logger.error(ex, exc_info=True) raise ex return encrypted_extra diff --git a/superset/models/helpers.py b/superset/models/helpers.py index 9237ee3e1c0ef..fe02fb9bfa253 100644 --- a/superset/models/helpers.py +++ b/superset/models/helpers.py @@ -213,6 +213,7 @@ def import_from_dict( cls.__name__, str(obj_query), yaml.safe_dump(dict_rep), + exc_info=True, ) raise ex diff --git a/superset/models/slice.py b/superset/models/slice.py index 08421ca54442d..9efd07d8e151d 100644 --- a/superset/models/slice.py +++ b/superset/models/slice.py @@ -223,7 +223,7 @@ def form_data(self) -> Dict[str, Any]: try: form_data = json.loads(self.params) except Exception as ex: # pylint: disable=broad-except - logger.error("Malformed json in slice's params") + logger.error("Malformed json in slice's params", exc_info=True) logger.exception(ex) form_data.update( { diff --git a/superset/reports/api.py b/superset/reports/api.py index 0b495ef864738..6a91608e2d4fe 100644 --- a/superset/reports/api.py +++ b/superset/reports/api.py @@ -239,6 +239,7 @@ def delete(self, pk: int) -> Response: "Error deleting report schedule %s: %s", self.__class__.__name__, str(ex), + exc_info=True, ) return self.response_422(message=str(ex)) @@ -300,6 +301,7 @@ def post(self) -> Response: "Error creating report schedule %s: %s", self.__class__.__name__, str(ex), + exc_info=True, ) return self.response_422(message=str(ex)) @@ -368,7 +370,10 @@ def put(self, pk: int) -> Response: # pylint: disable=too-many-return-statement return self.response_403() except ReportScheduleUpdateFailedError as ex: logger.error( - "Error updating report %s: %s", self.__class__.__name__, str(ex) + "Error updating report %s: %s", + self.__class__.__name__, + str(ex), + exc_info=True, ) return self.response_422(message=str(ex)) diff --git a/superset/sql_lab.py b/superset/sql_lab.py index 234b1dd110e62..a3c882eef6399 100644 --- a/superset/sql_lab.py +++ b/superset/sql_lab.py @@ -102,9 +102,13 @@ def handle_query_error( def get_query_backoff_handler(details: Dict[Any, Any]) -> None: query_id = details["kwargs"]["query_id"] - logger.error("Query with id `%s` could not be retrieved", str(query_id)) + logger.error( + "Query with id `%s` could not be retrieved", str(query_id), exc_info=True + ) stats_logger.incr("error_attempting_orm_query_{}".format(details["tries"] - 1)) - logger.error("Query %s: Sleeping for a sec before retrying...", str(query_id)) + logger.error( + "Query %s: Sleeping for a sec before retrying...", str(query_id), exc_info=True + ) def get_query_giveup_handler(_: Any) -> None: @@ -247,7 +251,7 @@ def execute_sql_statement( ) data = db_engine_spec.fetch_data(cursor, query.limit) except Exception as ex: - logger.error("Query %d: %s", query.id, type(ex)) + logger.error("Query %d: %s", query.id, type(ex), exc_info=True) logger.debug("Query %d: %s", query.id, ex) raise SqlLabException(db_engine_spec.extract_error_message(ex)) diff --git a/superset/tasks/cache.py b/superset/tasks/cache.py index e32467d0c30f4..546eaebdb0565 100644 --- a/superset/tasks/cache.py +++ b/superset/tasks/cache.py @@ -272,7 +272,7 @@ def cache_warmup( break else: message = f"No strategy {strategy_name} found!" - logger.error(message) + logger.error(message, exc_info=True) return message logger.info("Loading %s", class_.__name__) diff --git a/superset/tasks/scheduler.py b/superset/tasks/scheduler.py index 86bc9ca180271..67e8379d790b5 100644 --- a/superset/tasks/scheduler.py +++ b/superset/tasks/scheduler.py @@ -83,7 +83,9 @@ def execute(report_schedule_id: int, scheduled_dttm: str) -> None: task_id, report_schedule_id, scheduled_dttm_, ).run() except ReportScheduleUnexpectedError as ex: - logger.error("An unexpected occurred while executing the report: %s", ex) + logger.error( + "An unexpected occurred while executing the report: %s", ex, exc_info=True + ) except CommandException as ex: logger.info("Report state: %s", ex) @@ -95,4 +97,8 @@ def prune_log() -> None: except SoftTimeLimitExceeded as ex: logger.warning("A timeout occurred while pruning report schedule logs: %s", ex) except CommandException as ex: - logger.error("An exception occurred while pruning report schedule logs: %s", ex) + logger.error( + "An exception occurred while pruning report schedule logs: %s", + ex, + exc_info=True, + ) diff --git a/superset/utils/core.py b/superset/utils/core.py index 797bac5b47c19..bd664cf83c495 100644 --- a/superset/utils/core.py +++ b/superset/utils/core.py @@ -360,7 +360,7 @@ def flasher(msg: str, severity: str = "message") -> None: flash(msg, severity) except RuntimeError: if severity == "danger": - logger.error(msg) + logger.error(msg, exc_info=True) else: logger.info(msg) @@ -753,7 +753,7 @@ def validate_json(obj: Union[bytes, bytearray, str]) -> None: try: json.loads(obj) except Exception as ex: - logger.error("JSON is not valid %s", str(ex)) + logger.error("JSON is not valid %s", str(ex), exc_info=True) raise SupersetException("JSON is not valid") @@ -769,7 +769,7 @@ def __init__(self, seconds: int = 1, error_message: str = "Timeout") -> None: def handle_timeout( # pylint: disable=unused-argument self, signum: int, frame: Any ) -> None: - logger.error("Process timed out") + logger.error("Process timed out", exc_info=True) raise SupersetTimeoutException( error_type=SupersetErrorType.BACKEND_TIMEOUT_ERROR, message=self.error_message, diff --git a/superset/utils/webdriver.py b/superset/utils/webdriver.py index cdce5cd2d9744..e7155ff12932b 100644 --- a/superset/utils/webdriver.py +++ b/superset/utils/webdriver.py @@ -119,9 +119,9 @@ def get_screenshot( logger.info("Taking a PNG screenshot or url %s", url) img = element.screenshot_as_png except TimeoutException: - logger.error("Selenium timed out requesting url %s", url) + logger.error("Selenium timed out requesting url %s", url, exc_info=True) except WebDriverException as ex: - logger.error(ex) + logger.error(ex, exc_info=True) # Some webdrivers do not support screenshots for elements. # In such cases, take a screenshot of the entire page. img = driver.screenshot() # pylint: disable=no-member diff --git a/superset/views/core.py b/superset/views/core.py index cff9e482d7fad..06099669ece75 100755 --- a/superset/views/core.py +++ b/superset/views/core.py @@ -1914,7 +1914,7 @@ def sync_druid_source(self) -> FlaskResponse: # pylint: disable=no-self-use "Can't find User '%(name)s', please ask your admin " "to create one.", name=user_name, ) - logger.error(err_msg) + logger.error(err_msg, exc_info=True) return json_error_response(err_msg) cluster = ( db.session.query(DruidCluster) @@ -1926,7 +1926,7 @@ def sync_druid_source(self) -> FlaskResponse: # pylint: disable=no-self-use "Can't find DruidCluster with cluster_name = " "'%(name)s'", name=cluster_name, ) - logger.error(err_msg) + logger.error(err_msg, exc_info=True) return json_error_response(err_msg) try: DruidDatasource.sync_to_db_from_config(druid_config, user, cluster) @@ -2492,7 +2492,7 @@ def sql_json_exec( # pylint: disable=too-many-statements,too-many-locals query_id = query.id session.commit() # shouldn't be necessary except SQLAlchemyError as ex: - logger.error("Errors saving query details %s", str(ex)) + logger.error("Errors saving query details %s", str(ex), exc_info=True) session.rollback() raise Exception(_("Query record was not created as expected.")) if not query_id: diff --git a/superset/viz.py b/superset/viz.py index 1847cc498cc31..4419c15f79f5d 100644 --- a/superset/viz.py +++ b/superset/viz.py @@ -509,7 +509,8 @@ def get_df_payload( except Exception as ex: logger.exception(ex) logger.error( - "Error reading cache: " + utils.error_msg_from_exception(ex) + "Error reading cache: " + utils.error_msg_from_exception(ex), + exc_info=True, ) logger.info("Serving from cache")