From 3bd69ea7667d8fb449724c526025d45ec1d8212b Mon Sep 17 00:00:00 2001 From: Kelsey Thomas <101993653+Kelsey-Ethyca@users.noreply.github.com> Date: Mon, 12 Dec 2022 09:44:52 -0800 Subject: [PATCH 1/8] Updated CHANGELOG.md for release 2.2.2 --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0a1f9ea9b33..17fe394fd53 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -17,6 +17,7 @@ The types of changes are: ## [Unreleased](https://github.com/ethyca/fides/compare/2.2.2...main) + ## [2.2.2](https://github.com/ethyca/fides/compare/2.2.1...2.2.2) ### Docs From 8bcd5fc66482ae682f9df3c89bdbe1749f7fdc1c Mon Sep 17 00:00:00 2001 From: Adam Sachs Date: Mon, 12 Dec 2022 22:21:35 -0500 Subject: [PATCH 2/8] Update all logging references to use loguru Switch to use {} loguru formatting Update Pii class to work with loguru lazy formatting Include a new, proper log_pii config var --- .fides/fides.toml | 1 + src/fides/api/ctl/routes/health.py | 10 +- src/fides/api/main.py | 48 ++++----- .../ops/api/v1/endpoints/config_endpoints.py | 4 +- .../api/v1/endpoints/connection_endpoints.py | 26 +++-- .../v1/endpoints/connection_type_endpoints.py | 3 - .../v1/endpoints/consent_request_endpoints.py | 7 +- .../ops/api/v1/endpoints/dataset_endpoints.py | 21 ++-- .../api/ops/api/v1/endpoints/drp_endpoints.py | 17 ++-- .../api/v1/endpoints/encryption_endpoints.py | 4 +- .../identity_verification_endpoints.py | 2 - .../v1/endpoints/manual_webhook_endpoints.py | 11 +-- .../ops/api/v1/endpoints/masking_endpoints.py | 8 +- .../api/v1/endpoints/messaging_endpoints.py | 15 ++- .../ops/api/v1/endpoints/oauth_endpoints.py | 13 ++- .../ops/api/v1/endpoints/policy_endpoints.py | 46 +++++---- .../v1/endpoints/policy_webhook_endpoints.py | 22 ++--- .../v1/endpoints/privacy_request_endpoints.py | 45 +++++---- .../v1/endpoints/registration_endpoints.py | 9 +- .../api/v1/endpoints/saas_config_endpoints.py | 17 ++-- .../ops/api/v1/endpoints/storage_endpoints.py | 29 +++--- .../ops/api/v1/endpoints/user_endpoints.py | 7 +- .../v1/endpoints/user_permission_endpoints.py | 4 +- .../ops/email_templates/get_email_template.py | 5 +- src/fides/api/ops/graph/data_type.py | 5 +- src/fides/api/ops/graph/graph.py | 7 +- src/fides/api/ops/graph/traversal.py | 16 ++- src/fides/api/ops/models/datasetconfig.py | 12 +-- src/fides/api/ops/models/messaging.py | 7 +- src/fides/api/ops/models/privacy_request.py | 9 +- src/fides/api/ops/models/registration.py | 3 - src/fides/api/ops/models/storage.py | 5 +- src/fides/api/ops/schemas/storage/storage.py | 3 - .../authentication_strategy_factory.py | 3 - ...tion_strategy_oauth2_authorization_code.py | 2 - .../authentication_strategy_oauth2_base.py | 12 +-- ...tion_strategy_oauth2_client_credentials.py | 4 - .../ops/service/connectors/base_connector.py | 2 - .../ops/service/connectors/email_connector.py | 18 ++-- .../service/connectors/fides/fides_client.py | 97 ++++++++++++------- .../ops/service/connectors/http_connector.py | 4 +- .../connectors/limiter/rate_limiter.py | 9 +- .../service/connectors/manual_connector.py | 3 - .../connectors/manual_webhook_connector.py | 3 - .../service/connectors/mongodb_connector.py | 12 +-- .../ops/service/connectors/query_config.py | 23 +++-- .../connectors/saas/authenticated_client.py | 15 ++- .../saas/connector_registry_service.py | 10 +- .../ops/service/connectors/saas_connector.py | 22 ++--- .../service/connectors/saas_query_config.py | 15 ++- .../ops/service/connectors/sql_connector.py | 10 +- .../ops/service/drp/drp_fidesops_mapper.py | 7 +- .../strategy/masking_strategy_factory.py | 8 +- .../messaging/message_dispatch_service.py | 23 +++-- .../messaging/messaging_crud_service.py | 9 +- .../pagination/pagination_strategy_factory.py | 3 - .../pagination/pagination_strategy_link.py | 6 +- .../pagination/pagination_strategy_offset.py | 4 +- .../privacy_request/request_runner_service.py | 54 +++++------ .../privacy_request/request_service.py | 7 +- .../post_processor_strategy_factory.py | 3 - .../post_processor_strategy_filter.py | 8 +- .../post_processor_strategy_unwrap.py | 8 +- .../authentication_strategy_adobe_campaign.py | 8 +- .../domo_request_overrides.py | 2 - .../firebase_auth_request_overrides.py | 3 - .../twilio_request_overrides.py | 2 - .../saas_request_override_factory.py | 8 +- .../storage/storage_authenticator_service.py | 3 - .../storage/storage_uploader_service.py | 6 +- src/fides/api/ops/service/strategy.py | 2 - .../api/ops/task/filter_element_match.py | 6 +- src/fides/api/ops/task/filter_results.py | 9 +- src/fides/api/ops/task/graph_task.py | 32 +++--- src/fides/api/ops/task/refine_target_path.py | 4 +- src/fides/api/ops/task/task_resources.py | 6 +- src/fides/api/ops/tasks/storage.py | 10 +- .../api/ops/util/encryption/secrets_util.py | 6 +- .../api/ops/util/identity_verification.py | 8 +- src/fides/api/ops/util/logger.py | 63 +++--------- src/fides/api/ops/util/saas_util.py | 3 - .../api/ops/util/storage_authenticator.py | 4 +- .../ctl/core/config/database_settings.py | 3 - src/fides/ctl/core/config/logging_settings.py | 1 + .../ctl/core/config/notification_settings.py | 3 - src/fides/ctl/core/utils.py | 4 +- tests/ops/util/test_logger.py | 32 ++++-- 87 files changed, 461 insertions(+), 612 deletions(-) diff --git a/.fides/fides.toml b/.fides/fides.toml index 86a8e59a333..a81bfa78e5d 100644 --- a/.fides/fides.toml +++ b/.fides/fides.toml @@ -11,6 +11,7 @@ app_postgres = {connection_string="postgresql+psycopg2://postgres:fides@fides-db [logging] level = "INFO" +log_pii = true [cli] server_host = "localhost" diff --git a/src/fides/api/ctl/routes/health.py b/src/fides/api/ctl/routes/health.py index f217291e371..17d484b0fba 100644 --- a/src/fides/api/ctl/routes/health.py +++ b/src/fides/api/ctl/routes/health.py @@ -1,7 +1,7 @@ -import logging from typing import Any, Dict, List, Union from fastapi import Depends, HTTPException, status +from loguru import logger from redis.exceptions import ResponseError from sqlalchemy.orm import Session @@ -19,21 +19,17 @@ router = APIRouter(tags=["Health"]) -logger = logging.getLogger(__name__) -# stops polluting logs with sqlalchemy / alembic info-level logs -logging.getLogger("sqlalchemy.engine").setLevel(logging.ERROR) -logging.getLogger("alembic").setLevel(logging.WARNING) - def get_cache_health() -> str: """Checks if the cache is reachable""" + if not CONFIG.redis.enabled: return "no cache configured" try: get_cache() return "healthy" except (RedisConnectionError, ResponseError) as e: - logger.error("Unable to reach cache: %s", Pii(str(e))) + logger.error("Unable to reach cache: {}", Pii(str(e))) return "unhealthy" diff --git a/src/fides/api/main.py b/src/fides/api/main.py index f9a1779ac16..80d12b145a7 100644 --- a/src/fides/api/main.py +++ b/src/fides/api/main.py @@ -1,10 +1,8 @@ """ Contains the code that sets up the API. """ -import logging from datetime import datetime, timezone -from logging import WARNING -from os import getenv +from logging import DEBUG, WARNING from typing import Callable, Optional from fastapi import FastAPI, HTTPException, Request, Response, status @@ -14,7 +12,7 @@ from fideslib.oauth.api.deps import verify_oauth_client as lib_verify_oauth_client from fideslib.oauth.api.routes.user_endpoints import router as user_router from fideslog.sdk.python.event import AnalyticsEvent -from loguru import logger as log +from loguru import logger from redis.exceptions import RedisError, ResponseError from slowapi.errors import RateLimitExceeded # type: ignore from slowapi.extension import Limiter, _rate_limit_exceeded_handler # type: ignore @@ -59,7 +57,6 @@ ) from fides.api.ops.tasks.scheduled.scheduler import scheduler from fides.api.ops.util.cache import get_cache -from fides.api.ops.util.logger import get_fides_log_record_factory from fides.api.ops.util.oauth_util import verify_oauth_client from fides.ctl.core.config import FidesConfig from fides.ctl.core.config import get_config as get_ctl_config @@ -67,10 +64,6 @@ CONFIG: FidesConfig = get_ctl_config() -logging.basicConfig(level=CONFIG.logging.level) -logging.setLogRecordFactory(get_fides_log_record_factory()) -logger = logging.getLogger(__name__) - app = FastAPI(title="fides") app.state.limiter = Limiter( default_limits=[CONFIG.security.request_rate_limit], @@ -199,16 +192,15 @@ def configure_routes() -> None: async def setup_server() -> None: "Run all of the required setup steps for the webserver." - log.warning( + logger.warning( f"Startup configuration: reloading = {CONFIG.hot_reloading}, dev_mode = {CONFIG.dev_mode}", ) - log_pii = getenv("FIDES__LOG_PII", "").lower() == "true" - log.warning( - f"Startup configuration: pii logging = {log_pii}", + logger.warning( + f"Startup configuration: pii logging = {CONFIG.logging.log_pii}", ) - if logger.getEffectiveLevel() == logging.DEBUG: - log.warning( + if CONFIG.logging.level == DEBUG: + logger.warning( "WARNING: log level is DEBUG, so sensitive or personal data may be logged. " "Set FIDES__LOGGING__LEVEL to INFO or higher in production." ) @@ -222,36 +214,36 @@ async def setup_server() -> None: try: create_or_update_parent_user() except Exception as e: - log.error(f"Error creating parent user: {str(e)}") + logger.error(f"Error creating parent user: {str(e)}") raise FidesError(f"Error creating parent user: {str(e)}") - log.info("Validating SaaS connector templates...") + logger.info("Validating SaaS connector templates...") try: registry = load_registry(registry_file) db = get_api_session() update_saas_configs(registry, db) except Exception as e: - log.error( + logger.error( f"Error occurred during SaaS connector template validation: {str(e)}", ) return finally: db.close() - log.info("Running Cache connection test...") + logger.info("Running Cache connection test...") try: get_cache() except (RedisConnectionError, RedisError, ResponseError) as e: - log.error(f"Connection to cache failed: {str(e)}") + logger.error(f"Connection to cache failed: {str(e)}") return else: - log.debug("Connection to cache succeeded") + logger.debug("Connection to cache succeeded") if not scheduler.running: scheduler.start() - log.debug("Sending startup analytics events...") + logger.debug("Sending startup analytics events...") await send_analytics_event( AnalyticsEvent( docker=in_docker_container(), @@ -266,7 +258,7 @@ async def setup_server() -> None: desination=CONFIG.logging.destination, ) - log.bind(api_config=CONFIG.logging.json()).debug("Configuration options in use") + logger.bind(api_config=CONFIG.logging.json()).debug("Configuration options in use") @app.middleware("http") @@ -275,7 +267,7 @@ async def log_request(request: Request, call_next: Callable) -> Response: start = datetime.now() response = await call_next(request) handler_time = datetime.now() - start - log.bind( + logger.bind( method=request.method, status_code=response.status_code, handler_time=f"{handler_time.microseconds * 0.001}ms", @@ -315,14 +307,14 @@ def read_other_paths(request: Request) -> Response: # If any of those worked, serve the file. if ui_file and ui_file.is_file(): - log.debug( + logger.debug( f"catchall request path '{path}' matched static admin UI file: {ui_file}" ) return FileResponse(ui_file) # raise 404 for anything that should be backend endpoint but we can't find it if path.startswith(API_PREFIX[1:]): - log.debug( + logger.debug( f"catchall request path '{path}' matched an invalid API route, return 404" ) raise HTTPException( @@ -330,7 +322,7 @@ def read_other_paths(request: Request) -> Response: ) # otherwise return the index - log.debug( + logger.debug( f"catchall request path '{path}' did not match any admin UI routes, return generic admin UI index" ) return get_admin_index_as_response() @@ -341,7 +333,7 @@ def start_webserver(port: int = 8080) -> None: check_required_webserver_config_values() server = Server(Config(app, host="0.0.0.0", port=port, log_level=WARNING)) - log.info( + logger.info( f"Starting webserver - Host: {server.config.host}, Port: {server.config.port}, Log Level: {server.config.log_level}" ) server.run() diff --git a/src/fides/api/ops/api/v1/endpoints/config_endpoints.py b/src/fides/api/ops/api/v1/endpoints/config_endpoints.py index 3e6575dab89..a644127ea6e 100644 --- a/src/fides/api/ops/api/v1/endpoints/config_endpoints.py +++ b/src/fides/api/ops/api/v1/endpoints/config_endpoints.py @@ -1,7 +1,7 @@ -import logging from typing import Any, Dict from fastapi.params import Security +from loguru import logger from fides.api.ops.api.v1 import scope_registry as scopes from fides.api.ops.api.v1 import urn_registry as urls @@ -12,8 +12,6 @@ router = APIRouter(tags=["Config"], prefix=urls.V1_URL_PREFIX) -logger = logging.getLogger(__name__) - @router.get( urls.CONFIG, diff --git a/src/fides/api/ops/api/v1/endpoints/connection_endpoints.py b/src/fides/api/ops/api/v1/endpoints/connection_endpoints.py index 9ac5d668129..77a415c9abf 100644 --- a/src/fides/api/ops/api/v1/endpoints/connection_endpoints.py +++ b/src/fides/api/ops/api/v1/endpoints/connection_endpoints.py @@ -1,6 +1,5 @@ from __future__ import annotations -import logging from typing import List, Optional from fastapi import Depends, HTTPException @@ -9,6 +8,7 @@ from fastapi_pagination.bases import AbstractPage from fastapi_pagination.ext.sqlalchemy import paginate from fideslib.exceptions import KeyOrNameAlreadyExists +from loguru import logger from pydantic import ValidationError, conlist from sqlalchemy import or_ from sqlalchemy.orm import Session @@ -76,15 +76,13 @@ router = APIRouter(tags=["Connections"], prefix=V1_URL_PREFIX) -logger = logging.getLogger(__name__) - def get_connection_config_or_error( db: Session, connection_key: FidesOpsKey ) -> ConnectionConfig: """Helper to load the ConnectionConfig object or throw a 404""" connection_config = ConnectionConfig.get_by(db, field="key", value=connection_key) - logger.info("Finding connection configuration with key '%s'", connection_key) + logger.info("Finding connection configuration with key '{}'", connection_key) if not connection_config: raise HTTPException( status_code=HTTP_404_NOT_FOUND, @@ -119,7 +117,7 @@ def get_connections( SaaS connector types. """ logger.info( - "Finding connection configurations with pagination params %s and search query: '%s'.", + "Finding connection configurations with pagination params {} and search query: '{}'.", params, search if search else "", ) @@ -217,7 +215,7 @@ def patch_connections( """ created_or_updated: List[ConnectionConfig] = [] failed: List[BulkUpdateFailed] = [] - logger.info("Starting bulk upsert for %s connection configuration(s)", len(configs)) + logger.info("Starting bulk upsert for {} connection configuration(s)", len(configs)) for config in configs: orig_data = config.dict().copy() @@ -228,7 +226,7 @@ def patch_connections( created_or_updated.append(connection_config) except KeyOrNameAlreadyExists as exc: logger.warning( - "Create/update failed for connection config with key '%s': %s", + "Create/update failed for connection config with key '{}': {}", config.key, exc, ) @@ -240,7 +238,7 @@ def patch_connections( ) except Exception: logger.warning( - "Create/update failed for connection config with key '%s'.", config.key + "Create/update failed for connection config with key '{}'.", config.key ) failed.append( BulkUpdateFailed( @@ -269,7 +267,7 @@ def delete_connection( """Removes the connection configuration with matching key.""" connection_config = get_connection_config_or_error(db, connection_key) connection_type = connection_config.connection_type - logger.info("Deleting connection config with key '%s'.", connection_key) + logger.info("Deleting connection config with key '{}'.", connection_key) connection_config.delete(db) # Access Manual Webhooks are cascade deleted if their ConnectionConfig is deleted, @@ -297,7 +295,7 @@ def validate_secrets( try: schema = get_connection_secrets_schema(connection_type.value, saas_config) # type: ignore logger.info( - "Validating secrets on connection config with key '%s'", + "Validating secrets on connection config with key '{}'", connection_config.key, ) connection_secrets = schema.parse_obj(request_body) @@ -329,7 +327,7 @@ def connection_status( except (ConnectionException, ClientUnsuccessfulException) as exc: logger.warning( - "Connection test failed on %s: %s", + "Connection test failed on {}: {}", connection_config.key, Pii(str(exc)), ) @@ -342,7 +340,7 @@ def connection_status( failure_reason=str(exc), ) - logger.info("Connection test %s on %s", status.value, connection_config.key) # type: ignore + logger.info("Connection test {} on {}", status.value, connection_config.key) # type: ignore connection_config.update_test_status(test_status=status, db=db) # type: ignore return TestStatusMessage( @@ -376,7 +374,7 @@ def put_connection_config_secrets( db, unvalidated_secrets, connection_config ).dict() # Save validated secrets, regardless of whether they've been verified. - logger.info("Updating connection config secrets for '%s'", connection_key) + logger.info("Updating connection config secrets for '{}'", connection_key) connection_config.save(db=db) msg = f"Secrets updated for ConnectionConfig with key: {connection_key}." @@ -419,7 +417,7 @@ def requeue_requires_input_requests(db: Session) -> None: conditions=(PrivacyRequest.status == PrivacyRequestStatus.requires_input), ): logger.info( - "Queuing privacy request '%s with '%s' status now that manual inputs are no longer required.", + "Queuing privacy request '{} with '{}' status now that manual inputs are no longer required.", pr.id, pr.status.value, ) diff --git a/src/fides/api/ops/api/v1/endpoints/connection_type_endpoints.py b/src/fides/api/ops/api/v1/endpoints/connection_type_endpoints.py index 4d3f7e3ea66..5faef6ef6ff 100644 --- a/src/fides/api/ops/api/v1/endpoints/connection_type_endpoints.py +++ b/src/fides/api/ops/api/v1/endpoints/connection_type_endpoints.py @@ -1,4 +1,3 @@ -import logging from typing import Any, Dict, List, Optional from fastapi import APIRouter, Depends, HTTPException @@ -33,8 +32,6 @@ router = APIRouter(tags=["Connection Types"], prefix=V1_URL_PREFIX) -logger = logging.getLogger(__name__) - def get_connection_types( search: Optional[str] = None, system_type: Optional[SystemType] = None diff --git a/src/fides/api/ops/api/v1/endpoints/consent_request_endpoints.py b/src/fides/api/ops/api/v1/endpoints/consent_request_endpoints.py index ccf5a2a8a7a..ff2f214afdc 100644 --- a/src/fides/api/ops/api/v1/endpoints/consent_request_endpoints.py +++ b/src/fides/api/ops/api/v1/endpoints/consent_request_endpoints.py @@ -1,9 +1,9 @@ from __future__ import annotations -import logging from typing import Optional from fastapi import Depends, HTTPException, Security +from loguru import logger from sqlalchemy.exc import IntegrityError from sqlalchemy.orm import Session from starlette.status import ( @@ -51,7 +51,6 @@ router = APIRouter(tags=["Consent"], prefix=V1_URL_PREFIX) CONFIG = get_config() -logger = logging.getLogger(__name__) @router.post( @@ -100,7 +99,7 @@ def create_consent_request( try: send_verification_code_to_user(db, consent_request, data) except MessageDispatchException as exc: - logger.error("Error sending the verification code message: %s", str(exc)) + logger.error("Error sending the verification code message: {}", str(exc)) raise HTTPException( status_code=HTTP_500_INTERNAL_SERVER_ERROR, detail=f"Error sending the verification code message: {str(exc)}", @@ -291,7 +290,7 @@ def _get_consent_request_and_provided_identity( raise HTTPException(status_code=HTTP_400_BAD_REQUEST, detail=exc.message) except PermissionError as exc: logger.info( - "Invalid verification code provided for %s.", consent_request.id + "Invalid verification code provided for {}.", consent_request.id ) raise HTTPException(status_code=HTTP_403_FORBIDDEN, detail=exc.args[0]) diff --git a/src/fides/api/ops/api/v1/endpoints/dataset_endpoints.py b/src/fides/api/ops/api/v1/endpoints/dataset_endpoints.py index 2166a4dbda1..8fcb5e4c8ff 100644 --- a/src/fides/api/ops/api/v1/endpoints/dataset_endpoints.py +++ b/src/fides/api/ops/api/v1/endpoints/dataset_endpoints.py @@ -1,4 +1,3 @@ -import logging from typing import List import yaml @@ -7,6 +6,7 @@ from fastapi_pagination import Page, Params from fastapi_pagination.bases import AbstractPage from fastapi_pagination.ext.sqlalchemy import paginate +from loguru import logger from pydantic import conlist from sqlalchemy.exc import IntegrityError from sqlalchemy.orm import Session @@ -58,7 +58,6 @@ X_YAML = "application/x-yaml" -logger = logging.getLogger(__name__) router = APIRouter(tags=["Datasets"], prefix=V1_URL_PREFIX) @@ -66,7 +65,7 @@ def _get_connection_config( connection_key: FidesOpsKey, db: Session = Depends(deps.get_db) ) -> ConnectionConfig: - logger.info("Finding connection config with key '%s'", connection_key) + logger.info("Finding connection config with key '{}'", connection_key) connection_config = ConnectionConfig.get_by(db, field="key", value=connection_key) if not connection_config: raise HTTPException( @@ -119,7 +118,7 @@ def validate_dataset( Traversal(complete_graph, {k: None for k in unique_identities}) except (TraversalError, ValidationError) as err: logger.warning( - "Traversal validation failed for dataset '%s': %s", dataset.fides_key, err + "Traversal validation failed for dataset '{}': {}", dataset.fides_key, err ) return ValidateDatasetResponse( dataset=dataset, @@ -129,7 +128,7 @@ def validate_dataset( ), ) - logger.info("Validation successful for dataset '%s'!", dataset.fides_key) + logger.info("Validation successful for dataset '{}'!", dataset.fides_key) return ValidateDatasetResponse( dataset=dataset, traversal_details=DatasetTraversalDetails( @@ -162,7 +161,7 @@ def patch_datasets( created_or_updated: List[FidesopsDataset] = [] failed: List[BulkUpdateFailed] = [] - logger.info("Starting bulk upsert for %s datasets", len(datasets)) + logger.info("Starting bulk upsert for {} datasets", len(datasets)) # warn if there are duplicate fides_keys within the datasets # valid datasets with the same fides_key will override each other @@ -274,7 +273,7 @@ def create_or_update_dataset( ) ) except Exception: - logger.warning("Create/update failed for dataset '%s'.", data["fides_key"]) + logger.warning("Create/update failed for dataset '{}'.", data["fides_key"]) failed.append( BulkUpdateFailed( message="Dataset create/update failed.", @@ -323,7 +322,7 @@ def get_datasets( """Returns all datasets in the database.""" logger.info( - "Finding all datasets for connection '%s' with pagination params %s", + "Finding all datasets for connection '{}' with pagination params {}", connection_config.key, params, ) @@ -355,7 +354,7 @@ def get_dataset( """Returns a single dataset based on the given key.""" logger.info( - "Finding dataset '%s' for connection '%s'", fides_key, connection_config.key + "Finding dataset '{}' for connection '{}'", fides_key, connection_config.key ) dataset_config = DatasetConfig.filter( db=db, @@ -386,7 +385,7 @@ def delete_dataset( """Removes the dataset based on the given key.""" logger.info( - "Finding dataset '%s' for connection '%s'", fides_key, connection_config.key + "Finding dataset '{}' for connection '{}'", fides_key, connection_config.key ) dataset_config = DatasetConfig.filter( db=db, @@ -402,6 +401,6 @@ def delete_dataset( ) logger.info( - "Deleting dataset '%s' for connection '%s'", fides_key, connection_config.key + "Deleting dataset '{}' for connection '{}'", fides_key, connection_config.key ) dataset_config.delete(db) diff --git a/src/fides/api/ops/api/v1/endpoints/drp_endpoints.py b/src/fides/api/ops/api/v1/endpoints/drp_endpoints.py index a8331c6e769..c162e40b1b1 100644 --- a/src/fides/api/ops/api/v1/endpoints/drp_endpoints.py +++ b/src/fides/api/ops/api/v1/endpoints/drp_endpoints.py @@ -1,8 +1,8 @@ -import logging from typing import Any, Dict, List, Optional import jwt from fastapi import Depends, HTTPException, Security +from loguru import logger from sqlalchemy.orm import Session from starlette.status import ( HTTP_200_OK, @@ -48,7 +48,6 @@ from fides.api.ops.util.oauth_util import verify_oauth_client from fides.ctl.core.config import get_config -logger = logging.getLogger(__name__) router = APIRouter(tags=["DRP"], prefix=urls.V1_URL_PREFIX) CONFIG = get_config() @@ -78,7 +77,7 @@ async def create_drp_privacy_request( detail="JWT key must be provided", ) - logger.info("Finding policy with drp action '%s'", data.exercise[0]) + logger.info("Finding policy with drp action '{}'", data.exercise[0]) policy: Optional[Policy] = Policy.get_by( db=db, field="drp_action", @@ -116,7 +115,7 @@ async def create_drp_privacy_request( check_and_dispatch_error_notifications(db=db) logger.info( - "Decrypting identity for DRP privacy request %s", privacy_request.id + "Decrypting identity for DRP privacy request {}", privacy_request.id ) cache_data(privacy_request, policy, mapped_identity, None, data) @@ -130,14 +129,14 @@ async def create_drp_privacy_request( ) except common_exceptions.RedisConnectionError as exc: - logger.error("RedisConnectionError: %s", Pii(str(exc))) + logger.error("RedisConnectionError: {}", Pii(str(exc))) # Thrown when cache.ping() fails on cache connection retrieval raise HTTPException( status_code=HTTP_424_FAILED_DEPENDENCY, detail=exc.args[0], ) except Exception as exc: - logger.error("Exception: %s", Pii(str(exc))) + logger.error("Exception: {}", Pii(str(exc))) raise HTTPException( status_code=HTTP_422_UNPROCESSABLE_ENTITY, detail="DRP privacy request could not be exercised", @@ -157,7 +156,7 @@ def get_request_status_drp( a policy that implements a Data Rights Protocol action. """ - logger.info("Finding request for DRP with ID: %s", request_id) + logger.info("Finding request for DRP with ID: {}", request_id) request = PrivacyRequest.get( db=db, object_id=request_id, @@ -170,7 +169,7 @@ def get_request_status_drp( detail=f"Privacy request with ID {request_id} does not exist, or is not associated with a data rights protocol action.", ) - logger.info("Privacy request with ID: %s found for DRP status.", request_id) + logger.info("Privacy request with ID: {} found for DRP status.", request_id) return PrivacyRequestDRPStatusResponse( request_id=request.id, received_at=request.requested_at, @@ -220,7 +219,7 @@ def revoke_request( detail=f"Invalid revoke request. Can only revoke `pending` requests. Privacy request '{privacy_request.id}' status = {privacy_request.status.value}.", # type: ignore ) - logger.info("Canceling privacy request '%s'", privacy_request.id) + logger.info("Canceling privacy request '{}'", privacy_request.id) privacy_request.cancel_processing(db, cancel_reason=data.reason) return PrivacyRequestDRPStatusResponse( diff --git a/src/fides/api/ops/api/v1/endpoints/encryption_endpoints.py b/src/fides/api/ops/api/v1/endpoints/encryption_endpoints.py index b5e0932ad49..ae3572e265d 100644 --- a/src/fides/api/ops/api/v1/endpoints/encryption_endpoints.py +++ b/src/fides/api/ops/api/v1/endpoints/encryption_endpoints.py @@ -1,9 +1,9 @@ -import logging import secrets from fastapi import Security from fideslib.cryptography import cryptographic_util from fideslib.cryptography.cryptographic_util import b64_str_to_bytes, bytes_to_b64_str +from loguru import logger from fides.api.ops.api.v1.scope_registry import ENCRYPTION_EXEC from fides.api.ops.api.v1.urn_registry import ( @@ -29,7 +29,7 @@ from fides.ctl.core.config import get_config router = APIRouter(tags=["Encryption"], prefix=V1_URL_PREFIX) -logger = logging.getLogger(__name__) + CONFIG = get_config() diff --git a/src/fides/api/ops/api/v1/endpoints/identity_verification_endpoints.py b/src/fides/api/ops/api/v1/endpoints/identity_verification_endpoints.py index be6aa16d5fb..2d62ff3c0d4 100644 --- a/src/fides/api/ops/api/v1/endpoints/identity_verification_endpoints.py +++ b/src/fides/api/ops/api/v1/endpoints/identity_verification_endpoints.py @@ -1,4 +1,3 @@ -import logging from typing import Optional from fastapi import Depends @@ -13,7 +12,6 @@ from fides.api.ops.util.api_router import APIRouter from fides.ctl.core.config import get_config -logger = logging.getLogger(__name__) router = APIRouter(tags=["Identity Verification"], prefix=urls.V1_URL_PREFIX) diff --git a/src/fides/api/ops/api/v1/endpoints/manual_webhook_endpoints.py b/src/fides/api/ops/api/v1/endpoints/manual_webhook_endpoints.py index 3610a64a741..8cb7129e5fb 100644 --- a/src/fides/api/ops/api/v1/endpoints/manual_webhook_endpoints.py +++ b/src/fides/api/ops/api/v1/endpoints/manual_webhook_endpoints.py @@ -1,8 +1,8 @@ -import logging from typing import Optional, Sequence from fastapi import Depends, Security from fastapi.encoders import jsonable_encoder +from loguru import logger from sqlalchemy.exc import IntegrityError from sqlalchemy.orm import Session from starlette.exceptions import HTTPException @@ -36,7 +36,6 @@ from fides.api.ops.util.logger import Pii from fides.api.ops.util.oauth_util import verify_oauth_client -logger = logging.getLogger(__name__) router = APIRouter(tags=["Manual Webhooks"], prefix=V1_URL_PREFIX) @@ -88,7 +87,7 @@ def create_access_manual_webhook( ) logger.info( - "Creating access manual webhook for connection config '%s'", + "Creating access manual webhook for connection config '{}'", connection_config.key, ) @@ -132,7 +131,7 @@ def patch_access_manual_webhook( raise HTTPException(status_code=HTTP_400_BAD_REQUEST, detail=Pii(str(exc))) logger.info( - "Updated access manual webhook for connection config '%s'", + "Updated access manual webhook for connection config '{}'", connection_config.key, ) return access_manual_webhook @@ -154,7 +153,7 @@ def get_access_manual_webhook( connection_config ) logger.info( - "Retrieved access manual webhook for connection config '%s'", + "Retrieved access manual webhook for connection config '{}'", connection_config.key, ) return access_manual_webhook @@ -179,7 +178,7 @@ def delete_access_manual_webhook( access_manual_webhook.delete(db) logger.info( - "Deleted access manual webhook for connection config '%s'", + "Deleted access manual webhook for connection config '{}'", connection_config.key, ) diff --git a/src/fides/api/ops/api/v1/endpoints/masking_endpoints.py b/src/fides/api/ops/api/v1/endpoints/masking_endpoints.py index bdae878dc54..8f0834c0b49 100644 --- a/src/fides/api/ops/api/v1/endpoints/masking_endpoints.py +++ b/src/fides/api/ops/api/v1/endpoints/masking_endpoints.py @@ -1,7 +1,7 @@ -import logging from typing import Any, List from fastapi import HTTPException +from loguru import logger from starlette.status import HTTP_400_BAD_REQUEST, HTTP_404_NOT_FOUND from fides.api.ops.api.v1.urn_registry import MASKING, MASKING_STRATEGY, V1_URL_PREFIX @@ -19,8 +19,6 @@ router = APIRouter(tags=["Masking"], prefix=V1_URL_PREFIX) -logger = logging.getLogger(__name__) - @router.put(MASKING, response_model=MaskingAPIResponse) def mask_value(request: MaskingAPIRequest) -> MaskingAPIResponse: @@ -34,7 +32,7 @@ def mask_value(request: MaskingAPIRequest) -> MaskingAPIResponse: if num_strat > 1: logger.info( - "%s masking strategies requested; running in order.", + "{} masking strategies requested; running in order.", num_strat, ) @@ -43,7 +41,7 @@ def mask_value(request: MaskingAPIRequest) -> MaskingAPIResponse: strategy.strategy, strategy.configuration ) logger.info( - "Starting masking of %s value(s) with strategy %s", + "Starting masking of {} value(s) with strategy {}", len(values), strategy.strategy, ) diff --git a/src/fides/api/ops/api/v1/endpoints/messaging_endpoints.py b/src/fides/api/ops/api/v1/endpoints/messaging_endpoints.py index 009b8a5dfd3..a4f54e56406 100644 --- a/src/fides/api/ops/api/v1/endpoints/messaging_endpoints.py +++ b/src/fides/api/ops/api/v1/endpoints/messaging_endpoints.py @@ -1,10 +1,10 @@ -import logging from typing import Optional from fastapi import Depends, Security from fastapi_pagination import Page, Params from fastapi_pagination.bases import AbstractPage from fastapi_pagination.ext.sqlalchemy import paginate +from loguru import logger from sqlalchemy.orm import Session from starlette.exceptions import HTTPException from starlette.status import ( @@ -50,7 +50,6 @@ from fides.api.ops.util.oauth_util import verify_oauth_client router = APIRouter(tags=["messaging"], prefix=V1_URL_PREFIX) -logger = logging.getLogger(__name__) @router.post( @@ -109,14 +108,14 @@ def patch_config_by_key( try: return update_messaging_config(db=db, key=config_key, config=messaging_config) except MessagingConfigNotFoundException: - logger.warning("No messaging config found with key %s", config_key) + logger.warning("No messaging config found with key {}", config_key) raise HTTPException( status_code=HTTP_404_NOT_FOUND, detail=f"No messaging config found with key {config_key}", ) except Exception as exc: logger.warning( - "Patch failed for messaging config %s: %s", + "Patch failed for messaging config {}: {}", messaging_config.key, Pii(str(exc)), ) @@ -141,7 +140,7 @@ def put_config_secrets( """ Add or update secrets for messaging config. """ - logger.info("Finding messaging config with key '%s'", config_key) + logger.info("Finding messaging config with key '{}'", config_key) messaging_config = MessagingConfig.get_by(db=db, field="key", value=config_key) if not messaging_config: raise HTTPException( @@ -166,7 +165,7 @@ def put_config_secrets( ) logger.info( - "Updating messaging config secrets for config with key '%s'", config_key + "Updating messaging config secrets for config with key '{}'", config_key ) try: messaging_config.set_secrets(db=db, messaging_secrets=secrets_schema.dict()) @@ -192,7 +191,7 @@ def get_configs( Retrieves configs for messaging. """ logger.info( - "Finding all messaging configurations with pagination params %s", params + "Finding all messaging configurations with pagination params {}", params ) return paginate( MessagingConfig.query(db=db).order_by(MessagingConfig.created_at.desc()), @@ -211,7 +210,7 @@ def get_config_by_key( """ Retrieves configs for messaging service by key. """ - logger.info("Finding messaging config with key '%s'", config_key) + logger.info("Finding messaging config with key '{}'", config_key) try: return get_messaging_config_by_key(db=db, key=config_key) diff --git a/src/fides/api/ops/api/v1/endpoints/oauth_endpoints.py b/src/fides/api/ops/api/v1/endpoints/oauth_endpoints.py index 685056d00bc..f6cfed777af 100644 --- a/src/fides/api/ops/api/v1/endpoints/oauth_endpoints.py +++ b/src/fides/api/ops/api/v1/endpoints/oauth_endpoints.py @@ -1,10 +1,10 @@ -import logging from typing import List from fastapi import Body, Depends, HTTPException, Request, Security from fastapi.security import HTTPBasic from fideslib.models.client import ClientDetail from fideslib.oauth.schemas.oauth import AccessToken, OAuth2ClientCredentialsRequestForm +from loguru import logger from sqlalchemy.orm import Session from starlette.status import ( HTTP_400_BAD_REQUEST, @@ -53,7 +53,6 @@ router = APIRouter(tags=["OAuth"], prefix=V1_URL_PREFIX) -logger = logging.getLogger(__name__) CONFIG = get_config() @@ -111,7 +110,7 @@ def create_client( scopes: List[str] = Body([]), ) -> ClientCreatedResponse: """Creates a new client and returns the credentials""" - logging.info("Creating new client") + logger.info("Creating new client") if not all(scope in SCOPE_REGISTRY for scope in scopes): raise HTTPException( status_code=HTTP_422_UNPROCESSABLE_ENTITY, @@ -136,7 +135,7 @@ def delete_client(client_id: str, db: Session = Depends(get_db)) -> None: client = ClientDetail.get(db, object_id=client_id, config=CONFIG) if not client: return - logging.info("Deleting client") + logger.info("Deleting client") client.delete(db) @@ -151,7 +150,7 @@ def get_client_scopes(client_id: str, db: Session = Depends(get_db)) -> List[str if not client: return [] - logging.info("Getting client scopes") + logger.info("Getting client scopes") return client.scopes @@ -176,7 +175,7 @@ def set_client_scopes( detail=f"Invalid Scope. Scopes must be one of {SCOPE_REGISTRY}.", ) - logging.info("Updating client scopes") + logger.info("Updating client scopes") client.update(db, data={"scopes": scopes}) @@ -187,7 +186,7 @@ def set_client_scopes( ) def read_scopes() -> List[str]: """Returns a list of all scopes available for assignment in the system""" - logging.info("Getting all available scopes") + logger.info("Getting all available scopes") return SCOPE_REGISTRY diff --git a/src/fides/api/ops/api/v1/endpoints/policy_endpoints.py b/src/fides/api/ops/api/v1/endpoints/policy_endpoints.py index cd763ebe158..004ebb7f831 100644 --- a/src/fides/api/ops/api/v1/endpoints/policy_endpoints.py +++ b/src/fides/api/ops/api/v1/endpoints/policy_endpoints.py @@ -1,4 +1,3 @@ -import logging from typing import Any, Dict, List from fastapi import Body, Depends, Security @@ -7,6 +6,7 @@ from fastapi_pagination.ext.sqlalchemy import paginate from fideslib.exceptions import KeyOrNameAlreadyExists from fideslib.models.client import ClientDetail +from loguru import logger from pydantic import conlist from sqlalchemy.exc import IntegrityError from sqlalchemy.orm import Session @@ -34,8 +34,6 @@ router = APIRouter(tags=["DSR Policy"], prefix=urls.V1_URL_PREFIX) -logger = logging.getLogger(__name__) - @router.get( urls.POLICY_LIST, @@ -51,14 +49,14 @@ def get_policy_list( """ Return a paginated list of all Policy records in this system """ - logger.info("Finding all policies with pagination params '%s'", params) + logger.info("Finding all policies with pagination params '{}'", params) policies = Policy.query(db=db).order_by(Policy.created_at.desc()) return paginate(policies, params=params) def get_policy_or_error(db: Session, policy_key: FidesOpsKey) -> Policy: """Helper method to load Policy or throw a 404""" - logger.info("Finding policy with key '%s'", policy_key) + logger.info("Finding policy with key '{}'", policy_key) policy = Policy.get_by(db=db, field="key", value=policy_key) if not policy: raise HTTPException( @@ -106,7 +104,7 @@ def create_or_update_policies( """ created_or_updated: List[Policy] = [] failed: List[BulkUpdateFailed] = [] - logger.info("Starting bulk upsert for %s policies", len(data)) + logger.info("Starting bulk upsert for {} policies", len(data)) for policy_schema in data: policy_data: Dict[str, Any] = dict(policy_schema) @@ -126,7 +124,7 @@ def create_or_update_policies( DrpActionValidationError, IntegrityError, ) as exc: - logger.warning("Create/update failed for policy: %s", Pii(str(exc))) + logger.warning("Create/update failed for policy: {}", Pii(str(exc))) failure = { "message": exc.args[0], "data": policy_data, @@ -134,7 +132,7 @@ def create_or_update_policies( failed.append(BulkUpdateFailed(**failure)) continue except PolicyValidationError as exc: - logger.warning("Create/update failed for policy: %s", Pii(str(exc))) + logger.warning("Create/update failed for policy: {}", Pii(str(exc))) failure = { "message": "This record could not be added because the data provided was invalid.", "data": policy_data, @@ -169,7 +167,7 @@ def create_or_update_rules( Given a list of Rule data elements, create or update corresponding Rule objects or report failure """ - logger.info("Finding policy with key '%s'", policy_key) + logger.info("Finding policy with key '{}'", policy_key) policy = get_policy_or_error(db, policy_key) @@ -177,7 +175,7 @@ def create_or_update_rules( failed: List[BulkUpdateFailed] = [] logger.info( - "Starting bulk upsert for %s rules on policy %s", len(input_data), policy_key + "Starting bulk upsert for {} rules on policy {}", len(input_data), policy_key ) for schema in input_data: @@ -193,7 +191,7 @@ def create_or_update_rules( ) if not associated_storage_config: logger.warning( - "No storage config found with key %s", storage_destination_key + "No storage config found with key {}", storage_destination_key ) failure = { "message": f"A StorageConfig with key {storage_destination_key} does not exist", @@ -225,7 +223,7 @@ def create_or_update_rules( ) except KeyOrNameAlreadyExists as exc: logger.warning( - "Create/update failed for rule '%s' on policy %s: %s", + "Create/update failed for rule '{}' on policy {}: {}", schema.key, policy_key, exc, @@ -238,7 +236,7 @@ def create_or_update_rules( continue except RuleValidationError as exc: logger.warning( - "Create/update failed for rule '%s' on policy %s: %s", + "Create/update failed for rule '{}' on policy {}: {}", schema.key, policy_key, Pii(str(exc)), @@ -251,7 +249,7 @@ def create_or_update_rules( continue except ValueError as exc: logger.warning( - "Create/update failed for rule '%s' on policy %s: %s", + "Create/update failed for rule '{}' on policy {}: {}", schema.key, policy_key, Pii(str(exc)), @@ -284,7 +282,7 @@ def delete_rule( """ policy = get_policy_or_error(db, policy_key) - logger.info("Finding rule with key '%s'", rule_key) + logger.info("Finding rule with key '{}'", rule_key) rule = Rule.filter( db=db, conditions=(Rule.key == rule_key and Rule.policy_id == policy.id) @@ -295,7 +293,7 @@ def delete_rule( detail=f"No Rule found for key {rule_key} on Policy {policy_key}.", ) - logger.info("Deleting rule with key '%s'", rule_key) + logger.info("Deleting rule with key '{}'", rule_key) rule.delete(db=db) @@ -320,7 +318,7 @@ def create_or_update_rule_targets( """ policy = get_policy_or_error(db, policy_key) - logger.info("Finding rule with key '%s'", rule_key) + logger.info("Finding rule with key '{}'", rule_key) rule = Rule.filter( db=db, conditions=(Rule.key == rule_key and Rule.policy_id == policy.id) ).first() @@ -333,7 +331,7 @@ def create_or_update_rule_targets( created_or_updated = [] failed = [] logger.info( - "Starting bulk upsert for %s rule targets on rule %s", len(input_data), rule_key + "Starting bulk upsert for {} rule targets on rule {}", len(input_data), rule_key ) for schema in input_data: try: @@ -349,7 +347,7 @@ def create_or_update_rule_targets( ) except KeyOrNameAlreadyExists as exc: logger.warning( - "Create/update failed for rule target %s on rule %s: %s", + "Create/update failed for rule target {} on rule {}: {}", schema.key, rule_key, exc, @@ -366,7 +364,7 @@ def create_or_update_rule_targets( RuleTargetValidationError, ) as exc: logger.warning( - "Create/update failed for rule target %s on rule %s: %s", + "Create/update failed for rule target {} on rule {}: {}", schema.key, rule_key, Pii(str(exc)), @@ -379,7 +377,7 @@ def create_or_update_rule_targets( continue except IntegrityError as exc: logger.warning( - "Create/update failed for rule target %s on rule %s: %s", + "Create/update failed for rule target {} on rule {}: {}", schema.key, rule_key, Pii(str(exc)), @@ -415,7 +413,7 @@ def delete_rule_target( """ policy = get_policy_or_error(db, policy_key) - logger.info("Finding rule with key '%s'", rule_key) + logger.info("Finding rule with key '{}'", rule_key) rule = Rule.filter( db=db, conditions=(Rule.key == rule_key and Rule.policy_id == policy.id) ).first() @@ -425,7 +423,7 @@ def delete_rule_target( detail=f"No Rule found for key {rule_key} on Policy {policy_key}.", ) - logger.info("Finding rule target with key '%s'", rule_target_key) + logger.info("Finding rule target with key '{}'", rule_target_key) target = RuleTarget.filter( db=db, conditions=( @@ -438,6 +436,6 @@ def delete_rule_target( detail=f"No RuleTarget found for key {rule_target_key} at Rule {rule_key} on Policy {policy_key}.", ) - logger.info("Deleting rule target with key '%s'", rule_target_key) + logger.info("Deleting rule target with key '{}'", rule_target_key) target.delete(db=db) diff --git a/src/fides/api/ops/api/v1/endpoints/policy_webhook_endpoints.py b/src/fides/api/ops/api/v1/endpoints/policy_webhook_endpoints.py index 0f10b1ecbd9..0143e87e1cd 100644 --- a/src/fides/api/ops/api/v1/endpoints/policy_webhook_endpoints.py +++ b/src/fides/api/ops/api/v1/endpoints/policy_webhook_endpoints.py @@ -1,4 +1,3 @@ -import logging from typing import List from fastapi import Body, Depends, Security @@ -7,6 +6,7 @@ from fastapi_pagination.ext.sqlalchemy import paginate from fideslib.db.base_class import get_key_from_data from fideslib.exceptions import KeyOrNameAlreadyExists +from loguru import logger from pydantic import conlist from sqlalchemy.orm import Session from starlette.exceptions import HTTPException @@ -34,8 +34,6 @@ router = APIRouter(tags=["DSR Policy Webhooks"], prefix=urls.V1_URL_PREFIX) -logger = logging.getLogger(__name__) - @router.get( urls.POLICY_WEBHOOKS_PRE, @@ -56,7 +54,7 @@ def get_policy_pre_execution_webhooks( policy = get_policy_or_error(db, policy_key) logger.info( - "Finding all Pre-Execution Webhooks for Policy '%s' with pagination params '%s'", + "Finding all Pre-Execution Webhooks for Policy '{}' with pagination params '{}'", policy.key, params, ) @@ -82,7 +80,7 @@ def get_policy_post_execution_webhooks( policy = get_policy_or_error(db, policy_key) logger.info( - "Finding all Post-Execution Webhooks for Policy '%s' with pagination params '%s'", + "Finding all Post-Execution Webhooks for Policy '{}' with pagination params '{}'", policy.key, params, ) @@ -150,7 +148,7 @@ def put_webhooks( if webhooks_to_remove.count(): logger.info( - "Removing %s-Execution Webhooks from Policy '%s' that were not included in request: %s", + "Removing {}-Execution Webhooks from Policy '{}' that were not included in request: {}", webhook_cls.prefix.capitalize(), policy.key, [webhook.key for webhook in webhooks_to_remove], @@ -158,7 +156,7 @@ def put_webhooks( webhooks_to_remove.delete() logger.info( - "Creating/updating Policy Pre-Execution Webhooks: %s", staged_webhook_keys + "Creating/updating Policy Pre-Execution Webhooks: {}", staged_webhook_keys ) # Committing to database now, as a last step, once we've verified that all the webhooks # in the request are free of issues. @@ -223,7 +221,7 @@ def get_policy_webhook_or_error( Also verifies that the webhook belongs to the given Policy. """ logger.info( - "Finding %s-Execution Webhook with key '%s' for Policy '%s'", + "Finding {}-Execution Webhook with key '{}' for Policy '{}'", webhook_cls.prefix.capitalize(), webhook_key, policy.key, @@ -307,7 +305,7 @@ def _patch_webhook( try: logger.info( - "Updating %s-Execution Webhook with key '%s' on Policy '%s' ", + "Updating {}-Execution Webhook with key '{}' on Policy '{}' ", webhook_cls.prefix.capitalize(), webhook_key, policy_key, @@ -321,7 +319,7 @@ def _patch_webhook( if index is not None and index != loaded_webhook.order: logger.info( - "Reordering %s-Execution Webhooks for Policy '%s'", + "Reordering {}-Execution Webhooks for Policy '{}'", webhook_cls.prefix.capitalize(), policy_key, ) @@ -422,14 +420,14 @@ def delete_webhook( if reordering: # Move the webhook to the end and shuffle other webhooks logger.info( - "Reordering %s-Execution Webhooks for Policy '%s'", + "Reordering {}-Execution Webhooks for Policy '{}'", webhook_cls.prefix.capitalize(), policy_key, ) loaded_webhook.reorder_related_webhooks(db=db, new_index=total_webhook_count) logger.info( - "Deleting %s-Execution Webhook with key '%s' off of Policy '%s'", + "Deleting {}-Execution Webhook with key '{}' off of Policy '{}'", webhook_cls.prefix.capitalize(), webhook_key, policy_key, diff --git a/src/fides/api/ops/api/v1/endpoints/privacy_request_endpoints.py b/src/fides/api/ops/api/v1/endpoints/privacy_request_endpoints.py index a6fa18cb180..4c6bbcde3ac 100644 --- a/src/fides/api/ops/api/v1/endpoints/privacy_request_endpoints.py +++ b/src/fides/api/ops/api/v1/endpoints/privacy_request_endpoints.py @@ -2,7 +2,6 @@ import csv import io -import logging from collections import defaultdict from datetime import datetime from typing import Any, Callable, DefaultDict, Dict, List, Optional, Set, Union @@ -15,6 +14,7 @@ from fastapi_pagination.ext.sqlalchemy import paginate from fideslib.models.audit_log import AuditLog, AuditLogAction from fideslib.models.client import ClientDetail +from loguru import logger from pydantic import ValidationError as PydanticValidationError from pydantic import conlist from sqlalchemy import cast, column, null @@ -150,7 +150,6 @@ from fides.api.ops.util.oauth_util import verify_callback_oauth, verify_oauth_client from fides.ctl.core.config import get_config -logger = logging.getLogger(__name__) router = APIRouter(tags=["Privacy Requests"], prefix=V1_URL_PREFIX) CONFIG = get_config() EMBEDDED_EXECUTION_LOG_LIMIT = 50 @@ -160,7 +159,7 @@ def get_privacy_request_or_error( db: Session, privacy_request_id: str ) -> PrivacyRequest: """Load the privacy request or throw a 404""" - logger.info("Finding privacy request with id '%s'", privacy_request_id) + logger.info("Finding privacy request with id '{}'", privacy_request_id) privacy_request = PrivacyRequest.get(db, object_id=privacy_request_id) @@ -557,7 +556,7 @@ def get_request_status( To fetch a single privacy request, use the request_id query param `?request_id=`. To see individual execution logs, use the verbose query param `?verbose=True`. """ - logger.info("Finding all request statuses with pagination params %s", params) + logger.info("Finding all request statuses with pagination params {}", params) query = db.query(PrivacyRequest) query = _filter_privacy_request_queryset( @@ -578,7 +577,7 @@ def get_request_status( ) logger.info( - "Sorting requests by field: %s and direction: %s", sort_field, sort_direction + "Sorting requests by field: {} and direction: {}", sort_field, sort_direction ) query = _sort_privacy_request_queryset(query, sort_field, sort_direction) @@ -626,7 +625,7 @@ def get_request_status_logs( get_privacy_request_or_error(db, privacy_request_id) logger.info( - "Finding all execution logs for privacy request %s with params '%s'", + "Finding all execution logs for privacy request {} with params '{}'", privacy_request_id, params, ) @@ -782,7 +781,7 @@ def get_request_preview_queries( for key, value in queries.items() ] except TraversalError as err: - logger.info("Dry run failed: %s", err) + logger.info("Dry run failed: {}", err) raise HTTPException( status_code=HTTP_400_BAD_REQUEST, detail="Dry run failed", @@ -816,7 +815,7 @@ async def resume_privacy_request( ) logger.info( - "Resuming privacy request '%s' from webhook '%s'", + "Resuming privacy request '{}' from webhook '{}'", privacy_request_id, webhook.key, ) @@ -911,7 +910,7 @@ async def resume_privacy_request_with_manual_input( if paused_step == CurrentStep.access: validate_manual_input(manual_rows, paused_collection, dataset_graph) logger.info( - "Caching manual input for privacy request '%s', collection: '%s'", + "Caching manual input for privacy request '{}', collection: '{}'", privacy_request_id, paused_collection, ) @@ -919,14 +918,14 @@ async def resume_privacy_request_with_manual_input( elif paused_step == CurrentStep.erasure: logger.info( - "Caching manually erased row count for privacy request '%s', collection: '%s'", + "Caching manually erased row count for privacy request '{}', collection: '{}'", privacy_request_id, paused_collection, ) privacy_request.cache_manual_erasure_count(paused_collection, manual_count) # type: ignore logger.info( - "Resuming privacy request '%s', %s step, from collection '%s'", + "Resuming privacy request '{}', {} step, from collection '{}'", privacy_request_id, paused_step.value, paused_collection.value, @@ -1207,10 +1206,10 @@ async def verify_identification_code( except IdentityVerificationException as exc: raise HTTPException(status_code=HTTP_400_BAD_REQUEST, detail=exc.message) except PermissionError as exc: - logger.info("Invalid verification code provided for %s.", privacy_request.id) + logger.info("Invalid verification code provided for {}.", privacy_request.id) raise HTTPException(status_code=HTTP_403_FORBIDDEN, detail=exc.args[0]) - logger.info("Identity verified for %s.", privacy_request.id) + logger.info("Identity verified for {}.", privacy_request.id) if not CONFIG.execution.require_manual_request_approval: AuditLog.create( @@ -1364,7 +1363,7 @@ def upload_manual_webhook_data( ) logger.info( - "Input saved for access manual webhook '%s' for privacy_request '%s'.", + "Input saved for access manual webhook '{}' for privacy_request '{}'.", access_manual_webhook, privacy_request, ) @@ -1476,7 +1475,7 @@ def view_uploaded_manual_webhook_data( try: logger.info( - "Retrieving input data for access manual webhook '%s' for privacy request '%s'.", + "Retrieving input data for access manual webhook '{}' for privacy request '{}'.", connection_config.key, privacy_request.id, ) @@ -1539,7 +1538,7 @@ async def resume_privacy_request_from_requires_input( ) logger.info( - "Resuming privacy request '%s' after manual inputs verified", + "Resuming privacy request '{}' after manual inputs verified", privacy_request_id, ) @@ -1571,7 +1570,7 @@ def _create_privacy_request( # Optional fields to validate here are those that are both nullable in the DB, and exist # on the Pydantic schema - logger.info("Starting creation for %s privacy requests", len(data)) + logger.info("Starting creation for {} privacy requests", len(data)) optional_fields = ["external_id", "started_processing_at", "finished_processing_at"] for privacy_request_data in data: @@ -1586,7 +1585,7 @@ def _create_privacy_request( failed.append(failure) continue - logger.info("Finding policy with key '%s'", privacy_request_data.policy_key) + logger.info("Finding policy with key '{}'", privacy_request_data.policy_key) policy: Optional[Policy] = Policy.get_by( db=db, field="key", @@ -1594,7 +1593,7 @@ def _create_privacy_request( ) if policy is None: logger.warning( - "Create failed for privacy request with invalid policy key %s'", + "Create failed for privacy request with invalid policy key {}'", privacy_request_data.policy_key, ) @@ -1658,21 +1657,21 @@ def _create_privacy_request( queue_privacy_request(privacy_request.id) except MessageDispatchException as exc: kwargs["privacy_request_id"] = privacy_request.id - logger.error("MessageDispatchException: %s", exc) + logger.error("MessageDispatchException: {}", exc) failure = { "message": "Verification message could not be sent.", "data": kwargs, } failed.append(failure) except common_exceptions.RedisConnectionError as exc: - logger.error("RedisConnectionError: %s", Pii(str(exc))) + logger.error("RedisConnectionError: {}", Pii(str(exc))) # Thrown when cache.ping() fails on cache connection retrieval raise HTTPException( status_code=HTTP_424_FAILED_DEPENDENCY, detail=exc.args[0], ) except Exception as exc: - logger.error("Exception: %s", Pii(str(exc))) + logger.error("Exception: {}", Pii(str(exc))) failure = { "message": "This record could not be added", "data": kwargs, @@ -1695,7 +1694,7 @@ def _process_privacy_request_restart( ) -> PrivacyRequestResponse: logger.info( - "Restarting failed privacy request '%s' from '%s step, 'collection '%s'", + "Restarting failed privacy request '{}' from '{} step, 'collection '{}'", privacy_request.id, failed_step, failed_collection, diff --git a/src/fides/api/ops/api/v1/endpoints/registration_endpoints.py b/src/fides/api/ops/api/v1/endpoints/registration_endpoints.py index 7f2ba2df9d6..83a3c17169a 100644 --- a/src/fides/api/ops/api/v1/endpoints/registration_endpoints.py +++ b/src/fides/api/ops/api/v1/endpoints/registration_endpoints.py @@ -1,6 +1,5 @@ -import logging - from fastapi import Depends, status +from loguru import logger from sqlalchemy.orm import Session from starlette.exceptions import HTTPException @@ -16,8 +15,6 @@ prefix=urls.V1_URL_PREFIX, ) -logger = logging.getLogger(__name__) - @router.get( urls.REGISTRATION, @@ -60,7 +57,7 @@ async def update_registration_status( registration = registrations[0] if registration.analytics_id != data.analytics_id: logger.debug( - "Error registering Fides with analytics_id: %s to opt_in: %s. Fides with analytics_id: %s already registered.", + "Error registering Fides with analytics_id: {} to opt_in: {}. Fides with analytics_id: {} already registered.", data.analytics_id, data.opt_in, registration.analytics_id, @@ -74,7 +71,7 @@ async def update_registration_status( send_to_fideslog = data.opt_in logger.debug( - "Registering Fides with analytics_id: %s to opt_in: %s", + "Registering Fides with analytics_id: {} to opt_in: {}", data.analytics_id, data.opt_in, ) diff --git a/src/fides/api/ops/api/v1/endpoints/saas_config_endpoints.py b/src/fides/api/ops/api/v1/endpoints/saas_config_endpoints.py index fdd54d9b122..589852c7869 100644 --- a/src/fides/api/ops/api/v1/endpoints/saas_config_endpoints.py +++ b/src/fides/api/ops/api/v1/endpoints/saas_config_endpoints.py @@ -1,9 +1,9 @@ -import logging from typing import Optional from fastapi import Depends, HTTPException from fastapi.params import Security from fideslib.exceptions import KeyOrNameAlreadyExists +from loguru import logger from sqlalchemy.orm import Session from starlette.status import ( HTTP_200_OK, @@ -62,13 +62,12 @@ from fides.api.ops.util.oauth_util import verify_oauth_client router = APIRouter(tags=["SaaS Configs"], prefix=V1_URL_PREFIX) -logger = logging.getLogger(__name__) # Helper method to inject the parent ConnectionConfig into these child routes def _get_saas_connection_config( connection_key: FidesOpsKey, db: Session = Depends(deps.get_db) ) -> ConnectionConfig: - logger.info("Finding connection config with key '%s'", connection_key) + logger.info("Finding connection config with key '{}'", connection_key) connection_config = ConnectionConfig.get_by(db, field="key", value=connection_key) if not connection_config: raise HTTPException( @@ -138,7 +137,7 @@ def validate_saas_config( - each connector_param only has one of references or identity, not both """ - logger.info("Validation successful for SaaS config '%s'", saas_config.fides_key) + logger.info("Validation successful for SaaS config '{}'", saas_config.fides_key) return ValidateSaaSConfigResponse( saas_config=saas_config, validation_details=SaaSConfigValidationDetails( @@ -163,7 +162,7 @@ def patch_saas_config( or report failure """ logger.info( - "Updating SaaS config '%s' on connection config '%s'", + "Updating SaaS config '{}' on connection config '{}'", saas_config.fides_key, connection_config.key, ) @@ -181,7 +180,7 @@ def get_saas_config( ) -> SaaSConfig: """Returns the SaaS config for the given connection config.""" - logger.info("Finding SaaS config for connection '%s'", connection_config.key) + logger.info("Finding SaaS config for connection '{}'", connection_config.key) saas_config = connection_config.saas_config if not saas_config: raise HTTPException( @@ -203,7 +202,7 @@ def delete_saas_config( """Removes the SaaS config for the given connection config. The corresponding dataset and secrets must be deleted before deleting the SaaS config""" - logger.info("Finding SaaS config for connection '%s'", connection_config.key) + logger.info("Finding SaaS config for connection '{}'", connection_config.key) saas_config = connection_config.saas_config if not saas_config: raise HTTPException( @@ -240,7 +239,7 @@ def delete_saas_config( if warnings: raise HTTPException(status_code=HTTP_400_BAD_REQUEST, detail=" ".join(warnings)) - logger.info("Deleting SaaS config for connection '%s'", connection_config.key) + logger.info("Deleting SaaS config for connection '{}'", connection_config.key) connection_config.update(db, data={"saas_config": None}) @@ -331,7 +330,7 @@ def instantiate_connection_from_template( detail=f"SaaS Connector could not be created from the '{saas_connector_type}' template at this time.", ) logger.info( - "SaaS Connector and Dataset %s successfully created from '%s' template.", + "SaaS Connector and Dataset {} successfully created from '{}' template.", template_values.instance_key, saas_connector_type, ) diff --git a/src/fides/api/ops/api/v1/endpoints/storage_endpoints.py b/src/fides/api/ops/api/v1/endpoints/storage_endpoints.py index 2e8f1b4eeee..fe45d16bdbb 100644 --- a/src/fides/api/ops/api/v1/endpoints/storage_endpoints.py +++ b/src/fides/api/ops/api/v1/endpoints/storage_endpoints.py @@ -1,4 +1,3 @@ -import logging from typing import Dict, List, Optional from fastapi import Body, Depends, Security @@ -6,6 +5,7 @@ from fastapi_pagination.bases import AbstractPage from fastapi_pagination.ext.sqlalchemy import paginate from fideslib.exceptions import KeyOrNameAlreadyExists +from loguru import logger from pydantic import conlist from requests import RequestException from sqlalchemy.orm import Session @@ -59,7 +59,6 @@ from fides.api.ops.util.oauth_util import verify_oauth_client router = APIRouter(tags=["Storage"], prefix=V1_URL_PREFIX) -logger = logging.getLogger(__name__) @router.post( @@ -79,7 +78,7 @@ def upload_data( Uploads data from an access request to specified storage destination. Returns location of data. """ - logger.info("Finding privacy request with id '%s'", request_id) + logger.info("Finding privacy request with id '{}'", request_id) privacy_request = PrivacyRequest.get(db, object_id=request_id) if not privacy_request: @@ -88,7 +87,7 @@ def upload_data( detail=f"No privacy with id {request_id}.", ) - logger.info("Starting storage upload for request id: %s", request_id) + logger.info("Starting storage upload for request id: {}", request_id) try: data_location: str = upload( db, request_id=request_id, data=data, storage_key=storage_key @@ -118,7 +117,7 @@ def patch_config( created_or_updated: List[StorageConfig] = [] failed: List[BulkUpdateFailed] = [] - logger.info("Starting bulk upsert for %s storage configs", len(storage_configs)) + logger.info("Starting bulk upsert for {} storage configs", len(storage_configs)) for destination in storage_configs: try: storage_config = StorageConfig.create_or_update( @@ -126,7 +125,7 @@ def patch_config( ) except KeyOrNameAlreadyExists as exc: logger.warning( - "Create/update failed for storage config %s: %s", + "Create/update failed for storage config {}: {}", destination.key, exc, ) @@ -138,7 +137,7 @@ def patch_config( continue except Exception as exc: logger.warning( - "Create/update failed for storage config %s: %s", + "Create/update failed for storage config {}: {}", destination.key, Pii(str(exc)), ) @@ -172,7 +171,7 @@ def put_config_secrets( """ Add or update secrets for storage config. """ - logger.info("Finding storage config with key '%s'", config_key) + logger.info("Finding storage config with key '{}'", config_key) storage_config = StorageConfig.get_by(db=db, field="key", value=config_key) if not storage_config: raise HTTPException( @@ -196,7 +195,7 @@ def put_config_secrets( detail=exc.args[0], ) - logger.info("Updating storage config secrets for config with key '%s'", config_key) + logger.info("Updating storage config secrets for config with key '{}'", config_key) try: storage_config.set_secrets(db=db, storage_secrets=secrets_schema.dict()) except ValueError as exc: @@ -210,11 +209,11 @@ def put_config_secrets( status = secrets_are_valid(secrets_schema, storage_config.type) if status: logger.info( - "Storage secrets are valid for config with key '%s'", config_key + "Storage secrets are valid for config with key '{}'", config_key ) else: logger.warning( - "Storage secrets are invalid for config with key '%s'", config_key + "Storage secrets are invalid for config with key '{}'", config_key ) return TestStatusMessage( @@ -238,7 +237,7 @@ def get_configs( """ Retrieves configs for storage. """ - logger.info("Finding all storage configurations with pagination params %s", params) + logger.info("Finding all storage configurations with pagination params {}", params) return paginate( StorageConfig.query(db).order_by(StorageConfig.created_at.desc()), params=params ) @@ -255,7 +254,7 @@ def get_config_by_key( """ Retrieves configs for storage by key. """ - logger.info("Finding storage config with key '%s'", config_key) + logger.info("Finding storage config with key '{}'", config_key) storage_config = StorageConfig.get_by(db, field="key", value=config_key) if not storage_config: @@ -277,7 +276,7 @@ def delete_config_by_key( """ Deletes configs by key. """ - logger.info("Finding storage config with key '%s'", config_key) + logger.info("Finding storage config with key '{}'", config_key) storage_config = StorageConfig.get_by(db, field="key", value=config_key) if not storage_config: @@ -286,5 +285,5 @@ def delete_config_by_key( detail=f"No configuration with key {config_key}.", ) - logger.info("Deleting storage config with key '%s'", config_key) + logger.info("Deleting storage config with key '{}'", config_key) storage_config.delete(db) diff --git a/src/fides/api/ops/api/v1/endpoints/user_endpoints.py b/src/fides/api/ops/api/v1/endpoints/user_endpoints.py index bb5069dd93d..ad3e868b6f5 100644 --- a/src/fides/api/ops/api/v1/endpoints/user_endpoints.py +++ b/src/fides/api/ops/api/v1/endpoints/user_endpoints.py @@ -1,5 +1,4 @@ import json -import logging from typing import Optional import jose.exceptions @@ -11,6 +10,7 @@ from fideslib.models.fides_user import FidesUser from fideslib.oauth.oauth_util import extract_payload from fideslib.oauth.schemas.user import UserPasswordReset, UserResponse, UserUpdate +from loguru import logger from sqlalchemy.orm import Session from starlette.status import ( HTTP_200_OK, @@ -37,7 +37,6 @@ from fides.ctl.core.config import get_config CONFIG = get_config() -logger = logging.getLogger(__name__) router = APIRouter(tags=["Users"], prefix=V1_URL_PREFIX) @@ -78,7 +77,7 @@ def update_user( ) user.update(db=db, data=data.dict()) - logger.info("Updated user with id: '%s'.", user.id) + logger.info("Updated user with id: '{}'.", user.id) return user @@ -110,7 +109,7 @@ def update_user_password( current_user.update_password(db=db, new_password=b64_str_to_str(data.new_password)) - logger.info("Updated user with id: '%s'.", current_user.id) + logger.info("Updated user with id: '{}'.", current_user.id) return current_user diff --git a/src/fides/api/ops/api/v1/endpoints/user_permission_endpoints.py b/src/fides/api/ops/api/v1/endpoints/user_permission_endpoints.py index e26afe0b80d..a88b707af33 100644 --- a/src/fides/api/ops/api/v1/endpoints/user_permission_endpoints.py +++ b/src/fides/api/ops/api/v1/endpoints/user_permission_endpoints.py @@ -1,9 +1,8 @@ -import logging - from fastapi import Depends, HTTPException, Security from fastapi.security import SecurityScopes from fideslib.models.fides_user import FidesUser from fideslib.models.fides_user_permissions import FidesUserPermissions +from loguru import logger from sqlalchemy.orm import Session from starlette.status import HTTP_201_CREATED, HTTP_400_BAD_REQUEST, HTTP_404_NOT_FOUND @@ -30,7 +29,6 @@ from fides.ctl.core.config import get_config CONFIG = get_config() -logger = logging.getLogger(__name__) router = APIRouter(tags=["User Permissions"], prefix=V1_URL_PREFIX) diff --git a/src/fides/api/ops/email_templates/get_email_template.py b/src/fides/api/ops/email_templates/get_email_template.py index f3552df2635..bdf426fc0f7 100644 --- a/src/fides/api/ops/email_templates/get_email_template.py +++ b/src/fides/api/ops/email_templates/get_email_template.py @@ -1,7 +1,7 @@ -import logging import pathlib from jinja2 import Environment, FileSystemLoader, Template, select_autoescape +from loguru import logger from fides.api.ops.common_exceptions import EmailTemplateUnhandledActionType from fides.api.ops.email_templates.template_names import ( @@ -18,7 +18,6 @@ from fides.api.ops.schemas.messaging.messaging import MessagingActionType pathlib.Path(__file__).parent.resolve() -logger = logging.getLogger(__name__) abs_path_to_current_file_dir = pathlib.Path(__file__).parent.resolve() template_env = Environment( @@ -49,7 +48,7 @@ def get_email_template( # pylint: disable=too-many-return-statements if action_type == MessagingActionType.PRIVACY_REQUEST_REVIEW_APPROVE: return template_env.get_template(PRIVACY_REQUEST_REVIEW_APPROVE_TEMPLATE) - logger.error("No corresponding template linked to the %s", action_type) + logger.error("No corresponding template linked to the {}", action_type) raise EmailTemplateUnhandledActionType( f"No corresponding template linked to the {action_type}" ) diff --git a/src/fides/api/ops/graph/data_type.py b/src/fides/api/ops/graph/data_type.py index 5622e205362..8ae0952a86d 100644 --- a/src/fides/api/ops/graph/data_type.py +++ b/src/fides/api/ops/graph/data_type.py @@ -1,12 +1,11 @@ -import logging from abc import ABC, abstractmethod from enum import Enum from typing import Any, Dict, Generic, Optional, Set, Tuple, TypeVar from bson.errors import InvalidId from bson.objectid import ObjectId +from loguru import logger -logger = logging.getLogger(__name__) T = TypeVar("T") @@ -30,7 +29,7 @@ def empty_value(self) -> T: def truncate(self, length: int, val: T) -> T: """Truncates value to given length""" logger.warning( - "%s does not support length truncation. Using original masked value instead for update query.", + "{} does not support length truncation. Using original masked value instead for update query.", self.name, ) return val diff --git a/src/fides/api/ops/graph/graph.py b/src/fides/api/ops/graph/graph.py index 09e2e6fdd37..f34f16d44af 100644 --- a/src/fides/api/ops/graph/graph.py +++ b/src/fides/api/ops/graph/graph.py @@ -1,9 +1,10 @@ from __future__ import annotations -import logging from collections import defaultdict from typing import Callable, Dict, List, Optional, Set, Tuple +from loguru import logger + from fides.api.ops.common_exceptions import ValidationError from fides.api.ops.graph.config import ( Collection, @@ -17,8 +18,6 @@ ) from fides.api.ops.schemas.shared_schemas import FidesOpsKey -logger = logging.getLogger(__name__) - class Node: """A traversal_node represents a single collection as a graph traversal_node. @@ -206,7 +205,7 @@ def __init__(self, *datasets: Dataset) -> None: for (dest_field_address, direction) in ref_list: if dest_field_address.collection_address() not in self.nodes: logger.warning( - "Referenced object %s does not exist", dest_field_address + "Referenced object {} does not exist", dest_field_address ) raise ValidationError( f"Referred to object {dest_field_address} does not exist" diff --git a/src/fides/api/ops/graph/traversal.py b/src/fides/api/ops/graph/traversal.py index baa732f218d..5813ca1f3fc 100644 --- a/src/fides/api/ops/graph/traversal.py +++ b/src/fides/api/ops/graph/traversal.py @@ -1,9 +1,9 @@ from __future__ import annotations -import logging from typing import Any, Callable, Dict, List, Set, Tuple, cast import pydash.collections +from loguru import logger from fides.api.ops.common_exceptions import TraversalError from fides.api.ops.graph.config import ( @@ -19,10 +19,6 @@ from fides.api.ops.util.collection_util import Row, append from fides.api.ops.util.matching_queue import MatchingQueue -logger = logging.getLogger(__name__) - - -"""A type expressing a single row of data from (any) collection""" Datastore = Dict[CollectionAddress, List[Row]] """A type expressing retrieved rows of data from a specified collection""" @@ -220,7 +216,7 @@ def __verify_traversal(self) -> None: and raises an error on any traversal failure conditions.""" self.traverse( {self.root_node.address: [self.seed_data]}, - lambda n, m: logger.info("Traverse %s", n.address), + lambda n, m: logger.info("Traverse {}", n.address), ) def traversal_map( @@ -342,7 +338,7 @@ def traverse( # pylint: disable=R0914 else: # traversal traversal_node dict diff finished nodes logger.error( - "Node could not be reached given specified ordering [%s]", + "Node could not be reached given specified ordering [{}]", ",".join([str(tn.address) for tn in running_node_queue.data]), ) raise TraversalError( @@ -353,7 +349,7 @@ def traverse( # pylint: disable=R0914 # error if there are nodes that have not been visited if remaining_node_keys: logger.error( - "Some nodes were not reachable: %s", + "Some nodes were not reachable: {}", ",".join([str(x) for x in remaining_node_keys]), ) raise TraversalError( @@ -362,7 +358,7 @@ def traverse( # pylint: disable=R0914 # error if there are edges that have not been visited if remaining_edges: logger.error( - "Some edges were not reachable: %s", + "Some edges were not reachable: {}", ",".join([str(x) for x in remaining_edges]), ) raise TraversalError( @@ -373,5 +369,5 @@ def traverse( # pylint: disable=R0914 tn.address for tn in finished_nodes.values() if tn.is_terminal_node ] if environment: - logger.debug("Found %s end nodes: %s", len(end_nodes), end_nodes) + logger.debug("Found {} end nodes: {}", len(end_nodes), end_nodes) return end_nodes diff --git a/src/fides/api/ops/models/datasetconfig.py b/src/fides/api/ops/models/datasetconfig.py index 7f06e1a4ad8..8d69944834c 100644 --- a/src/fides/api/ops/models/datasetconfig.py +++ b/src/fides/api/ops/models/datasetconfig.py @@ -1,7 +1,7 @@ -import logging from typing import Any, Dict, Optional, Set from fideslib.db.base_class import Base +from loguru import logger from sqlalchemy import Column, ForeignKey, String from sqlalchemy.dialects.postgresql import JSONB from sqlalchemy.ext.mutable import MutableDict @@ -27,8 +27,6 @@ from fides.api.ops.schemas.shared_schemas import FidesOpsKey from fides.api.ops.util.saas_util import merge_datasets -logger = logging.getLogger(__name__) - class DatasetConfig(Base): """ @@ -95,7 +93,7 @@ def get_graph(self) -> Dataset: ) else: logger.debug( - "Connection config with key %s is not a saas config, skipping merge dataset", + "Connection config with key {} is not a saas config, skipping merge dataset", self.connection_config.key, ) return dataset_graph @@ -193,12 +191,12 @@ def convert_dataset_to_graph( after = set() if dataset.fidesops_meta and dataset.fidesops_meta.after: after = set(dataset.fidesops_meta.after) - logger.debug("Parsing dataset '%s' into graph representation", dataset_name) + logger.debug("Parsing dataset '{}' into graph representation", dataset_name) graph_collections = [] for collection in dataset.collections: graph_fields = [to_graph_field(field) for field in collection.fields] logger.debug( - "Parsing dataset %s: parsed collection %s with %s fields", + "Parsing dataset {}: parsed collection {} with {} fields", dataset_name, collection.name, len(graph_fields), @@ -214,7 +212,7 @@ def convert_dataset_to_graph( ) graph_collections.append(graph_collection) logger.debug( - "Finished parsing dataset %s with %s collections", + "Finished parsing dataset {} with {} collections", dataset_name, len(graph_collections), ) diff --git a/src/fides/api/ops/models/messaging.py b/src/fides/api/ops/models/messaging.py index a1f5232b043..976d5baab71 100644 --- a/src/fides/api/ops/models/messaging.py +++ b/src/fides/api/ops/models/messaging.py @@ -1,7 +1,7 @@ -import logging from typing import Optional from fideslib.db.base import Base +from loguru import logger from pydantic import ValidationError from sqlalchemy import Column, Enum, String from sqlalchemy.dialects.postgresql import JSONB @@ -31,7 +31,6 @@ from fides.ctl.core.config import get_config CONFIG = get_config() -logger = logging.getLogger(__name__) def get_messaging_method( @@ -109,7 +108,7 @@ def get_configuration(cls, db: Session, service_type: str) -> Base: ) if not instance.secrets: logger.warning( - "Messaging secrets not found for config with key: %s", instance.key + "Messaging secrets not found for config with key: {}", instance.key ) raise MessageDispatchException( f"Messaging secrets not found for config with key: {instance.key}" @@ -139,7 +138,7 @@ def set_secrets( KeyError, ValidationError, ) as exc: - logger.error("Error: %s", Pii(str(exc))) + logger.error("Error: {}", Pii(str(exc))) # We don't want to handle these explicitly here, only in the API view raise diff --git a/src/fides/api/ops/models/privacy_request.py b/src/fides/api/ops/models/privacy_request.py index aeb0875891d..1aef77f6291 100644 --- a/src/fides/api/ops/models/privacy_request.py +++ b/src/fides/api/ops/models/privacy_request.py @@ -3,7 +3,6 @@ from __future__ import annotations import json -import logging from datetime import datetime, timedelta from enum import Enum as EnumType from typing import Any, Dict, List, Optional, Union @@ -16,6 +15,7 @@ from fideslib.models.client import ClientDetail from fideslib.models.fides_user import FidesUser from fideslib.oauth.jwt import generate_jwe +from loguru import logger from sqlalchemy import Boolean, Column, DateTime from sqlalchemy import Enum as EnumColumn from sqlalchemy import ForeignKey, Integer, String, UniqueConstraint @@ -71,7 +71,6 @@ from fides.api.ops.util.identity_verification import IdentityVerificationMixin from fides.ctl.core.config import get_config -logger = logging.getLogger(__name__) CONFIG = get_config() @@ -642,7 +641,7 @@ def trigger_policy_webhook(self, webhook: WebhookTypes) -> None: } logger.info( - "Calling webhook '%s' for privacy_request '%s'", webhook.key, self.id + "Calling webhook '{}' for privacy_request '{}'", webhook.key, self.id ) response: Optional[SecondPartyResponseFormat] = https_connector.execute( # type: ignore request_body.dict(), @@ -659,7 +658,7 @@ def trigger_policy_webhook(self, webhook: WebhookTypes) -> None: [response_body.derived_identity.dict().values()] ): logger.info( - "Updating known identities on privacy request '%s' from webhook '%s'.", + "Updating known identities on privacy request '{}' from webhook '{}'.", self.id, webhook.key, ) @@ -703,7 +702,7 @@ def cancel_processing(self, db: Session, cancel_reason: Optional[str]) -> None: task_id = self.get_cached_task_id() if task_id: - logger.info("Revoking task %s for request %s", task_id, self.id) + logger.info("Revoking task {} for request {}", task_id, self.id) # Only revokes if execution is not already in progress celery_app.control.revoke(task_id, terminate=False) diff --git a/src/fides/api/ops/models/registration.py b/src/fides/api/ops/models/registration.py index 1034000712e..c29a50cb55c 100644 --- a/src/fides/api/ops/models/registration.py +++ b/src/fides/api/ops/models/registration.py @@ -1,4 +1,3 @@ -import logging from typing import Any, Dict, Optional, Tuple from fideslib.db.base_class import Base, FidesBase @@ -12,8 +11,6 @@ CONFIG = get_config() -logger = logging.getLogger(__name__) - class UserRegistration(Base): """ diff --git a/src/fides/api/ops/models/storage.py b/src/fides/api/ops/models/storage.py index 25d2c481335..ca73b4a476d 100644 --- a/src/fides/api/ops/models/storage.py +++ b/src/fides/api/ops/models/storage.py @@ -1,6 +1,5 @@ -import logging - from fideslib.db.base import Base +from loguru import logger from pydantic import ValidationError from sqlalchemy import Column, Enum, String from sqlalchemy.dialects.postgresql import JSONB @@ -24,8 +23,6 @@ from fides.api.ops.util.logger import Pii from fides.ctl.core.config import get_config -logger = logging.getLogger(__name__) - CONFIG = get_config() diff --git a/src/fides/api/ops/schemas/storage/storage.py b/src/fides/api/ops/schemas/storage/storage.py index d7f0bd34067..4651c9c208b 100644 --- a/src/fides/api/ops/schemas/storage/storage.py +++ b/src/fides/api/ops/schemas/storage/storage.py @@ -1,4 +1,3 @@ -import logging from enum import Enum from typing import Any, Dict, List, Optional, Union @@ -8,8 +7,6 @@ from fides.api.ops.schemas.api import BulkResponse, BulkUpdateFailed from fides.api.ops.schemas.shared_schemas import FidesOpsKey -logger = logging.getLogger(__name__) - class ResponseFormat(Enum): """Response formats""" diff --git a/src/fides/api/ops/service/authentication/authentication_strategy_factory.py b/src/fides/api/ops/service/authentication/authentication_strategy_factory.py index 2a365cd3a15..20573948f91 100644 --- a/src/fides/api/ops/service/authentication/authentication_strategy_factory.py +++ b/src/fides/api/ops/service/authentication/authentication_strategy_factory.py @@ -1,4 +1,3 @@ -import logging from enum import Enum from typing import Any, Dict, List @@ -26,8 +25,6 @@ QueryParamAuthenticationStrategy, ) -logger = logging.getLogger(__name__) - class SupportedAuthenticationStrategies(Enum): """ diff --git a/src/fides/api/ops/service/authentication/authentication_strategy_oauth2_authorization_code.py b/src/fides/api/ops/service/authentication/authentication_strategy_oauth2_authorization_code.py index af1537cacc6..e2edf72e7c4 100644 --- a/src/fides/api/ops/service/authentication/authentication_strategy_oauth2_authorization_code.py +++ b/src/fides/api/ops/service/authentication/authentication_strategy_oauth2_authorization_code.py @@ -1,4 +1,3 @@ -import logging from typing import List, Optional from urllib.parse import urlencode from uuid import uuid4 @@ -18,7 +17,6 @@ from fides.api.ops.util.saas_util import assign_placeholders, map_param_values from fides.ctl.core.config import get_config -logger = logging.getLogger(__name__) CONFIG = get_config() diff --git a/src/fides/api/ops/service/authentication/authentication_strategy_oauth2_base.py b/src/fides/api/ops/service/authentication/authentication_strategy_oauth2_base.py index 16114317f11..7112fafe9e7 100644 --- a/src/fides/api/ops/service/authentication/authentication_strategy_oauth2_base.py +++ b/src/fides/api/ops/service/authentication/authentication_strategy_oauth2_base.py @@ -1,7 +1,7 @@ -import logging from datetime import datetime, timedelta from typing import Any, Dict, List, Literal, Optional +from loguru import logger from sqlalchemy.orm import Session from fides.api.ops.common_exceptions import FidesopsException, OAuth2TokenException @@ -17,8 +17,6 @@ from fides.api.ops.util.logger import Pii from fides.api.ops.util.saas_util import assign_placeholders, map_param_values -logger = logging.getLogger(__name__) - class OAuth2AuthenticationStrategyBase(AuthenticationStrategy): """ @@ -44,7 +42,7 @@ def _close_to_expiration( if expires_at is None: logger.info( - "The expires_at value is not defined for %s, skipping token refresh", + "The expires_at value is not defined for {}, skipping token refresh", connection_config.key, ) return False @@ -62,7 +60,7 @@ def _call_token_request( and connection config secrets. """ - logger.info("Attempting %s token request for %s", action, connection_config.key) + logger.info("Attempting {} token request for {}", action, connection_config.key) # get the client config from the token request or default to the # protocol and host specified by the root client config (no auth) @@ -97,7 +95,7 @@ def _call_token_request( json_response = response.json() except Exception as exc: logger.error( - "Error occurred during the %s request for %s: %s", + "Error occurred during the {} request for {}: {}", action, connection_config.key, Pii(str(exc)), @@ -183,7 +181,7 @@ def _validate_and_store_response( updated_secrets = {**connection_config.secrets, **data} # type: ignore connection_config.update(db, data={"secrets": updated_secrets}) logger.info( - "Successfully updated the OAuth2 token(s) for %s", connection_config.key + "Successfully updated the OAuth2 token(s) for {}", connection_config.key ) return access_token diff --git a/src/fides/api/ops/service/authentication/authentication_strategy_oauth2_client_credentials.py b/src/fides/api/ops/service/authentication/authentication_strategy_oauth2_client_credentials.py index a9107028ad5..59781094cc9 100644 --- a/src/fides/api/ops/service/authentication/authentication_strategy_oauth2_client_credentials.py +++ b/src/fides/api/ops/service/authentication/authentication_strategy_oauth2_client_credentials.py @@ -1,5 +1,3 @@ -import logging - from requests import PreparedRequest from fides.api.ops.models.connectionconfig import ConnectionConfig @@ -8,8 +6,6 @@ OAuth2AuthenticationStrategyBase, ) -logger = logging.getLogger(__name__) - class OAuth2ClientCredentialsAuthenticationStrategy(OAuth2AuthenticationStrategyBase): """ diff --git a/src/fides/api/ops/service/connectors/base_connector.py b/src/fides/api/ops/service/connectors/base_connector.py index daf47ab1d38..8b28fb9bd33 100644 --- a/src/fides/api/ops/service/connectors/base_connector.py +++ b/src/fides/api/ops/service/connectors/base_connector.py @@ -1,4 +1,3 @@ -import logging from abc import ABC, abstractmethod from typing import Any, Dict, Generic, List, Optional, TypeVar @@ -12,7 +11,6 @@ CONFIG = get_config() DB_CONNECTOR_TYPE = TypeVar("DB_CONNECTOR_TYPE") -logger = logging.getLogger(__name__) class BaseConnector(Generic[DB_CONNECTOR_TYPE], ABC): diff --git a/src/fides/api/ops/service/connectors/email_connector.py b/src/fides/api/ops/service/connectors/email_connector.py index 1405022a97d..b334fa97e28 100644 --- a/src/fides/api/ops/service/connectors/email_connector.py +++ b/src/fides/api/ops/service/connectors/email_connector.py @@ -1,7 +1,7 @@ -import logging from typing import Any, Dict, List, Optional from fideslib.models.audit_log import AuditLog, AuditLogAction +from loguru import logger from sqlalchemy.orm import Session from fides.api.ops.common_exceptions import ( @@ -34,8 +34,6 @@ from fides.api.ops.service.messaging.message_dispatch_service import dispatch_message from fides.api.ops.util.collection_util import Row, append -logger = logging.getLogger(__name__) - class EmailConnector(BaseConnector[None]): def query_config(self, node: TraversalNode) -> ManualQueryConfig: @@ -55,7 +53,7 @@ def test_connection(self) -> Optional[ConnectionTestStatus]: Sends an email to the "test_email" configured, just to establish that the email workflow is working. """ config = EmailSchema(**self.configuration.secrets or {}) - logger.info("Starting test connection to %s", self.configuration.key) + logger.info("Starting test connection to {}", self.configuration.key) db = Session.object_session(self.configuration) @@ -85,7 +83,7 @@ def test_connection(self) -> Optional[ConnectionTestStatus]: ], ) except MessageDispatchException as exc: - logger.info("Email connector test failed with exception %s", exc) + logger.info("Email connector test failed with exception {}", exc) return ConnectionTestStatus.failed return ConnectionTestStatus.succeeded @@ -98,7 +96,7 @@ def retrieve_data( # type: ignore ) -> Optional[List[Row]]: """Access requests are not supported at this time.""" logger.info( - "Access requests not supported for email connector '%s' at this time.", + "Access requests not supported for email connector '{}' at this time.", node.address.value, ) return [] @@ -119,7 +117,7 @@ def mask_data( # type: ignore node, policy, input_data ) - logger.info("Caching action needed for collection: '%s", node.address.value) + logger.info("Caching action needed for collection: '{}", node.address.value) privacy_request.cache_email_connector_template_contents( step=CurrentStep.erasure, collection=node.address, @@ -184,7 +182,7 @@ def email_connector_erasure_send(db: Session, privacy_request: PrivacyRequest) - if not template_values: logger.info( - "No email sent: no template values saved for '%s'", + "No email sent: no template values saved for '{}'", ds.dataset.get("fides_key"), ) return @@ -198,7 +196,7 @@ def email_connector_erasure_send(db: Session, privacy_request: PrivacyRequest) - ) ): logger.info( - "No email sent: no masking needed on '%s'", ds.dataset.get("fides_key") + "No email sent: no masking needed on '{}'", ds.dataset.get("fides_key") ) return @@ -213,7 +211,7 @@ def email_connector_erasure_send(db: Session, privacy_request: PrivacyRequest) - ) logger.info( - "Email send succeeded for request '%s' for dataset: '%s'", + "Email send succeeded for request '{}' for dataset: '{}'", privacy_request.id, ds.dataset.get("fides_key"), ) diff --git a/src/fides/api/ops/service/connectors/fides/fides_client.py b/src/fides/api/ops/service/connectors/fides/fides_client.py index 23eda8576ac..cad29d4fddf 100644 --- a/src/fides/api/ops/service/connectors/fides/fides_client.py +++ b/src/fides/api/ops/service/connectors/fides/fides_client.py @@ -4,7 +4,7 @@ import requests from fideslib.oauth.schemas.user import UserLogin -from loguru import logger as log +from loguru import logger from requests import PreparedRequest, Request, RequestException, Session from fides.api.ctl.utils.errors import FidesError @@ -51,24 +51,28 @@ def __init__( def login(self) -> None: ul: UserLogin = UserLogin(username=self.username, password=self.password) - log.info( - f"Logging in to remote fides {self.uri} with username '{self.username}'..." + logger.info( + "Logging in to remote fides {} with username '{}'...", + self.uri, + self.username, ) try: response = requests.post( f"{self.uri}{urls.V1_URL_PREFIX}{urls.LOGIN}", json=ul.dict() ) except RequestException as e: - log.error(f"Error logging in on remote Fides {self.uri}: {str(e)}") + logger.error("Error logging in on remote Fides {}: {}", self.uri, str(e)) raise e if response.ok: self.token = response.json()["token_data"]["access_token"] - log.info( - f"Successfully logged in to remote fides {self.uri} with username '{self.username}'" + logger.info( + "Successfully logged in to remote fides {} with username '{}'", + self.uri, + self.username, ) else: - log.error(f"Error logging in on remote Fides {self.uri}") + logger.error("Error logging in on remote Fides {}", self.uri) response.raise_for_status() def authenticated_request( @@ -110,8 +114,10 @@ def create_privacy_request( policy_key=policy_key, ) - log.info( - f"Creating privacy request with external_id {external_id} on remote fides {self.uri}..." + logger.info( + "Creating privacy request with external_id {} on remote fides {}...", + external_id, + self.uri, ) request: PreparedRequest = self.authenticated_request( method="POST", @@ -120,7 +126,7 @@ def create_privacy_request( ) response = self.session.send(request) if not response.ok: - log.error(f"Error creating privacy request on remote Fides {self.uri}") + logger.error("Error creating privacy request on remote Fides {}", self.uri) response.raise_for_status() if response.json()["failed"]: # TODO better handle errored state here? @@ -129,8 +135,11 @@ def create_privacy_request( ) pr_id = response.json()["succeeded"][0]["id"] - log.info( - f"Successfully created privacy request with id {pr_id} and external_id {external_id} on remote fides {self.uri}" + logger.info( + "Successfully created privacy request with id {} and external_id {} on remote fides {}", + pr_id, + external_id, + self.uri, ) return pr_id @@ -151,8 +160,10 @@ async def poll_for_request_completion( f"Unable to poll for request completion. No token for Fides connector for server {self.uri}" ) - log.info( - f"Polling remote fides {self.uri} for completion of privacy request with id {privacy_request_id}..." + logger.info( + "Polling remote fides {} for completion of privacy request with id {}...", + self.uri, + privacy_request_id, ) status: PrivacyRequestResponse = await poll_server_for_completion( privacy_request_id=privacy_request_id, @@ -174,8 +185,10 @@ async def poll_for_request_completion( f"Privacy request [{privacy_request_id}] on remote Fides {self.uri} was denied. Look at the remote Fides for more information." ) if status.status == PrivacyRequestStatus.complete: - log.info( - f"Privacy request [{privacy_request_id}] is complete on remote Fides {self.uri}!", + logger.info( + "Privacy request [{}] is complete on remote Fides {}!", + privacy_request_id, + self.uri, ) return status @@ -188,12 +201,15 @@ def request_status(self, privacy_request_id: str = None) -> List[Dict[str, Any]] Return privacy request object that tracks its status """ if privacy_request_id: - log.info( - f"Retrieving request status for privacy request {privacy_request_id if privacy_request_id else ''} on remote fides {self.uri}..." + logger.info( + "Retrieving request status for privacy request {} on remote fides {}...", + privacy_request_id, + self.uri, ) else: - log.info( - f"Retrieving request status for all privacy requests on remote fides {self.uri}..." + logger.info( + "Retrieving request status for all privacy requests on remote fides {}...", + self.uri, ) request: PreparedRequest = self.authenticated_request( @@ -205,18 +221,23 @@ def request_status(self, privacy_request_id: str = None) -> List[Dict[str, Any]] ) response = self.session.send(request, timeout=5) if not response.ok: - log.error( - f"Error retrieving status of privacy request [{privacy_request_id}] on remote Fides {self.uri}", + logger.error( + "Error retrieving status of privacy request [{}] on remote Fides {}", + privacy_request_id, + self.uri, ) response.raise_for_status() if privacy_request_id: - log.info( - f"Retrieved request status for privacy request {privacy_request_id if privacy_request_id else ''} on remote fides {self.uri}" + logger.info( + "Retrieved request status for privacy request {} on remote fides {}", + privacy_request_id, + self.uri, ) else: - log.info( - f"Retrieved request status for all privacy requests on remote fides {self.uri}" + logger.info( + "Retrieved request status for all privacy requests on remote fides {}", + self.uri, ) return response.json()["items"] @@ -231,8 +252,10 @@ def retrieve_request_results( Returns the filtered access results as a `Dict[str, List[Row]] """ try: - log.info( - f"Retrieving request results for privacy request {privacy_request_id} on remote fides {self.uri}..." + logger.info( + "Retrieving request results for privacy request {} on remote fides {}...", + privacy_request_id, + self.uri, ) request = self.authenticated_request( method="get", @@ -241,17 +264,23 @@ def retrieve_request_results( ) response = self.session.send(request) except requests.exceptions.HTTPError as e: - log.error( - f"Error retrieving data from child server for privacy request {privacy_request_id}: {e}" + logger.error( + "Error retrieving data from child server for privacy request {}: {}", + privacy_request_id, + e, ) if response.status_code != 200: - log.error( - f"Error retrieving data from child server for privacy request {privacy_request_id}: {response.text}" + logger.error( + "Error retrieving data from child server for privacy request {}: {}", + privacy_request_id, + response.text, ) return {} - log.info( - f"Retrieved request results for privacy request {privacy_request_id} on remote fides {self.uri}" + logger.info( + "Retrieved request results for privacy request {} on remote fides {}", + privacy_request_id, + self.uri, ) return response.json() diff --git a/src/fides/api/ops/service/connectors/http_connector.py b/src/fides/api/ops/service/connectors/http_connector.py index 72c6baf19cf..8dc72d3db01 100644 --- a/src/fides/api/ops/service/connectors/http_connector.py +++ b/src/fides/api/ops/service/connectors/http_connector.py @@ -1,8 +1,8 @@ import json -import logging from typing import Any, Dict, List, Optional import requests +from loguru import logger from starlette.status import HTTP_500_INTERNAL_SERVER_ERROR from fides.api.ops.common_exceptions import ClientUnsuccessfulException @@ -15,8 +15,6 @@ from fides.api.ops.service.connectors.query_config import QueryConfig from fides.api.ops.util.collection_util import Row -logger = logging.getLogger(__name__) - class HTTPSConnector(BaseConnector[None]): """HTTP Connector - for connecting to second and third-party endpoints""" diff --git a/src/fides/api/ops/service/connectors/limiter/rate_limiter.py b/src/fides/api/ops/service/connectors/limiter/rate_limiter.py index fd1270bca29..86ead1385ea 100644 --- a/src/fides/api/ops/service/connectors/limiter/rate_limiter.py +++ b/src/fides/api/ops/service/connectors/limiter/rate_limiter.py @@ -1,13 +1,12 @@ -import logging import time from enum import Enum from typing import List +from loguru import logger + from fides.api.ops.common_exceptions import RedisConnectionError from fides.api.ops.util.cache import FidesopsRedis, get_cache -logger = logging.getLogger(__name__) - class RateLimiterPeriod(Enum): """ @@ -129,7 +128,7 @@ def limit( redis: FidesopsRedis = get_cache() except RedisConnectionError as exc: logger.warning( - "Failed to connect to redis, skipping limiter for requests %s. %s", + "Failed to connect to redis, skipping limiter for requests {}. {}", ",".join(str(r) for r in requests), exc, ) @@ -151,7 +150,7 @@ def limit( if breached_requests: logger.debug( - "Breached rate limits: %s. Decrementing usage and trying again.", + "Breached rate limits: {}. Decrementing usage and trying again.", ",".join(str(r) for r in breached_requests), ) self.decrement_usage( diff --git a/src/fides/api/ops/service/connectors/manual_connector.py b/src/fides/api/ops/service/connectors/manual_connector.py index 79f24734da4..c6e45da72c9 100644 --- a/src/fides/api/ops/service/connectors/manual_connector.py +++ b/src/fides/api/ops/service/connectors/manual_connector.py @@ -1,4 +1,3 @@ -import logging from typing import Any, Dict, List, Optional from fides.api.ops.common_exceptions import PrivacyRequestPaused @@ -9,8 +8,6 @@ from fides.api.ops.service.connectors.query_config import ManualQueryConfig from fides.api.ops.util.collection_util import Row -logger = logging.getLogger(__name__) - class ManualConnector(BaseConnector[None]): def query_config(self, node: TraversalNode) -> ManualQueryConfig: diff --git a/src/fides/api/ops/service/connectors/manual_webhook_connector.py b/src/fides/api/ops/service/connectors/manual_webhook_connector.py index 052881284d5..75a6e501d19 100644 --- a/src/fides/api/ops/service/connectors/manual_webhook_connector.py +++ b/src/fides/api/ops/service/connectors/manual_webhook_connector.py @@ -1,4 +1,3 @@ -import logging from typing import Any, Dict, List from fides.api.ops.graph.traversal import TraversalNode @@ -8,8 +7,6 @@ from fides.api.ops.service.connectors.base_connector import BaseConnector from fides.api.ops.util.collection_util import Row -logger = logging.getLogger(__name__) - class ManualWebhookConnector(BaseConnector[None]): def query_config(self, node: TraversalNode) -> None: # type: ignore diff --git a/src/fides/api/ops/service/connectors/mongodb_connector.py b/src/fides/api/ops/service/connectors/mongodb_connector.py index c0a6d5c00eb..2212cf176f1 100644 --- a/src/fides/api/ops/service/connectors/mongodb_connector.py +++ b/src/fides/api/ops/service/connectors/mongodb_connector.py @@ -1,6 +1,6 @@ -import logging from typing import Any, Dict, List, Optional +from loguru import logger from pymongo import MongoClient from pymongo.errors import OperationFailure, ServerSelectionTimeoutError @@ -17,8 +17,6 @@ from fides.api.ops.util.collection_util import Row from fides.api.ops.util.logger import Pii -logger = logging.getLogger(__name__) - class MongoDBConnector(BaseConnector[MongoClient]): """MongoDB Connector""" @@ -60,7 +58,7 @@ def test_connection(self) -> Optional[ConnectionTestStatus]: Connects to the Mongo database and makes two trivial queries to ensure connection is valid. """ config = MongoDBSchema(**self.configuration.secrets or {}) - logger.info("Starting test connection to %s", self.configuration.key) + logger.info("Starting test connection to {}", self.configuration.key) client = self.client() try: # Make a couple of trivial requests - getting server info and fetching the collection names @@ -103,10 +101,10 @@ def retrieve_data( db = client[db_name] collection = db[collection_name] rows = [] - logger.info("Starting data retrieval for %s", node.address) + logger.info("Starting data retrieval for {}", node.address) for row in collection.find(query_data, fields): rows.append(row) - logger.info("Found %s rows on %s", len(rows), node.address) + logger.info("Found {} rows on {}", len(rows), node.address) return rows def mask_data( @@ -133,7 +131,7 @@ def mask_data( update_result = collection.update_one(query, update, upsert=False) update_ct += update_result.modified_count logger.info( - "db.%s.update_one(%s, %s, upsert=False)", + "db.{}.update_one({}, {}, upsert=False)", collection_name, Pii(query), Pii(update), diff --git a/src/fides/api/ops/service/connectors/query_config.py b/src/fides/api/ops/service/connectors/query_config.py index 6bdd054fe37..c8652f0997a 100644 --- a/src/fides/api/ops/service/connectors/query_config.py +++ b/src/fides/api/ops/service/connectors/query_config.py @@ -1,9 +1,9 @@ -import logging import re from abc import ABC, abstractmethod from typing import Any, Dict, Generic, List, Optional, Tuple, TypeVar import pydash +from loguru import logger from sqlalchemy import MetaData, Table, text from sqlalchemy.engine import Engine from sqlalchemy.sql import Executable, Update # type: ignore @@ -31,7 +31,6 @@ from fides.api.ops.util.logger import Pii from fides.api.ops.util.querytoken import QueryToken -logger = logging.getLogger(__name__) T = TypeVar("T") @@ -160,7 +159,7 @@ def update_value_map( # pylint: disable=R0914 masking_override, null_masking, strategy ): logger.warning( - "Unable to generate a query for field %s: data_type is either not present on the field or not supported for the %s masking strategy. Received data type: %s", + "Unable to generate a query for field {}: data_type is either not present on the field or not supported for the {} masking strategy. Received data type: {}", rule_field_path.string_path, strategy_config["strategy"], masking_override.data_type_converter.name, # type: ignore @@ -212,7 +211,7 @@ def _generate_masked_value( # pylint: disable=R0913 masked_val = strategy.mask([val], request_id)[0] # type: ignore logger.debug( - "Generated the following masked val for field %s: %s", + "Generated the following masked val for field {}: {}", str_field_path, masked_val, ) @@ -223,7 +222,7 @@ def _generate_masked_value( # pylint: disable=R0913 if masking_override.length: logger.warning( - "Because a length has been specified for field %s, we will truncate length of masked value to match, regardless of masking strategy", + "Because a length has been specified for field {}, we will truncate length of masked value to match, regardless of masking strategy", str_field_path, ) # for strategies other than null masking we assume that masked data type is the same as specified data type @@ -406,7 +405,7 @@ def generate_query( return text(query_str).params(query_data) logger.warning( - "There is not enough data to generate a valid query for %s", + "There is not enough data to generate a valid query for {}", self.node.address, ) return None @@ -441,7 +440,7 @@ def generate_update_stmt( valid = len(pk_clauses) > 0 and len(update_clauses) > 0 if not valid: logger.warning( - "There is not enough data to generate a valid update statement for %s", + "There is not enough data to generate a valid update statement for {}", self.node.address, ) return None @@ -450,7 +449,7 @@ def generate_update_stmt( update_clauses, pk_clauses, ) - logger.info("query = %s, params = %s", Pii(query_str), Pii(update_value_map)) + logger.info("query = {}, params = {}", Pii(query_str), Pii(update_value_map)) return text(query_str).params(update_value_map) def query_to_str(self, t: TextClause, input_data: Dict[str, List[Any]]) -> str: @@ -552,7 +551,7 @@ def generate_query( # pylint: disable=R0914 return text(query_str).params(query_data) logger.warning( - "There is not enough data to generate a valid query for %s", + "There is not enough data to generate a valid query for {}", self.node.address, ) return None @@ -654,7 +653,7 @@ def generate_update( valid = len(non_empty_primary_keys) > 0 and update_value_map if not valid: logger.warning( - "There is not enough data to generate a valid update statement for %s", + "There is not enough data to generate a valid update statement for {}", self.node.address, ) return None @@ -718,7 +717,7 @@ def transform_query_pairs(pairs: Dict[str, Any]) -> Dict[str, Any]: return query_fields, return_fields logger.warning( - "There is not enough data to generate a valid query for %s", + "There is not enough data to generate a valid query for {}", self.node.address, ) return None @@ -739,7 +738,7 @@ def generate_update_stmt( valid = len(pk_clauses) > 0 and len(update_clauses) > 0 if not valid: logger.warning( - "There is not enough data to generate a valid update for %s", + "There is not enough data to generate a valid update for {}", self.node.address, ) return None diff --git a/src/fides/api/ops/service/connectors/saas/authenticated_client.py b/src/fides/api/ops/service/connectors/saas/authenticated_client.py index a7aa74db1f0..0ed63b2105b 100644 --- a/src/fides/api/ops/service/connectors/saas/authenticated_client.py +++ b/src/fides/api/ops/service/connectors/saas/authenticated_client.py @@ -1,13 +1,13 @@ from __future__ import annotations import email -import logging import re import time from functools import wraps from time import sleep from typing import TYPE_CHECKING, Any, Callable, List, Optional, Union +from loguru import logger from requests import PreparedRequest, Request, Response, Session from fides.api.ops.common_exceptions import ( @@ -29,7 +29,6 @@ from fides.api.ops.schemas.saas.shared_schemas import SaaSRequestParams -logger = logging.getLogger(__name__) CONFIG = get_config() @@ -135,7 +134,7 @@ def result(*args: Any, **kwargs: Any) -> Response: if attempt < retry_count: logger.warning( - "Retrying http request in %s seconds", sleep_time + "Retrying http request in {} seconds", sleep_time ) sleep(sleep_time) @@ -189,7 +188,7 @@ def send( if not response.ok: if ignore_errors: logger.info( - "Ignoring errors on response with status code %s as configured.", + "Ignoring errors on response with status code {} as configured.", response.status_code, ) return response @@ -214,10 +213,10 @@ def log_request_and_response_for_debugging( if CONFIG.dev_mode: logger.info( "\n\n-----------SAAS REQUEST-----------" - "\n%s %s" - "\nheaders: %s" - "\nbody: %s" - "\nresponse: %s", + "\n{} {}" + "\nheaders: {}" + "\nbody: {}" + "\nresponse: {}", prepared_request.method, prepared_request.url, prepared_request.headers, diff --git a/src/fides/api/ops/service/connectors/saas/connector_registry_service.py b/src/fides/api/ops/service/connectors/saas/connector_registry_service.py index 12c0ea57056..fc1438a649e 100644 --- a/src/fides/api/ops/service/connectors/saas/connector_registry_service.py +++ b/src/fides/api/ops/service/connectors/saas/connector_registry_service.py @@ -1,10 +1,10 @@ from __future__ import annotations -import logging from os.path import exists from typing import Dict, Iterable, List, Optional, Union from fideslib.core.config import load_toml +from loguru import logger from packaging.version import LegacyVersion, Version from packaging.version import parse as parse_version from pydantic import BaseModel, validator @@ -31,8 +31,6 @@ _registry: Optional[ConnectorRegistry] = None registry_file = "data/saas/saas_connector_registry.toml" -logger = logging.getLogger(__name__) - class ConnectorTemplate(BaseModel): """ @@ -155,7 +153,7 @@ def update_saas_configs(registry: ConnectorRegistry, db: Session) -> None: """ for connector_type in registry.connector_types(): logger.debug( - "Determining if any updates are needed for connectors of type %s based on templates...", + "Determining if any updates are needed for connectors of type {} based on templates...", connector_type, ) template: ConnectorTemplate = registry.get_connector_template( # type: ignore @@ -174,7 +172,7 @@ def update_saas_configs(registry: ConnectorRegistry, db: Session) -> None: saas_config_instance = SaaSConfig.parse_obj(connection_config.saas_config) if parse_version(saas_config_instance.version) < template_version: logger.info( - "Updating SaaS config instance '%s' of type '%s' as its version, %s, was found to be lower than the template version %s", + "Updating SaaS config instance '{}' of type '{}' as its version, {}, was found to be lower than the template version {}", saas_config_instance.fides_key, connector_type, saas_config_instance.version, @@ -189,7 +187,7 @@ def update_saas_configs(registry: ConnectorRegistry, db: Session) -> None: ) except Exception: logger.error( - "Encountered error attempting to update SaaS config instance %s", + "Encountered error attempting to update SaaS config instance {}", saas_config_instance.fides_key, exc_info=True, ) diff --git a/src/fides/api/ops/service/connectors/saas_connector.py b/src/fides/api/ops/service/connectors/saas_connector.py index 891b802e169..671e115917d 100644 --- a/src/fides/api/ops/service/connectors/saas_connector.py +++ b/src/fides/api/ops/service/connectors/saas_connector.py @@ -1,8 +1,8 @@ -import logging from json import JSONDecodeError from typing import Any, Callable, Dict, List, Optional, Tuple, Union, cast import pydash +from loguru import logger from requests import Response from fides.api.ops.common_exceptions import FidesopsException, PostProcessingException @@ -29,8 +29,6 @@ from fides.api.ops.util.collection_util import Row from fides.api.ops.util.saas_util import assign_placeholders, map_param_values -logger = logging.getLogger(__name__) - class SaaSConnector(BaseConnector[AuthenticatedClient]): """A connector type to integrate with third-party SaaS APIs""" @@ -135,7 +133,7 @@ def create_client(self) -> AuthenticatedClient: client_config = self.get_client_config() rate_limit_config = self.get_rate_limit_config() - logger.info("Creating client to %s", uri) + logger.info("Creating client to {}", uri) return AuthenticatedClient( uri, self.configuration, client_config, rate_limit_config ) @@ -169,7 +167,7 @@ def retrieve_data( # then we return a single empty row to still trigger the mask_data method if delete_request: logger.info( - "Skipping read for the '%s' collection, it is delete-only", + "Skipping read for the '{}' collection, it is delete-only", self.current_collection_name, ) return [{}] @@ -243,7 +241,7 @@ def _missing_dataset_reference_values( if missing_dataset_reference_values: logger.info( - "The '%s' request of %s is missing the following dataset reference values [%s], skipping traversal", + "The '{}' request of {} is missing the following dataset reference values [{}], skipping traversal", self.current_collection_name, self.saas_config.fides_key, # type: ignore ", ".join(missing_dataset_reference_values), @@ -274,7 +272,7 @@ def execute_prepared_request( ) logger.info( - "%s row(s) returned after postprocessing '%s' collection.", + "{} row(s) returned after postprocessing '{}' collection.", len(rows), self.current_collection_name, ) @@ -292,7 +290,7 @@ def execute_prepared_request( if next_request: logger.info( - "Using '%s' pagination strategy to get next page for '%s'.", + "Using '{}' pagination strategy to get next page for '{}'.", saas_request.pagination.strategy, # type: ignore self.current_collection_name, ) @@ -319,7 +317,7 @@ def process_response_data( postprocessor.strategy, postprocessor.configuration # type: ignore ) logger.info( - "Starting postprocessing of '%s' collection with '%s' strategy.", + "Starting postprocessing of '{}' collection with '{}' strategy.", self.current_collection_name, postprocessor.strategy, # type: ignore ) @@ -419,7 +417,7 @@ def _handle_errored_response( """ if saas_request.ignore_errors and not response.ok: logger.info( - "Ignoring and clearing errored response with status code %s.", + "Ignoring and clearing errored response with status code {}.", response.status_code, ) response = Response() @@ -471,7 +469,7 @@ def _invoke_read_request_override( ) # type: ignore except Exception: logger.error( - "Encountered error executing override access function '%s'", + "Encountered error executing override access function '{}'", override_function_name, exc_info=True, ) @@ -519,7 +517,7 @@ def _invoke_masking_request_override( ) # type: ignore except Exception: logger.error( - "Encountered error executing override mask function '%s", + "Encountered error executing override mask function '{}", override_function_name, exc_info=True, ) diff --git a/src/fides/api/ops/service/connectors/saas_query_config.py b/src/fides/api/ops/service/connectors/saas_query_config.py index db0646ad0a7..88ece2fcbf0 100644 --- a/src/fides/api/ops/service/connectors/saas_query_config.py +++ b/src/fides/api/ops/service/connectors/saas_query_config.py @@ -1,11 +1,11 @@ from __future__ import annotations import json -import logging from itertools import product from typing import Any, Dict, List, Literal, Optional, TypeVar import pydash +from loguru import logger from fides.api.ops.common_exceptions import FidesopsException from fides.api.ops.graph.config import ScalarField @@ -28,7 +28,6 @@ from fides.ctl.core.config import get_config CONFIG = get_config() -logger = logging.getLogger(__name__) T = TypeVar("T") @@ -62,7 +61,7 @@ def get_read_requests_by_identity(self) -> List[SaaSRequest]: try: requests = self.endpoints[collection_name].requests except KeyError: - logger.error("The '%s' endpoint is not defined", collection_name) + logger.error("The '{}' endpoint is not defined", collection_name) return [] if not requests.read: @@ -104,11 +103,11 @@ def get_erasure_request_by_action( ) if request: logger.info( - "Found matching endpoint to %s '%s' collection", action, collection_name + "Found matching endpoint to {} '{}' collection", action, collection_name ) else: logger.info( - "Unable to find matching endpoint to %s '%s' collection", + "Unable to find matching endpoint to {} '{}' collection", action, collection_name, ) @@ -145,7 +144,7 @@ def get_masking_request(self) -> Optional[SaaSRequest]: self.action = action_type logger.info( - "Selecting '%s' action to perform masking request for '%s' collection.", + "Selecting '{}' action to perform masking request for '{}' collection.", action_type, self.collection_name, ) @@ -313,7 +312,7 @@ def generate_query( self.action, self.collection_name, self.current_request, param_values # type: ignore ) - logger.info("Populated request params for %s", self.current_request.path) + logger.info("Populated request params for {}", self.current_request.path) return saas_request_params @@ -429,7 +428,7 @@ def generate_update_request_params( self.action, self.collection_name, masking_request, param_values # type: ignore ) - logger.info("Populated request params for %s", masking_request.path) + logger.info("Populated request params for {}", masking_request.path) return saas_request_params diff --git a/src/fides/api/ops/service/connectors/sql_connector.py b/src/fides/api/ops/service/connectors/sql_connector.py index 97a6e3a8892..9dfc08f7bc9 100644 --- a/src/fides/api/ops/service/connectors/sql_connector.py +++ b/src/fides/api/ops/service/connectors/sql_connector.py @@ -1,7 +1,7 @@ -import logging from abc import abstractmethod from typing import Any, Dict, List, Optional, Type +from loguru import logger from snowflake.sqlalchemy import URL as Snowflake_URL from sqlalchemy import Column, text from sqlalchemy.engine import ( # type: ignore @@ -47,8 +47,6 @@ ) from fides.api.ops.util.collection_util import Row -logger = logging.getLogger(__name__) - class SQLConnector(BaseConnector[Engine]): """A SQL connector represents an abstract connector to any datastore that can be @@ -97,7 +95,7 @@ def query_config(self, node: TraversalNode) -> SQLQueryConfig: def test_connection(self) -> Optional[ConnectionTestStatus]: """Connects to the SQL DB and makes a trivial query.""" - logger.info("Starting test connection to %s", self.configuration.key) + logger.info("Starting test connection to {}", self.configuration.key) try: engine = self.client() @@ -129,7 +127,7 @@ def retrieve_data( stmt: Optional[TextClause] = query_config.generate_query(input_data, policy) if stmt is None: return [] - logger.info("Starting data retrieval for %s", node.address) + logger.info("Starting data retrieval for {}", node.address) with client.connect() as connection: self.set_schema(connection) results = connection.execute(stmt) @@ -161,7 +159,7 @@ def mask_data( def close(self) -> None: """Close any held resources""" if self.db_client: - logger.debug(" disposing of %s", self.__class__) + logger.debug(" disposing of {}", self.__class__) self.db_client.dispose() def create_client(self) -> Engine: diff --git a/src/fides/api/ops/service/drp/drp_fidesops_mapper.py b/src/fides/api/ops/service/drp/drp_fidesops_mapper.py index 5f58f667df9..36203fb70d8 100644 --- a/src/fides/api/ops/service/drp/drp_fidesops_mapper.py +++ b/src/fides/api/ops/service/drp/drp_fidesops_mapper.py @@ -1,6 +1,7 @@ -import logging from typing import Dict +from loguru import logger + from fides.api.ops.models.privacy_request import ( PrivacyRequestStatus, ProvidedIdentityType, @@ -9,8 +10,6 @@ from fides.api.ops.schemas.privacy_request import PrivacyRequestDRPStatus from fides.api.ops.schemas.redis_cache import Identity -logger = logging.getLogger(__name__) - class DrpFidesopsMapper: """ @@ -34,7 +33,7 @@ def map_identity(drp_identity: DrpIdentity) -> Identity: for attr, val in drp_identity.__dict__.items(): if attr not in DRP_TO_FIDESOPS_SUPPORTED_IDENTITY_PROPS_MAP: logger.warning( - "Identity attribute of %s is not supported by Fidesops at this time. Continuing to use other identity props, if provided.", + "Identity attribute of {} is not supported by Fidesops at this time. Continuing to use other identity props, if provided.", attr, ) else: diff --git a/src/fides/api/ops/service/masking/strategy/masking_strategy_factory.py b/src/fides/api/ops/service/masking/strategy/masking_strategy_factory.py index d3bb7ed192a..9e036993f93 100644 --- a/src/fides/api/ops/service/masking/strategy/masking_strategy_factory.py +++ b/src/fides/api/ops/service/masking/strategy/masking_strategy_factory.py @@ -1,6 +1,6 @@ -import logging from typing import Callable, Dict, Type, Union, ValuesView +from loguru import logger from pydantic import ValidationError from fides.api.ops.common_exceptions import NoSuchStrategyException @@ -8,8 +8,6 @@ from fides.api.ops.schemas.masking.masking_configuration import FormatPreservationConfig from fides.api.ops.service.masking.strategy.masking_strategy import MaskingStrategy -logger = logging.getLogger(__name__) - class MaskingStrategyFactory: registry: Dict[str, Type[MaskingStrategy]] = {} @@ -21,14 +19,14 @@ def register( ) -> Callable[[Type[MaskingStrategy]], Type[MaskingStrategy]]: def wrapper(strategy_class: Type[MaskingStrategy]) -> Type[MaskingStrategy]: logger.debug( - "Registering new masking strategy '%s' under name '%s'", + "Registering new masking strategy '{}' under name '{}'", strategy_class, name, ) if name in cls.registry: logger.warning( - "Masking strategy with name '%s' already exists. It previously referred to class '%s', but will now refer to '%s'", + "Masking strategy with name '{}' already exists. It previously referred to class '{}', but will now refer to '{}'", name, cls.registry[name], strategy_class, diff --git a/src/fides/api/ops/service/messaging/message_dispatch_service.py b/src/fides/api/ops/service/messaging/message_dispatch_service.py index 0ab831f902a..6da6d63425a 100644 --- a/src/fides/api/ops/service/messaging/message_dispatch_service.py +++ b/src/fides/api/ops/service/messaging/message_dispatch_service.py @@ -1,9 +1,9 @@ from __future__ import annotations -import logging from typing import Any, Callable, Dict, List, Optional, Union import requests +from loguru import logger from sqlalchemy.orm import Session from twilio.base.exceptions import TwilioRestException from twilio.rest import Client @@ -41,7 +41,6 @@ CONFIG = get_config() -logger = logging.getLogger(__name__) EMAIL_JOIN_STRING = ", " @@ -137,7 +136,7 @@ def dispatch_message( db=db, service_type=service_type ) logger.info( - "Building appropriate message template for action type: %s", action_type + "Building appropriate message template for action type: {}", action_type ) messaging_method = get_messaging_method(service_type) message: Optional[Union[EmailForActionType, str]] = None @@ -153,7 +152,7 @@ def dispatch_message( ) else: logger.error( - "Notification service type is not valid: %s", + "Notification service type is not valid: {}", CONFIG.notifications.notification_service_type, ) raise MessageDispatchException( @@ -161,21 +160,21 @@ def dispatch_message( ) messaging_service: MessagingServiceType = messaging_config.service_type # type: ignore logger.info( - "Retrieving appropriate dispatcher for email service: %s", messaging_service + "Retrieving appropriate dispatcher for email service: {}", messaging_service ) dispatcher: Optional[ Callable[[MessagingConfig, Any, Optional[str]], None] ] = _get_dispatcher_from_config_type(message_service_type=messaging_service) if not dispatcher: logger.error( - "Dispatcher has not been implemented for message service type: %s", + "Dispatcher has not been implemented for message service type: {}", messaging_service, ) raise MessageDispatchException( f"Dispatcher has not been implemented for message service type: {messaging_service}" ) logger.info( - "Starting message dispatch for messaging service with action type: %s", + "Starting message dispatch for messaging service with action type: {}", action_type, ) dispatcher( @@ -227,7 +226,7 @@ def _build_sms( # pylint: disable=too-many-return-statements if body_params.rejection_reason: return f"Your privacy request has been denied for the following reason: {body_params.rejection_reason}" return "Your privacy request has been denied." - logger.error("Message action type %s is not implemented", action_type) + logger.error("Message action type {} is not implemented", action_type) raise MessageDispatchException( f"Message action type {action_type} is not implemented" ) @@ -309,7 +308,7 @@ def _build_email( # pylint: disable=too-many-return-statements {"rejection_reason": body_params.rejection_reason} ), ) - logger.error("Message action type %s is not implemented", action_type) + logger.error("Message action type {} is not implemented", action_type) raise MessageDispatchException( f"Message action type {action_type} is not implemented" ) @@ -363,13 +362,13 @@ def _mailgun_dispatcher( ) if not response.ok: logger.error( - "Email failed to send with status code: %s", response.status_code + "Email failed to send with status code: {}", response.status_code ) raise MessageDispatchException( f"Email failed to send with status code {response.status_code}" ) except Exception as e: - logger.error("Email failed to send: %s", Pii(str(e))) + logger.error("Email failed to send: {}", Pii(str(e))) raise MessageDispatchException(f"Email failed to send due to: {Pii(e)}") @@ -414,5 +413,5 @@ def _twilio_sms_dispatcher( "Message failed to send. Either sender phone number or messaging service sid must be provided." ) except TwilioRestException as e: - logger.error("Twilio SMS failed to send: %s", Pii(str(e))) + logger.error("Twilio SMS failed to send: {}", Pii(str(e))) raise MessageDispatchException(f"Twilio SMS failed to send due to: {Pii(e)}") diff --git a/src/fides/api/ops/service/messaging/messaging_crud_service.py b/src/fides/api/ops/service/messaging/messaging_crud_service.py index 2a8f204a415..f603b9cacea 100644 --- a/src/fides/api/ops/service/messaging/messaging_crud_service.py +++ b/src/fides/api/ops/service/messaging/messaging_crud_service.py @@ -1,6 +1,5 @@ -import logging - from fideslang.validation import FidesKey +from loguru import logger from sqlalchemy.orm import Session from fides.api.ops.common_exceptions import MessagingConfigNotFoundException @@ -10,8 +9,6 @@ MessagingConfigResponse, ) -logger = logging.getLogger(__name__) - def update_messaging_config( db: Session, key: FidesKey, config: MessagingConfigRequest @@ -49,7 +46,7 @@ def create_or_update_messaging_config( def delete_messaging_config(db: Session, key: FidesKey) -> None: - logger.info("Finding messaging config with key '%s'", key) + logger.info("Finding messaging config with key '{}'", key) messaging_config: MessagingConfig = MessagingConfig.get_by( db, field="key", value=key ) @@ -57,7 +54,7 @@ def delete_messaging_config(db: Session, key: FidesKey) -> None: raise MessagingConfigNotFoundException( f"No messaging config found with key {key}" ) - logger.info("Deleting messaging config with key '%s'", key) + logger.info("Deleting messaging config with key '{}'", key) messaging_config.delete(db) diff --git a/src/fides/api/ops/service/pagination/pagination_strategy_factory.py b/src/fides/api/ops/service/pagination/pagination_strategy_factory.py index eb183db3053..8c2c97b0789 100644 --- a/src/fides/api/ops/service/pagination/pagination_strategy_factory.py +++ b/src/fides/api/ops/service/pagination/pagination_strategy_factory.py @@ -1,6 +1,5 @@ from __future__ import annotations -import logging from enum import Enum from typing import TYPE_CHECKING, Any, Dict, List @@ -22,8 +21,6 @@ from fides.api.ops.schemas.saas.strategy_configuration import StrategyConfiguration from fides.api.ops.service.pagination.pagination_strategy import PaginationStrategy -logger = logging.getLogger(__name__) - class SupportedPaginationStrategies(Enum): """ diff --git a/src/fides/api/ops/service/pagination/pagination_strategy_link.py b/src/fides/api/ops/service/pagination/pagination_strategy_link.py index 72014ebffa9..29487283f0a 100644 --- a/src/fides/api/ops/service/pagination/pagination_strategy_link.py +++ b/src/fides/api/ops/service/pagination/pagination_strategy_link.py @@ -1,9 +1,9 @@ -import logging from typing import Any, Dict, Optional from urllib import parse from urllib.parse import urlsplit import pydash +from loguru import logger from requests import Response from fides.api.ops.schemas.saas.shared_schemas import SaaSRequestParams @@ -14,8 +14,6 @@ from fides.api.ops.service.pagination.pagination_strategy import PaginationStrategy from fides.api.ops.util.logger import Pii -logger = logging.getLogger(__name__) - class LinkPaginationStrategy(PaginationStrategy): @@ -58,7 +56,7 @@ def get_next_request( updated_path = urlsplit(next_link).path updated_query_params = dict(parse.parse_qsl(urlsplit(next_link).query)) logger.debug( - "Replacing path with %s and query params with %s", + "Replacing path with {} and query params with {}", updated_path, Pii(updated_query_params), ) diff --git a/src/fides/api/ops/service/pagination/pagination_strategy_offset.py b/src/fides/api/ops/service/pagination/pagination_strategy_offset.py index 7a9529e6f90..177d22ab768 100644 --- a/src/fides/api/ops/service/pagination/pagination_strategy_offset.py +++ b/src/fides/api/ops/service/pagination/pagination_strategy_offset.py @@ -1,7 +1,7 @@ -import logging from typing import Any, Dict, Optional, Union import pydash +from loguru import logger from requests import Response from fides.api.ops.common_exceptions import FidesopsException @@ -14,8 +14,6 @@ ) from fides.api.ops.service.pagination.pagination_strategy import PaginationStrategy -logger = logging.getLogger(__name__) - class OffsetPaginationStrategy(PaginationStrategy): diff --git a/src/fides/api/ops/service/privacy_request/request_runner_service.py b/src/fides/api/ops/service/privacy_request/request_runner_service.py index 3908db44911..d2f0d5a33bc 100644 --- a/src/fides/api/ops/service/privacy_request/request_runner_service.py +++ b/src/fides/api/ops/service/privacy_request/request_runner_service.py @@ -1,13 +1,12 @@ -import logging import random from datetime import datetime, timedelta from typing import Any, Dict, List, Optional, Set, Tuple import requests -from celery.utils.log import get_task_logger from fideslib.db.session import get_db_session from fideslib.models.audit_log import AuditLog, AuditLogAction from fideslib.schemas.base_class import BaseSchema +from loguru import logger from pydantic import ValidationError from redis.exceptions import DataError from sqlalchemy.orm import Session @@ -81,7 +80,6 @@ from fides.ctl.core.config import get_config CONFIG = get_config() -logger = get_task_logger(__name__) class ManualWebhookResults(BaseSchema): @@ -150,8 +148,8 @@ def run_webhooks_and_report_status( try: privacy_request.trigger_policy_webhook(webhook) except PrivacyRequestPaused: - logging.info( - "Pausing execution of privacy request %s. Halt instruction received from webhook %s.", + logger.info( + "Pausing execution of privacy request {}. Halt instruction received from webhook {}.", privacy_request.id, webhook.key, ) @@ -159,8 +157,8 @@ def run_webhooks_and_report_status( initiate_paused_privacy_request_followup(privacy_request) return False except ClientUnsuccessfulException as exc: - logging.error( - "Privacy Request '%s' exited after response from webhook '%s': %s.", + logger.error( + "Privacy Request '{}' exited after response from webhook '{}': {}.", privacy_request.id, webhook.key, Pii(str(exc.args[0])), @@ -169,8 +167,8 @@ def run_webhooks_and_report_status( privacy_request.cache_failed_checkpoint_details(current_step) return False except ValidationError: - logging.error( - "Privacy Request '%s' errored due to response validation error from webhook '%s'.", + logger.error( + "Privacy Request '{}' errored due to response validation error from webhook '{}'.", privacy_request.id, webhook.key, ) @@ -193,7 +191,7 @@ def upload_access_results( # pylint: disable=R0912 """Process the data uploads after the access portion of the privacy request has completed""" download_urls: List[str] = [] if not access_result: - logging.info("No results returned for access request %s", privacy_request.id) + logger.info("No results returned for access request {}", privacy_request.id) for rule in policy.get_rules_for_action( # pylint: disable=R1702 action_type=ActionType.access @@ -217,8 +215,8 @@ def upload_access_results( # pylint: disable=R0912 manual_data ) # Add manual data directly to each upload packet - logging.info( - "Starting access request upload for rule %s for privacy request %s", + logger.info( + "Starting access request upload for rule {} for privacy request {}", rule.key, privacy_request.id, ) @@ -232,8 +230,8 @@ def upload_access_results( # pylint: disable=R0912 if download_url: download_urls.append(download_url) except common_exceptions.StorageUploadError as exc: - logging.error( - "Error uploading subject access data for rule %s on policy %s and privacy request %s : %s", + logger.error( + "Error uploading subject access data for rule {} on policy {} and privacy request {} : {}", rule.key, policy.key, privacy_request.id, @@ -263,7 +261,7 @@ def queue_privacy_request( ) except DataError: logger.debug( - "Error tracking task_id for request with id %s", privacy_request_id + "Error tracking task_id for request with id {}", privacy_request_id ) return task.task_id @@ -290,16 +288,16 @@ async def run_privacy_request( """ resume_step: Optional[CurrentStep] = CurrentStep(from_step) if from_step else None # type: ignore if from_step: - logger.info("Resuming privacy request from checkpoint: '%s'", from_step) + logger.info("Resuming privacy request from checkpoint: '{}'", from_step) with self.session as session: privacy_request = PrivacyRequest.get(db=session, object_id=privacy_request_id) if privacy_request.status == PrivacyRequestStatus.canceled: - logging.info( - "Terminating privacy request %s: request canceled.", privacy_request.id + logger.info( + "Terminating privacy request {}: request canceled.", privacy_request.id ) return - logging.info("Dispatching privacy request %s", privacy_request.id) + logger.info("Dispatching privacy request {}", privacy_request.id) privacy_request.start_processing(session) policy = privacy_request.policy @@ -446,7 +444,7 @@ async def run_privacy_request( }, ) privacy_request.status = PrivacyRequestStatus.complete - logging.info("Privacy request %s run completed.", privacy_request.id) + logger.info("Privacy request {} run completed.", privacy_request.id) privacy_request.save(db=session) @@ -510,14 +508,14 @@ def mark_paused_privacy_request_as_expired(privacy_request_id: str) -> None: privacy_request = PrivacyRequest.get(db=db, object_id=privacy_request_id) if not privacy_request: logger.info( - "Attempted to mark as expired. No privacy request with id '%s' found.", + "Attempted to mark as expired. No privacy request with id '{}' found.", privacy_request_id, ) db.close() return if privacy_request.status == PrivacyRequestStatus.paused: logger.error( - "Privacy request '%s' has expired. Please resubmit information.", + "Privacy request '{}' has expired. Please resubmit information.", privacy_request.id, ) privacy_request.error_processing(db=db) @@ -541,7 +539,7 @@ def _retrieve_child_results( # pylint: disable=R0911 connector = FidesConnector(fides_connector[1]) except Exception as e: logger.error( - "Error create client for child server %s: %s", fides_connector[0], e + "Error create client for child server {}: {}", fides_connector[0], e ) return None @@ -551,13 +549,13 @@ def _retrieve_child_results( # pylint: disable=R0911 address = CollectionAddress.from_string(key) if address.dataset == fides_connector[0]: if not rows: - logger.info("No rows found for result entry %s", key) + logger.info("No rows found for result entry {}", key) continue privacy_request_id = rows[0]["id"] if not privacy_request_id: logger.error( - "No privacy request found for connector key %s", fides_connector[0] + "No privacy request found for connector key {}", fides_connector[0] ) continue @@ -565,7 +563,7 @@ def _retrieve_child_results( # pylint: disable=R0911 client = connector.create_client() except requests.exceptions.HTTPError as e: logger.error( - "Error logging into to child server for privacy request %s: %s", + "Error logger into to child server for privacy request {}: {}", privacy_request_id, e, ) @@ -580,7 +578,7 @@ def _retrieve_child_results( # pylint: disable=R0911 response = client.session.send(request) except requests.exceptions.HTTPError as e: logger.error( - "Error retrieving data from child server for privacy request %s: %s", + "Error retrieving data from child server for privacy request {}: {}", privacy_request_id, e, ) @@ -588,7 +586,7 @@ def _retrieve_child_results( # pylint: disable=R0911 if response.status_code != 200: logger.error( - "Error retrieving data from child server for privacy request %s: %s", + "Error retrieving data from child server for privacy request {}: {}", privacy_request_id, response.json(), ) diff --git a/src/fides/api/ops/service/privacy_request/request_service.py b/src/fides/api/ops/service/privacy_request/request_service.py index 8de007b9085..a7e54d745ec 100644 --- a/src/fides/api/ops/service/privacy_request/request_service.py +++ b/src/fides/api/ops/service/privacy_request/request_service.py @@ -1,11 +1,11 @@ from __future__ import annotations -import logging from asyncio import sleep from datetime import datetime from typing import Any, Dict, List, Optional, Set from httpx import AsyncClient +from loguru import logger from fides.api.ops.api.v1.urn_registry import PRIVACY_REQUESTS, V1_URL_PREFIX from fides.api.ops.models.policy import ActionType, Policy @@ -18,7 +18,6 @@ from fides.ctl.core.config import get_config CONFIG = get_config() -logger = logging.getLogger(__name__) def build_required_privacy_request_kwargs( @@ -50,12 +49,12 @@ def cache_data( ) -> None: """Cache privacy request data""" # Store identity and encryption key in the cache - logger.info("Caching identity for privacy request %s", privacy_request.id) + logger.info("Caching identity for privacy request {}", privacy_request.id) privacy_request.cache_identity(identity) privacy_request.cache_encryption(encryption_key) # handles None already # Store masking secrets in the cache - logger.info("Caching masking secrets for privacy request %s", privacy_request.id) + logger.info("Caching masking secrets for privacy request {}", privacy_request.id) erasure_rules = policy.get_rules_for_action(action_type=ActionType.erasure) unique_masking_strategies_by_name: Set[str] = set() for rule in erasure_rules: diff --git a/src/fides/api/ops/service/processors/post_processor_strategy/post_processor_strategy_factory.py b/src/fides/api/ops/service/processors/post_processor_strategy/post_processor_strategy_factory.py index 1b9129c7248..0a58b63ae81 100644 --- a/src/fides/api/ops/service/processors/post_processor_strategy/post_processor_strategy_factory.py +++ b/src/fides/api/ops/service/processors/post_processor_strategy/post_processor_strategy_factory.py @@ -1,4 +1,3 @@ -import logging from enum import Enum from typing import Any, Dict, List @@ -17,8 +16,6 @@ UnwrapPostProcessorStrategy, ) -logger = logging.getLogger(__name__) - class SupportedPostProcessorStrategies(Enum): """ diff --git a/src/fides/api/ops/service/processors/post_processor_strategy/post_processor_strategy_filter.py b/src/fides/api/ops/service/processors/post_processor_strategy/post_processor_strategy_filter.py index 48689d87d0e..0374fc46ed6 100644 --- a/src/fides/api/ops/service/processors/post_processor_strategy/post_processor_strategy_filter.py +++ b/src/fides/api/ops/service/processors/post_processor_strategy/post_processor_strategy_filter.py @@ -1,7 +1,7 @@ -import logging from typing import Any, Dict, List, Union import pydash +from loguru import logger from fides.api.ops.common_exceptions import FidesopsException from fides.api.ops.schemas.saas.shared_schemas import IdentityParamRef @@ -12,8 +12,6 @@ PostProcessorStrategy, ) -logger = logging.getLogger(__name__) - class FilterPostProcessorStrategy(PostProcessorStrategy): """ @@ -70,7 +68,7 @@ def process( if isinstance(self.value, IdentityParamRef): if identity_data is None or identity_data.get(self.value.identity) is None: logger.warning( - "Could not retrieve identity reference '%s' due to missing identity data for the following post processing strategy: %s", + "Could not retrieve identity reference '{}' due to missing identity data for the following post processing strategy: {}", self.value.identity, self.name, ) @@ -101,7 +99,7 @@ def process( ) except KeyError: logger.warning( - "%s could not be found on data for the following post processing strategy: %s", + "{} could not be found on data for the following post processing strategy: {}", self.field, self.name, ) diff --git a/src/fides/api/ops/service/processors/post_processor_strategy/post_processor_strategy_unwrap.py b/src/fides/api/ops/service/processors/post_processor_strategy/post_processor_strategy_unwrap.py index 2f7f735d08d..280c94be3e5 100644 --- a/src/fides/api/ops/service/processors/post_processor_strategy/post_processor_strategy_unwrap.py +++ b/src/fides/api/ops/service/processors/post_processor_strategy/post_processor_strategy_unwrap.py @@ -1,7 +1,7 @@ -import logging from typing import Any, Dict, List, Union import pydash +from loguru import logger from fides.api.ops.schemas.saas.strategy_configuration import ( UnwrapPostProcessorConfiguration, @@ -10,8 +10,6 @@ PostProcessorStrategy, ) -logger = logging.getLogger(__name__) - class UnwrapPostProcessorStrategy(PostProcessorStrategy): """ @@ -56,7 +54,7 @@ def process( unwrapped = pydash.get(data, self.data_path) if unwrapped is None: logger.warning( - "%s could not be found for the following post processing strategy: %s", + "{} could not be found for the following post processing strategy: {}", self.data_path, self.name, ) @@ -67,7 +65,7 @@ def process( unwrapped = pydash.get(item, self.data_path) if unwrapped is None: logger.warning( - "%s could not be found for the following post processing strategy: %s", + "{} could not be found for the following post processing strategy: {}", self.data_path, self.name, ) diff --git a/src/fides/api/ops/service/saas_request/override_implementations/authentication_strategy_adobe_campaign.py b/src/fides/api/ops/service/saas_request/override_implementations/authentication_strategy_adobe_campaign.py index 22345c3741d..50d032fe3cc 100644 --- a/src/fides/api/ops/service/saas_request/override_implementations/authentication_strategy_adobe_campaign.py +++ b/src/fides/api/ops/service/saas_request/override_implementations/authentication_strategy_adobe_campaign.py @@ -1,10 +1,10 @@ -import logging import math import time from datetime import datetime, timedelta from typing import Dict, Optional, cast from jwt import encode +from loguru import logger from requests import PreparedRequest, post from sqlalchemy.orm import Session @@ -16,8 +16,6 @@ ) from fides.api.ops.util.saas_util import assign_placeholders -logger = logging.getLogger(__name__) - class AdobeCampaignAuthenticationConfiguration(StrategyConfiguration): """ @@ -100,7 +98,7 @@ def add_authentication( } connection_config.update(db, data={"secrets": updated_secrets}) logger.info( - "Successfully updated the access token for %s", + "Successfully updated the access token for {}", connection_config.key, ) else: @@ -117,7 +115,7 @@ def _close_to_expiration( if expires_at is None: logger.info( - "The expires_at value is not defined for %s, skipping token refresh", + "The expires_at value is not defined for {}, skipping token refresh", connection_config.key, ) return False diff --git a/src/fides/api/ops/service/saas_request/override_implementations/domo_request_overrides.py b/src/fides/api/ops/service/saas_request/override_implementations/domo_request_overrides.py index 21198d2d0b8..a60fba85700 100644 --- a/src/fides/api/ops/service/saas_request/override_implementations/domo_request_overrides.py +++ b/src/fides/api/ops/service/saas_request/override_implementations/domo_request_overrides.py @@ -1,4 +1,3 @@ -import logging from json import dumps from typing import Any, Dict, List @@ -18,7 +17,6 @@ from fides.ctl.core.config import get_config CONFIG = get_config() -logger = logging.getLogger(__name__) @register("domo_user_update", [SaaSRequestType.UPDATE]) diff --git a/src/fides/api/ops/service/saas_request/override_implementations/firebase_auth_request_overrides.py b/src/fides/api/ops/service/saas_request/override_implementations/firebase_auth_request_overrides.py index 11d2b69db63..bad52334e74 100644 --- a/src/fides/api/ops/service/saas_request/override_implementations/firebase_auth_request_overrides.py +++ b/src/fides/api/ops/service/saas_request/override_implementations/firebase_auth_request_overrides.py @@ -1,4 +1,3 @@ -import logging from typing import Any, Dict, List import firebase_admin @@ -14,8 +13,6 @@ ) from fides.api.ops.util.collection_util import Row -logger = logging.getLogger(__name__) - @register("firebase_auth_user_access", [SaaSRequestType.READ]) def firebase_auth_user_access( # pylint: disable=R0914 diff --git a/src/fides/api/ops/service/saas_request/override_implementations/twilio_request_overrides.py b/src/fides/api/ops/service/saas_request/override_implementations/twilio_request_overrides.py index ba0336eb595..88b641a61de 100644 --- a/src/fides/api/ops/service/saas_request/override_implementations/twilio_request_overrides.py +++ b/src/fides/api/ops/service/saas_request/override_implementations/twilio_request_overrides.py @@ -1,4 +1,3 @@ -import logging from typing import Any, Dict, List import requests @@ -17,7 +16,6 @@ from fides.ctl.core.config import get_config CONFIG = get_config() -logger = logging.getLogger(__name__) @register("twilio_user_update", [SaaSRequestType.UPDATE]) diff --git a/src/fides/api/ops/service/saas_request/saas_request_override_factory.py b/src/fides/api/ops/service/saas_request/saas_request_override_factory.py index ce0d4b62cfe..bb2a41c706a 100644 --- a/src/fides/api/ops/service/saas_request/saas_request_override_factory.py +++ b/src/fides/api/ops/service/saas_request/saas_request_override_factory.py @@ -1,15 +1,15 @@ -import logging from enum import Enum from inspect import Signature, signature from typing import Callable, Dict, List, Union +from loguru import logger + from fides.api.ops.common_exceptions import ( InvalidSaaSRequestOverrideException, NoSuchSaaSRequestOverrideException, ) from fides.api.ops.util.collection_util import Row -logger = logging.getLogger(__name__) # at some point this should likely be formalized more centrally... class SaaSRequestType(Enum): @@ -62,7 +62,7 @@ def wrapper( ) -> Callable[..., Union[List[Row], int]]: for request_type in request_types: logger.debug( - "Registering new SaaS request override function '%s' under name '%s' for SaaSRequestType %s", + "Registering new SaaS request override function '{}' under name '{}' for SaaSRequestType {}", override_function.__name__, name, request_type, @@ -84,7 +84,7 @@ def wrapper( if name in cls.registry[request_type]: logger.warning( - "SaaS request override function with name '%s' already exists for SaaSRequestType %s. It previously referred to function '%s', but will now refer to '%s'", + "SaaS request override function with name '{}' already exists for SaaSRequestType {}. It previously referred to function '{}', but will now refer to '{}'", name, request_type, cls.registry[request_type][name], diff --git a/src/fides/api/ops/service/storage/storage_authenticator_service.py b/src/fides/api/ops/service/storage/storage_authenticator_service.py index 67c5701b190..39999a2bda1 100644 --- a/src/fides/api/ops/service/storage/storage_authenticator_service.py +++ b/src/fides/api/ops/service/storage/storage_authenticator_service.py @@ -1,4 +1,3 @@ -import logging from typing import Any, Dict from botocore.exceptions import ClientError @@ -11,8 +10,6 @@ ) from fides.api.ops.util.storage_authenticator import get_s3_session -logger = logging.getLogger(__name__) - def secrets_are_valid( secrets: SUPPORTED_STORAGE_SECRETS, diff --git a/src/fides/api/ops/service/storage/storage_uploader_service.py b/src/fides/api/ops/service/storage/storage_uploader_service.py index df918e322e0..0a51f00cc51 100644 --- a/src/fides/api/ops/service/storage/storage_uploader_service.py +++ b/src/fides/api/ops/service/storage/storage_uploader_service.py @@ -1,6 +1,6 @@ -import logging from typing import Any, Dict, Optional +from loguru import logger from sqlalchemy.orm import Session from fides.api.ops.common_exceptions import StorageUploadError @@ -14,8 +14,6 @@ ) from fides.api.ops.tasks.storage import upload_to_local, upload_to_s3 -logger = logging.getLogger(__name__) - def upload( db: Session, *, request_id: str, data: Dict, storage_key: FidesOpsKey @@ -33,7 +31,7 @@ def upload( ) if config is None: - logger.warning("Storage type not found: %s", storage_key) + logger.warning("Storage type not found: {}", storage_key) raise StorageUploadError(f"Storage type not found: {storage_key}") uploader: Any = _get_uploader_from_config_type(config.type) # type: ignore return uploader(db, config, data, request_id) diff --git a/src/fides/api/ops/service/strategy.py b/src/fides/api/ops/service/strategy.py index 43ec85cdeec..0ef4f27f158 100644 --- a/src/fides/api/ops/service/strategy.py +++ b/src/fides/api/ops/service/strategy.py @@ -1,6 +1,5 @@ from __future__ import annotations -import logging from abc import ABC from typing import Any, Dict, Generic, List, Optional, Type, TypeVar @@ -10,7 +9,6 @@ from fides.api.ops.common_exceptions import ValidationError as FidesopsValidationError from fides.api.ops.schemas.saas.strategy_configuration import StrategyConfiguration -logger = logging.getLogger(__name__) T = TypeVar("T", bound="Strategy") C = TypeVar("C", bound=StrategyConfiguration) diff --git a/src/fides/api/ops/task/filter_element_match.py b/src/fides/api/ops/task/filter_element_match.py index cdab654bdae..05dad77017b 100644 --- a/src/fides/api/ops/task/filter_element_match.py +++ b/src/fides/api/ops/task/filter_element_match.py @@ -1,9 +1,9 @@ import copy -import logging from collections import defaultdict from typing import Any, Dict, List import pydash +from loguru import logger from fides.api.ops.task.refine_target_path import ( DetailedPath, @@ -14,8 +14,6 @@ from fides.api.ops.util.collection_util import FIDESOPS_DO_NOT_MASK_INDEX, Row from fides.api.ops.util.logger import Pii -logger = logging.getLogger(__name__) - def filter_element_match( row: Row, @@ -103,7 +101,7 @@ def _remove_paths_from_row( if matched_array is None: # This case shouldn't happen - if this gets logged, we've done something wrong logger.info( - "_remove_paths_from_row call: Path %s in row %s not found.", + "_remove_paths_from_row call: Path {} in row {} not found.", path, Pii(row), ) diff --git a/src/fides/api/ops/task/filter_results.py b/src/fides/api/ops/task/filter_results.py index 67ec9762a85..265263e461e 100644 --- a/src/fides/api/ops/task/filter_results.py +++ b/src/fides/api/ops/task/filter_results.py @@ -1,14 +1,13 @@ import itertools -import logging from collections import defaultdict from typing import Any, Dict, List, Optional, Set, Union +from loguru import logger + from fides.api.ops.graph.config import CollectionAddress, FieldPath from fides.api.ops.schemas.shared_schemas import FidesOpsKey from fides.api.ops.util.collection_util import Row -logger = logging.getLogger(__name__) - def filter_data_categories( access_request_results: Dict[str, List[Dict[str, Optional[Any]]]], @@ -177,7 +176,7 @@ def unpack_fides_connector_results( rule_results = results[rule_key] except KeyError: logger.error( - "Did not find a result entry on Fides connector %s for rule %s", + "Did not find a result entry on Fides connector {} for rule {}", node_address, rule_key, ) @@ -196,5 +195,5 @@ def unpack_fides_connector_results( filtered_access_results[key] = value # type: ignore else: if value: - logger.info("Appending child rows to %s", key) + logger.info("Appending child rows to {}", key) filtered.extend(value) # type: ignore diff --git a/src/fides/api/ops/task/graph_task.py b/src/fides/api/ops/task/graph_task.py index 2e1ec0a2b34..21d46c8311c 100644 --- a/src/fides/api/ops/task/graph_task.py +++ b/src/fides/api/ops/task/graph_task.py @@ -1,5 +1,4 @@ import copy -import logging import traceback from abc import ABC from functools import wraps @@ -9,6 +8,7 @@ import dask from dask import delayed # type: ignore[attr-defined] from dask.threaded import get +from loguru import logger from sqlalchemy.orm import Session from fides.api.ops.common_exceptions import ( @@ -45,8 +45,6 @@ from fides.api.ops.util.saas_util import FIDESOPS_GROUPED_INPUTS from fides.ctl.core.config import get_config -logger = logging.getLogger(__name__) - dask.config.set(scheduler="threads") COLLECTION_FIELD_PATH_MAP = Dict[CollectionAddress, List[Tuple[FieldPath, FieldPath]]] @@ -87,7 +85,7 @@ def result(*args: Any, **kwargs: Any) -> Any: return func(*args, **kwargs) except PrivacyRequestPaused as ex: logger.warning( - "Privacy request %s paused %s", + "Privacy request {} paused {}", method_name, self.traversal_node.address, ) @@ -102,7 +100,7 @@ def result(*args: Any, **kwargs: Any) -> Any: return 0 except CollectionDisabled as exc: logger.warning( - "Skipping disabled collection %s for privacy_request: %s", + "Skipping disabled collection {} for privacy_request: {}", self.traversal_node.address, self.resources.request.id, ) @@ -111,7 +109,7 @@ def result(*args: Any, **kwargs: Any) -> Any: except BaseException as ex: # pylint: disable=W0703 func_delay *= CONFIG.execution.task_retry_backoff logger.warning( - "Retrying %s %s in %s seconds...", + "Retrying {} {} in {} seconds...", method_name, self.traversal_node.address, func_delay, @@ -268,7 +266,7 @@ def pre_process_input_data( """ if not len(data) == len(self.input_keys): logger.warning( - "%s expected %s input keys, received %s", + "{} expected {} input keys, received {}", self, len(self.input_keys), len(data), @@ -293,7 +291,7 @@ def pre_process_input_data( continue logger.info( - "Consolidating incoming data into %s from %s.", + "Consolidating incoming data into {} from {}.", self.traversal_node.node.address, collection_address, ) @@ -349,7 +347,7 @@ def update_status( def log_start(self, action_type: ActionType) -> None: """Task start activities""" logger.info( - "Starting %s, traversal_node %s", self.resources.request.id, self.key + "Starting {}, traversal_node {}", self.resources.request.id, self.key ) self.update_status( @@ -358,19 +356,19 @@ def log_start(self, action_type: ActionType) -> None: def log_retry(self, action_type: ActionType) -> None: """Task retry activities""" - logger.info("Retrying %s, node %s", self.resources.request.id, self.key) + logger.info("Retrying {}, node {}", self.resources.request.id, self.key) self.update_status("retrying", [], action_type, ExecutionLogStatus.retrying) def log_paused(self, action_type: ActionType, ex: Optional[BaseException]) -> None: """On paused activities""" - logger.info("Pausing %s, node %s", self.resources.request.id, self.key) + logger.info("Pausing {}, node {}", self.resources.request.id, self.key) self.update_status(str(ex), [], action_type, ExecutionLogStatus.paused) def log_skipped(self, action_type: ActionType, ex: str) -> None: """Log that a collection was skipped. For now, this is because a collection has been disabled.""" - logger.info("Skipping %s, node %s", self.resources.request.id, self.key) + logger.info("Skipping {}, node {}", self.resources.request.id, self.key) self.update_status(str(ex), [], action_type, ExecutionLogStatus.skipped) @@ -384,14 +382,14 @@ def log_end( if ex: traceback.print_exc() logger.warning( - "Ending %s, %s with failure %s", + "Ending {}, {} with failure {}", self.resources.request.id, self.key, Pii(ex), ) self.update_status(str(ex), [], action_type, ExecutionLogStatus.error) else: - logger.info("Ending %s, %s", self.resources.request.id, self.key) + logger.info("Ending {}, {}", self.resources.request.id, self.key) self.update_status( str(success_override_msg) if success_override_msg else "success", build_affected_field_logs( @@ -470,7 +468,7 @@ def access_results_post_processing( # For access request results, cache results with non-matching array elements *removed* for row in output: logger.info( - "Filtering row in %s for matching array elements.", + "Filtering row in {} for matching array elements.", self.traversal_node.node.address, ) filter_element_match(row, post_processed_node_input_data) @@ -513,7 +511,7 @@ def erasure_request(self, retrieved_data: List[Row], *inputs: List[Row]) -> int: # note this in the execution log and perform no erasures on this node if not self.traversal_node.node.contains_field(lambda f: f.primary_key): logger.warning( - "No erasures on %s as there is no primary_key defined.", + "No erasures on {} as there is no primary_key defined.", self.traversal_node.node.address, ) self.update_status( @@ -526,7 +524,7 @@ def erasure_request(self, retrieved_data: List[Row], *inputs: List[Row]) -> int: if not self.can_write_data(): logger.warning( - "No erasures on %s as its ConnectionConfig does not have write access.", + "No erasures on {} as its ConnectionConfig does not have write access.", self.traversal_node.node.address, ) self.update_status( diff --git a/src/fides/api/ops/task/refine_target_path.py b/src/fides/api/ops/task/refine_target_path.py index 7d0ad7b97f5..b5c1aa86dc3 100644 --- a/src/fides/api/ops/task/refine_target_path.py +++ b/src/fides/api/ops/task/refine_target_path.py @@ -1,6 +1,7 @@ -import logging from typing import Any, Dict, List, Optional, Union +from loguru import logger + from fides.api.ops.graph.config import FieldPath from fides.api.ops.util.collection_util import FIDESOPS_DO_NOT_MASK_INDEX, Row @@ -9,7 +10,6 @@ ] # A specific level along the path to a resource. Can be a dictionary key or an array index. DetailedPath = List[Level] # A more detailed field path, potentially containing indices FieldPathNodeInput = Dict[FieldPath, Optional[List[Any]]] -logger = logging.getLogger(__name__) def join_detailed_path(detailed_path: DetailedPath) -> str: diff --git a/src/fides/api/ops/task/task_resources.py b/src/fides/api/ops/task/task_resources.py index 6ba222609f6..f3f4cc893da 100644 --- a/src/fides/api/ops/task/task_resources.py +++ b/src/fides/api/ops/task/task_resources.py @@ -1,6 +1,6 @@ -import logging from typing import Any, Dict, List, Optional +from loguru import logger from sqlalchemy.orm import Session from fides.api.ops.common_exceptions import ConnectorNotFoundException @@ -32,8 +32,6 @@ from fides.api.ops.util.cache import get_cache from fides.api.ops.util.collection_util import Row -logger = logging.getLogger(__name__) - class Connections: """Temporary container for connections. This will be replaced.""" @@ -192,5 +190,5 @@ def get_connector(self, key: FidesOpsKey) -> Any: def close(self) -> None: """Close any held resources""" - logger.debug("Closing all task resources for %s", self.request.id) + logger.debug("Closing all task resources for {}", self.request.id) self.connections.close() diff --git a/src/fides/api/ops/tasks/storage.py b/src/fides/api/ops/tasks/storage.py index b1bd3261992..ceb3c8531bd 100644 --- a/src/fides/api/ops/tasks/storage.py +++ b/src/fides/api/ops/tasks/storage.py @@ -1,7 +1,6 @@ from __future__ import annotations import json -import logging import os import secrets import zipfile @@ -13,6 +12,7 @@ from boto3 import Session from botocore.exceptions import ClientError, ParamValidationError from fideslib.cryptography.cryptographic_util import bytes_to_b64_str +from loguru import logger from fides.api.ops.schemas.storage.storage import ( ResponseFormat, @@ -26,8 +26,6 @@ from fides.api.ops.util.storage_authenticator import get_s3_session from fides.ctl.core.config import get_config -logger = logging.getLogger(__name__) - CONFIG = get_config() LOCAL_FIDES_UPLOAD_DIRECTORY = "fides_uploads" @@ -126,7 +124,7 @@ def upload_to_s3( # pylint: disable=R0913 auth_method: S3AuthMethod, ) -> str: """Uploads arbitrary data to s3 returned from an access request""" - logger.info("Starting S3 Upload of %s", file_key) + logger.info("Starting S3 Upload of {}", file_key) try: my_session = get_s3_session(auth_method, storage_secrets) @@ -140,7 +138,7 @@ def upload_to_s3( # pylint: disable=R0913 Key=file_key, ) except Exception as e: - logger.error("Encountered error while uploading s3 object: %s", e) + logger.error("Encountered error while uploading s3 object: {}", e) raise e presigned_url: str = create_presigned_url_for_s3( @@ -150,7 +148,7 @@ def upload_to_s3( # pylint: disable=R0913 return presigned_url except ClientError as e: logger.error( - "Encountered error while uploading and generating link for s3 object: %s", e + "Encountered error while uploading and generating link for s3 object: {}", e ) raise e except ParamValidationError as e: diff --git a/src/fides/api/ops/util/encryption/secrets_util.py b/src/fides/api/ops/util/encryption/secrets_util.py index 88369610beb..6b9bf285203 100644 --- a/src/fides/api/ops/util/encryption/secrets_util.py +++ b/src/fides/api/ops/util/encryption/secrets_util.py @@ -1,7 +1,8 @@ -import logging import secrets from typing import Dict, List, Optional, TypeVar +from loguru import logger + from fides.api.ops.schemas.masking.masking_secrets import ( MaskingSecretCache, MaskingSecretMeta, @@ -10,7 +11,6 @@ from fides.api.ops.util.cache import get_cache, get_masking_secret_cache_key T = TypeVar("T") -logger = logging.getLogger(__name__) class SecretsUtil: @@ -26,7 +26,7 @@ def get_or_generate_secret( ) if not secret: logger.warning( - "Secret type %s expected from cache but was not present for masking strategy %s", + "Secret type {} expected from cache but was not present for masking strategy {}", secret_type, masking_secret_meta.masking_strategy, ) diff --git a/src/fides/api/ops/util/identity_verification.py b/src/fides/api/ops/util/identity_verification.py index cf07c0996a8..3b1613f7b05 100644 --- a/src/fides/api/ops/util/identity_verification.py +++ b/src/fides/api/ops/util/identity_verification.py @@ -1,11 +1,11 @@ -import logging from typing import Optional +from loguru import logger + from fides.api.ops.common_exceptions import IdentityVerificationException from fides.api.ops.util.cache import FidesopsRedis, get_cache from fides.ctl.core.config import get_config -logger = logging.getLogger(__name__) CONFIG = get_config() @@ -69,7 +69,7 @@ def _get_cached_verification_code_attempt_count(self) -> int: def purge_verification_code(self) -> None: """Removes any verification codes from the cache so they can no longer be used.""" logger.debug( - "Removing cached identity verification code for record with ID: %s", + "Removing cached identity verification code for record with ID: {}", self.id, # type: ignore ) cache = get_cache() @@ -87,7 +87,7 @@ def _verify_identity(self, provided_code: str) -> None: attempt_count: int = self._get_cached_verification_code_attempt_count() if attempt_count >= CONFIG.security.identity_verification_attempt_limit: logger.debug( - "Failed identity verification attempt limit exceeded for record with ID: %s", + "Failed identity verification attempt limit exceeded for record with ID: {}", self.id, # type: ignore ) # When the attempt_count we can remove the verification code entirely diff --git a/src/fides/api/ops/util/logger.py b/src/fides/api/ops/util/logger.py index 6c87b9d379c..40c2f271a4d 100644 --- a/src/fides/api/ops/util/logger.py +++ b/src/fides/api/ops/util/logger.py @@ -1,71 +1,34 @@ from __future__ import annotations -import logging -import os -from typing import Any, Mapping, Union +from loguru import logger + +from fides.ctl.core.config import get_config MASKED = "MASKED" +CONFIG = get_config() + class Pii(str): """Mask pii data""" - -def get_fides_log_record_factory() -> Any: - """intercepts default LogRecord for custom handling of params""" - - def factory( # pylint: disable=R0913 - name: str, - level: int, - fn: str, - lno: int, - msg: str, - args: Union[tuple[Any, ...], Mapping[str, Any]], - exc_info: Any, - func: str = None, - sinfo: str = None, - ) -> logging.LogRecord: - env_log_pii: bool = os.getenv("FIDESOPS__LOG_PII", "").lower() == "true" - new_args = args - if not env_log_pii and not name.startswith("uvicorn"): - new_args = tuple(_mask_pii_for_logs(arg) for arg in args) - return logging.LogRecord( - name=name, - level=level, - pathname=fn, - lineno=lno, - msg=msg, - args=new_args, - exc_info=exc_info, - func=func, - sinfo=sinfo, - ) - - return factory - - -def _mask_pii_for_logs(parameter: Any) -> Any: - """ - :param parameter: param that contains possible pii - :return: depending on ENV config, returns masked pii param. - - Logging args must be specifically wrapped in Pii in order to mask. - - """ - return MASKED if isinstance(parameter, Pii) else parameter + def __format__(self, __format_spec: str) -> str: + if CONFIG.logging.log_pii: + return super().__format__(__format_spec) + return MASKED def _log_exception(exc: BaseException, dev_mode: bool = False) -> None: """If dev mode, log the entire traceback""" if dev_mode: - logging.error(exc, exc_info=True) + logger.opt(exception=True).error(exc) else: - logging.error(exc) + logger.error(exc) def _log_warning(exc: BaseException, dev_mode: bool = False) -> None: """If dev mode, log the entire traceback""" if dev_mode: - logging.warning(exc, exc_info=True) + logger.opt(exception=True).warning(exc) else: - logging.warning(exc) + logger.error(exc) diff --git a/src/fides/api/ops/util/saas_util.py b/src/fides/api/ops/util/saas_util.py index 0d35097ce2f..0f8be06332a 100644 --- a/src/fides/api/ops/util/saas_util.py +++ b/src/fides/api/ops/util/saas_util.py @@ -1,7 +1,6 @@ from __future__ import annotations import json -import logging import re from collections import defaultdict from functools import reduce @@ -17,8 +16,6 @@ from fides.api.ops.schemas.saas.saas_config import SaaSRequest from fides.api.ops.schemas.saas.shared_schemas import SaaSRequestParams -logger = logging.getLogger(__name__) - FIDESOPS_GROUPED_INPUTS = "fidesops_grouped_inputs" PRIVACY_REQUEST_ID = "privacy_request_id" MASKED_OBJECT_FIELDS = "masked_object_fields" diff --git a/src/fides/api/ops/util/storage_authenticator.py b/src/fides/api/ops/util/storage_authenticator.py index 4b4594e8ce1..a7ac50c08c2 100644 --- a/src/fides/api/ops/util/storage_authenticator.py +++ b/src/fides/api/ops/util/storage_authenticator.py @@ -1,13 +1,11 @@ -import logging from typing import Any, Dict from boto3 import Session +from loguru import logger from fides.api.ops.common_exceptions import StorageUploadError from fides.api.ops.schemas.storage.storage import S3AuthMethod, StorageSecrets -logger = logging.getLogger(__name__) - def get_s3_session( auth_method: S3AuthMethod, storage_secrets: Dict[StorageSecrets, Any] diff --git a/src/fides/ctl/core/config/database_settings.py b/src/fides/ctl/core/config/database_settings.py index ab0e0658556..16ee8230b98 100644 --- a/src/fides/ctl/core/config/database_settings.py +++ b/src/fides/ctl/core/config/database_settings.py @@ -2,7 +2,6 @@ # pylint: disable=C0115,C0116, E0213 -import logging from typing import Dict, Optional from fideslib.core.config import FidesSettings @@ -10,8 +9,6 @@ from fides.ctl.core.config.utils import get_test_mode -logger = logging.getLogger(__name__) - ENV_PREFIX = "FIDES__DATABASE__" diff --git a/src/fides/ctl/core/config/logging_settings.py b/src/fides/ctl/core/config/logging_settings.py index 76f0daab001..21068f495ac 100644 --- a/src/fides/ctl/core/config/logging_settings.py +++ b/src/fides/ctl/core/config/logging_settings.py @@ -19,6 +19,7 @@ class LoggingSettings(FidesSettings): destination: str = "" level: str = "INFO" serialization: str = "" + log_pii: bool = False @validator("destination", pre=True) @classmethod diff --git a/src/fides/ctl/core/config/notification_settings.py b/src/fides/ctl/core/config/notification_settings.py index efd0e71390c..334b764b1e6 100644 --- a/src/fides/ctl/core/config/notification_settings.py +++ b/src/fides/ctl/core/config/notification_settings.py @@ -1,12 +1,9 @@ -import logging from typing import Optional from pydantic import validator from .fides_settings import FidesSettings -logger = logging.getLogger(__name__) - ENV_PREFIX = "FIDES__NOTIFICATIONS__" diff --git a/src/fides/ctl/core/utils.py b/src/fides/ctl/core/utils.py index 0a244d5b56a..9dbf5ed7437 100644 --- a/src/fides/ctl/core/utils.py +++ b/src/fides/ctl/core/utils.py @@ -1,6 +1,5 @@ """Utils to help with API calls.""" import glob -import logging import re from functools import partial from hashlib import sha1 @@ -14,12 +13,13 @@ import sqlalchemy from fideslang.models import DatasetField, FidesModel from fideslang.validation import FidesValidationError +from loguru import logger from sqlalchemy.engine import Engine from sqlalchemy.exc import SQLAlchemyError from fides.ctl.connectors.models import ConnectorAuthFailureException -logger = logging.getLogger("server_api") +logger.bind(name="server_api") echo_red = partial(click.secho, fg="red", bold=True) echo_green = partial(click.secho, fg="green", bold=True) diff --git a/tests/ops/util/test_logger.py b/tests/ops/util/test_logger.py index 49a7b7c12e1..75c98ffbc80 100644 --- a/tests/ops/util/test_logger.py +++ b/tests/ops/util/test_logger.py @@ -2,25 +2,37 @@ import pytest -from fides.api.ops.util import logger from fides.api.ops.util.logger import MASKED, Pii +from fides.ctl.core.config import get_config + +CONFIG = get_config() + + +@pytest.fixture(scope="function") +def log_pii_true() -> None: + original_value = CONFIG.logging.log_pii + CONFIG.logging.log_pii = True + yield + CONFIG.logging.log_pii = original_value @pytest.fixture(scope="function") -def toggle_testing_envvar() -> None: - original_value = os.getenv("TESTING") - del os.environ["TESTING"] +def log_pii_false() -> None: + original_value = CONFIG.logging.log_pii + CONFIG.logging.log_pii = False yield - os.environ["TESTING"] = original_value + CONFIG.logging.log_pii = original_value -def test_logger_masks_pii(toggle_testing_envvar) -> None: +@pytest.mark.usefixtures("log_pii_false") +def test_logger_masks_pii() -> None: some_data = "some_data" - result = logger._mask_pii_for_logs(Pii(some_data)) + result = "{}".format((Pii(some_data))) assert result == MASKED -def test_logger_does_not_mask_by_default(toggle_testing_envvar) -> None: +@pytest.mark.usefixtures("log_pii_true") +def test_logger_doesnt_mask_pii() -> None: some_data = "some_data" - result = logger._mask_pii_for_logs(some_data) - assert result == some_data + result = "{}".format((Pii(some_data))) + assert result == "some_data" From 925e8cc728e94abb415743832e8960567380fdab Mon Sep 17 00:00:00 2001 From: Adam Sachs Date: Tue, 13 Dec 2022 08:24:34 -0500 Subject: [PATCH 3/8] update changelog --- CHANGELOG.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 17fe394fd53..0e75173b3e3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -17,6 +17,9 @@ The types of changes are: ## [Unreleased](https://github.com/ethyca/fides/compare/2.2.2...main) +### Changed +* Updated to use `loguru` logging library throughout codebase [#2031](https://github.com/ethyca/fides/pull/2031) + ## [2.2.2](https://github.com/ethyca/fides/compare/2.2.1...2.2.2) From 7a627dd82408580b95844c21252be752a1a63596 Mon Sep 17 00:00:00 2001 From: Adam Sachs Date: Tue, 13 Dec 2022 13:11:33 -0500 Subject: [PATCH 4/8] Remove fstring log messages to allow lazy evaluation --- src/fides/api/main.py | 30 ++++++++++++++++++---------- src/fides/api/ops/tasks/__init__.py | 2 +- src/fides/api/ops/util/oauth_util.py | 4 +++- 3 files changed, 23 insertions(+), 13 deletions(-) diff --git a/src/fides/api/main.py b/src/fides/api/main.py index 80d12b145a7..865d2dd100c 100644 --- a/src/fides/api/main.py +++ b/src/fides/api/main.py @@ -193,11 +193,11 @@ async def setup_server() -> None: "Run all of the required setup steps for the webserver." logger.warning( - f"Startup configuration: reloading = {CONFIG.hot_reloading}, dev_mode = {CONFIG.dev_mode}", - ) - logger.warning( - f"Startup configuration: pii logging = {CONFIG.logging.log_pii}", + "Startup configuration: reloading = {}, dev_mode = {}", + CONFIG.hot_reloading, + CONFIG.dev_mode, ) + logger.warning("Startup configuration: pii logging = {}", CONFIG.logging.log_pii) if CONFIG.logging.level == DEBUG: logger.warning( @@ -214,7 +214,7 @@ async def setup_server() -> None: try: create_or_update_parent_user() except Exception as e: - logger.error(f"Error creating parent user: {str(e)}") + logger.error("Error creating parent user: {}", str(e)) raise FidesError(f"Error creating parent user: {str(e)}") logger.info("Validating SaaS connector templates...") @@ -224,7 +224,8 @@ async def setup_server() -> None: update_saas_configs(registry, db) except Exception as e: logger.error( - f"Error occurred during SaaS connector template validation: {str(e)}", + "Error occurred during SaaS connector template validation: {}", + str(e), ) return finally: @@ -235,7 +236,7 @@ async def setup_server() -> None: try: get_cache() except (RedisConnectionError, RedisError, ResponseError) as e: - logger.error(f"Connection to cache failed: {str(e)}") + logger.error("Connection to cache failed: {}", str(e)) return else: logger.debug("Connection to cache succeeded") @@ -308,14 +309,17 @@ def read_other_paths(request: Request) -> Response: # If any of those worked, serve the file. if ui_file and ui_file.is_file(): logger.debug( - f"catchall request path '{path}' matched static admin UI file: {ui_file}" + "catchall request path '{}' matched static admin UI file: {}", + path, + ui_file, ) return FileResponse(ui_file) # raise 404 for anything that should be backend endpoint but we can't find it if path.startswith(API_PREFIX[1:]): logger.debug( - f"catchall request path '{path}' matched an invalid API route, return 404" + "catchall request path '{}' matched an invalid API route, return 404", + path, ) raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="Item not found" @@ -323,7 +327,8 @@ def read_other_paths(request: Request) -> Response: # otherwise return the index logger.debug( - f"catchall request path '{path}' did not match any admin UI routes, return generic admin UI index" + "catchall request path '{}' did not match any admin UI routes, return generic admin UI index", + path, ) return get_admin_index_as_response() @@ -334,6 +339,9 @@ def start_webserver(port: int = 8080) -> None: server = Server(Config(app, host="0.0.0.0", port=port, log_level=WARNING)) logger.info( - f"Starting webserver - Host: {server.config.host}, Port: {server.config.port}, Log Level: {server.config.log_level}" + "Starting webserver - Host: {}, Port: {}, Log Level: {}", + server.config.host, + server.config.port, + server.config.log_level, ) server.run() diff --git a/src/fides/api/ops/tasks/__init__.py b/src/fides/api/ops/tasks/__init__.py index 166c55a6f4c..c595e9a182e 100644 --- a/src/fides/api/ops/tasks/__init__.py +++ b/src/fides/api/ops/tasks/__init__.py @@ -46,7 +46,7 @@ def _create_celery(config_path: str = CONFIG.execution.celery_config_path) -> Ce try: celery_config_overrides: MutableMapping[str, Any] = load_toml([config_path]) except FileNotFoundError as e: - logger.warning(f"{config_path} could not be loaded: %s", e) + logger.warning("{} could not be loaded: {}", config_path, e) else: celery_config.update(celery_config_overrides) diff --git a/src/fides/api/ops/util/oauth_util.py b/src/fides/api/ops/util/oauth_util.py index 18507eb5034..3ab6ecb6c85 100644 --- a/src/fides/api/ops/util/oauth_util.py +++ b/src/fides/api/ops/util/oauth_util.py @@ -153,7 +153,9 @@ async def verify_oauth_client( scopes_required = ",".join(security_scopes.scopes) scopes_provided = ",".join(assigned_scopes) logger.debug( - f"Auth token missing required scopes: {scopes_required}. Scopes provided: {scopes_provided}." + "Auth token missing required scopes: {}. Scopes provided: {}.", + scopes_required, + scopes_provided, ) raise AuthorizationError(detail="Not Authorized for this action") From e8b3339cc7a38cdfd5c84c36e8ecdda3e76463b6 Mon Sep 17 00:00:00 2001 From: Thomas Date: Wed, 14 Dec 2022 11:33:30 +0800 Subject: [PATCH 5/8] fix static checks --- src/fides/api/main.py | 1 - src/fides/api/ops/api/v1/endpoints/oauth_endpoints.py | 1 - src/fides/ctl/core/config/database_settings.py | 1 + 3 files changed, 1 insertion(+), 2 deletions(-) diff --git a/src/fides/api/main.py b/src/fides/api/main.py index 45176a6ee9c..917e55d4397 100644 --- a/src/fides/api/main.py +++ b/src/fides/api/main.py @@ -52,7 +52,6 @@ ) from fides.api.ops.tasks.scheduled.scheduler import scheduler from fides.api.ops.util.cache import get_cache -from fides.ctl.core.config import FidesConfig from fides.ctl.core.config import FidesConfig, get_config from fides.ctl.core.config.helpers import check_required_webserver_config_values from fides.lib.oauth.api.routes.user_endpoints import router as user_router diff --git a/src/fides/api/ops/api/v1/endpoints/oauth_endpoints.py b/src/fides/api/ops/api/v1/endpoints/oauth_endpoints.py index 0ca8721c4f9..1771d851d5c 100644 --- a/src/fides/api/ops/api/v1/endpoints/oauth_endpoints.py +++ b/src/fides/api/ops/api/v1/endpoints/oauth_endpoints.py @@ -3,7 +3,6 @@ from fastapi import Body, Depends, HTTPException, Request, Security from fastapi.security import HTTPBasic from loguru import logger - from sqlalchemy.orm import Session from starlette.status import ( HTTP_400_BAD_REQUEST, diff --git a/src/fides/ctl/core/config/database_settings.py b/src/fides/ctl/core/config/database_settings.py index b2729a6e551..f7ffe2f749c 100644 --- a/src/fides/ctl/core/config/database_settings.py +++ b/src/fides/ctl/core/config/database_settings.py @@ -7,6 +7,7 @@ from pydantic import PostgresDsn, validator from fides.ctl.core.config.utils import get_test_mode + from .fides_settings import FidesSettings ENV_PREFIX = "FIDES__DATABASE__" From d209987594dd36cca544853edd2e7547eb18586f Mon Sep 17 00:00:00 2001 From: Thomas Date: Wed, 14 Dec 2022 11:37:02 +0800 Subject: [PATCH 6/8] run isort/black "check" in CI instead of "fix" --- noxfiles/ci_nox.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/noxfiles/ci_nox.py b/noxfiles/ci_nox.py index aff30161e1f..be491cb4834 100644 --- a/noxfiles/ci_nox.py +++ b/noxfiles/ci_nox.py @@ -32,8 +32,8 @@ def static_checks(session: nox.Session) -> None: @nox.parametrize( "mode", [ - nox.param("fix", id="fix"), nox.param("check", id="check"), + nox.param("fix", id="fix"), ], ) def black(session: nox.Session, mode: str) -> None: @@ -49,8 +49,8 @@ def black(session: nox.Session, mode: str) -> None: @nox.parametrize( "mode", [ - nox.param("fix", id="fix"), nox.param("check", id="check"), + nox.param("fix", id="fix"), ], ) def isort(session: nox.Session, mode: str) -> None: From d6a25672f5601bf85caf0782f972ee483be75a81 Mon Sep 17 00:00:00 2001 From: Adam Sachs Date: Wed, 14 Dec 2022 08:37:37 -0500 Subject: [PATCH 7/8] remove some additional logging references added in merge --- src/fides/ctl/core/config/helpers.py | 5 ++--- src/fides/ctl/core/config/utils.py | 2 -- src/fides/lib/db/session.py | 7 ++----- src/fides/lib/oauth/api/routes/user_endpoints.py | 10 ++++------ 4 files changed, 8 insertions(+), 16 deletions(-) diff --git a/src/fides/ctl/core/config/helpers.py b/src/fides/ctl/core/config/helpers.py index a2798b6b1ab..ead298cf767 100644 --- a/src/fides/ctl/core/config/helpers.py +++ b/src/fides/ctl/core/config/helpers.py @@ -1,5 +1,4 @@ """This module contains logic related to loading/manipulation/writing the config.""" -import logging import os from os import environ, getenv from pathlib import Path @@ -8,13 +7,13 @@ import toml from click import echo +from loguru import logger from pydantic import BaseSettings from toml import dump, load from fides.ctl.core.utils import echo_red DEFAULT_CONFIG_PATH = ".fides/fides.toml" -logger = logging.getLogger(__name__) def load_file(file_names: Union[List[Path], List[str]]) -> str: @@ -46,7 +45,7 @@ def load_file(file_names: Union[List[Path], List[str]]) -> str: for file_name in file_names: possible_location = os.path.join(dir_str, file_name) if possible_location and os.path.isfile(possible_location): - logger.info("Loading file %s from %s", file_name, dir_str) + logger.info("Loading file {} from {}", file_name, dir_str) return possible_location raise FileNotFoundError diff --git a/src/fides/ctl/core/config/utils.py b/src/fides/ctl/core/config/utils.py index b8ce1ff42ae..51377ebe986 100644 --- a/src/fides/ctl/core/config/utils.py +++ b/src/fides/ctl/core/config/utils.py @@ -1,9 +1,7 @@ -import logging from os import getenv DEFAULT_CONFIG_PATH = ".fides/fides.toml" DEFAULT_CONFIG_PATH_ENV_VAR = "FIDES__CONFIG_PATH" -logger = logging.getLogger(__name__) def get_test_mode() -> bool: diff --git a/src/fides/lib/db/session.py b/src/fides/lib/db/session.py index a77a18b2881..b55244ffc29 100644 --- a/src/fides/lib/db/session.py +++ b/src/fides/lib/db/session.py @@ -1,7 +1,6 @@ from __future__ import annotations -import logging - +from loguru import logger from sqlalchemy import create_engine from sqlalchemy.engine import Engine from sqlalchemy.engine.url import URL @@ -10,8 +9,6 @@ from fides.ctl.core.config import FidesConfig from fides.lib.exceptions import MissingConfig -logger = logging.getLogger(__name__) - def get_db_engine( *, @@ -62,7 +59,7 @@ def commit(self) -> None: try: return super().commit() except Exception as exc: - logger.error("Exception: %s", exc) + logger.error("Exception: {}", exc) # Rollback the current transaction after each failed commit self.rollback() raise diff --git a/src/fides/lib/oauth/api/routes/user_endpoints.py b/src/fides/lib/oauth/api/routes/user_endpoints.py index 146791fc490..9bbd71634cb 100644 --- a/src/fides/lib/oauth/api/routes/user_endpoints.py +++ b/src/fides/lib/oauth/api/routes/user_endpoints.py @@ -1,4 +1,3 @@ -import logging from datetime import datetime from typing import Optional @@ -6,6 +5,7 @@ from fastapi_pagination import Page, Params from fastapi_pagination.bases import AbstractPage from fastapi_pagination.ext.sqlalchemy import paginate +from loguru import logger from sqlalchemy.orm import Session from sqlalchemy_utils import escape_like from starlette.status import ( @@ -38,8 +38,6 @@ USER_READ, ) -logger = logging.getLogger(__name__) - router = APIRouter() @@ -80,7 +78,7 @@ def create_user( ) user = FidesUser.create(db=db, data=user_data.dict()) - logger.info("Created user with id: '%s'.", user.id) + logger.info("Created user with id: '{}'.", user.id) FidesUserPermissions.create( db=db, data={"user_id": user.id, "scopes": [PRIVACY_REQUEST_READ]} ) @@ -114,7 +112,7 @@ def delete_user( detail="Users can only remove themselves, or be the Admin UI Root User.", ) - logger.info("Deleting user with id: '%s'.", user_id) + logger.info("Deleting user with id: '{}'.", user_id) user.delete(db) @@ -130,7 +128,7 @@ def get_user(*, db: Session = Depends(get_db), user_id: str) -> FidesUser: if user is None: raise HTTPException(status_code=HTTP_404_NOT_FOUND, detail="User not found") - logger.info("Returning user with id: '%s'.", user_id) + logger.info("Returning user with id: '{}'.", user_id) return user From c1b2d4fd2dd769e96d378ee491ad032e37b49769 Mon Sep 17 00:00:00 2001 From: Dawn Pattison Date: Wed, 14 Dec 2022 09:39:57 -0600 Subject: [PATCH 8/8] Remove some f strings and switch some more instances of %-style to {}-style. --- src/fides/api/ctl/database/database.py | 4 ++-- src/fides/api/ops/analytics.py | 6 +++--- src/fides/api/ops/api/v1/endpoints/messaging_endpoints.py | 4 ++-- src/fides/api/ops/models/storage.py | 2 +- src/fides/api/ops/service/connectors/fides_connector.py | 6 +++--- src/fides/api/ops/task/refine_target_path.py | 2 +- src/fides/api/ops/util/storage_authenticator.py | 2 +- 7 files changed, 13 insertions(+), 13 deletions(-) diff --git a/src/fides/api/ctl/database/database.py b/src/fides/api/ctl/database/database.py index 5692a174029..aa5d81c7bf0 100644 --- a/src/fides/api/ctl/database/database.py +++ b/src/fides/api/ctl/database/database.py @@ -90,7 +90,7 @@ def get_db_health(database_url: str, db: Session) -> str: return "healthy" except Exception as error: # pylint: disable=broad-except error_type = get_full_exception_name(error) - log.error(f"Unable to reach the database: {error_type}: {error}") + log.error("Unable to reach the database: {}: {}", error_type, error) return "unhealthy" @@ -101,4 +101,4 @@ async def configure_db(database_url: str) -> None: await init_db(database_url) except Exception as error: # pylint: disable=broad-except error_type = get_full_exception_name(error) - log.error(f"Unable to configure database: {error_type}: {error}") + log.error("Unable to configure database: {}: {}", error_type, error) diff --git a/src/fides/api/ops/analytics.py b/src/fides/api/ops/analytics.py index 2b895df2b25..96f8063134b 100644 --- a/src/fides/api/ops/analytics.py +++ b/src/fides/api/ops/analytics.py @@ -42,9 +42,9 @@ async def send_analytics_event(event: AnalyticsEvent) -> None: try: await analytics_client.send_async(event) except AnalyticsError as err: - log.warning(f"Error sending analytics event: {err}") + log.warning("Error sending analytics event: {}", err) else: - log.info(f"Analytics event sent with client id: {analytics_client.client_id}") + log.info("Analytics event sent with client id: {}", analytics_client.client_id) async def send_registration(registration: UserRegistration) -> None: @@ -53,7 +53,7 @@ async def send_registration(registration: UserRegistration) -> None: try: await analytics_client.register_async(registration.as_fideslog()) except AnalyticsError as err: - log.warning(f"Error sending registration event: {err}") + log.warning("Error sending registration event: {}", err) else: log.info( f"Analytics registration sent with client id: {analytics_client.client_id}" diff --git a/src/fides/api/ops/api/v1/endpoints/messaging_endpoints.py b/src/fides/api/ops/api/v1/endpoints/messaging_endpoints.py index a4f54e56406..09089c28721 100644 --- a/src/fides/api/ops/api/v1/endpoints/messaging_endpoints.py +++ b/src/fides/api/ops/api/v1/endpoints/messaging_endpoints.py @@ -71,7 +71,7 @@ def post_config( return create_or_update_messaging_config(db=db, config=messaging_config) except ValueError as e: logger.warning( - "Create failed for messaging config %s: %s", + "Create failed for messaging config {}: {}", messaging_config.key, Pii(str(e)), ) @@ -81,7 +81,7 @@ def post_config( ) except Exception as exc: logger.warning( - "Create failed for messaging config %s: %s", + "Create failed for messaging config {}: {}", messaging_config.key, Pii(str(exc)), ) diff --git a/src/fides/api/ops/models/storage.py b/src/fides/api/ops/models/storage.py index 75666d71666..c8d20443a02 100644 --- a/src/fides/api/ops/models/storage.py +++ b/src/fides/api/ops/models/storage.py @@ -97,7 +97,7 @@ def set_secrets( KeyError, ValidationError, ) as exc: - logger.error("Error: %s", Pii(str(exc))) + logger.error("Error: {}", Pii(str(exc))) # We don't want to handle these explicitly here, only in the API view raise diff --git a/src/fides/api/ops/service/connectors/fides_connector.py b/src/fides/api/ops/service/connectors/fides_connector.py index c55bfc60c2c..c0c75292482 100644 --- a/src/fides/api/ops/service/connectors/fides_connector.py +++ b/src/fides/api/ops/service/connectors/fides_connector.py @@ -61,15 +61,15 @@ def test_connection(self) -> Optional[ConnectionTestStatus]: Tests connection to the configured Fides server with configured credentials by attempting an authorized API call and ensuring success """ - log.info(f"Starting test connection to {self.configuration.key}") + log.info("Starting test connection to {}", self.configuration.key) try: client: FidesClient = self.client() client.request_status() except Exception as e: - log.error(f"Error testing connection to remote Fides {str(e)}") + log.error("Error testing connection to remote Fides {}", str(e)) return ConnectionTestStatus.failed - log.info(f"Successful connection test for {self.configuration.key}") + log.info("Successful connection test for {}", self.configuration.key) return ConnectionTestStatus.succeeded def retrieve_data( diff --git a/src/fides/api/ops/task/refine_target_path.py b/src/fides/api/ops/task/refine_target_path.py index b5c1aa86dc3..a5115fbfca8 100644 --- a/src/fides/api/ops/task/refine_target_path.py +++ b/src/fides/api/ops/task/refine_target_path.py @@ -101,7 +101,7 @@ def refine_target_path( IndexError, TypeError, ): # No/invalid field path. Expected when the path has been eliminated. - logger.warning("Could not locate target path %s on row", target_path) + logger.warning("Could not locate target path {} on row", target_path) return [] if isinstance(current_elem, dict): diff --git a/src/fides/api/ops/util/storage_authenticator.py b/src/fides/api/ops/util/storage_authenticator.py index a7ac50c08c2..caa41bee482 100644 --- a/src/fides/api/ops/util/storage_authenticator.py +++ b/src/fides/api/ops/util/storage_authenticator.py @@ -35,5 +35,5 @@ def get_s3_session( logger.info("Successfully created automatic session") return session - logger.error("Auth method not supported for S3: %s", auth_method) + logger.error("Auth method not supported for S3: {}", auth_method) raise ValueError(f"Auth method not supported for S3: {auth_method}")