Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update all logging references to use loguru #2031

1 change: 1 addition & 0 deletions .fides/fides.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ app_postgres = {connection_string="postgresql+psycopg2://postgres:fides@fides-db

[logging]
level = "INFO"
log_pii = true
ThomasLaPiana marked this conversation as resolved.
Show resolved Hide resolved

[cli]
server_host = "localhost"
Expand Down
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,10 @@ The types of changes are:

## [Unreleased](https://github.com/ethyca/fides/compare/2.2.2...main)

### Changed

* Updated to use `loguru` logging library throughout codebase [#2031](https://github.com/ethyca/fides/pull/2031)

### Fixed

* Timing issues with bulk DSR reprocessing, specifically when analytics are enabled [#2015](https://github.com/ethyca/fides/pull/2015)
Expand Down
4 changes: 2 additions & 2 deletions noxfiles/ci_nox.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,8 @@ def static_checks(session: nox.Session) -> None:
@nox.parametrize(
"mode",
[
nox.param("fix", id="fix"),
nox.param("check", id="check"),
nox.param("fix", id="fix"),
],
)
def black(session: nox.Session, mode: str) -> None:
Expand All @@ -49,8 +49,8 @@ def black(session: nox.Session, mode: str) -> None:
@nox.parametrize(
"mode",
[
nox.param("fix", id="fix"),
nox.param("check", id="check"),
nox.param("fix", id="fix"),
],
)
def isort(session: nox.Session, mode: str) -> None:
Expand Down
10 changes: 3 additions & 7 deletions src/fides/api/ctl/routes/health.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import logging
from typing import Any, Dict, List, Union

from fastapi import Depends, HTTPException, status
from loguru import logger
from redis.exceptions import ResponseError
from sqlalchemy.orm import Session

Expand All @@ -19,21 +19,17 @@

router = APIRouter(tags=["Health"])

logger = logging.getLogger(__name__)
# stops polluting logs with sqlalchemy / alembic info-level logs
ThomasLaPiana marked this conversation as resolved.
Show resolved Hide resolved
logging.getLogger("sqlalchemy.engine").setLevel(logging.ERROR)
logging.getLogger("alembic").setLevel(logging.WARNING)


def get_cache_health() -> str:
"""Checks if the cache is reachable"""

if not CONFIG.redis.enabled:
return "no cache configured"
try:
get_cache()
return "healthy"
except (RedisConnectionError, ResponseError) as e:
logger.error("Unable to reach cache: %s", Pii(str(e)))
logger.error("Unable to reach cache: {}", Pii(str(e)))
return "unhealthy"


Expand Down
70 changes: 35 additions & 35 deletions src/fides/api/main.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,14 @@
"""
Contains the code that sets up the API.
"""
import logging
from datetime import datetime, timezone
from logging import WARNING
from os import getenv
from logging import DEBUG, WARNING
from typing import Callable, Optional

from fastapi import FastAPI, HTTPException, Request, Response, status
from fastapi.responses import FileResponse
from fideslog.sdk.python.event import AnalyticsEvent
from loguru import logger as log
from loguru import logger
from redis.exceptions import RedisError, ResponseError
from slowapi.errors import RateLimitExceeded # type: ignore
from slowapi.extension import Limiter, _rate_limit_exceeded_handler # type: ignore
Expand Down Expand Up @@ -54,17 +52,12 @@
)
from fides.api.ops.tasks.scheduled.scheduler import scheduler
from fides.api.ops.util.cache import get_cache
from fides.api.ops.util.logger import get_fides_log_record_factory
from fides.ctl.core.config import FidesConfig, get_config
from fides.ctl.core.config.helpers import check_required_webserver_config_values
from fides.lib.oauth.api.routes.user_endpoints import router as user_router

CONFIG: FidesConfig = get_config()

logging.basicConfig(level=CONFIG.logging.level)
logging.setLogRecordFactory(get_fides_log_record_factory())
logger = logging.getLogger(__name__)

app = FastAPI(title="fides")
app.state.limiter = Limiter(
default_limits=[CONFIG.security.request_rate_limit],
Expand Down Expand Up @@ -190,16 +183,15 @@ def configure_routes() -> None:
async def setup_server() -> None:
"Run all of the required setup steps for the webserver."

log.warning(
f"Startup configuration: reloading = {CONFIG.hot_reloading}, dev_mode = {CONFIG.dev_mode}",
)
log_pii = getenv("FIDES__LOG_PII", "").lower() == "true"
log.warning(
f"Startup configuration: pii logging = {log_pii}",
logger.warning(
"Startup configuration: reloading = {}, dev_mode = {}",
CONFIG.hot_reloading,
CONFIG.dev_mode,
)
logger.warning("Startup configuration: pii logging = {}", CONFIG.logging.log_pii)

if logger.getEffectiveLevel() == logging.DEBUG:
log.warning(
if CONFIG.logging.level == DEBUG:
logger.warning(
"WARNING: log level is DEBUG, so sensitive or personal data may be logged. "
"Set FIDES__LOGGING__LEVEL to INFO or higher in production."
)
Expand All @@ -213,36 +205,37 @@ async def setup_server() -> None:
try:
create_or_update_parent_user()
except Exception as e:
log.error(f"Error creating parent user: {str(e)}")
logger.error("Error creating parent user: {}", str(e))
raise FidesError(f"Error creating parent user: {str(e)}")

log.info("Validating SaaS connector templates...")
logger.info("Validating SaaS connector templates...")
try:
registry = load_registry(registry_file)
db = get_api_session()
update_saas_configs(registry, db)
except Exception as e:
log.error(
f"Error occurred during SaaS connector template validation: {str(e)}",
logger.error(
"Error occurred during SaaS connector template validation: {}",
str(e),
)
return
finally:
db.close()

log.info("Running Cache connection test...")
logger.info("Running Cache connection test...")

try:
get_cache()
except (RedisConnectionError, RedisError, ResponseError) as e:
log.error(f"Connection to cache failed: {str(e)}")
logger.error("Connection to cache failed: {}", str(e))
return
else:
log.debug("Connection to cache succeeded")
logger.debug("Connection to cache succeeded")

if not scheduler.running:
scheduler.start()

log.debug("Sending startup analytics events...")
logger.debug("Sending startup analytics events...")
await send_analytics_event(
AnalyticsEvent(
docker=in_docker_container(),
Expand All @@ -257,7 +250,7 @@ async def setup_server() -> None:
desination=CONFIG.logging.destination,
)

log.bind(api_config=CONFIG.logging.json()).debug("Configuration options in use")
logger.bind(api_config=CONFIG.logging.json()).debug("Configuration options in use")


@app.middleware("http")
Expand All @@ -266,7 +259,7 @@ async def log_request(request: Request, call_next: Callable) -> Response:
start = datetime.now()
response = await call_next(request)
handler_time = datetime.now() - start
log.bind(
logger.bind(
method=request.method,
status_code=response.status_code,
handler_time=f"{handler_time.microseconds * 0.001}ms",
Expand Down Expand Up @@ -306,23 +299,27 @@ def read_other_paths(request: Request) -> Response:

# If any of those worked, serve the file.
if ui_file and ui_file.is_file():
log.debug(
f"catchall request path '{path}' matched static admin UI file: {ui_file}"
logger.debug(
"catchall request path '{}' matched static admin UI file: {}",
path,
ui_file,
)
return FileResponse(ui_file)

# raise 404 for anything that should be backend endpoint but we can't find it
if path.startswith(API_PREFIX[1:]):
log.debug(
f"catchall request path '{path}' matched an invalid API route, return 404"
logger.debug(
"catchall request path '{}' matched an invalid API route, return 404",
path,
)
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, detail="Item not found"
)

# otherwise return the index
log.debug(
f"catchall request path '{path}' did not match any admin UI routes, return generic admin UI index"
logger.debug(
"catchall request path '{}' did not match any admin UI routes, return generic admin UI index",
path,
)
return get_admin_index_as_response()

Expand All @@ -332,7 +329,10 @@ def start_webserver(port: int = 8080) -> None:
check_required_webserver_config_values()
server = Server(Config(app, host="0.0.0.0", port=port, log_level=WARNING))

log.info(
f"Starting webserver - Host: {server.config.host}, Port: {server.config.port}, Log Level: {server.config.log_level}"
logger.info(
"Starting webserver - Host: {}, Port: {}, Log Level: {}",
server.config.host,
server.config.port,
server.config.log_level,
)
server.run()
4 changes: 1 addition & 3 deletions src/fides/api/ops/api/v1/endpoints/config_endpoints.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import logging
from typing import Any, Dict

from fastapi.params import Security
from loguru import logger

from fides.api.ops.api.v1 import scope_registry as scopes
from fides.api.ops.api.v1 import urn_registry as urls
Expand All @@ -12,8 +12,6 @@

router = APIRouter(tags=["Config"], prefix=urls.V1_URL_PREFIX)

logger = logging.getLogger(__name__)


@router.get(
urls.CONFIG,
Expand Down
26 changes: 12 additions & 14 deletions src/fides/api/ops/api/v1/endpoints/connection_endpoints.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
from __future__ import annotations

import logging
from typing import List, Optional

from fastapi import Depends, HTTPException
from fastapi.params import Query, Security
from fastapi_pagination import Page, Params
from fastapi_pagination.bases import AbstractPage
from fastapi_pagination.ext.sqlalchemy import paginate
from loguru import logger
from pydantic import ValidationError, conlist
from sqlalchemy import or_
from sqlalchemy.orm import Session
Expand Down Expand Up @@ -76,15 +76,13 @@

router = APIRouter(tags=["Connections"], prefix=V1_URL_PREFIX)

logger = logging.getLogger(__name__)


def get_connection_config_or_error(
db: Session, connection_key: FidesOpsKey
) -> ConnectionConfig:
"""Helper to load the ConnectionConfig object or throw a 404"""
connection_config = ConnectionConfig.get_by(db, field="key", value=connection_key)
logger.info("Finding connection configuration with key '%s'", connection_key)
logger.info("Finding connection configuration with key '{}'", connection_key)
if not connection_config:
raise HTTPException(
status_code=HTTP_404_NOT_FOUND,
Expand Down Expand Up @@ -119,7 +117,7 @@ def get_connections(
SaaS connector types.
"""
logger.info(
"Finding connection configurations with pagination params %s and search query: '%s'.",
"Finding connection configurations with pagination params {} and search query: '{}'.",
params,
search if search else "",
)
Expand Down Expand Up @@ -217,7 +215,7 @@ def patch_connections(
"""
created_or_updated: List[ConnectionConfig] = []
failed: List[BulkUpdateFailed] = []
logger.info("Starting bulk upsert for %s connection configuration(s)", len(configs))
logger.info("Starting bulk upsert for {} connection configuration(s)", len(configs))

for config in configs:
orig_data = config.dict().copy()
Expand All @@ -228,7 +226,7 @@ def patch_connections(
created_or_updated.append(connection_config)
except KeyOrNameAlreadyExists as exc:
logger.warning(
"Create/update failed for connection config with key '%s': %s",
"Create/update failed for connection config with key '{}': {}",
config.key,
exc,
)
Expand All @@ -240,7 +238,7 @@ def patch_connections(
)
except Exception:
logger.warning(
"Create/update failed for connection config with key '%s'.", config.key
"Create/update failed for connection config with key '{}'.", config.key
)
failed.append(
BulkUpdateFailed(
Expand Down Expand Up @@ -269,7 +267,7 @@ def delete_connection(
"""Removes the connection configuration with matching key."""
connection_config = get_connection_config_or_error(db, connection_key)
connection_type = connection_config.connection_type
logger.info("Deleting connection config with key '%s'.", connection_key)
logger.info("Deleting connection config with key '{}'.", connection_key)
connection_config.delete(db)

# Access Manual Webhooks are cascade deleted if their ConnectionConfig is deleted,
Expand Down Expand Up @@ -297,7 +295,7 @@ def validate_secrets(
try:
schema = get_connection_secrets_schema(connection_type.value, saas_config) # type: ignore
logger.info(
"Validating secrets on connection config with key '%s'",
"Validating secrets on connection config with key '{}'",
connection_config.key,
)
connection_secrets = schema.parse_obj(request_body)
Expand Down Expand Up @@ -329,7 +327,7 @@ def connection_status(

except (ConnectionException, ClientUnsuccessfulException) as exc:
logger.warning(
"Connection test failed on %s: %s",
"Connection test failed on {}: {}",
connection_config.key,
Pii(str(exc)),
)
Expand All @@ -342,7 +340,7 @@ def connection_status(
failure_reason=str(exc),
)

logger.info("Connection test %s on %s", status.value, connection_config.key) # type: ignore
logger.info("Connection test {} on {}", status.value, connection_config.key) # type: ignore
connection_config.update_test_status(test_status=status, db=db) # type: ignore

return TestStatusMessage(
Expand Down Expand Up @@ -376,7 +374,7 @@ def put_connection_config_secrets(
db, unvalidated_secrets, connection_config
).dict()
# Save validated secrets, regardless of whether they've been verified.
logger.info("Updating connection config secrets for '%s'", connection_key)
logger.info("Updating connection config secrets for '{}'", connection_key)
connection_config.save(db=db)

msg = f"Secrets updated for ConnectionConfig with key: {connection_key}."
Expand Down Expand Up @@ -419,7 +417,7 @@ def requeue_requires_input_requests(db: Session) -> None:
conditions=(PrivacyRequest.status == PrivacyRequestStatus.requires_input),
):
logger.info(
"Queuing privacy request '%s with '%s' status now that manual inputs are no longer required.",
"Queuing privacy request '{} with '{}' status now that manual inputs are no longer required.",
pr.id,
pr.status.value,
)
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import logging
from typing import Any, Dict, List, Optional

from fastapi import APIRouter, Depends, HTTPException
Expand Down Expand Up @@ -33,8 +32,6 @@

router = APIRouter(tags=["Connection Types"], prefix=V1_URL_PREFIX)

logger = logging.getLogger(__name__)


def get_connection_types(
search: Optional[str] = None, system_type: Optional[SystemType] = None
Expand Down
Loading