Skip to content

Commit

Permalink
237 fix logging (#238)
Browse files Browse the repository at this point in the history
* update to ensure old loggers remain active

* set instruction input to debug - not more outputs in text

* fixup: Format Python code with Black

* text with new conda only dependencies

* finally have logging working as desired

---------

Co-authored-by: github-actions <github-actions@github.com>
  • Loading branch information
rosepearson and github-actions committed Jan 26, 2024
1 parent 1a3c6a5 commit ca46bf0
Show file tree
Hide file tree
Showing 6 changed files with 34 additions and 38 deletions.
7 changes: 3 additions & 4 deletions environment_linux.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@ dependencies:
- pytest
- python-dotenv
- python-pdal
- pip:
- netcdf4
- geoapis >=0.3.2
- osmpythontools >=0.3.5
- netcdf4
- geoapis >=0.3.2
- osmpythontools >=0.3.5
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ build-backend = "setuptools.build_meta"

[project]
name = "geofabrics"
version = "1.1.8"
version = "1.1.9"
description = "A package for creating geofabrics for flood modelling."
readme = "README.md"
authors = [{ name = "Rose pearson", email = "rose.pearson@niwa.co.nz" }]
Expand Down
36 changes: 19 additions & 17 deletions src/geofabrics/dem.py
Original file line number Diff line number Diff line change
Expand Up @@ -1550,7 +1550,7 @@ def _add_tiled_lidar_chunked(
for i, dim_y in enumerate(chunked_dim_y):
delayed_chunked_x = []
for j, dim_x in enumerate(chunked_dim_x):
self.logger.info(f"\tLiDAR chunk {[i, j]}")
self.logger.debug(f"\tLiDAR chunk {[i, j]}")

# Define the region to tile
chunk_region_to_tile = self._define_chunk_region(
Expand Down Expand Up @@ -1697,7 +1697,9 @@ def _elevation_over_tile(

# Perform the specified rasterisation over the grid locations
z_flat = elevation_from_points(
point_cloud=tile_points, xy_out=xy_out, options=options
point_cloud=tile_points,
xy_out=xy_out,
options=options,
)
grid_z = z_flat.reshape(grid_x.shape)

Expand Down Expand Up @@ -2238,7 +2240,7 @@ def _add_tiled_lidar_chunked(
for i, dim_y in enumerate(chunked_dim_y):
delayed_chunked_x = []
for j, dim_x in enumerate(chunked_dim_x):
self.logger.info(f"\tChunk {[i, j]}")
self.logger.debug(f"\tChunk {[i, j]}")

# Define the region to tile
chunk_region_to_tile = self._define_chunk_region(
Expand Down Expand Up @@ -2637,8 +2639,10 @@ def calculate_linear(
method="linear",
)[0]
except (scipy.spatial.QhullError, Exception) as caught_exception:
logging.warning(
f" [dem.calculate_linear]:\tException {caught_exception} during "
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
logger.warning(
f"Exception {caught_exception} during "
"linear interpolation. Set to NaN."
)
linear = numpy.nan
Expand Down Expand Up @@ -2684,16 +2688,16 @@ def load_tiles_in_chunk(
"""Read in all LiDAR files within the chunked region - clipped to within
the region within which to rasterise."""

logging.info(
f" [dem.load_tiles_in_chunk]:\tReading all {len(lidar_files)} files in chunk."
)
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
logger.debug(f"Reading all {len(lidar_files)} files in chunk.")

# Initialise LiDAR points
lidar_points = []

# Cycle through each file loading it in an adding it to a numpy array
for lidar_file in lidar_files:
logging.info(f"dem.load_tiles_in_chunk]:\tLoading in file {lidar_file}")
logger.debug(f"Loading in file {lidar_file}")

# read in the LiDAR file
pdal_pipeline = read_file_with_pdal(
Expand Down Expand Up @@ -2726,10 +2730,9 @@ def roughness_over_chunk(

# If no points return an array of NaN
if len(tile_points) == 0:
logging.warning(
" [dem.roughness_over_chunk]:\tThe latest chunk has no data and is being "
"ignored."
)
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
logger.debug("The latest chunk has no data and is being ignored.")
return grid_z
# keep only the specified classifications (should be ground cover)
classification_mask = numpy.zeros_like(tile_points["Classification"], dtype=bool)
Expand Down Expand Up @@ -2774,10 +2777,9 @@ def elevation_over_chunk(

# If no points return an array of NaN
if len(tile_points) == 0:
logging.warning(
" [dem.elevation_over_chunk]:\tThe latest chunk has no data and is being "
"ignored."
)
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
logger.debug(" The latest chunk has no data and is being ignored.")
return grid_z
# keep only the specified classifications (should be ground / water)
# Not used for coarse DEM
Expand Down
3 changes: 3 additions & 0 deletions src/geofabrics/processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -906,6 +906,7 @@ def run(self):
}
cluster = distributed.LocalCluster(**cluster_kwargs)
with cluster, distributed.Client(cluster) as client:
client.forward_logging() # Ensure root logging configuration is used
self.logger.info(f"Dask client: {client}")
self.logger.info(f"Dask dashboard: {client.dashboard_link}")

Expand Down Expand Up @@ -1134,6 +1135,7 @@ def run(self):
with cluster, distributed.Client(cluster) as client:
self.logger.info(f"Dask client: {client}")
self.logger.info(f"Dask dashboard: {client.dashboard_link}")
client.forward_logging() # Ensure root logging configuration is used

# setup the hydrologically conditioned DEM generator
self.hydrologic_dem = dem.HydrologicallyConditionedDem(
Expand Down Expand Up @@ -1311,6 +1313,7 @@ def run(self):
with cluster, distributed.Client(cluster) as client:
self.logger.info(f"Dask client: {client}")
self.logger.info(f"Dask dashboard: {client.dashboard_link}")
client.forward_logging() # Ensure root logging configuration is used

# setup the roughness DEM generator
self.roughness_dem = dem.RoughnessDem(
Expand Down
22 changes: 7 additions & 15 deletions src/geofabrics/runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,41 +17,33 @@ def config_logging(logging_filepath: pathlib):
"""Configure the root logger inhereited by all othr loggers."""
log_dict = {
"version": 1,
"disable_existing_loggers": True,
"disable_existing_loggers": False,
"formatters": {
"standard": {
"format": "%(asctime)s - %(levelname)s - %(name)s.%(funcName)s:%(lineno)d: %(message)s",
"datefmt": "%Y-%m-%d %H:%M:%S",
},
},
"handlers": {
"default": {
"level": "INFO",
"formatter": "standard",
"class": "logging.StreamHandler",
"stream": "ext://sys.stdout", # Default is stderr
},
"stream_handler": {
"level": "INFO",
"formatter": "standard",
"class": "logging.StreamHandler",
"stream": "ext://sys.stdout", # Default is stderr
},
"file_handler": {
"level": "INFO",
"level": "DEBUG",
"filename": logging_filepath,
"class": "logging.FileHandler",
"formatter": "standard",
"encoding": "utf-8",
"mode": "a",
},
},
"loggers": {
"": {
"handlers": ["file_handler", "stream_handler"],
"level": "INFO",
"propagate": True,
},
"root": {
"handlers": ["file_handler", "stream_handler"],
"level": "DEBUG",
"propagate": False,
},
}
logging.config.dictConfig(log_dict)
Expand Down Expand Up @@ -83,7 +75,7 @@ def setup_logging_for_run(instructions: dict, label: str):
log_path.mkdir(parents=True, exist_ok=True)

config_logging(log_path / f"geofabrics_{label}.log")
logger = logging.getLogger(__name__)
logger = logging.getLogger(f"{__name__}.{label}")

logger.info(f"Log file is located at: geofabrics_{label}.log")
logger.debug(instructions)
Expand Down
2 changes: 1 addition & 1 deletion src/geofabrics/version.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,4 @@
Contains the package version information
"""

__version__ = "1.1.8"
__version__ = "1.1.9"

0 comments on commit ca46bf0

Please sign in to comment.