-
Notifications
You must be signed in to change notification settings - Fork 28
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Adds two QAQC scripts to tools directory
This merge adds two new scripts into /tools/ for use in QAQC. Adds inundate_nation.py to produce inundation maps for the entire country for use in QAQC. Adds check_deep_flooding.py to check for depths of inundation greater than a user-supplied threshold at specific areas defined by a user-supplied shapefile. This resolves #439 and resolves #438.
- Loading branch information
1 parent
493dc27
commit f67a35d
Showing
4 changed files
with
226 additions
and
5 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,110 @@ | ||
import argparse | ||
import os | ||
from multiprocessing import Pool | ||
import numpy as np | ||
import rasterio.shutil | ||
from rasterio.warp import calculate_default_transform, reproject, Resampling | ||
import rasterio.crs | ||
import rasterio | ||
import rasterio.mask | ||
import geopandas as gpd | ||
from shapely.geometry import box | ||
|
||
|
||
def check_deep_flooding(args): | ||
|
||
depth_grid_path = args[0] | ||
shapefile_path = args[1] | ||
depth_threshold = args[2] | ||
output_dir = args[3] | ||
|
||
print("Checking " + depth_grid_path + "...") | ||
|
||
# Open depth_grid_path and shapefile_path and perform np.wheres | ||
depth_src = rasterio.open(depth_grid_path) | ||
depth_array = depth_src.read(1) | ||
reference = depth_src | ||
|
||
#Read layer using the bbox option. CRS mismatches are handled if bbox is passed a geodataframe (which it is). | ||
bounding_box = gpd.GeoDataFrame({'geometry': box(*reference.bounds)}, index=[0], crs=reference.crs) | ||
poly_all = gpd.read_file(shapefile_path, bbox = bounding_box) | ||
|
||
# Make sure features are present in bounding box area before projecting. Continue to next layer if features are absent. | ||
if poly_all.empty: | ||
return | ||
|
||
#Project layer to reference crs. | ||
poly_all_proj = poly_all.to_crs(reference.crs) | ||
# check if there are any lakes within our reference raster extent. | ||
if poly_all_proj.empty: | ||
#If no features within reference raster extent, create a zero array of same shape as reference raster. | ||
poly_mask = np.zeros(reference.shape) | ||
else: | ||
#Perform mask operation on the reference raster and using the previously declared geometry geoseries. Invert set to true as we want areas outside of poly areas to be False and areas inside poly areas to be True. | ||
geometry = poly_all_proj.geometry | ||
in_poly,transform,c = rasterio.mask.raster_geometry_mask(reference, geometry, invert = True) | ||
#Write mask array, areas inside polys are set to 1 and areas outside poly are set to 0. | ||
poly_mask = np.where(in_poly == True, 1,0) | ||
|
||
# Filter depth_array by depth_threshold | ||
filtered_depth_array = np.where(depth_array > depth_threshold, depth_array, -1) | ||
|
||
# Perform mask. | ||
masked_depth_array = np.where(poly_mask == 1, filtered_depth_array, -1) | ||
|
||
if np.amax(masked_depth_array) > 0: | ||
|
||
file_handle = os.path.split(depth_grid_path)[1] | ||
|
||
checked_depth_raster = os.path.join(output_dir, "checked_" + str(depth_threshold) + "_" + file_handle) | ||
|
||
print("Writing " + checked_depth_raster + "...") | ||
# Write output. | ||
with rasterio.Env(): | ||
profile = depth_src.profile | ||
profile.update(nodata=-1) | ||
with rasterio.open(checked_depth_raster, 'w', **profile) as dst: | ||
dst.write(masked_depth_array, 1) | ||
|
||
|
||
if __name__ == '__main__': | ||
|
||
# Parse arguments. | ||
parser = argparse.ArgumentParser(description='Checks for deep flooding in a specified shapefile. Requires a directory of depth grids and a shapefile.') | ||
parser.add_argument('-d','--depth-grid-dir',help='Name of directory containing outputs of depth outputs of inundation.py',required=True) | ||
parser.add_argument('-s','--shapefile-path',help='Path to shapefile to be used as the overlay.',required=True) | ||
parser.add_argument('-t','--depth-threshold',help='Depth in meters to use as checking threshold.',required=True) | ||
parser.add_argument('-o', '--output-dir',help='The path to a directory to write the outputs. If not used, the inundation_review directory is used by default -> type=str',required=True, default="") | ||
parser.add_argument('-j', '--job-number',help='The number of jobs',required=False,default=1) | ||
|
||
args = vars(parser.parse_args()) | ||
|
||
depth_grid_dir = args['depth_grid_dir'] | ||
shapefile_path = args['shapefile_path'] | ||
depth_threshold = int(args['depth_threshold']) | ||
output_dir = args['output_dir'] | ||
job_number = int(args['job_number']) | ||
|
||
# Get list of files in depth_grid_dir. | ||
# Loop through files and determine which ones are depth grids, adding them to a list. | ||
|
||
depth_grid_dir_list = os.listdir(depth_grid_dir) | ||
if not os.path.exists(output_dir): | ||
os.mkdir(output_dir) | ||
|
||
procs_list = [] | ||
|
||
for f in depth_grid_dir_list: | ||
if 'depth' in f: | ||
full_f_path = os.path.join(depth_grid_dir, f) | ||
|
||
# check_deep_flooding([full_f_path, shapefile_path, depth_threshold, output_dir]) | ||
procs_list.append([full_f_path, shapefile_path, depth_threshold, output_dir]) | ||
|
||
# Multiprocess. | ||
with Pool(processes=job_number) as pool: | ||
pool.map(check_deep_flooding, procs_list) | ||
|
||
|
||
|
||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,97 @@ | ||
import argparse | ||
import os | ||
|
||
from inundation import inundate | ||
from multiprocessing import Pool | ||
|
||
INUN_REVIEW_DIR = r'/data/inundation_review/inundation_nwm_recurr/' # Will update. | ||
INPUTS_DIR = r'/data/inputs' | ||
|
||
|
||
def run_inundation(args): | ||
|
||
fim_run_dir = args[0] | ||
huc = args[1] | ||
magnitude = args[2] | ||
magnitude_output_dir = args[3] | ||
config = args[4] | ||
|
||
fim_run_parent = os.path.join(fim_run_dir, huc) | ||
rem = os.path.join(fim_run_parent, 'rem_zeroed_masked.tif') | ||
catchments = os.path.join(fim_run_parent, 'gw_catchments_reaches_filtered_addedAttributes.tif') | ||
mask_type = 'huc' | ||
catchment_poly = '' | ||
hydro_table = os.path.join(fim_run_parent, 'hydroTable.csv') | ||
catchment_poly = os.path.join(fim_run_parent, 'gw_catchments_reaches_filtered_addedAttributes_crosswalked.gpkg') | ||
inundation_raster = os.path.join(magnitude_output_dir, magnitude + '_' + config + '_inund_extent.tif') | ||
depth_raster = os.path.join(magnitude_output_dir, magnitude + '_' + config + '_inund_depth.tif') | ||
forecast = os.path.join(INUN_REVIEW_DIR, 'nwm_recurr_flow_data', 'recurr_' + magnitude + '_cms.csv') | ||
hucs, hucs_layerName = os.path.join(INPUTS_DIR, 'wbd', 'WBD_National.gpkg'), 'WBDHU8' | ||
|
||
print(depth_raster) | ||
|
||
if not os.path.exists(depth_raster): | ||
print("Running the NWM recurrence intervals for HUC: " + huc + ", " + magnitude + "...") | ||
inundate( | ||
rem,catchments,catchment_poly,hydro_table,forecast,mask_type,hucs=hucs,hucs_layerName=hucs_layerName, | ||
subset_hucs=huc,num_workers=1,aggregate=False,inundation_raster=None,inundation_polygon=None, | ||
depths=depth_raster,out_raster_profile=None,out_vector_profile=None,quiet=True | ||
) | ||
else: | ||
print(inundation_raster + " exists. Moving on...") | ||
|
||
|
||
if __name__ == '__main__': | ||
|
||
# Parse arguments. | ||
parser = argparse.ArgumentParser(description='Inundation mapping for FOSS FIM using streamflow recurrence interflow data. Inundation outputs are stored in the /inundation_review/inundation_nwm_recurr/ directory.') | ||
parser.add_argument('-r','--fim-run-dir',help='Name of directory containing outputs of fim_run.sh (e.g. data/ouputs/dev_abc/12345678_dev_test)',required=True) | ||
parser.add_argument('-o', '--output-dir',help='The path to a directory to write the outputs. If not used, the inundation_review directory is used by default -> type=str',required=False, default="") | ||
parser.add_argument('-j', '--job-number',help='The number of jobs',required=False,default=1) | ||
|
||
args = vars(parser.parse_args()) | ||
|
||
fim_run_dir = args['fim_run_dir'] | ||
output_dir = args['output_dir'] | ||
magnitude_list = ['1_5'] | ||
|
||
job_number = int(args['job_number']) | ||
|
||
huc_list = os.listdir(fim_run_dir) | ||
|
||
print(fim_run_dir) | ||
fim_version = os.path.split(fim_run_dir)[1] | ||
|
||
print(fim_version) | ||
|
||
print(output_dir) | ||
|
||
if output_dir == "": | ||
output_dir = os.path.join(INUN_REVIEW_DIR, fim_version) | ||
|
||
print(output_dir) | ||
|
||
if not os.path.exists(output_dir): | ||
os.mkdir(output_dir) | ||
|
||
if 'ms' in fim_version: | ||
config = 'ms' | ||
if 'fr' in fim_version: | ||
config = 'fr' | ||
|
||
procs_list = [] | ||
|
||
for huc in huc_list: | ||
if huc != 'logs': | ||
for magnitude in magnitude_list: | ||
magnitude_output_dir = os.path.join(output_dir, magnitude + '_' + config) | ||
if not os.path.exists(magnitude_output_dir): | ||
os.mkdir(magnitude_output_dir) | ||
print(magnitude_output_dir) | ||
procs_list.append([fim_run_dir, huc, magnitude, magnitude_output_dir, config]) | ||
|
||
# Multiprocess. | ||
if job_number > 1: | ||
with Pool(processes=job_number) as pool: | ||
pool.map(run_inundation, procs_list) | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters