Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[1pt] PR: Change the projection of HAND processing to EPSG:5070 #782

Merged
merged 10 commits into from
Jan 13, 2023
5 changes: 3 additions & 2 deletions data/nld/preprocess_levee_protected_areas.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,14 @@
from datetime import datetime

sys.path.append('/foss_fim/src')
from utils.shared_variables import DEFAULT_FIM_PROJECTION_CRS
import utils.shared_functions as sf
from utils.shared_functions import FIM_Helpers as fh

def preprocess_levee_protected_areas(source_file_name_and_path,
target_output_folder_path = '',
target_output_filename = 'levee_protected_areas.gpkg',
projection_crs_id = 'EPSG:5070',
projection_crs_id = DEFAULT_FIM_PROJECTION_CRS,
overwrite = False):

'''
Expand Down Expand Up @@ -77,7 +78,7 @@ def preprocess_levee_protected_areas(source_file_name_and_path,

# projection crs
if (projection_crs_id is None) or (projection_crs_id == ''):
projection_crs_id = 'EPSG:5070'
projection_crs_id = DEFAULT_FIM_PROJECTION_CRS

# -------------------
# setup logs
Expand Down
7 changes: 4 additions & 3 deletions data/usgs/acquire_and_preprocess_3dep_dems.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@ def __download_usgs_dems(extent_files, output_folder_path, number_of_jobs, retry
base_cmd += ' -cutline {2} -crop_to_cutline -ot Float32 -r bilinear'
base_cmd += ' -of "GTiff" -overwrite -co "BLOCKXSIZE=256" -co "BLOCKYSIZE=256"'
base_cmd += ' -co "TILED=YES" -co "COMPRESS=LZW" -co "BIGTIFF=YES" -tr 10 10'
base_cmd += ' -t_srs EPSG:5070 -cblend 6'
base_cmd += ' -t_srs {3} -cblend 6'

with ProcessPoolExecutor(max_workers=number_of_jobs) as executor:

Expand Down Expand Up @@ -212,7 +212,7 @@ def download_usgs_dem_file(extent_file,
base_cmd += ' -cutline {2} -crop_to_cutline -ot Float32 -r bilinear'
base_cmd += ' -of "GTiff" -overwrite -co "BLOCKXSIZE=256" -co "BLOCKYSIZE=256"'
base_cmd += ' -co "TILED=YES" -co "COMPRESS=LZW" -co "BIGTIFF=YES" -tr 10 10'
base_cmd += ' -t_srs EPSG:5070 -cblend 6'
base_cmd += ' -t_srs {3} -cblend 6'
- retry (bool)
If True, and the file exists (and is over 0k), downloading will be skipped.
Expand Down Expand Up @@ -246,7 +246,8 @@ def download_usgs_dem_file(extent_file,

cmd = base_cmd.format(download_url,
target_path_raw,
extent_file)
extent_file,
sv.DEFAULT_FIM_PROJECTION_CRS)
#PREP_PROJECTION_EPSG
#fh.vprint(f"cmd is {cmd}", self.is_verbose, True)
#print(f"cmd is {cmd}")
Expand Down
18 changes: 18 additions & 0 deletions docs/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,24 @@
All notable changes to this project will be documented in this file.
We follow the [Semantic Versioning 2.0.0](http://semver.org/) format.

## v4.0.19.0 - 2023-01-06 - [PR#782](https://github.com/NOAA-OWP/inundation-mapping/pull/782)

Changes the projection of HAND processing to EPSG 5070.

### Changes

- `gms_run_post_processing.sh`: Adds target projection for `points`
- `data/nld/preprocess_levee_protected_areas.py`: Changed to use `utils.shared_variables.DEFAULT_FIM_PROJECTION_CRS`
- `src/`
- `clip_vectors_to_wbd.py`: Save intermediate outputs in EPSG:5070
- `src_adjust_spatial_obs.py`: Changed to use `utils.shared_variables.DEFAULT_FIM_PROJECTION_CRS`
- `utils/shared_variables.py`: Changes the designated projection variables
- `gms/`
- `stream_branches.py`: Checks the projection of the input streams and changes if necessary
- `run_by_unit.py`: Changes the default projection crs variable and added as HUC target projection
- `tools/inundate_nation.py`: Changed to use `utils.shared_variables.PREP_PROJECTION`

<br/><br/>

## v4.0.18.2 - 2023-01-11 - [PR#790](https://github.com/NOAA-OWP/inundation-mapping/pull/790)

Expand Down
4 changes: 2 additions & 2 deletions gms_run_post_processing.sh
Original file line number Diff line number Diff line change
Expand Up @@ -141,9 +141,9 @@ if [ "$src_adjust_spatial" = "True" ]; then
export CALIBRATION_DB_PASS=$CALIBRATION_DB_PASS
echo "Populate PostgrSQL database with benchmark FIM extent points and HUC attributes (the calibration database)"
echo "Loading HUC Data"
time ogr2ogr -overwrite -nln hucs -a_srs ESRI:102039 -f PostgreSQL PG:"host=$CALIBRATION_DB_HOST dbname=$CALIBRATION_DB_NAME user=$CALIBRATION_DB_USER_NAME password=$CALIBRATION_DB_PASS" $inputDataDir/wbd/WBD_National.gpkg WBDHU8
time ogr2ogr -overwrite -nln hucs -t_srs EPSG:5070 -f PostgreSQL PG:"host=$CALIBRATION_DB_HOST dbname=$CALIBRATION_DB_NAME user=$CALIBRATION_DB_USER_NAME password=$CALIBRATION_DB_PASS" $inputDataDir/wbd/WBD_National.gpkg WBDHU8
echo "Loading Point Data"
time ogr2ogr -overwrite -f PostgreSQL PG:"host=$CALIBRATION_DB_HOST dbname=$CALIBRATION_DB_NAME user=$CALIBRATION_DB_USER_NAME password=$CALIBRATION_DB_PASS" $fim_obs_pnt_data usgs_nws_benchmark_points -nln points
time ogr2ogr -overwrite -t_srs EPSG:5070 -f PostgreSQL PG:"host=$CALIBRATION_DB_HOST dbname=$CALIBRATION_DB_NAME user=$CALIBRATION_DB_USER_NAME password=$CALIBRATION_DB_PASS" $fim_obs_pnt_data usgs_nws_benchmark_points -nln points
fi
fi

Expand Down
20 changes: 10 additions & 10 deletions src/clip_vectors_to_wbd.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import rasterio as rio

from shapely.geometry import MultiPolygon,Polygon
from utils.shared_variables import DEFAULT_FIM_PROJECTION_CRS
from utils.shared_functions import getDriver, mem_profile

@mem_profile
Expand Down Expand Up @@ -39,7 +40,7 @@ def subset_vector_layers(subset_nwm_lakes,
wbd = gpd.read_file(wbd_filename)
dem_domain = gpd.read_file(dem_domain)
wbd = gpd.clip(wbd, dem_domain)
wbd.to_file(wbd_filename, layer='WBDHU8')
wbd.to_file(wbd_filename, layer='WBDHU8', crs=DEFAULT_FIM_PROJECTION_CRS)

# Get wbd buffer
wbd_buffer = wbd.copy()
Expand All @@ -52,21 +53,20 @@ def subset_vector_layers(subset_nwm_lakes,

wbd_buffer = wbd_buffer[['geometry']]
wbd_streams_buffer = wbd_streams_buffer[['geometry']]
wbd_buffer.to_file(wbd_buffer_filename, driver=getDriver(wbd_buffer_filename), index=False)
wbd_buffer.to_file(wbd_buffer_filename, driver=getDriver(wbd_buffer_filename), index=False, crs=DEFAULT_FIM_PROJECTION_CRS)

# Clip ocean water polygon for future masking ocean areas (where applicable)
landsea = gpd.read_file(landsea, mask=wbd_buffer)
if not landsea.empty:
landsea.to_file(subset_landsea, driver = getDriver(subset_landsea), index=False)
landsea.to_file(subset_landsea, driver = getDriver(subset_landsea), index=False, crs=DEFAULT_FIM_PROJECTION_CRS)
del landsea

# Clip levee-protected areas polygons for future masking ocean areas (where applicable)
print("Subsetting Levee Protected Areas", flush=True)
levee_protected_areas = gpd.read_file(levee_protected_areas, mask=wbd_buffer)
if not levee_protected_areas.empty:
levee_protected_areas = levee_protected_areas.to_crs('ESRI:102039')
levee_protected_areas.to_file(subset_levee_protected_areas, driver = getDriver
(subset_levee_protected_areas), index=False)
(subset_levee_protected_areas), index=False, crs=DEFAULT_FIM_PROJECTION_CRS)
del levee_protected_areas

# Find intersecting lakes and writeout
Expand All @@ -81,22 +81,22 @@ def subset_vector_layers(subset_nwm_lakes,
# Loop through the filled polygons and insert the new geometry
for i in range(len(nwm_lakes_fill_holes)):
nwm_lakes.loc[i, 'geometry'] = nwm_lakes_fill_holes[i]
nwm_lakes.to_file(subset_nwm_lakes, driver = getDriver(subset_nwm_lakes), index=False)
nwm_lakes.to_file(subset_nwm_lakes, driver = getDriver(subset_nwm_lakes), index=False, crs=DEFAULT_FIM_PROJECTION_CRS)
del nwm_lakes

# Find intersecting levee lines
print("Subsetting NLD levee lines for HUC{} {}".format(hucUnitLength, hucCode), flush=True)
nld_lines = gpd.read_file(nld_lines, mask = wbd_buffer)
if not nld_lines.empty:
nld_lines.to_file(subset_nld_lines, driver = getDriver(subset_nld_lines), index=False)
nld_lines.to_file(subset_nld_lines, driver = getDriver(subset_nld_lines), index=False, crs=DEFAULT_FIM_PROJECTION_CRS)
del nld_lines

# Subset NWM headwaters
print("Subsetting NWM Headwater Points for HUC{} {}".format(hucUnitLength, hucCode), flush=True)
nwm_headwaters = gpd.read_file(nwm_headwaters, mask=wbd_streams_buffer)

if len(nwm_headwaters) > 0:
nwm_headwaters.to_file(subset_nwm_headwaters, driver=getDriver(subset_nwm_headwaters), index=False)
nwm_headwaters.to_file(subset_nwm_headwaters, driver=getDriver(subset_nwm_headwaters), index=False, crs=DEFAULT_FIM_PROJECTION_CRS)
else:
print ("No headwater point(s) within HUC " + str(hucCode) + " boundaries.")
sys.exit(0)
Expand All @@ -107,7 +107,7 @@ def subset_vector_layers(subset_nwm_lakes,
nwm_catchments = gpd.read_file(nwm_catchments, mask=wbd_buffer)

if len(nwm_catchments) > 0:
nwm_catchments.to_file(subset_nwm_catchments, driver=getDriver(subset_nwm_catchments), index=False)
nwm_catchments.to_file(subset_nwm_catchments, driver=getDriver(subset_nwm_catchments), index=False, crs=DEFAULT_FIM_PROJECTION_CRS)
else:
print ("No NWM catchments within HUC " + str(hucCode) + " boundaries.")
sys.exit(0)
Expand All @@ -124,7 +124,7 @@ def subset_vector_layers(subset_nwm_lakes,
if len(nwm_streams) > 0:
nwm_streams = gpd.clip(nwm_streams, wbd_streams_buffer)

nwm_streams.to_file(subset_nwm_streams, driver=getDriver(subset_nwm_streams), index=False)
nwm_streams.to_file(subset_nwm_streams, driver=getDriver(subset_nwm_streams), index=False, crs=DEFAULT_FIM_PROJECTION_CRS)
else:
print ("No NWM stream segments within HUC " + str(hucCode) + " boundaries.")
sys.exit(0)
Expand Down
4 changes: 2 additions & 2 deletions src/gms/run_by_unit.sh
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ huc4Identifier=${hucNumber:0:4}
huc2Identifier=${hucNumber:0:2}
input_NHD_WBHD_layer=WBDHU$hucUnitLength

default_projection_crs="ESRI:102039"
default_projection_crs="EPSG:5070"
input_DEM=$inputDataDir/3dep_dems/10m_5070/fim_seamless_3dep_dem_10m_5070.vrt
input_DEM_domain=$inputDataDir/3dep_dems/10m_5070/HUC6_dem_domain.gpkg
input_NLD=$inputDataDir/nld_vectors/huc2_levee_lines/nld_preprocessed_"$huc2Identifier".gpkg
Expand Down Expand Up @@ -108,7 +108,7 @@ python3 $srcDir/clip_vectors_to_wbd.py -d $hucNumber -w $input_nwm_flows -l $inp
echo -e $startDiv"Clip WBD8"$stopDiv
date -u
Tstart
ogr2ogr -f GPKG -clipsrc $outputHucDataDir/wbd_buffered.gpkg $outputHucDataDir/wbd8_clp.gpkg $inputDataDir/wbd/WBD_National.gpkg WBDHU8
ogr2ogr -f GPKG -clipsrc $outputHucDataDir/wbd_buffered.gpkg $outputHucDataDir/wbd8_clp.gpkg $inputDataDir/wbd/WBD_National.gpkg WBDHU8 -t_srs $default_projection_crs
Tcount

## DERIVE LEVELPATH ##
Expand Down
6 changes: 6 additions & 0 deletions src/gms/stream_branches.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
from shapely.strtree import STRtree
from random import sample
from scipy.stats import mode
from utils.shared_variables import PREP_CRS

class StreamNetwork(gpd.GeoDataFrame):

Expand Down Expand Up @@ -73,6 +74,11 @@ def from_file(cls, filename, branch_id_attribute=None, values_excluded=None,
print('Loading file')

raw_df = gpd.read_file(filename,*args,**kwargs)

# Reproject
if raw_df.crs.to_authority() != PREP_CRS.to_authority():
raw_df.to_crs(PREP_CRS)

filtered_df = gpd.GeoDataFrame()

if (branch_id_attribute is not None) and (values_excluded is not None):
Expand Down
27 changes: 9 additions & 18 deletions src/src_adjust_spatial_obs.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
from geopandas.tools import sjoin
from multiprocessing import Pool
from src_roughness_optimization import update_rating_curve
from utils.shared_variables import DOWNSTREAM_THRESHOLD, ROUGHNESS_MIN_THRESH, ROUGHNESS_MAX_THRESH
from utils.shared_variables import DOWNSTREAM_THRESHOLD, ROUGHNESS_MIN_THRESH, ROUGHNESS_MAX_THRESH, DEFAULT_FIM_PROJECTION_CRS

#import variables from .env file
load_dotenv()
Expand Down Expand Up @@ -52,7 +52,7 @@
def process_points(args):

'''
The funciton ingests geodataframe and attributes the point data with its hydroid and HAND values before passing a dataframe to the src_roughness_optimization.py workflow
The function ingests geodataframe and attributes the point data with its hydroid and HAND values before passing a dataframe to the src_roughness_optimization.py workflow
Processing
- Extract x,y coordinates from geometry
Expand All @@ -74,21 +74,12 @@ def process_points(args):
## Define coords variable to be used in point raster value attribution.
coords = [(x,y) for x, y in zip(water_edge_df.X, water_edge_df.Y)]

water_edge_df.to_crs(DEFAULT_FIM_PROJECTION_CRS)

## Use point geometry to determine HAND raster pixel values.
hand_src = rasterio.open(hand_path)
hand_crs = hand_src.crs
water_edge_df.to_crs(hand_crs) # Reproject geodataframe to match hand_src. Should be the same, but this is a double check.
water_edge_df['hand'] = [h[0] for h in hand_src.sample(coords)]
hand_src.close()
del hand_src, hand_crs,

## Use point geometry to determine catchment raster pixel values.
catchments_src = rasterio.open(catchments_path)
catchments_crs = catchments_src.crs
water_edge_df.to_crs(catchments_crs)
water_edge_df['hydroid'] = [c[0] for c in catchments_src.sample(coords)]
catchments_src.close()
del catchments_src, catchments_crs
with rasterio.open(hand_path) as hand_src, rasterio.open(catchments_path) as catchments_src:
water_edge_df['hand'] = [h[0] for h in hand_src.sample(coords)]
water_edge_df['hydroid'] = [c[0] for c in catchments_src.sample(coords)]

water_edge_df = water_edge_df[(water_edge_df['hydroid'].notnull()) & (water_edge_df['hand'] > 0) & (water_edge_df['hydroid'] > 0)]

Expand Down Expand Up @@ -156,10 +147,10 @@ def find_points_in_huc(huc_id, conn):
JOIN hucs H ON ST_Contains(H.geom, P.geom)
WHERE H.huc8 = %s """

# Need to hard code the CRS to use EPSG:5070 instead of the default ESRI:102039 (gdal pyproj throws an error with crs 102039)
# Use EPSG:5070 instead of the default ESRI:102039 (gdal pyproj throws an error with crs 102039)
# Appears that EPSG:5070 is functionally equivalent to ESRI:102039: https://gis.stackexchange.com/questions/329123/crs-interpretation-in-qgis
water_edge_df = gpd.GeoDataFrame.from_postgis(huc_pt_query, con=conn,
params=[huc_id], crs="EPSG:5070",
params=[huc_id], crs=DEFAULT_FIM_PROJECTION_CRS,
parse_dates=['coll_time'])
water_edge_df = water_edge_df.drop(columns=['st_x','st_y'])

Expand Down
12 changes: 5 additions & 7 deletions src/utils/shared_variables.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,12 @@
#!/usr/bin/env python3

import os
from pyproj import CRS

# Projections.
#PREP_PROJECTION = "+proj=aea +datum=NAD83 +x_0=0.0 +y_0=0.0 +lon_0=96dW +lat_0=23dN +lat_1=29d30'N +lat_2=45d30'N +towgs84=-0.9956000824677655,1.901299877314078,0.5215002840524426,0.02591500053005733,0.009425998542707753,0.01159900118427752,-0.00062000005129903 +no_defs +units=m"

# These two are ESRI:102039 (USA_Contiguous_Albers_Equal_Area_Conic_USGS_version)
PREP_PROJECTION_CM = 'PROJCS["USA_Contiguous_Albers_Equal_Area_Conic_USGS_version",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Albers"],PARAMETER["false_easting",0.0],PARAMETER["false_northing",0.0],PARAMETER["central_meridian",-96.0],PARAMETER["standard_parallel_1",29.5],PARAMETER["standard_parallel_2",45.5],PARAMETER["latitude_of_origin",23.0],UNIT["Meter",1.0],VERTCS["NAVD_1988",VDATUM["North_American_Vertical_Datum_1988"],PARAMETER["Vertical_Shift",0.0],PARAMETER["Direction",1.0],UNIT["Centimeter",0.01]]]'
PREP_PROJECTION = 'PROJCS["USA_Contiguous_Albers_Equal_Area_Conic_USGS_version",GEOGCS["NAD83",DATUM["North_American_Datum_1983",SPHEROID["GRS 1980",6378137,298.2572221010042,AUTHORITY["EPSG","7019"]],AUTHORITY["EPSG","6269"]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433],AUTHORITY["EPSG","4269"]],PROJECTION["Albers_Conic_Equal_Area"],PARAMETER["standard_parallel_1",29.5],PARAMETER["standard_parallel_2",45.5],PARAMETER["latitude_of_center",23],PARAMETER["longitude_of_center",-96],PARAMETER["false_easting",0],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]]]'
DEFAULT_FIM_PROJECTION_CRS = 'ESRI:102039'
# -- Projections: EPSG 5070-- #
PREP_PROJECTION = 'PROJCRS["NAD83 / Conus Albers",BASEGEOGCRS["NAD83",DATUM["North American Datum 1983",ELLIPSOID["GRS 1980",6378137,298.257222101,LENGTHUNIT["metre",1]]],PRIMEM["Greenwich",0,ANGLEUNIT["degree",0.0174532925199433]],ID["EPSG",4269]],CONVERSION["Conus Albers",METHOD["Albers Equal Area",ID["EPSG",9822]],PARAMETER["Latitude of false origin",23,ANGLEUNIT["degree",0.0174532925199433],ID["EPSG",8821]],PARAMETER["Longitude of false origin",-96,ANGLEUNIT["degree",0.0174532925199433],ID["EPSG",8822]],PARAMETER["Latitude of 1st standard parallel",29.5,ANGLEUNIT["degree",0.0174532925199433],ID["EPSG",8823]],PARAMETER["Latitude of 2nd standard parallel",45.5,ANGLEUNIT["degree",0.0174532925199433],ID["EPSG",8824]],PARAMETER["Easting at false origin",0,LENGTHUNIT["metre",1],ID["EPSG",8826]],PARAMETER["Northing at false origin",0,LENGTHUNIT["metre",1],ID["EPSG",8827]]],CS[Cartesian,2],AXIS["easting (X)",east,ORDER[1],LENGTHUNIT["metre",1]],AXIS["northing (Y)",north,ORDER[2],LENGTHUNIT["metre",1]],USAGE[SCOPE["Data analysis and small scale data presentation for contiguous lower 48 states."],AREA["United States (USA) - CONUS onshore - Alabama; Arizona; Arkansas; California; Colorado; Connecticut; Delaware; Florida; Georgia; Idaho; Illinois; Indiana; Iowa; Kansas; Kentucky; Louisiana; Maine; Maryland; Massachusetts; Michigan; Minnesota; Mississippi; Missouri; Montana; Nebraska; Nevada; New Hampshire; New Jersey; New Mexico; New York; North Carolina; North Dakota; Ohio; Oklahoma; Oregon; Pennsylvania; Rhode Island; South Carolina; South Dakota; Tennessee; Texas; Utah; Vermont; Virginia; Washington; West Virginia; Wisconsin; Wyoming."],BBOX[24.41,-124.79,49.38,-66.91]],ID["EPSG",5070]]'
DEFAULT_FIM_PROJECTION_CRS = 'EPSG:5070'
PREP_CRS = CRS(PREP_PROJECTION)
VIZ_PROJECTION ='PROJCS["WGS_1984_Web_Mercator_Auxiliary_Sphere",GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Mercator_Auxiliary_Sphere"],PARAMETER["False_Easting",0.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",0.0],PARAMETER["Standard_Parallel_1",0.0],PARAMETER["Auxiliary_Sphere_Type",0.0],UNIT["Meter",1.0]]'

# -- Data URLs-- #
Expand Down
5 changes: 2 additions & 3 deletions tools/inundate_nation.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,16 +16,15 @@
from gms_tools.mosaic_inundation import Mosaic_inundation
from gms_tools.inundate_gms import Inundate_gms
from inundation import inundate
from tools_shared_variables import elev_raster_ndv
from utils.shared_variables import elev_raster_ndv, PREP_PROJECTION
from utils.shared_functions import FIM_Helpers as fh


#INUN_REVIEW_DIR = r'/data/inundation_review/inundation_nwm_recurr/'
#INUN_OUTPUT_DIR = r'/data/inundation_review/inundate_nation/'
#INPUTS_DIR = r'/data/inputs'
#OUTPUT_BOOL_PARENT_DIR = '/data/inundation_review/inundate_nation/bool_temp/'
#OUTPUT_BOOL_PARENT_DIR = '/data/inundation_review/inundate_nation/bool_temp/
#DEFAULT_OUTPUT_DIR = '/data/inundation_review/inundate_nation/mosaic_output/'
PREP_PROJECTION = 'PROJCS["USA_Contiguous_Albers_Equal_Area_Conic_USGS_version",GEOGCS["NAD83",DATUM["North_American_Datum_1983",SPHEROID["GRS 1980",6378137,298.2572221010042,AUTHORITY["EPSG","7019"]],AUTHORITY["EPSG","6269"]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433],AUTHORITY["EPSG","4269"]],PROJECTION["Albers_Conic_Equal_Area"],PARAMETER["standard_parallel_1",29.5],PARAMETER["standard_parallel_2",45.5],PARAMETER["latitude_of_center",23],PARAMETER["longitude_of_center",-96],PARAMETER["false_easting",0],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]]]'


def inundate_nation(fim_run_dir, output_dir, magnitude_key, flow_file, inc_mosaic, job_number):
Expand Down